diff --git a/Dockerfile b/Dockerfile index b9886fbd8..b449368dd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM gradle:6.5.0-jdk8 ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip ENV ANDROID_API_LEVEL android-30 -ENV ANDROID_BUILD_TOOLS_VERSION 30.0.2 +ENV ANDROID_BUILD_TOOLS_VERSION 30.0.3 ENV ANDROID_HOME /usr/local/android-sdk-linux ENV ANDROID_NDK_VERSION 21.1.6352462 ENV ANDROID_VERSION 30 diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index e1456360c..3fb8cbc5e 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -12,24 +12,25 @@ configurations { configurations.all { exclude group: 'com.google.firebase', module: 'firebase-core' + exclude group: 'androidx.recyclerview', module: 'recyclerview' } dependencies { implementation 'androidx.core:core:1.3.2' implementation 'androidx.palette:palette:1.0.0' - implementation 'androidx.exifinterface:exifinterface:1.3.1' + implementation 'androidx.exifinterface:exifinterface:1.3.2' implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0' implementation 'androidx.multidex:multidex:2.0.1' implementation "androidx.sharetarget:sharetarget:1.0.0" compileOnly 'org.checkerframework:checker-qual:2.5.2' compileOnly 'org.checkerframework:checker-compat-qual:2.5.0' - implementation 'com.google.firebase:firebase-messaging:20.3.0' - implementation 'com.google.firebase:firebase-config:19.2.0' - implementation 'com.google.firebase:firebase-datatransport:17.0.8' + implementation 'com.google.firebase:firebase-messaging:21.0.1' + implementation 'com.google.firebase:firebase-config:20.0.2' + implementation 'com.google.firebase:firebase-datatransport:17.0.10' implementation 'com.google.firebase:firebase-appindexing:19.1.0' implementation 'com.google.android.gms:play-services-maps:17.0.0' - implementation 'com.google.android.gms:play-services-auth:18.1.0' + implementation 'com.google.android.gms:play-services-auth:19.0.0' implementation 'com.google.android.gms:play-services-vision:16.2.0' implementation 'com.google.android.gms:play-services-wearable:17.0.0' implementation 'com.google.android.gms:play-services-location:17.1.0' @@ -40,12 +41,12 @@ dependencies { implementation 'com.stripe:stripe-android:2.0.2' implementation files('libs/libgsaverification-client.aar') - coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.0.10' + coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.1' } android { compileSdkVersion 30 - buildToolsVersion '30.0.2' + buildToolsVersion '30.0.3' ndkVersion "21.1.6352462" defaultConfig.applicationId = "org.telegram.messenger" @@ -97,9 +98,11 @@ android { jniDebuggable true signingConfig signingConfigs.debug applicationIdSuffix ".beta" - minifyEnabled true + minifyEnabled false + shrinkResources false multiDexEnabled true proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + ndk.debugSymbolLevel = 'FULL' } /*debugAsan { @@ -139,6 +142,7 @@ android { minifyEnabled true multiDexEnabled true proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + ndk.debugSymbolLevel = 'FULL' } release { @@ -149,6 +153,7 @@ android { shrinkResources false multiDexEnabled true proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + ndk.debugSymbolLevel = 'FULL' } } @@ -285,7 +290,7 @@ android { } } - defaultConfig.versionCode = 2139 + defaultConfig.versionCode = 2195 applicationVariants.all { variant -> variant.outputs.all { output -> @@ -303,8 +308,8 @@ android { defaultConfig { minSdkVersion 16 - targetSdkVersion 28 - versionName "7.2.1" + targetSdkVersion 29 + versionName "7.3.0" vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi'] diff --git a/TMessagesProj/config/debug/AndroidManifest.xml b/TMessagesProj/config/debug/AndroidManifest.xml index 0add1a21d..c01d13c3b 100644 --- a/TMessagesProj/config/debug/AndroidManifest.xml +++ b/TMessagesProj/config/debug/AndroidManifest.xml @@ -26,6 +26,7 @@ android:hardwareAccelerated="@bool/useHardwareAcceleration" android:largeHeap="true" android:supportsRtl="false" + android:requestLegacyExternalStorage="true" tools:replace="android:supportsRtl"> diff --git a/TMessagesProj/config/debug/AndroidManifest_SDK23.xml b/TMessagesProj/config/debug/AndroidManifest_SDK23.xml index 500bbc0be..079cc4817 100644 --- a/TMessagesProj/config/debug/AndroidManifest_SDK23.xml +++ b/TMessagesProj/config/debug/AndroidManifest_SDK23.xml @@ -30,6 +30,7 @@ android:hardwareAccelerated="@bool/useHardwareAcceleration" android:largeHeap="true" android:supportsRtl="false" + android:requestLegacyExternalStorage="true" tools:replace="android:supportsRtl"> diff --git a/TMessagesProj/config/release/AndroidManifest.xml b/TMessagesProj/config/release/AndroidManifest.xml index 9600f7b57..7bb790100 100644 --- a/TMessagesProj/config/release/AndroidManifest.xml +++ b/TMessagesProj/config/release/AndroidManifest.xml @@ -27,6 +27,7 @@ android:hardwareAccelerated="@bool/useHardwareAcceleration" android:largeHeap="true" android:supportsRtl="false" + android:requestLegacyExternalStorage="true" tools:replace="android:supportsRtl"> diff --git a/TMessagesProj/config/release/AndroidManifest_SDK23.xml b/TMessagesProj/config/release/AndroidManifest_SDK23.xml index 0d58e3d78..093b6a195 100644 --- a/TMessagesProj/config/release/AndroidManifest_SDK23.xml +++ b/TMessagesProj/config/release/AndroidManifest_SDK23.xml @@ -30,6 +30,7 @@ android:hardwareAccelerated="@bool/useHardwareAcceleration" android:largeHeap="true" android:supportsRtl="false" + android:requestLegacyExternalStorage="true" tools:replace="android:supportsRtl"> diff --git a/TMessagesProj/jni/CMakeLists.txt b/TMessagesProj/jni/CMakeLists.txt index b22a566f5..9a0d4c89a 100644 --- a/TMessagesProj/jni/CMakeLists.txt +++ b/TMessagesProj/jni/CMakeLists.txt @@ -395,7 +395,7 @@ target_compile_definitions(sqlite PUBLIC #voip include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt) -set(NATIVE_LIB "tmessages.34") +set(NATIVE_LIB "tmessages.35") #tmessages add_library(${NATIVE_LIB} SHARED @@ -642,7 +642,8 @@ target_sources(${NATIVE_LIB} PRIVATE third_party/libyuv/source/scale_neon64.cc third_party/libyuv/source/scale_win.cc third_party/libyuv/source/scale.cc - third_party/libyuv/source/video_common.cc) + third_party/libyuv/source/video_common.cc + third_party/libyuv/source/scale_uv.cc) target_include_directories(${NATIVE_LIB} PUBLIC opus/include diff --git a/TMessagesProj/jni/gifvideo.cpp b/TMessagesProj/jni/gifvideo.cpp index 90ac488a6..9a7a51c5a 100644 --- a/TMessagesProj/jni/gifvideo.cpp +++ b/TMessagesProj/jni/gifvideo.cpp @@ -290,7 +290,7 @@ extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDr info->src = new char[len + 1]; memcpy(info->src, srcString, len); info->src[len] = '\0'; - if (srcString != 0) { + if (srcString != nullptr) { env->ReleaseStringUTFChars(src, srcString); } @@ -364,7 +364,7 @@ extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDr dataArr[PARAM_NUM_WIDTH] = info->video_stream->codecpar->width; dataArr[PARAM_NUM_HEIGHT] = info->video_stream->codecpar->height; AVDictionaryEntry *rotate_tag = av_dict_get(info->video_stream->metadata, "rotate", NULL, 0); - if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0")) { + if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0") != 0) { char *tail; dataArr[PARAM_NUM_ROTATION] = (jint) av_strtod(rotate_tag->value, &tail); if (*tail) { @@ -373,7 +373,7 @@ extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDr } else { dataArr[PARAM_NUM_ROTATION] = 0; } - if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264) { + if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 || info->video_stream->codecpar->codec_id == AV_CODEC_ID_HEVC) { dataArr[PARAM_NUM_FRAMERATE] = (jint) av_q2d(info->video_stream->avg_frame_rate); } else { dataArr[PARAM_NUM_FRAMERATE] = (jint) av_q2d(info->video_stream->r_frame_rate); @@ -605,16 +605,19 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da jint *dataArr = env->GetIntArrayElements(data, 0); int32_t wantedWidth; int32_t wantedHeight; + + AndroidBitmapInfo bitmapInfo; + AndroidBitmap_getInfo(env, bitmap, &bitmapInfo); + int32_t bitmapWidth = bitmapInfo.width; + int32_t bitmapHeight = bitmapInfo.height; if (dataArr != nullptr) { wantedWidth = dataArr[0]; wantedHeight = dataArr[1]; dataArr[3] = (jint) (1000 * info->frame->best_effort_timestamp * av_q2d(info->video_stream->time_base)); env->ReleaseIntArrayElements(data, dataArr, 0); } else { - AndroidBitmapInfo bitmapInfo; - AndroidBitmap_getInfo(env, bitmap, &bitmapInfo); - wantedWidth = bitmapInfo.width; - wantedHeight = bitmapInfo.height; + wantedWidth = bitmapWidth; + wantedHeight = bitmapHeight; } void *pixels; @@ -622,17 +625,17 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) { if (info->sws_ctx == nullptr) { if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) { - info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, info->frame->width, info->frame->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); + info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); } else if (info->video_dec_ctx->pix_fmt > AV_PIX_FMT_NONE && info->video_dec_ctx->pix_fmt < AV_PIX_FMT_NB) { - info->sws_ctx = sws_getContext(info->video_dec_ctx->width, info->video_dec_ctx->height, info->video_dec_ctx->pix_fmt, info->video_dec_ctx->width, info->video_dec_ctx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); + info->sws_ctx = sws_getContext(info->video_dec_ctx->width, info->video_dec_ctx->height, info->video_dec_ctx->pix_fmt, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); } } if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) { if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) { - libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height); + libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight); } else { - libyuv::I420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height); + libyuv::I420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight); } } else if (info->frame->format == AV_PIX_FMT_BGRA) { libyuv::ABGRToARGB(info->frame->data[0], info->frame->linesize[0], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height); diff --git a/TMessagesProj/jni/tgnet/Handshake.cpp b/TMessagesProj/jni/tgnet/Handshake.cpp index 9b378b401..f679b336f 100644 --- a/TMessagesProj/jni/tgnet/Handshake.cpp +++ b/TMessagesProj/jni/tgnet/Handshake.cpp @@ -206,7 +206,7 @@ inline bool factorizeValue(uint64_t what, uint32_t &p, uint32_t &q) { inline bool check_prime(BIGNUM *p) { int result = 0; - if (!BN_primality_test(&result, p, BN_prime_checks, bnContext, 0, NULL)) { + if (!BN_primality_test(&result, p, 64, bnContext, 0, NULL)) { if (LOGS_ENABLED) DEBUG_E("OpenSSL error at BN_primality_test"); return false; } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/AUTHORS b/TMessagesProj/jni/third_party/libvpx/source/libvpx/AUTHORS index 3eb03e923..352c91fed 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/AUTHORS +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/AUTHORS @@ -25,6 +25,7 @@ Angie Chiang Aron Rosenberg Attila Nagy Birk Magnussen +Brian Foley Brion Vibber changjun.yang Charles 'Buck' Krasic @@ -33,6 +34,7 @@ Chi Yo Tsai chm Chris Cunningham Christian Duvivier +Clement Courbet Daniele Castagna Daniel Kang Dan Zhu @@ -91,6 +93,7 @@ John Koleszar Johnny Klonaris John Stark Jon Kunkee +Jorge E. Moreira Joshua Bleecher Snyder Joshua Litt Julia Robson @@ -125,6 +128,7 @@ Mirko Bonadei Moriyoshi Koizumi Morton Jonuschat Nathan E. Egge +Neil Birkbeck Nico Weber Niveditha Rau Parag Salasakar @@ -165,6 +169,7 @@ Shimon Doodkin Shiyou Yin Shubham Tandle Shunyao Li +Sreerenj Balachandran Stefan Holmer Suman Sunkara Supradeep T R @@ -185,6 +190,7 @@ Vignesh Venkatasubramanian Vitaly Buka Vlad Tsyrklevich Wan-Teh Chang +Wonkap Jang xiwei gu Yaowu Xu Yi Luo diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/CHANGELOG b/TMessagesProj/jni/third_party/libvpx/source/libvpx/CHANGELOG index 345d3dc4d..e731fc612 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/CHANGELOG +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/CHANGELOG @@ -1,3 +1,27 @@ +2020-07-29 v1.9.0 "Quacking Duck" + This release adds support for NV12, a separate library for rate control, as + well as incremental improvements. + + - Upgrading: + NV12 support is added to this release. + A new interface is added for VP9 rate control. The new library libvp9rc.a + must be linked by applications. + Googletest is updated to v1.10.0. + simple_encode.cc is compiled into a new library libsimple_encode.a with + CONFIG_RATE_CTRL. + + - Enhancement: + Various changes to improve VP9 SVC, rate control, quality and speed to real + time encoding. + + - Bug fixes: + Fix key frame update refresh simulcast flexible svc. + Fix to disable_16x16part speed feature for real time encoding. + Fix some signed integer overflows for VP9 rate control. + Fix initialization of delta_q_uv. + Fix condition in regulate_q for cyclic refresh. + Various fixes to dynamic resizing for VP9 SVC. + 2019-12-09 v1.8.2 "Pekin Duck" This release collects incremental improvements to many aspects of the library. diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/README.libvpx b/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/README.libvpx index 36735ffbb..195654f7b 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/README.libvpx +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/README.libvpx @@ -1,5 +1,5 @@ URL: https://git.videolan.org/git/x264.git -Version: d23d18655249944c1ca894b451e2c82c7a584c62 +Version: 3e5aed95cc470f37e2db3e6506a8deb89b527720 License: ISC License File: LICENSE @@ -12,10 +12,8 @@ Get configuration from vpx_config.asm. Prefix functions with vpx by default. Manage name mangling (prefixing with '_') manually because 'PREFIX' does not exist in libvpx. -Expand PIC default to macho64 and respect CONFIG_PIC from libvpx -Set 'private_extern' visibility for macho targets. Copy PIC 'GLOBAL' macros from x86_abi_support.asm Use .text instead of .rodata on macho to avoid broken tables in PIC mode. -Use .text with no alignment for aout -Only use 'hidden' visibility with Chromium +Use .text with no alignment for aout. +Only use 'hidden' visibility with Chromium. Prefix ARCH_* with VPX_. diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/x86inc.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/x86inc.asm index 3d722fec0..3d55e921c 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/x86inc.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/third_party/x86inc/x86inc.asm @@ -1,12 +1,12 @@ ;***************************************************************************** ;* x86inc.asm: x264asm abstraction layer ;***************************************************************************** -;* Copyright (C) 2005-2016 x264 project +;* Copyright (C) 2005-2019 x264 project ;* ;* Authors: Loren Merritt +;* Henrik Gramner ;* Anton Mitrofanov ;* Fiona Glaser -;* Henrik Gramner ;* ;* Permission to use, copy, modify, and/or distribute this software for any ;* purpose with or without fee is hereby granted, provided that the above @@ -67,19 +67,19 @@ %endif %define FORMAT_ELF 0 +%define FORMAT_MACHO 0 %ifidn __OUTPUT_FORMAT__,elf %define FORMAT_ELF 1 %elifidn __OUTPUT_FORMAT__,elf32 %define FORMAT_ELF 1 %elifidn __OUTPUT_FORMAT__,elf64 %define FORMAT_ELF 1 -%endif - -%define FORMAT_MACHO 0 -%ifidn __OUTPUT_FORMAT__,macho32 - %define FORMAT_MACHO 1 +%elifidn __OUTPUT_FORMAT__,macho + %define FORMAT_MACHO 1 +%elifidn __OUTPUT_FORMAT__,macho32 + %define FORMAT_MACHO 1 %elifidn __OUTPUT_FORMAT__,macho64 - %define FORMAT_MACHO 1 + %define FORMAT_MACHO 1 %endif ; Set PREFIX for libvpx builds. @@ -103,7 +103,11 @@ ; works around the issue. It appears to be specific to the way libvpx ; handles the tables. %macro SECTION_RODATA 0-1 16 - %ifidn __OUTPUT_FORMAT__,macho32 + %ifidn __OUTPUT_FORMAT__,win32 + SECTION .rdata align=%1 + %elif WIN64 + SECTION .rdata align=%1 + %elifidn __OUTPUT_FORMAT__,macho32 SECTION .text align=%1 fakegot: %elifidn __OUTPUT_FORMAT__,aout @@ -113,8 +117,7 @@ %endif %endmacro -; PIC macros are copied from vpx_ports/x86_abi_support.asm. The "define PIC" -; from original code is added in for 64bit. +; PIC macros from vpx_ports/x86_abi_support.asm. %ifidn __OUTPUT_FORMAT__,elf32 %define ABI_IS_32BIT 1 %elifidn __OUTPUT_FORMAT__,macho32 @@ -203,10 +206,24 @@ %ifndef GET_GOT_DEFINED %define GET_GOT_DEFINED 0 %endif -; Done with PIC macros +; End PIC macros from vpx_ports/x86_abi_support.asm. + +; libvpx explicitly sets visibilty in shared object builds. Avoid setting +; visibility to hidden as it may break builds that split sources on e.g., +; directory boundaries. +%ifdef CHROMIUM + %define VISIBILITY hidden + %define HAVE_PRIVATE_EXTERN 1 +%else + %define VISIBILITY + %define HAVE_PRIVATE_EXTERN 0 +%endif %ifdef __NASM_VER__ %use smartalign + %if __NASM_VERSION_ID__ < 0x020e0000 ; 2.14 + %define HAVE_PRIVATE_EXTERN 0 + %endif %endif ; Macros to eliminate most code duplication between x86_32 and x86_64: @@ -324,6 +341,18 @@ DECLARE_REG_TMP_SIZE 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14 %define gprsize 4 %endif +%macro LEA 2 +%if VPX_ARCH_X86_64 + lea %1, [%2] +%elif PIC + call $+5 ; special-cased to not affect the RSB on most CPU:s + pop %1 + add %1, (%2)-$+1 +%else + mov %1, %2 +%endif +%endmacro + %macro PUSH 1 push %1 %ifidn rstk, rsp @@ -385,6 +414,10 @@ DECLARE_REG_TMP_SIZE 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14 %endif %endmacro +%if VPX_ARCH_X86_64 == 0 + %define movsxd movifnidn +%endif + %macro movsxdifnidn 2 %ifnidn %1, %2 movsxd %1, %2 @@ -433,6 +466,8 @@ DECLARE_REG_TMP_SIZE 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14 %endmacro %define required_stack_alignment ((mmsize + 15) & ~15) +%define vzeroupper_required (mmsize > 16 && (VPX_ARCH_X86_64 == 0 || xmm_regs_used > 16 || notcpuflag(avx512))) +%define high_mm_regs (16*cpuflag(avx512)) %macro ALLOC_STACK 1-2 0 ; stack_size, n_xmm_regs (for win64 only) %ifnum %1 @@ -483,10 +518,18 @@ DECLARE_REG_TMP_SIZE 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14 %ifnum %1 %if %1 != 0 && required_stack_alignment > STACK_ALIGNMENT %if %1 > 0 + ; Reserve an additional register for storing the original stack pointer, but avoid using + ; eax/rax for this purpose since it can potentially get overwritten as a return value. %assign regs_used (regs_used + 1) + %if VPX_ARCH_X86_64 && regs_used == 7 + %assign regs_used 8 + %elif VPX_ARCH_X86_64 == 0 && regs_used == 1 + %assign regs_used 2 + %endif %endif %if VPX_ARCH_X86_64 && regs_used < 5 + UNIX64 * 3 - ; Ensure that we don't clobber any registers containing arguments + ; Ensure that we don't clobber any registers containing arguments. For UNIX64 we also preserve r6 (rax) + ; since it's used as a hidden argument in vararg functions to specify the number of vector registers used. %assign regs_used 5 + UNIX64 * 3 %endif %endif @@ -516,10 +559,10 @@ DECLARE_REG 7, rdi, 64 DECLARE_REG 8, rsi, 72 DECLARE_REG 9, rbx, 80 DECLARE_REG 10, rbp, 88 -DECLARE_REG 11, R12, 96 -DECLARE_REG 12, R13, 104 -DECLARE_REG 13, R14, 112 -DECLARE_REG 14, R15, 120 +DECLARE_REG 11, R14, 96 +DECLARE_REG 12, R15, 104 +DECLARE_REG 13, R12, 112 +DECLARE_REG 14, R13, 120 %macro PROLOGUE 2-5+ 0 ; #args, #regs, #xmm_regs, [stack_size,] arg_names... %assign num_args %1 @@ -538,15 +581,16 @@ DECLARE_REG 14, R15, 120 %macro WIN64_PUSH_XMM 0 ; Use the shadow space to store XMM6 and XMM7, the rest needs stack space allocated. - %if xmm_regs_used > 6 + %if xmm_regs_used > 6 + high_mm_regs movaps [rstk + stack_offset + 8], xmm6 %endif - %if xmm_regs_used > 7 + %if xmm_regs_used > 7 + high_mm_regs movaps [rstk + stack_offset + 24], xmm7 %endif - %if xmm_regs_used > 8 + %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8 + %if %%xmm_regs_on_stack > 0 %assign %%i 8 - %rep xmm_regs_used-8 + %rep %%xmm_regs_on_stack movaps [rsp + (%%i-8)*16 + stack_size + 32], xmm %+ %%i %assign %%i %%i+1 %endrep @@ -555,53 +599,56 @@ DECLARE_REG 14, R15, 120 %macro WIN64_SPILL_XMM 1 %assign xmm_regs_used %1 - ASSERT xmm_regs_used <= 16 - %if xmm_regs_used > 8 + ASSERT xmm_regs_used <= 16 + high_mm_regs + %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8 + %if %%xmm_regs_on_stack > 0 ; Allocate stack space for callee-saved xmm registers plus shadow space and align the stack. - %assign %%pad (xmm_regs_used-8)*16 + 32 + %assign %%pad %%xmm_regs_on_stack*16 + 32 %assign stack_size_padded %%pad + ((-%%pad-stack_offset-gprsize) & (STACK_ALIGNMENT-1)) SUB rsp, stack_size_padded %endif WIN64_PUSH_XMM %endmacro -%macro WIN64_RESTORE_XMM_INTERNAL 1 +%macro WIN64_RESTORE_XMM_INTERNAL 0 %assign %%pad_size 0 - %if xmm_regs_used > 8 - %assign %%i xmm_regs_used - %rep xmm_regs_used-8 + %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8 + %if %%xmm_regs_on_stack > 0 + %assign %%i xmm_regs_used - high_mm_regs + %rep %%xmm_regs_on_stack %assign %%i %%i-1 - movaps xmm %+ %%i, [%1 + (%%i-8)*16 + stack_size + 32] + movaps xmm %+ %%i, [rsp + (%%i-8)*16 + stack_size + 32] %endrep %endif %if stack_size_padded > 0 %if stack_size > 0 && required_stack_alignment > STACK_ALIGNMENT mov rsp, rstkm %else - add %1, stack_size_padded + add rsp, stack_size_padded %assign %%pad_size stack_size_padded %endif %endif - %if xmm_regs_used > 7 - movaps xmm7, [%1 + stack_offset - %%pad_size + 24] + %if xmm_regs_used > 7 + high_mm_regs + movaps xmm7, [rsp + stack_offset - %%pad_size + 24] %endif - %if xmm_regs_used > 6 - movaps xmm6, [%1 + stack_offset - %%pad_size + 8] + %if xmm_regs_used > 6 + high_mm_regs + movaps xmm6, [rsp + stack_offset - %%pad_size + 8] %endif %endmacro -%macro WIN64_RESTORE_XMM 1 - WIN64_RESTORE_XMM_INTERNAL %1 +%macro WIN64_RESTORE_XMM 0 + WIN64_RESTORE_XMM_INTERNAL %assign stack_offset (stack_offset-stack_size_padded) + %assign stack_size_padded 0 %assign xmm_regs_used 0 %endmacro -%define has_epilogue regs_used > 7 || xmm_regs_used > 6 || mmsize == 32 || stack_size > 0 +%define has_epilogue regs_used > 7 || stack_size > 0 || vzeroupper_required || xmm_regs_used > 6+high_mm_regs %macro RET 0 - WIN64_RESTORE_XMM_INTERNAL rsp + WIN64_RESTORE_XMM_INTERNAL POP_IF_USED 14, 13, 12, 11, 10, 9, 8, 7 - %if mmsize == 32 + %if vzeroupper_required vzeroupper %endif AUTO_REP_RET @@ -620,14 +667,15 @@ DECLARE_REG 7, R10, 16 DECLARE_REG 8, R11, 24 DECLARE_REG 9, rbx, 32 DECLARE_REG 10, rbp, 40 -DECLARE_REG 11, R12, 48 -DECLARE_REG 12, R13, 56 -DECLARE_REG 13, R14, 64 -DECLARE_REG 14, R15, 72 +DECLARE_REG 11, R14, 48 +DECLARE_REG 12, R15, 56 +DECLARE_REG 13, R12, 64 +DECLARE_REG 14, R13, 72 -%macro PROLOGUE 2-5+ ; #args, #regs, #xmm_regs, [stack_size,] arg_names... +%macro PROLOGUE 2-5+ 0 ; #args, #regs, #xmm_regs, [stack_size,] arg_names... %assign num_args %1 %assign regs_used %2 + %assign xmm_regs_used %3 ASSERT regs_used >= num_args SETUP_STACK_POINTER %4 ASSERT regs_used <= 15 @@ -637,7 +685,7 @@ DECLARE_REG 14, R15, 72 DEFINE_ARGS_INTERNAL %0, %4, %5 %endmacro -%define has_epilogue regs_used > 9 || mmsize == 32 || stack_size > 0 +%define has_epilogue regs_used > 9 || stack_size > 0 || vzeroupper_required %macro RET 0 %if stack_size_padded > 0 @@ -648,7 +696,7 @@ DECLARE_REG 14, R15, 72 %endif %endif POP_IF_USED 14, 13, 12, 11, 10, 9 - %if mmsize == 32 + %if vzeroupper_required vzeroupper %endif AUTO_REP_RET @@ -693,7 +741,7 @@ DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14 DEFINE_ARGS_INTERNAL %0, %4, %5 %endmacro -%define has_epilogue regs_used > 3 || mmsize == 32 || stack_size > 0 +%define has_epilogue regs_used > 3 || stack_size > 0 || vzeroupper_required %macro RET 0 %if stack_size_padded > 0 @@ -704,7 +752,7 @@ DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14 %endif %endif POP_IF_USED 6, 5, 4, 3 - %if mmsize == 32 + %if vzeroupper_required vzeroupper %endif AUTO_REP_RET @@ -715,7 +763,7 @@ DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14 %if WIN64 == 0 %macro WIN64_SPILL_XMM 1 %endmacro - %macro WIN64_RESTORE_XMM 1 + %macro WIN64_RESTORE_XMM 0 %endmacro %macro WIN64_PUSH_XMM 0 %endmacro @@ -726,7 +774,7 @@ DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14 ; We can automatically detect "follows a branch", but not a branch target. ; (SSSE3 is a sufficient condition to know that your cpu doesn't have this problem.) %macro REP_RET 0 - %if has_epilogue + %if has_epilogue || cpuflag(ssse3) RET %else rep ret @@ -758,7 +806,7 @@ DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14 BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, jna, jnae, jb, jbe, jnb, jnbe, jc, jnc, js, jns, jo, jno, jp, jnp -%macro TAIL_CALL 2 ; callee, is_nonadjacent +%macro TAIL_CALL 1-2 1 ; callee, is_nonadjacent %if has_epilogue call %1 RET @@ -788,35 +836,25 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %endmacro %macro cglobal_internal 2-3+ annotate_function_size - %if %1 - %xdefine %%FUNCTION_PREFIX private_prefix - ; libvpx explicitly sets visibility in shared object builds. Avoid - ; setting visibility to hidden as it may break builds that split - ; sources on e.g., directory boundaries. - %ifdef CHROMIUM - %xdefine %%VISIBILITY hidden - %else - %xdefine %%VISIBILITY - %endif - %else - %xdefine %%FUNCTION_PREFIX public_prefix - %xdefine %%VISIBILITY - %endif %ifndef cglobaled_%2 - %xdefine %2 mangle(%%FUNCTION_PREFIX %+ _ %+ %2) + %if %1 + %xdefine %2 mangle(private_prefix %+ _ %+ %2) + %else + %xdefine %2 mangle(public_prefix %+ _ %+ %2) + %endif %xdefine %2.skip_prologue %2 %+ .skip_prologue CAT_XDEFINE cglobaled_, %2, 1 %endif %xdefine current_function %2 %xdefine current_function_section __SECT__ %if FORMAT_ELF - global %2:function %%VISIBILITY - %elif FORMAT_MACHO - %ifdef __NASM_VER__ - global %2 + %if %1 + global %2:function VISIBILITY %else - global %2:private_extern + global %2:function %endif + %elif FORMAT_MACHO && HAVE_PRIVATE_EXTERN && %1 + global %2:private_extern %else global %2 %endif @@ -827,12 +865,24 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %assign stack_offset 0 ; stack pointer offset relative to the return address %assign stack_size 0 ; amount of stack space that can be freely used inside a function %assign stack_size_padded 0 ; total amount of allocated stack space, including space for callee-saved xmm registers on WIN64 and alignment padding - %assign xmm_regs_used 0 ; number of XMM registers requested, used for dealing with callee-saved registers on WIN64 + %assign xmm_regs_used 0 ; number of XMM registers requested, used for dealing with callee-saved registers on WIN64 and vzeroupper %ifnidn %3, "" PROLOGUE %3 %endif %endmacro +; Create a global symbol from a local label with the correct name mangling and type +%macro cglobal_label 1 + %if FORMAT_ELF + global current_function %+ %1:function VISIBILITY + %elif FORMAT_MACHO && HAVE_PRIVATE_EXTERN + global current_function %+ %1:private_extern + %else + global current_function %+ %1 + %endif + %1: +%endmacro + %macro cextern 1 %xdefine %1 mangle(private_prefix %+ _ %+ %1) CAT_XDEFINE cglobaled_, %1, 1 @@ -851,7 +901,9 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %macro const 1-2+ %xdefine %1 mangle(private_prefix %+ _ %+ %1) %if FORMAT_ELF - global %1:data hidden + global %1:data VISIBILITY + %elif FORMAT_MACHO && HAVE_PRIVATE_EXTERN + global %1:private_extern %else global %1 %endif @@ -890,24 +942,26 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %assign cpuflags_sse (1<<4) | cpuflags_mmx2 %assign cpuflags_sse2 (1<<5) | cpuflags_sse %assign cpuflags_sse2slow (1<<6) | cpuflags_sse2 -%assign cpuflags_sse3 (1<<7) | cpuflags_sse2 -%assign cpuflags_ssse3 (1<<8) | cpuflags_sse3 -%assign cpuflags_sse4 (1<<9) | cpuflags_ssse3 -%assign cpuflags_sse42 (1<<10)| cpuflags_sse4 -%assign cpuflags_avx (1<<11)| cpuflags_sse42 -%assign cpuflags_xop (1<<12)| cpuflags_avx -%assign cpuflags_fma4 (1<<13)| cpuflags_avx -%assign cpuflags_fma3 (1<<14)| cpuflags_avx -%assign cpuflags_avx2 (1<<15)| cpuflags_fma3 +%assign cpuflags_lzcnt (1<<7) | cpuflags_sse2 +%assign cpuflags_sse3 (1<<8) | cpuflags_sse2 +%assign cpuflags_ssse3 (1<<9) | cpuflags_sse3 +%assign cpuflags_sse4 (1<<10)| cpuflags_ssse3 +%assign cpuflags_sse42 (1<<11)| cpuflags_sse4 +%assign cpuflags_aesni (1<<12)| cpuflags_sse42 +%assign cpuflags_gfni (1<<13)| cpuflags_sse42 +%assign cpuflags_avx (1<<14)| cpuflags_sse42 +%assign cpuflags_xop (1<<15)| cpuflags_avx +%assign cpuflags_fma4 (1<<16)| cpuflags_avx +%assign cpuflags_fma3 (1<<17)| cpuflags_avx +%assign cpuflags_bmi1 (1<<18)| cpuflags_avx|cpuflags_lzcnt +%assign cpuflags_bmi2 (1<<19)| cpuflags_bmi1 +%assign cpuflags_avx2 (1<<20)| cpuflags_fma3|cpuflags_bmi2 +%assign cpuflags_avx512 (1<<21)| cpuflags_avx2 ; F, CD, BW, DQ, VL -%assign cpuflags_cache32 (1<<16) -%assign cpuflags_cache64 (1<<17) -%assign cpuflags_slowctz (1<<18) -%assign cpuflags_lzcnt (1<<19) -%assign cpuflags_aligned (1<<20) ; not a cpu feature, but a function variant -%assign cpuflags_atom (1<<21) -%assign cpuflags_bmi1 (1<<22)|cpuflags_lzcnt -%assign cpuflags_bmi2 (1<<23)|cpuflags_bmi1 +%assign cpuflags_cache32 (1<<22) +%assign cpuflags_cache64 (1<<23) +%assign cpuflags_aligned (1<<24) ; not a cpu feature, but a function variant +%assign cpuflags_atom (1<<25) ; Returns a boolean value expressing whether or not the specified cpuflag is enabled. %define cpuflag(x) (((((cpuflags & (cpuflags_ %+ x)) ^ (cpuflags_ %+ x)) - 1) >> 31) & 1) @@ -950,7 +1004,7 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %if VPX_ARCH_X86_64 || cpuflag(sse2) %ifdef __NASM_VER__ - ALIGNMODE k8 + ALIGNMODE p6 %else CPU amdnop %endif @@ -963,11 +1017,12 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %endif %endmacro -; Merge mmx and sse* +; Merge mmx, sse*, and avx* ; m# is a simd register of the currently selected size ; xm# is the corresponding xmm register if mmsize >= 16, otherwise the same as m# ; ym# is the corresponding ymm register if mmsize >= 32, otherwise the same as m# -; (All 3 remain in sync through SWAP.) +; zm# is the corresponding zmm register if mmsize >= 64, otherwise the same as m# +; (All 4 remain in sync through SWAP.) %macro CAT_XDEFINE 3 %xdefine %1%2 %3 @@ -977,69 +1032,99 @@ BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, %undef %1%2 %endmacro +%macro DEFINE_MMREGS 1 ; mmtype + %assign %%prev_mmregs 0 + %ifdef num_mmregs + %assign %%prev_mmregs num_mmregs + %endif + + %assign num_mmregs 8 + %if VPX_ARCH_X86_64 && mmsize >= 16 + %assign num_mmregs 16 + %if cpuflag(avx512) || mmsize == 64 + %assign num_mmregs 32 + %endif + %endif + + %assign %%i 0 + %rep num_mmregs + CAT_XDEFINE m, %%i, %1 %+ %%i + CAT_XDEFINE nn%1, %%i, %%i + %assign %%i %%i+1 + %endrep + %if %%prev_mmregs > num_mmregs + %rep %%prev_mmregs - num_mmregs + CAT_UNDEF m, %%i + CAT_UNDEF nn %+ mmtype, %%i + %assign %%i %%i+1 + %endrep + %endif + %xdefine mmtype %1 +%endmacro + +; Prefer registers 16-31 over 0-15 to avoid having to use vzeroupper +%macro AVX512_MM_PERMUTATION 0-1 0 ; start_reg + %if VPX_ARCH_X86_64 && cpuflag(avx512) + %assign %%i %1 + %rep 16-%1 + %assign %%i_high %%i+16 + SWAP %%i, %%i_high + %assign %%i %%i+1 + %endrep + %endif +%endmacro + %macro INIT_MMX 0-1+ %assign avx_enabled 0 %define RESET_MM_PERMUTATION INIT_MMX %1 %define mmsize 8 - %define num_mmregs 8 %define mova movq %define movu movq %define movh movd %define movnta movntq - %assign %%i 0 - %rep 8 - CAT_XDEFINE m, %%i, mm %+ %%i - CAT_XDEFINE nnmm, %%i, %%i - %assign %%i %%i+1 - %endrep - %rep 8 - CAT_UNDEF m, %%i - CAT_UNDEF nnmm, %%i - %assign %%i %%i+1 - %endrep INIT_CPUFLAGS %1 + DEFINE_MMREGS mm %endmacro %macro INIT_XMM 0-1+ %assign avx_enabled 0 %define RESET_MM_PERMUTATION INIT_XMM %1 %define mmsize 16 - %define num_mmregs 8 - %if VPX_ARCH_X86_64 - %define num_mmregs 16 - %endif %define mova movdqa %define movu movdqu %define movh movq %define movnta movntdq - %assign %%i 0 - %rep num_mmregs - CAT_XDEFINE m, %%i, xmm %+ %%i - CAT_XDEFINE nnxmm, %%i, %%i - %assign %%i %%i+1 - %endrep INIT_CPUFLAGS %1 + DEFINE_MMREGS xmm + %if WIN64 + AVX512_MM_PERMUTATION 6 ; Swap callee-saved registers with volatile registers + %endif %endmacro %macro INIT_YMM 0-1+ %assign avx_enabled 1 %define RESET_MM_PERMUTATION INIT_YMM %1 %define mmsize 32 - %define num_mmregs 8 - %if VPX_ARCH_X86_64 - %define num_mmregs 16 - %endif %define mova movdqa %define movu movdqu %undef movh %define movnta movntdq - %assign %%i 0 - %rep num_mmregs - CAT_XDEFINE m, %%i, ymm %+ %%i - CAT_XDEFINE nnymm, %%i, %%i - %assign %%i %%i+1 - %endrep INIT_CPUFLAGS %1 + DEFINE_MMREGS ymm + AVX512_MM_PERMUTATION +%endmacro + +%macro INIT_ZMM 0-1+ + %assign avx_enabled 1 + %define RESET_MM_PERMUTATION INIT_ZMM %1 + %define mmsize 64 + %define mova movdqa + %define movu movdqu + %undef movh + %define movnta movntdq + INIT_CPUFLAGS %1 + DEFINE_MMREGS zmm + AVX512_MM_PERMUTATION %endmacro INIT_XMM @@ -1048,18 +1133,26 @@ INIT_XMM %define mmmm%1 mm%1 %define mmxmm%1 mm%1 %define mmymm%1 mm%1 + %define mmzmm%1 mm%1 %define xmmmm%1 mm%1 %define xmmxmm%1 xmm%1 %define xmmymm%1 xmm%1 + %define xmmzmm%1 xmm%1 %define ymmmm%1 mm%1 %define ymmxmm%1 xmm%1 %define ymmymm%1 ymm%1 + %define ymmzmm%1 ymm%1 + %define zmmmm%1 mm%1 + %define zmmxmm%1 xmm%1 + %define zmmymm%1 ymm%1 + %define zmmzmm%1 zmm%1 %define xm%1 xmm %+ m%1 %define ym%1 ymm %+ m%1 + %define zm%1 zmm %+ m%1 %endmacro %assign i 0 -%rep 16 +%rep 32 DECLARE_MMCAST i %assign i i+1 %endrep @@ -1129,25 +1222,42 @@ INIT_XMM %endif %assign %%i 0 %rep num_mmregs - CAT_XDEFINE %%f, %%i, m %+ %%i + %xdefine %%tmp m %+ %%i + CAT_XDEFINE %%f, %%i, regnumof %+ %%tmp %assign %%i %%i+1 %endrep %endmacro -%macro LOAD_MM_PERMUTATION 1 ; name to load from - %ifdef %1_m0 +%macro LOAD_MM_PERMUTATION 0-1 ; name to load from + %if %0 + %xdefine %%f %1_m + %else + %xdefine %%f current_function %+ _m + %endif + %xdefine %%tmp %%f %+ 0 + %ifnum %%tmp + RESET_MM_PERMUTATION %assign %%i 0 %rep num_mmregs - CAT_XDEFINE m, %%i, %1_m %+ %%i - CAT_XDEFINE nn, m %+ %%i, %%i + %xdefine %%tmp %%f %+ %%i + CAT_XDEFINE %%m, %%i, m %+ %%tmp %assign %%i %%i+1 %endrep + %rep num_mmregs + %assign %%i %%i-1 + CAT_XDEFINE m, %%i, %%m %+ %%i + CAT_XDEFINE nn, m %+ %%i, %%i + %endrep %endif %endmacro ; Append cpuflags to the callee's name iff the appended name is known and the plain name isn't %macro call 1 - call_internal %1 %+ SUFFIX, %1 + %ifid %1 + call_internal %1 %+ SUFFIX, %1 + %else + call %1 + %endif %endmacro %macro call_internal 2 %xdefine %%i %2 @@ -1190,12 +1300,17 @@ INIT_XMM ;============================================================================= %assign i 0 -%rep 16 +%rep 32 %if i < 8 CAT_XDEFINE sizeofmm, i, 8 + CAT_XDEFINE regnumofmm, i, i %endif CAT_XDEFINE sizeofxmm, i, 16 CAT_XDEFINE sizeofymm, i, 32 + CAT_XDEFINE sizeofzmm, i, 64 + CAT_XDEFINE regnumofxmm, i, i + CAT_XDEFINE regnumofymm, i, i + CAT_XDEFINE regnumofzmm, i, i %assign i i+1 %endrep %undef i @@ -1214,7 +1329,7 @@ INIT_XMM ;%1 == instruction ;%2 == minimal instruction set ;%3 == 1 if float, 0 if int -;%4 == 1 if non-destructive or 4-operand (xmm, xmm, xmm, imm), 0 otherwise +;%4 == 1 if 4-operand emulation, 0 if 3-operand emulation, 255 otherwise (no emulation) ;%5 == 1 if commutative (i.e. doesn't matter which src arg is which), 0 if not ;%6+: operands %macro RUN_AVX_INSTR 6-9+ @@ -1238,8 +1353,22 @@ INIT_XMM %ifdef cpuname %if notcpuflag(%2) %error use of ``%1'' %2 instruction in cpuname function: current_function - %elif cpuflags_%2 < cpuflags_sse && notcpuflag(sse2) && __sizeofreg > 8 + %elif %3 == 0 && __sizeofreg == 16 && notcpuflag(sse2) %error use of ``%1'' sse2 instruction in cpuname function: current_function + %elif %3 == 0 && __sizeofreg == 32 && notcpuflag(avx2) + %error use of ``%1'' avx2 instruction in cpuname function: current_function + %elif __sizeofreg == 16 && notcpuflag(sse) + %error use of ``%1'' sse instruction in cpuname function: current_function + %elif __sizeofreg == 32 && notcpuflag(avx) + %error use of ``%1'' avx instruction in cpuname function: current_function + %elif __sizeofreg == 64 && notcpuflag(avx512) + %error use of ``%1'' avx512 instruction in cpuname function: current_function + %elifidn %1, pextrw ; special case because the base instruction is mmx2, + %ifnid %6 ; but sse4 is required for memory operands + %if notcpuflag(sse4) + %error use of ``%1'' sse4 instruction in cpuname function: current_function + %endif + %endif %endif %endif %endif @@ -1247,14 +1376,12 @@ INIT_XMM %if __emulate_avx %xdefine __src1 %7 %xdefine __src2 %8 - %ifnidn %6, %7 - %if %0 >= 9 - CHECK_AVX_INSTR_EMU {%1 %6, %7, %8, %9}, %6, %8, %9 - %else - CHECK_AVX_INSTR_EMU {%1 %6, %7, %8}, %6, %8 - %endif - %if %5 && %4 == 0 - %ifnid %8 + %if %5 && %4 == 0 + %ifnidn %6, %7 + %ifidn %6, %8 + %xdefine __src1 %8 + %xdefine __src2 %7 + %elifnnum sizeof%8 ; 3-operand AVX instructions with a memory arg can only have it in src2, ; whereas SSE emulation prefers to have it in src1 (i.e. the mov). ; So, if the instruction is commutative with a memory arg, swap them. @@ -1262,6 +1389,13 @@ INIT_XMM %xdefine __src2 %7 %endif %endif + %endif + %ifnidn %6, __src1 + %if %0 >= 9 + CHECK_AVX_INSTR_EMU {%1 %6, %7, %8, %9}, %6, __src2, %9 + %else + CHECK_AVX_INSTR_EMU {%1 %6, %7, %8}, %6, __src2 + %endif %if __sizeofreg == 8 MOVQ %6, __src1 %elif %3 @@ -1278,9 +1412,40 @@ INIT_XMM %elif %0 >= 9 __instr %6, %7, %8, %9 %elif %0 == 8 - __instr %6, %7, %8 + %if avx_enabled && %5 + %xdefine __src1 %7 + %xdefine __src2 %8 + %ifnum regnumof%7 + %ifnum regnumof%8 + %if regnumof%7 < 8 && regnumof%8 >= 8 && regnumof%8 < 16 && sizeof%8 <= 32 + ; Most VEX-encoded instructions require an additional byte to encode when + ; src2 is a high register (e.g. m8..15). If the instruction is commutative + ; we can swap src1 and src2 when doing so reduces the instruction length. + %xdefine __src1 %8 + %xdefine __src2 %7 + %endif + %endif + %endif + __instr %6, __src1, __src2 + %else + __instr %6, %7, %8 + %endif %elif %0 == 7 - __instr %6, %7 + %if avx_enabled && %5 + %xdefine __src1 %6 + %xdefine __src2 %7 + %ifnum regnumof%6 + %ifnum regnumof%7 + %if regnumof%6 < 8 && regnumof%7 >= 8 && regnumof%7 < 16 && sizeof%7 <= 32 + %xdefine __src1 %7 + %xdefine __src2 %6 + %endif + %endif + %endif + __instr %6, __src1, __src2 + %else + __instr %6, %7 + %endif %else __instr %6 %endif @@ -1289,9 +1454,9 @@ INIT_XMM ;%1 == instruction ;%2 == minimal instruction set ;%3 == 1 if float, 0 if int -;%4 == 1 if non-destructive or 4-operand (xmm, xmm, xmm, imm), 0 otherwise +;%4 == 1 if 4-operand emulation, 0 if 3-operand emulation, 255 otherwise (no emulation) ;%5 == 1 if commutative (i.e. doesn't matter which src arg is which), 0 if not -%macro AVX_INSTR 1-5 fnord, 0, 1, 0 +%macro AVX_INSTR 1-5 fnord, 0, 255, 0 %macro %1 1-10 fnord, fnord, fnord, fnord, %1, %2, %3, %4, %5 %ifidn %2, fnord RUN_AVX_INSTR %6, %7, %8, %9, %10, %1 @@ -1307,77 +1472,112 @@ INIT_XMM %endmacro %endmacro -; Instructions with both VEX and non-VEX encodings +; Instructions with both VEX/EVEX and legacy encodings ; Non-destructive instructions are written without parameters AVX_INSTR addpd, sse2, 1, 0, 1 AVX_INSTR addps, sse, 1, 0, 1 -AVX_INSTR addsd, sse2, 1, 0, 1 -AVX_INSTR addss, sse, 1, 0, 1 +AVX_INSTR addsd, sse2, 1, 0, 0 +AVX_INSTR addss, sse, 1, 0, 0 AVX_INSTR addsubpd, sse3, 1, 0, 0 AVX_INSTR addsubps, sse3, 1, 0, 0 -AVX_INSTR aesdec, fnord, 0, 0, 0 -AVX_INSTR aesdeclast, fnord, 0, 0, 0 -AVX_INSTR aesenc, fnord, 0, 0, 0 -AVX_INSTR aesenclast, fnord, 0, 0, 0 -AVX_INSTR aesimc -AVX_INSTR aeskeygenassist +AVX_INSTR aesdec, aesni, 0, 0, 0 +AVX_INSTR aesdeclast, aesni, 0, 0, 0 +AVX_INSTR aesenc, aesni, 0, 0, 0 +AVX_INSTR aesenclast, aesni, 0, 0, 0 +AVX_INSTR aesimc, aesni +AVX_INSTR aeskeygenassist, aesni AVX_INSTR andnpd, sse2, 1, 0, 0 AVX_INSTR andnps, sse, 1, 0, 0 AVX_INSTR andpd, sse2, 1, 0, 1 AVX_INSTR andps, sse, 1, 0, 1 -AVX_INSTR blendpd, sse4, 1, 0, 0 -AVX_INSTR blendps, sse4, 1, 0, 0 -AVX_INSTR blendvpd, sse4, 1, 0, 0 -AVX_INSTR blendvps, sse4, 1, 0, 0 +AVX_INSTR blendpd, sse4, 1, 1, 0 +AVX_INSTR blendps, sse4, 1, 1, 0 +AVX_INSTR blendvpd, sse4 ; can't be emulated +AVX_INSTR blendvps, sse4 ; can't be emulated +AVX_INSTR cmpeqpd, sse2, 1, 0, 1 +AVX_INSTR cmpeqps, sse, 1, 0, 1 +AVX_INSTR cmpeqsd, sse2, 1, 0, 0 +AVX_INSTR cmpeqss, sse, 1, 0, 0 +AVX_INSTR cmplepd, sse2, 1, 0, 0 +AVX_INSTR cmpleps, sse, 1, 0, 0 +AVX_INSTR cmplesd, sse2, 1, 0, 0 +AVX_INSTR cmpless, sse, 1, 0, 0 +AVX_INSTR cmpltpd, sse2, 1, 0, 0 +AVX_INSTR cmpltps, sse, 1, 0, 0 +AVX_INSTR cmpltsd, sse2, 1, 0, 0 +AVX_INSTR cmpltss, sse, 1, 0, 0 +AVX_INSTR cmpneqpd, sse2, 1, 0, 1 +AVX_INSTR cmpneqps, sse, 1, 0, 1 +AVX_INSTR cmpneqsd, sse2, 1, 0, 0 +AVX_INSTR cmpneqss, sse, 1, 0, 0 +AVX_INSTR cmpnlepd, sse2, 1, 0, 0 +AVX_INSTR cmpnleps, sse, 1, 0, 0 +AVX_INSTR cmpnlesd, sse2, 1, 0, 0 +AVX_INSTR cmpnless, sse, 1, 0, 0 +AVX_INSTR cmpnltpd, sse2, 1, 0, 0 +AVX_INSTR cmpnltps, sse, 1, 0, 0 +AVX_INSTR cmpnltsd, sse2, 1, 0, 0 +AVX_INSTR cmpnltss, sse, 1, 0, 0 +AVX_INSTR cmpordpd, sse2 1, 0, 1 +AVX_INSTR cmpordps, sse 1, 0, 1 +AVX_INSTR cmpordsd, sse2 1, 0, 0 +AVX_INSTR cmpordss, sse 1, 0, 0 AVX_INSTR cmppd, sse2, 1, 1, 0 AVX_INSTR cmpps, sse, 1, 1, 0 AVX_INSTR cmpsd, sse2, 1, 1, 0 AVX_INSTR cmpss, sse, 1, 1, 0 -AVX_INSTR comisd, sse2 -AVX_INSTR comiss, sse -AVX_INSTR cvtdq2pd, sse2 -AVX_INSTR cvtdq2ps, sse2 -AVX_INSTR cvtpd2dq, sse2 -AVX_INSTR cvtpd2ps, sse2 -AVX_INSTR cvtps2dq, sse2 -AVX_INSTR cvtps2pd, sse2 -AVX_INSTR cvtsd2si, sse2 -AVX_INSTR cvtsd2ss, sse2 -AVX_INSTR cvtsi2sd, sse2 -AVX_INSTR cvtsi2ss, sse -AVX_INSTR cvtss2sd, sse2 -AVX_INSTR cvtss2si, sse -AVX_INSTR cvttpd2dq, sse2 -AVX_INSTR cvttps2dq, sse2 -AVX_INSTR cvttsd2si, sse2 -AVX_INSTR cvttss2si, sse +AVX_INSTR cmpunordpd, sse2, 1, 0, 1 +AVX_INSTR cmpunordps, sse, 1, 0, 1 +AVX_INSTR cmpunordsd, sse2, 1, 0, 0 +AVX_INSTR cmpunordss, sse, 1, 0, 0 +AVX_INSTR comisd, sse2, 1 +AVX_INSTR comiss, sse, 1 +AVX_INSTR cvtdq2pd, sse2, 1 +AVX_INSTR cvtdq2ps, sse2, 1 +AVX_INSTR cvtpd2dq, sse2, 1 +AVX_INSTR cvtpd2ps, sse2, 1 +AVX_INSTR cvtps2dq, sse2, 1 +AVX_INSTR cvtps2pd, sse2, 1 +AVX_INSTR cvtsd2si, sse2, 1 +AVX_INSTR cvtsd2ss, sse2, 1, 0, 0 +AVX_INSTR cvtsi2sd, sse2, 1, 0, 0 +AVX_INSTR cvtsi2ss, sse, 1, 0, 0 +AVX_INSTR cvtss2sd, sse2, 1, 0, 0 +AVX_INSTR cvtss2si, sse, 1 +AVX_INSTR cvttpd2dq, sse2, 1 +AVX_INSTR cvttps2dq, sse2, 1 +AVX_INSTR cvttsd2si, sse2, 1 +AVX_INSTR cvttss2si, sse, 1 AVX_INSTR divpd, sse2, 1, 0, 0 AVX_INSTR divps, sse, 1, 0, 0 AVX_INSTR divsd, sse2, 1, 0, 0 AVX_INSTR divss, sse, 1, 0, 0 AVX_INSTR dppd, sse4, 1, 1, 0 AVX_INSTR dpps, sse4, 1, 1, 0 -AVX_INSTR extractps, sse4 +AVX_INSTR extractps, sse4, 1 +AVX_INSTR gf2p8affineinvqb, gfni, 0, 1, 0 +AVX_INSTR gf2p8affineqb, gfni, 0, 1, 0 +AVX_INSTR gf2p8mulb, gfni, 0, 0, 0 AVX_INSTR haddpd, sse3, 1, 0, 0 AVX_INSTR haddps, sse3, 1, 0, 0 AVX_INSTR hsubpd, sse3, 1, 0, 0 AVX_INSTR hsubps, sse3, 1, 0, 0 AVX_INSTR insertps, sse4, 1, 1, 0 AVX_INSTR lddqu, sse3 -AVX_INSTR ldmxcsr, sse +AVX_INSTR ldmxcsr, sse, 1 AVX_INSTR maskmovdqu, sse2 AVX_INSTR maxpd, sse2, 1, 0, 1 AVX_INSTR maxps, sse, 1, 0, 1 -AVX_INSTR maxsd, sse2, 1, 0, 1 -AVX_INSTR maxss, sse, 1, 0, 1 +AVX_INSTR maxsd, sse2, 1, 0, 0 +AVX_INSTR maxss, sse, 1, 0, 0 AVX_INSTR minpd, sse2, 1, 0, 1 AVX_INSTR minps, sse, 1, 0, 1 -AVX_INSTR minsd, sse2, 1, 0, 1 -AVX_INSTR minss, sse, 1, 0, 1 -AVX_INSTR movapd, sse2 -AVX_INSTR movaps, sse +AVX_INSTR minsd, sse2, 1, 0, 0 +AVX_INSTR minss, sse, 1, 0, 0 +AVX_INSTR movapd, sse2, 1 +AVX_INSTR movaps, sse, 1 AVX_INSTR movd, mmx -AVX_INSTR movddup, sse3 +AVX_INSTR movddup, sse3, 1 AVX_INSTR movdqa, sse2 AVX_INSTR movdqu, sse2 AVX_INSTR movhlps, sse, 1, 0, 0 @@ -1386,24 +1586,24 @@ AVX_INSTR movhps, sse, 1, 0, 0 AVX_INSTR movlhps, sse, 1, 0, 0 AVX_INSTR movlpd, sse2, 1, 0, 0 AVX_INSTR movlps, sse, 1, 0, 0 -AVX_INSTR movmskpd, sse2 -AVX_INSTR movmskps, sse +AVX_INSTR movmskpd, sse2, 1 +AVX_INSTR movmskps, sse, 1 AVX_INSTR movntdq, sse2 AVX_INSTR movntdqa, sse4 -AVX_INSTR movntpd, sse2 -AVX_INSTR movntps, sse +AVX_INSTR movntpd, sse2, 1 +AVX_INSTR movntps, sse, 1 AVX_INSTR movq, mmx AVX_INSTR movsd, sse2, 1, 0, 0 -AVX_INSTR movshdup, sse3 -AVX_INSTR movsldup, sse3 +AVX_INSTR movshdup, sse3, 1 +AVX_INSTR movsldup, sse3, 1 AVX_INSTR movss, sse, 1, 0, 0 -AVX_INSTR movupd, sse2 -AVX_INSTR movups, sse -AVX_INSTR mpsadbw, sse4 +AVX_INSTR movupd, sse2, 1 +AVX_INSTR movups, sse, 1 +AVX_INSTR mpsadbw, sse4, 0, 1, 0 AVX_INSTR mulpd, sse2, 1, 0, 1 AVX_INSTR mulps, sse, 1, 0, 1 -AVX_INSTR mulsd, sse2, 1, 0, 1 -AVX_INSTR mulss, sse, 1, 0, 1 +AVX_INSTR mulsd, sse2, 1, 0, 0 +AVX_INSTR mulss, sse, 1, 0, 0 AVX_INSTR orpd, sse2, 1, 0, 1 AVX_INSTR orps, sse, 1, 0, 1 AVX_INSTR pabsb, ssse3 @@ -1421,14 +1621,18 @@ AVX_INSTR paddsb, mmx, 0, 0, 1 AVX_INSTR paddsw, mmx, 0, 0, 1 AVX_INSTR paddusb, mmx, 0, 0, 1 AVX_INSTR paddusw, mmx, 0, 0, 1 -AVX_INSTR palignr, ssse3 +AVX_INSTR palignr, ssse3, 0, 1, 0 AVX_INSTR pand, mmx, 0, 0, 1 AVX_INSTR pandn, mmx, 0, 0, 0 AVX_INSTR pavgb, mmx2, 0, 0, 1 AVX_INSTR pavgw, mmx2, 0, 0, 1 -AVX_INSTR pblendvb, sse4, 0, 0, 0 -AVX_INSTR pblendw, sse4 -AVX_INSTR pclmulqdq +AVX_INSTR pblendvb, sse4 ; can't be emulated +AVX_INSTR pblendw, sse4, 0, 1, 0 +AVX_INSTR pclmulqdq, fnord, 0, 1, 0 +AVX_INSTR pclmulhqhqdq, fnord, 0, 0, 0 +AVX_INSTR pclmulhqlqdq, fnord, 0, 0, 0 +AVX_INSTR pclmullqhqdq, fnord, 0, 0, 0 +AVX_INSTR pclmullqlqdq, fnord, 0, 0, 0 AVX_INSTR pcmpestri, sse42 AVX_INSTR pcmpestrm, sse42 AVX_INSTR pcmpistri, sse42 @@ -1452,10 +1656,10 @@ AVX_INSTR phminposuw, sse4 AVX_INSTR phsubw, ssse3, 0, 0, 0 AVX_INSTR phsubd, ssse3, 0, 0, 0 AVX_INSTR phsubsw, ssse3, 0, 0, 0 -AVX_INSTR pinsrb, sse4 -AVX_INSTR pinsrd, sse4 -AVX_INSTR pinsrq, sse4 -AVX_INSTR pinsrw, mmx2 +AVX_INSTR pinsrb, sse4, 0, 1, 0 +AVX_INSTR pinsrd, sse4, 0, 1, 0 +AVX_INSTR pinsrq, sse4, 0, 1, 0 +AVX_INSTR pinsrw, mmx2, 0, 1, 0 AVX_INSTR pmaddwd, mmx, 0, 0, 1 AVX_INSTR pmaddubsw, ssse3, 0, 0, 0 AVX_INSTR pmaxsb, sse4, 0, 0, 1 @@ -1527,27 +1731,27 @@ AVX_INSTR punpcklwd, mmx, 0, 0, 0 AVX_INSTR punpckldq, mmx, 0, 0, 0 AVX_INSTR punpcklqdq, sse2, 0, 0, 0 AVX_INSTR pxor, mmx, 0, 0, 1 -AVX_INSTR rcpps, sse, 1, 0, 0 +AVX_INSTR rcpps, sse, 1 AVX_INSTR rcpss, sse, 1, 0, 0 -AVX_INSTR roundpd, sse4 -AVX_INSTR roundps, sse4 -AVX_INSTR roundsd, sse4 -AVX_INSTR roundss, sse4 -AVX_INSTR rsqrtps, sse, 1, 0, 0 +AVX_INSTR roundpd, sse4, 1 +AVX_INSTR roundps, sse4, 1 +AVX_INSTR roundsd, sse4, 1, 1, 0 +AVX_INSTR roundss, sse4, 1, 1, 0 +AVX_INSTR rsqrtps, sse, 1 AVX_INSTR rsqrtss, sse, 1, 0, 0 AVX_INSTR shufpd, sse2, 1, 1, 0 AVX_INSTR shufps, sse, 1, 1, 0 -AVX_INSTR sqrtpd, sse2, 1, 0, 0 -AVX_INSTR sqrtps, sse, 1, 0, 0 +AVX_INSTR sqrtpd, sse2, 1 +AVX_INSTR sqrtps, sse, 1 AVX_INSTR sqrtsd, sse2, 1, 0, 0 AVX_INSTR sqrtss, sse, 1, 0, 0 -AVX_INSTR stmxcsr, sse +AVX_INSTR stmxcsr, sse, 1 AVX_INSTR subpd, sse2, 1, 0, 0 AVX_INSTR subps, sse, 1, 0, 0 AVX_INSTR subsd, sse2, 1, 0, 0 AVX_INSTR subss, sse, 1, 0, 0 -AVX_INSTR ucomisd, sse2 -AVX_INSTR ucomiss, sse +AVX_INSTR ucomisd, sse2, 1 +AVX_INSTR ucomiss, sse, 1 AVX_INSTR unpckhpd, sse2, 1, 0, 0 AVX_INSTR unpckhps, sse, 1, 0, 0 AVX_INSTR unpcklpd, sse2, 1, 0, 0 @@ -1560,6 +1764,38 @@ AVX_INSTR pfadd, 3dnow, 1, 0, 1 AVX_INSTR pfsub, 3dnow, 1, 0, 0 AVX_INSTR pfmul, 3dnow, 1, 0, 1 +;%1 == instruction +;%2 == minimal instruction set +%macro GPR_INSTR 2 + %macro %1 2-5 fnord, %1, %2 + %ifdef cpuname + %if notcpuflag(%5) + %error use of ``%4'' %5 instruction in cpuname function: current_function + %endif + %endif + %ifidn %3, fnord + %4 %1, %2 + %else + %4 %1, %2, %3 + %endif + %endmacro +%endmacro + +GPR_INSTR andn, bmi1 +GPR_INSTR bextr, bmi1 +GPR_INSTR blsi, bmi1 +GPR_INSTR blsr, bmi1 +GPR_INSTR blsmsk, bmi1 +GPR_INSTR bzhi, bmi2 +GPR_INSTR mulx, bmi2 +GPR_INSTR pdep, bmi2 +GPR_INSTR pext, bmi2 +GPR_INSTR popcnt, sse42 +GPR_INSTR rorx, bmi2 +GPR_INSTR sarx, bmi2 +GPR_INSTR shlx, bmi2 +GPR_INSTR shrx, bmi2 + ; base-4 constants for shuffles %assign i 0 %rep 256 @@ -1610,7 +1846,7 @@ FMA_INSTR pmadcswd, pmaddwd, paddd v%5%6 %1, %2, %3, %4 %elifidn %1, %2 ; If %3 or %4 is a memory operand it needs to be encoded as the last operand. - %ifid %3 + %ifnum sizeof%3 v%{5}213%6 %2, %3, %4 %else v%{5}132%6 %2, %4, %3 @@ -1635,15 +1871,53 @@ FMA4_INSTR fmsubadd, pd, ps FMA4_INSTR fnmadd, pd, ps, sd, ss FMA4_INSTR fnmsub, pd, ps, sd, ss -; workaround: vpbroadcastq is broken in x86_32 due to a yasm bug (fixed in 1.3.0) -%ifdef __YASM_VER__ - %if __YASM_VERSION_ID__ < 0x01030000 && VPX_ARCH_X86_64 == 0 - %macro vpbroadcastq 2 - %if sizeof%1 == 16 - movddup %1, %2 - %else - vbroadcastsd %1, %2 +; Macros for converting VEX instructions to equivalent EVEX ones. +%macro EVEX_INSTR 2-3 0 ; vex, evex, prefer_evex + %macro %1 2-7 fnord, fnord, %1, %2, %3 + %ifidn %3, fnord + %define %%args %1, %2 + %elifidn %4, fnord + %define %%args %1, %2, %3 + %else + %define %%args %1, %2, %3, %4 + %endif + %assign %%evex_required cpuflag(avx512) & %7 + %ifnum regnumof%1 + %if regnumof%1 >= 16 || sizeof%1 > 32 + %assign %%evex_required 1 %endif - %endmacro - %endif -%endif + %endif + %ifnum regnumof%2 + %if regnumof%2 >= 16 || sizeof%2 > 32 + %assign %%evex_required 1 + %endif + %endif + %ifnum regnumof%3 + %if regnumof%3 >= 16 || sizeof%3 > 32 + %assign %%evex_required 1 + %endif + %endif + %if %%evex_required + %6 %%args + %else + %5 %%args ; Prefer VEX over EVEX due to shorter instruction length + %endif + %endmacro +%endmacro + +EVEX_INSTR vbroadcastf128, vbroadcastf32x4 +EVEX_INSTR vbroadcasti128, vbroadcasti32x4 +EVEX_INSTR vextractf128, vextractf32x4 +EVEX_INSTR vextracti128, vextracti32x4 +EVEX_INSTR vinsertf128, vinsertf32x4 +EVEX_INSTR vinserti128, vinserti32x4 +EVEX_INSTR vmovdqa, vmovdqa32 +EVEX_INSTR vmovdqu, vmovdqu32 +EVEX_INSTR vpand, vpandd +EVEX_INSTR vpandn, vpandnd +EVEX_INSTR vpor, vpord +EVEX_INSTR vpxor, vpxord +EVEX_INSTR vrcpps, vrcp14ps, 1 ; EVEX versions have higher precision +EVEX_INSTR vrcpss, vrcp14ss, 1 +EVEX_INSTR vrsqrtps, vrsqrt14ps, 1 +EVEX_INSTR vrsqrtss, vrsqrt14ss, 1 diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/tools_common.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/tools_common.c index 59978b7f9..cbecfbb41 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/tools_common.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/tools_common.c @@ -91,10 +91,13 @@ int read_yuv_frame(struct VpxInputContext *input_ctx, vpx_image_t *yuv_frame) { for (plane = 0; plane < 3; ++plane) { uint8_t *ptr; - const int w = vpx_img_plane_width(yuv_frame, plane); + int w = vpx_img_plane_width(yuv_frame, plane); const int h = vpx_img_plane_height(yuv_frame, plane); int r; - + // Assuming that for nv12 we read all chroma data at one time + if (yuv_frame->fmt == VPX_IMG_FMT_NV12 && plane > 1) break; + // Fixing NV12 chroma width it is odd + if (yuv_frame->fmt == VPX_IMG_FMT_NV12 && plane == 1) w = (w + 1) & ~1; /* Determine the correct plane based on the image format. The for-loop * always counts in Y,U,V order, but this may not match the order of * the data on disk. diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/extend.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/extend.c index f4dbce2cd..b52e9fe93 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/extend.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/extend.c @@ -11,30 +11,40 @@ #include "extend.h" #include "vpx_mem/vpx_mem.h" -static void copy_and_extend_plane(unsigned char *s, /* source */ - int sp, /* source pitch */ - unsigned char *d, /* destination */ - int dp, /* destination pitch */ - int h, /* height */ - int w, /* width */ - int et, /* extend top border */ - int el, /* extend left border */ - int eb, /* extend bottom border */ - int er) { /* extend right border */ - int i; +static void copy_and_extend_plane( + unsigned char *s, /* source */ + int sp, /* source pitch */ + unsigned char *d, /* destination */ + int dp, /* destination pitch */ + int h, /* height */ + int w, /* width */ + int et, /* extend top border */ + int el, /* extend left border */ + int eb, /* extend bottom border */ + int er, /* extend right border */ + int interleave_step) { /* step between pixels of the current plane */ + int i, j; unsigned char *src_ptr1, *src_ptr2; unsigned char *dest_ptr1, *dest_ptr2; int linesize; + if (interleave_step < 1) interleave_step = 1; + /* copy the left and right most columns out */ src_ptr1 = s; - src_ptr2 = s + w - 1; + src_ptr2 = s + (w - 1) * interleave_step; dest_ptr1 = d - el; dest_ptr2 = d + w; for (i = 0; i < h; ++i) { memset(dest_ptr1, src_ptr1[0], el); - memcpy(dest_ptr1 + el, src_ptr1, w); + if (interleave_step == 1) { + memcpy(dest_ptr1 + el, src_ptr1, w); + } else { + for (j = 0; j < w; j++) { + dest_ptr1[el + j] = src_ptr1[interleave_step * j]; + } + } memset(dest_ptr2, src_ptr2[0], er); src_ptr1 += sp; src_ptr2 += sp; @@ -69,9 +79,12 @@ void vp8_copy_and_extend_frame(YV12_BUFFER_CONFIG *src, int eb = dst->border + dst->y_height - src->y_height; int er = dst->border + dst->y_width - src->y_width; + // detect nv12 colorspace + int chroma_step = src->v_buffer - src->u_buffer == 1 ? 2 : 1; + copy_and_extend_plane(src->y_buffer, src->y_stride, dst->y_buffer, dst->y_stride, src->y_height, src->y_width, et, el, eb, - er); + er, 1); et = dst->border >> 1; el = dst->border >> 1; @@ -80,11 +93,11 @@ void vp8_copy_and_extend_frame(YV12_BUFFER_CONFIG *src, copy_and_extend_plane(src->u_buffer, src->uv_stride, dst->u_buffer, dst->uv_stride, src->uv_height, src->uv_width, et, el, - eb, er); + eb, er, chroma_step); copy_and_extend_plane(src->v_buffer, src->uv_stride, dst->v_buffer, dst->uv_stride, src->uv_height, src->uv_width, et, el, - eb, er); + eb, er, chroma_step); } void vp8_copy_and_extend_frame_with_rect(YV12_BUFFER_CONFIG *src, @@ -98,6 +111,8 @@ void vp8_copy_and_extend_frame_with_rect(YV12_BUFFER_CONFIG *src, int dst_y_offset = srcy * dst->y_stride + srcx; int src_uv_offset = ((srcy * src->uv_stride) >> 1) + (srcx >> 1); int dst_uv_offset = ((srcy * dst->uv_stride) >> 1) + (srcx >> 1); + // detect nv12 colorspace + int chroma_step = src->v_buffer - src->u_buffer == 1 ? 2 : 1; /* If the side is not touching the bounder then don't extend. */ if (srcy) et = 0; @@ -107,7 +122,7 @@ void vp8_copy_and_extend_frame_with_rect(YV12_BUFFER_CONFIG *src, copy_and_extend_plane(src->y_buffer + src_y_offset, src->y_stride, dst->y_buffer + dst_y_offset, dst->y_stride, srch, srcw, - et, el, eb, er); + et, el, eb, er, 1); et = (et + 1) >> 1; el = (el + 1) >> 1; @@ -118,11 +133,11 @@ void vp8_copy_and_extend_frame_with_rect(YV12_BUFFER_CONFIG *src, copy_and_extend_plane(src->u_buffer + src_uv_offset, src->uv_stride, dst->u_buffer + dst_uv_offset, dst->uv_stride, srch, - srcw, et, el, eb, er); + srcw, et, el, eb, er, chroma_step); copy_and_extend_plane(src->v_buffer + src_uv_offset, src->uv_stride, dst->v_buffer + dst_uv_offset, dst->uv_stride, srch, - srcw, et, el, eb, er); + srcw, et, el, eb, er, chroma_step); } /* note the extension is only for the last row, for intra prediction purpose */ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/generic/systemdependent.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/generic/systemdependent.c index 75ce7ef35..cd1b02c9c 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/generic/systemdependent.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/generic/systemdependent.c @@ -16,6 +16,8 @@ #include "vpx_ports/x86.h" #elif VPX_ARCH_PPC #include "vpx_ports/ppc.h" +#elif VPX_ARCH_MIPS +#include "vpx_ports/mips.h" #endif #include "vp8/common/onyxc_int.h" #include "vp8/common/systemdependent.h" @@ -96,6 +98,8 @@ void vp8_machine_specific_config(VP8_COMMON *ctx) { ctx->cpu_caps = x86_simd_caps(); #elif VPX_ARCH_PPC ctx->cpu_caps = ppc_simd_caps(); +#elif VPX_ARCH_MIPS + ctx->cpu_caps = mips_cpu_caps(); #else // generic-gnu targets. ctx->cpu_caps = 0; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/dequantize_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/dequantize_mmi.c index b3f8084ae..b9330a666 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/dequantize_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/dequantize_mmi.c @@ -100,7 +100,7 @@ void vp8_dequant_idct_add_mmi(int16_t *input, int16_t *dq, unsigned char *dest, vp8_short_idct4x4llm_mmi(input, dest, stride, dest, stride); __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gssdlc1 %[ftmp0], 0x07(%[input]) \n\t" "gssdrc1 %[ftmp0], 0x00(%[input]) \n\t" "sdl $0, 0x0f(%[input]) \n\t" diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/idctllm_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/idctllm_mmi.c index 5e48f5916..a35689dd3 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/idctllm_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/idctllm_mmi.c @@ -13,25 +13,25 @@ #include "vpx_ports/asmdefs_mmi.h" #define TRANSPOSE_4H \ - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" \ + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" \ MMI_LI(%[tmp0], 0x93) \ "mtc1 %[tmp0], %[ftmp10] \n\t" \ "punpcklhw %[ftmp5], %[ftmp1], %[ftmp0] \n\t" \ "punpcklhw %[ftmp9], %[ftmp2], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp5], %[ftmp5], %[ftmp9] \n\t" \ + "por %[ftmp5], %[ftmp5], %[ftmp9] \n\t" \ "punpckhhw %[ftmp6], %[ftmp1], %[ftmp0] \n\t" \ "punpckhhw %[ftmp9], %[ftmp2], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp6], %[ftmp6], %[ftmp9] \n\t" \ + "por %[ftmp6], %[ftmp6], %[ftmp9] \n\t" \ "punpcklhw %[ftmp7], %[ftmp3], %[ftmp0] \n\t" \ "punpcklhw %[ftmp9], %[ftmp4], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp7], %[ftmp7], %[ftmp9] \n\t" \ + "por %[ftmp7], %[ftmp7], %[ftmp9] \n\t" \ "punpckhhw %[ftmp8], %[ftmp3], %[ftmp0] \n\t" \ "punpckhhw %[ftmp9], %[ftmp4], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp8], %[ftmp8], %[ftmp9] \n\t" \ + "por %[ftmp8], %[ftmp8], %[ftmp9] \n\t" \ "punpcklwd %[ftmp1], %[ftmp5], %[ftmp7] \n\t" \ "punpckhwd %[ftmp2], %[ftmp5], %[ftmp7] \n\t" \ "punpcklwd %[ftmp3], %[ftmp6], %[ftmp8] \n\t" \ @@ -41,15 +41,19 @@ void vp8_short_idct4x4llm_mmi(int16_t *input, unsigned char *pred_ptr, int pred_stride, unsigned char *dst_ptr, int dst_stride) { double ftmp[12]; - uint32_t tmp[0]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_04) = { 0x0004000400040004ULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_4e7b) = { 0x4e7b4e7b4e7b4e7bULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_22a3) = { 0x22a322a322a322a3ULL }; + uint64_t tmp[1]; + double ff_ph_04, ff_ph_4e7b, ff_ph_22a3; __asm__ volatile ( + "dli %[tmp0], 0x0004000400040004 \n\t" + "dmtc1 %[tmp0], %[ff_ph_04] \n\t" + "dli %[tmp0], 0x4e7b4e7b4e7b4e7b \n\t" + "dmtc1 %[tmp0], %[ff_ph_4e7b] \n\t" + "dli %[tmp0], 0x22a322a322a322a3 \n\t" + "dmtc1 %[tmp0], %[ff_ph_22a3] \n\t" MMI_LI(%[tmp0], 0x02) - "mtc1 %[tmp0], %[ftmp11] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[ftmp1], 0x07(%[ip]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[ip]) \n\t" @@ -186,9 +190,10 @@ void vp8_short_idct4x4llm_mmi(int16_t *input, unsigned char *pred_ptr, [ftmp6]"=&f"(ftmp[6]), [ftmp7]"=&f"(ftmp[7]), [ftmp8]"=&f"(ftmp[8]), [ftmp9]"=&f"(ftmp[9]), [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]), - [pred_ptr]"+&r"(pred_ptr), [dst_ptr]"+&r"(dst_ptr) - : [ip]"r"(input), [ff_ph_22a3]"f"(ff_ph_22a3), - [ff_ph_4e7b]"f"(ff_ph_4e7b), [ff_ph_04]"f"(ff_ph_04), + [pred_ptr]"+&r"(pred_ptr), [dst_ptr]"+&r"(dst_ptr), + [ff_ph_4e7b]"=&f"(ff_ph_4e7b), [ff_ph_04]"=&f"(ff_ph_04), + [ff_ph_22a3]"=&f"(ff_ph_22a3) + : [ip]"r"(input), [pred_stride]"r"((mips_reg)pred_stride), [dst_stride]"r"((mips_reg)dst_stride) : "memory" @@ -198,12 +203,13 @@ void vp8_short_idct4x4llm_mmi(int16_t *input, unsigned char *pred_ptr, void vp8_dc_only_idct_add_mmi(int16_t input_dc, unsigned char *pred_ptr, int pred_stride, unsigned char *dst_ptr, int dst_stride) { - int a1 = ((input_dc + 4) >> 3); - double ftmp[5]; + int a0 = ((input_dc + 4) >> 3); + double a1, ftmp[5]; int low32; __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dmtc1 %[a0], %[a1] \n\t" "pshufh %[a1], %[a1], %[ftmp0] \n\t" "ulw %[low32], 0x00(%[pred_ptr]) \n\t" "mtc1 %[low32], %[ftmp1] \n\t" @@ -244,9 +250,9 @@ void vp8_dc_only_idct_add_mmi(int16_t input_dc, unsigned char *pred_ptr, "gsswrc1 %[ftmp1], 0x00(%[dst_ptr]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), [ftmp2]"=&f"(ftmp[2]), [ftmp3]"=&f"(ftmp[3]), [ftmp4]"=&f"(ftmp[4]), [low32]"=&r"(low32), - [dst_ptr]"+&r"(dst_ptr), [pred_ptr]"+&r"(pred_ptr) + [dst_ptr]"+&r"(dst_ptr), [pred_ptr]"+&r"(pred_ptr), [a1]"=&f"(a1) : [dst_stride]"r"((mips_reg)dst_stride), - [pred_stride]"r"((mips_reg)pred_stride), [a1]"f"(a1) + [pred_stride]"r"((mips_reg)pred_stride), [a0]"r"(a0) : "memory" ); } @@ -254,14 +260,15 @@ void vp8_dc_only_idct_add_mmi(int16_t input_dc, unsigned char *pred_ptr, void vp8_short_inv_walsh4x4_mmi(int16_t *input, int16_t *mb_dqcoeff) { int i; int16_t output[16]; - double ftmp[12]; - uint32_t tmp[1]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_03) = { 0x0003000300030003ULL }; + double ff_ph_03, ftmp[12]; + uint64_t tmp[1]; __asm__ volatile ( + "dli %[tmp0], 0x0003000300030003 \n\t" + "dmtc1 %[tmp0], %[ff_ph_03] \n\t" MMI_LI(%[tmp0], 0x03) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "mtc1 %[tmp0], %[ftmp11] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "gsldlc1 %[ftmp1], 0x07(%[ip]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[ip]) \n\t" "gsldlc1 %[ftmp2], 0x0f(%[ip]) \n\t" @@ -317,8 +324,8 @@ void vp8_short_inv_walsh4x4_mmi(int16_t *input, int16_t *mb_dqcoeff) { [ftmp3]"=&f"(ftmp[3]), [ftmp4]"=&f"(ftmp[4]), [ftmp5]"=&f"(ftmp[5]), [ftmp6]"=&f"(ftmp[6]), [ftmp7]"=&f"(ftmp[7]), [ftmp8]"=&f"(ftmp[8]), [ftmp9]"=&f"(ftmp[9]), [ftmp10]"=&f"(ftmp[10]), - [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]) - : [ip]"r"(input), [op]"r"(output), [ff_ph_03]"f"(ff_ph_03) + [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]), [ff_ph_03]"=&f"(ff_ph_03) + : [ip]"r"(input), [op]"r"(output) : "memory" ); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/loopfilter_filters_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/loopfilter_filters_mmi.c index f2182f95c..a07a7e3b4 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/loopfilter_filters_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/loopfilter_filters_mmi.c @@ -13,28 +13,25 @@ #include "vp8/common/onyxc_int.h" #include "vpx_ports/asmdefs_mmi.h" -DECLARE_ALIGNED(8, static const uint64_t, ff_ph_01) = { 0x0001000100010001ULL }; -DECLARE_ALIGNED(8, static const uint64_t, - ff_ph_003f) = { 0x003f003f003f003fULL }; -DECLARE_ALIGNED(8, static const uint64_t, - ff_ph_0900) = { 0x0900090009000900ULL }; -DECLARE_ALIGNED(8, static const uint64_t, - ff_ph_1200) = { 0x1200120012001200ULL }; -DECLARE_ALIGNED(8, static const uint64_t, - ff_ph_1b00) = { 0x1b001b001b001b00ULL }; -DECLARE_ALIGNED(8, static const uint64_t, ff_pb_fe) = { 0xfefefefefefefefeULL }; -DECLARE_ALIGNED(8, static const uint64_t, ff_pb_80) = { 0x8080808080808080ULL }; -DECLARE_ALIGNED(8, static const uint64_t, ff_pb_04) = { 0x0404040404040404ULL }; -DECLARE_ALIGNED(8, static const uint64_t, ff_pb_03) = { 0x0303030303030303ULL }; -DECLARE_ALIGNED(8, static const uint64_t, ff_pb_01) = { 0x0101010101010101ULL }; - void vp8_loop_filter_horizontal_edge_mmi( unsigned char *src_ptr, int src_pixel_step, const unsigned char *blimit, const unsigned char *limit, const unsigned char *thresh, int count) { - uint32_t tmp[1]; + uint64_t tmp[1]; mips_reg addr[2]; double ftmp[12]; + double ff_ph_01, ff_pb_fe, ff_pb_80, ff_pb_04, ff_pb_03; + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ff_ph_01] \n\t" + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x0303030303030303 \n\t" + "dmtc1 %[tmp0], %[ff_pb_03] \n\t" "1: \n\t" "gsldlc1 %[ftmp10], 0x07(%[limit]) \n\t" "gsldrc1 %[ftmp10], 0x00(%[limit]) \n\t" @@ -56,14 +53,14 @@ void vp8_loop_filter_horizontal_edge_mmi( "gsldrc1 %[ftmp4], 0x00(%[addr1]) \n\t" "pasubub %[ftmp1], %[ftmp3], %[ftmp4] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step]) "gsldlc1 %[ftmp5], 0x07(%[addr1]) \n\t" "gsldrc1 %[ftmp5], 0x00(%[addr1]) \n\t" "pasubub %[ftmp9], %[ftmp4], %[ftmp5] \n\t" "psubusb %[ftmp1], %[ftmp9], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "gsldlc1 %[ftmp6], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp6], 0x00(%[src_ptr]) \n\t" @@ -72,35 +69,35 @@ void vp8_loop_filter_horizontal_edge_mmi( "gsldrc1 %[ftmp7], 0x00(%[addr0]) \n\t" "pasubub %[ftmp11], %[ftmp7], %[ftmp6] \n\t" "psubusb %[ftmp1], %[ftmp11], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" MMI_ADDU(%[addr1], %[src_ptr], %[src_pixel_step_x2]) "gsldlc1 %[ftmp8], 0x07(%[addr1]) \n\t" "gsldrc1 %[ftmp8], 0x00(%[addr1]) \n\t" "pasubub %[ftmp1], %[ftmp8], %[ftmp7] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" MMI_ADDU(%[addr1], %[addr0], %[src_pixel_step_x2]) "gsldlc1 %[ftmp2], 0x07(%[addr1]) \n\t" "gsldrc1 %[ftmp2], 0x00(%[addr1]) \n\t" "pasubub %[ftmp1], %[ftmp2], %[ftmp8] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp1], %[ftmp5], %[ftmp6] \n\t" "paddusb %[ftmp1], %[ftmp1], %[ftmp1] \n\t" "pasubub %[ftmp2], %[ftmp4], %[ftmp7] \n\t" - "and %[ftmp2], %[ftmp2], %[ff_pb_fe] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp10] \n\t" + "pand %[ftmp2], %[ftmp2], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" "psrlh %[ftmp2], %[ftmp2], %[ftmp10] \n\t" "paddusb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" "gsldlc1 %[ftmp10], 0x07(%[blimit]) \n\t" "gsldrc1 %[ftmp10], 0x00(%[blimit]) \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp10] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" "pcmpeqb %[ftmp0], %[ftmp0], %[ftmp10] \n\t" "gsldlc1 %[ftmp10], 0x07(%[thresh]) \n\t" @@ -108,72 +105,72 @@ void vp8_loop_filter_horizontal_edge_mmi( "psubusb %[ftmp1], %[ftmp9], %[ftmp10] \n\t" "psubusb %[ftmp2], %[ftmp11], %[ftmp10] \n\t" "paddb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" - "xor %[ftmp2], %[ftmp2], %[ftmp2] \n\t" + "pxor %[ftmp2], %[ftmp2], %[ftmp2] \n\t" "pcmpeqb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" "pcmpeqb %[ftmp2], %[ftmp2], %[ftmp2] \n\t" - "xor %[ftmp1], %[ftmp1], %[ftmp2] \n\t" + "pxor %[ftmp1], %[ftmp1], %[ftmp2] \n\t" - "xor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" + "pxor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" "psubsb %[ftmp2], %[ftmp4], %[ftmp7] \n\t" - "and %[ftmp2], %[ftmp2], %[ftmp1] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp1] \n\t" "psubsb %[ftmp3], %[ftmp6], %[ftmp5] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" - "and %[ftmp2], %[ftmp2], %[ftmp0] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp0] \n\t" "paddsb %[ftmp8], %[ftmp2], %[ff_pb_03] \n\t" "paddsb %[ftmp9], %[ftmp2], %[ff_pb_04] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp11], %[ftmp11], %[ftmp11] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp11], %[ftmp11], %[ftmp11] \n\t" "punpcklbh %[ftmp0], %[ftmp0], %[ftmp8] \n\t" "punpckhbh %[ftmp11], %[ftmp11], %[ftmp8] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp10] \n\t" "psrah %[ftmp11], %[ftmp11], %[ftmp10] \n\t" "packsshb %[ftmp8], %[ftmp0], %[ftmp11] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "punpcklbh %[ftmp0], %[ftmp0], %[ftmp9] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp10] \n\t" - "xor %[ftmp11], %[ftmp11], %[ftmp11] \n\t" + "pxor %[ftmp11], %[ftmp11], %[ftmp11] \n\t" "punpckhbh %[ftmp9], %[ftmp11], %[ftmp9] \n\t" "psrah %[ftmp9], %[ftmp9], %[ftmp10] \n\t" "paddsh %[ftmp11], %[ftmp0], %[ff_ph_01] \n\t" "packsshb %[ftmp0], %[ftmp0], %[ftmp9] \n\t" "paddsh %[ftmp9], %[ftmp9], %[ff_ph_01] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" "psrah %[ftmp11], %[ftmp11], %[ftmp10] \n\t" "psrah %[ftmp9], %[ftmp9], %[ftmp10] \n\t" "packsshb %[ftmp11], %[ftmp11], %[ftmp9] \n\t" "pandn %[ftmp1], %[ftmp1], %[ftmp11] \n\t" "paddsb %[ftmp5], %[ftmp5], %[ftmp8] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step]) "gssdlc1 %[ftmp5], 0x07(%[addr1]) \n\t" "gssdrc1 %[ftmp5], 0x00(%[addr1]) \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step_x2]) "paddsb %[ftmp4], %[ftmp4], %[ftmp1] \n\t" - "xor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" "gssdlc1 %[ftmp4], 0x07(%[addr1]) \n\t" "gssdrc1 %[ftmp4], 0x00(%[addr1]) \n\t" "psubsb %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" "gssdlc1 %[ftmp6], 0x07(%[src_ptr]) \n\t" "gssdrc1 %[ftmp6], 0x00(%[src_ptr]) \n\t" "psubsb %[ftmp7], %[ftmp7], %[ftmp1] \n\t" - "xor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" "gssdlc1 %[ftmp7], 0x07(%[addr0]) \n\t" "gssdrc1 %[ftmp7], 0x00(%[addr0]) \n\t" @@ -188,17 +185,18 @@ void vp8_loop_filter_horizontal_edge_mmi( [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]), [addr0]"=&r"(addr[0]), [addr1]"=&r"(addr[1]), - [src_ptr]"+&r"(src_ptr), [count]"+&r"(count) + [src_ptr]"+&r"(src_ptr), [count]"+&r"(count), + [ff_ph_01]"=&f"(ff_ph_01), [ff_pb_fe]"=&f"(ff_pb_fe), + [ff_pb_80]"=&f"(ff_pb_80), [ff_pb_04]"=&f"(ff_pb_04), + [ff_pb_03]"=&f"(ff_pb_03) : [limit]"r"(limit), [blimit]"r"(blimit), [thresh]"r"(thresh), [src_pixel_step]"r"((mips_reg)src_pixel_step), [src_pixel_step_x2]"r"((mips_reg)(src_pixel_step<<1)), - [src_pixel_step_x4]"r"((mips_reg)(src_pixel_step<<2)), - [ff_ph_01]"f"(ff_ph_01), [ff_pb_fe]"f"(ff_pb_fe), - [ff_pb_80]"f"(ff_pb_80), [ff_pb_04]"f"(ff_pb_04), - [ff_pb_03]"f"(ff_pb_03) + [src_pixel_step_x4]"r"((mips_reg)(src_pixel_step<<2)) : "memory" ); + /* clang-format on */ } void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, @@ -206,11 +204,23 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, const unsigned char *blimit, const unsigned char *limit, const unsigned char *thresh, int count) { - uint32_t tmp[1]; + uint64_t tmp[1]; mips_reg addr[2]; double ftmp[13]; + double ff_pb_fe, ff_ph_01, ff_pb_03, ff_pb_04, ff_pb_80; + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ff_ph_01] \n\t" + "dli %[tmp0], 0x0303030303030303 \n\t" + "dmtc1 %[tmp0], %[ff_pb_03] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" MMI_SLL(%[tmp0], %[src_pixel_step], 0x02) MMI_ADDU(%[src_ptr], %[src_ptr], %[tmp0]) MMI_SUBU(%[src_ptr], %[src_ptr], 0x04) @@ -288,23 +298,23 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, /* abs (q2-q1) */ "pasubub %[ftmp7], %[ftmp11], %[ftmp10] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* ftmp3: abs(q1-q0) */ "pasubub %[ftmp3], %[ftmp10], %[ftmp9] \n\t" "psubusb %[ftmp7], %[ftmp3], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* ftmp4: abs(p1-p0) */ "pasubub %[ftmp4], %[ftmp5], %[ftmp6] \n\t" "psubusb %[ftmp7], %[ftmp4], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* abs (p2-p1) */ "pasubub %[ftmp7], %[ftmp2], %[ftmp5] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* abs (p3-p2) */ "pasubub %[ftmp7], %[ftmp1], %[ftmp2] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "gsldlc1 %[ftmp8], 0x07(%[blimit]) \n\t" "gsldrc1 %[ftmp8], 0x00(%[blimit]) \n\t" @@ -314,14 +324,14 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, "paddusb %[ftmp11], %[ftmp11], %[ftmp11] \n\t" /* abs (p1-q1) */ "pasubub %[ftmp12], %[ftmp10], %[ftmp5] \n\t" - "and %[ftmp12], %[ftmp12], %[ff_pb_fe] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp1] \n\t" + "pand %[ftmp12], %[ftmp12], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp1] \n\t" "psrlh %[ftmp12], %[ftmp12], %[ftmp1] \n\t" "paddusb %[ftmp1], %[ftmp11], %[ftmp12] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" - "xor %[ftmp1], %[ftmp1], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "pxor %[ftmp1], %[ftmp1], %[ftmp1] \n\t" /* ftmp0:mask */ "pcmpeqb %[ftmp0], %[ftmp0], %[ftmp1] \n\t" @@ -331,41 +341,41 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, /* ftmp3: abs(q1-q0) ftmp4: abs(p1-p0) */ "psubusb %[ftmp4], %[ftmp4], %[ftmp8] \n\t" "psubusb %[ftmp3], %[ftmp3], %[ftmp8] \n\t" - "or %[ftmp2], %[ftmp4], %[ftmp3] \n\t" + "por %[ftmp2], %[ftmp4], %[ftmp3] \n\t" "pcmpeqb %[ftmp2], %[ftmp2], %[ftmp1] \n\t" "pcmpeqb %[ftmp1], %[ftmp1], %[ftmp1] \n\t" /* ftmp1:hev */ - "xor %[ftmp1], %[ftmp2], %[ftmp1] \n\t" + "pxor %[ftmp1], %[ftmp2], %[ftmp1] \n\t" - "xor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" - "xor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" "psubsb %[ftmp2], %[ftmp5], %[ftmp10] \n\t" - "and %[ftmp2], %[ftmp2], %[ftmp1] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp1] \n\t" "psubsb %[ftmp3], %[ftmp9], %[ftmp6] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" /* ftmp2:filter_value */ - "and %[ftmp2], %[ftmp2], %[ftmp0] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp0] \n\t" "paddsb %[ftmp11], %[ftmp2], %[ff_pb_04] \n\t" "paddsb %[ftmp12], %[ftmp2], %[ff_pb_03] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp7] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp7] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" "punpcklbh %[ftmp0], %[ftmp0], %[ftmp12] \n\t" "punpckhbh %[ftmp8], %[ftmp8], %[ftmp12] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "psrah %[ftmp8], %[ftmp8], %[ftmp7] \n\t" "packsshb %[ftmp12], %[ftmp0], %[ftmp8] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" "punpcklbh %[ftmp0], %[ftmp0], %[ftmp11] \n\t" "punpckhbh %[ftmp8], %[ftmp8], %[ftmp11] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp7] \n\t" @@ -373,22 +383,22 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, "packsshb %[ftmp11], %[ftmp0], %[ftmp8] \n\t" "psubsb %[ftmp9], %[ftmp9], %[ftmp11] \n\t" - "xor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" "paddsb %[ftmp6], %[ftmp6], %[ftmp12] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" "paddsh %[ftmp0], %[ftmp0], %[ff_ph_01] \n\t" "paddsh %[ftmp8], %[ftmp8], %[ff_ph_01] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp7] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp7] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "psrah %[ftmp8], %[ftmp8], %[ftmp7] \n\t" "packsshb %[ftmp2], %[ftmp0], %[ftmp8] \n\t" "pandn %[ftmp2], %[ftmp1], %[ftmp2] \n\t" "psubsb %[ftmp10], %[ftmp10], %[ftmp2] \n\t" - "xor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" "paddsb %[ftmp5], %[ftmp5], %[ftmp2] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" /* ftmp5: *op1 ; ftmp6: *op0 */ "punpcklbh %[ftmp2], %[ftmp5], %[ftmp6] \n\t" @@ -408,7 +418,7 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp9] \n\t" - "dsrl %[ftmp2], %[ftmp2], %[ftmp9] \n\t" + "ssrld %[ftmp2], %[ftmp2], %[ftmp9] \n\t" MMI_SLL(%[tmp0], %[src_pixel_step], 0x02) MMI_SUBU(%[addr1], %[addr0], %[tmp0]) "gsswlc1 %[ftmp2], 0x05(%[addr1]) \n\t" @@ -419,21 +429,21 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, "gsswlc1 %[ftmp6], 0x05(%[addr1]) \n\t" "gsswrc1 %[ftmp6], 0x02(%[addr1]) \n\t" - "dsrl %[ftmp6], %[ftmp6], %[ftmp9] \n\t" + "ssrld %[ftmp6], %[ftmp6], %[ftmp9] \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step]) "gsswlc1 %[ftmp6], 0x05(%[addr1]) \n\t" "gsswrc1 %[ftmp6], 0x02(%[addr1]) \n\t" "gsswlc1 %[ftmp1], 0x05(%[src_ptr]) \n\t" "gsswrc1 %[ftmp1], 0x02(%[src_ptr]) \n\t" - "dsrl %[ftmp1], %[ftmp1], %[ftmp9] \n\t" + "ssrld %[ftmp1], %[ftmp1], %[ftmp9] \n\t" "gsswlc1 %[ftmp1], 0x05(%[addr0]) \n\t" "gsswrc1 %[ftmp1], 0x02(%[addr0]) \n\t" MMI_ADDU(%[addr1], %[addr0], %[src_pixel_step]) "gsswlc1 %[ftmp5], 0x05(%[addr1]) \n\t" "gsswrc1 %[ftmp5], 0x02(%[addr1]) \n\t" - "dsrl %[ftmp5], %[ftmp5], %[ftmp9] \n\t" + "ssrld %[ftmp5], %[ftmp5], %[ftmp9] \n\t" MMI_ADDU(%[addr1], %[addr0], %[tmp0]) "gsswlc1 %[ftmp5], 0x05(%[addr1]) \n\t" "gsswrc1 %[ftmp5], 0x02(%[addr1]) \n\t" @@ -450,15 +460,16 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [ftmp12]"=&f"(ftmp[12]), [tmp0]"=&r"(tmp[0]), [addr0]"=&r"(addr[0]), [addr1]"=&r"(addr[1]), - [src_ptr]"+&r"(src_ptr), [count]"+&r"(count) + [src_ptr]"+&r"(src_ptr), [count]"+&r"(count), + [ff_ph_01]"=&f"(ff_ph_01), [ff_pb_03]"=&f"(ff_pb_03), + [ff_pb_04]"=&f"(ff_pb_04), [ff_pb_80]"=&f"(ff_pb_80), + [ff_pb_fe]"=&f"(ff_pb_fe) : [limit]"r"(limit), [blimit]"r"(blimit), [thresh]"r"(thresh), - [src_pixel_step]"r"((mips_reg)src_pixel_step), - [ff_ph_01]"f"(ff_ph_01), [ff_pb_03]"f"(ff_pb_03), - [ff_pb_04]"f"(ff_pb_04), [ff_pb_80]"f"(ff_pb_80), - [ff_pb_fe]"f"(ff_pb_fe) + [src_pixel_step]"r"((mips_reg)src_pixel_step) : "memory" ); + /* clang-format on */ } /* clang-format off */ @@ -484,10 +495,29 @@ void vp8_loop_filter_vertical_edge_mmi(unsigned char *src_ptr, void vp8_mbloop_filter_horizontal_edge_mmi( unsigned char *src_ptr, int src_pixel_step, const unsigned char *blimit, const unsigned char *limit, const unsigned char *thresh, int count) { - uint32_t tmp[1]; + uint64_t tmp[1]; double ftmp[13]; + double ff_pb_fe, ff_pb_80, ff_pb_04, ff_pb_03, ff_ph_003f, ff_ph_0900, + ff_ph_1200, ff_ph_1b00; + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x0303030303030303 \n\t" + "dmtc1 %[tmp0], %[ff_pb_03] \n\t" + "dli %[tmp0], 0x003f003f003f003f \n\t" + "dmtc1 %[tmp0], %[ff_ph_003f] \n\t" + "dli %[tmp0], 0x0900090009000900 \n\t" + "dmtc1 %[tmp0], %[ff_ph_0900] \n\t" + "dli %[tmp0], 0x1200120012001200 \n\t" + "dmtc1 %[tmp0], %[ff_ph_1200] \n\t" + "dli %[tmp0], 0x1b001b001b001b00 \n\t" + "dmtc1 %[tmp0], %[ff_ph_1b00] \n\t" MMI_SLL(%[tmp0], %[src_pixel_step], 0x02) MMI_SUBU(%[src_ptr], %[src_ptr], %[tmp0]) "1: \n\t" @@ -532,31 +562,31 @@ void vp8_mbloop_filter_horizontal_edge_mmi( "psubusb %[ftmp0], %[ftmp0], %[ftmp9] \n\t" "pasubub %[ftmp1], %[ftmp3], %[ftmp4] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp9] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp10], %[ftmp4], %[ftmp5] \n\t" "psubusb %[ftmp1], %[ftmp10], %[ftmp9] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp11], %[ftmp7], %[ftmp6] \n\t" "psubusb %[ftmp1], %[ftmp11], %[ftmp9] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp1], %[ftmp8], %[ftmp7] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp9] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp1], %[ftmp2], %[ftmp8] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp9] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" "pasubub %[ftmp1], %[ftmp5], %[ftmp6] \n\t" "paddusb %[ftmp1], %[ftmp1], %[ftmp1] \n\t" "pasubub %[ftmp2], %[ftmp4], %[ftmp7] \n\t" - "and %[ftmp2], %[ftmp2], %[ff_pb_fe] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "pand %[ftmp2], %[ftmp2], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psrlh %[ftmp2], %[ftmp2], %[ftmp9] \n\t" "paddusb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" "psubusb %[ftmp1], %[ftmp1], %[ftmp12] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" - "xor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" /* ftmp0: mask */ "pcmpeqb %[ftmp0], %[ftmp0], %[ftmp9] \n\t" @@ -565,27 +595,27 @@ void vp8_mbloop_filter_horizontal_edge_mmi( "psubusb %[ftmp1], %[ftmp10], %[ftmp9] \n\t" "psubusb %[ftmp2], %[ftmp11], %[ftmp9] \n\t" "paddb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" - "xor %[ftmp2], %[ftmp2], %[ftmp2] \n\t" + "pxor %[ftmp2], %[ftmp2], %[ftmp2] \n\t" "pcmpeqb %[ftmp1], %[ftmp1], %[ftmp2] \n\t" "pcmpeqb %[ftmp2], %[ftmp2], %[ftmp2] \n\t" /* ftmp1: hev */ - "xor %[ftmp1], %[ftmp1], %[ftmp2] \n\t" + "pxor %[ftmp1], %[ftmp1], %[ftmp2] \n\t" - "xor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" + "pxor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" "psubsb %[ftmp2], %[ftmp4], %[ftmp7] \n\t" "psubsb %[ftmp9], %[ftmp6], %[ftmp5] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp9] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp9] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp9] \n\t" - "and %[ftmp2], %[ftmp2], %[ftmp0] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp0] \n\t" "pandn %[ftmp12], %[ftmp1], %[ftmp2] \n\t" - "and %[ftmp2], %[ftmp2], %[ftmp1] \n\t" + "pand %[ftmp2], %[ftmp2], %[ftmp1] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "paddsb %[ftmp0], %[ftmp2], %[ff_pb_03] \n\t" VP8_MBLOOP_HPSRAB "paddsb %[ftmp5], %[ftmp5], %[ftmp0] \n\t" @@ -593,15 +623,15 @@ void vp8_mbloop_filter_horizontal_edge_mmi( VP8_MBLOOP_HPSRAB "psubsb %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "li %[tmp0], 0x07 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dli %[tmp0], 0x07 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" VP8_MBLOOP_HPSRAB_ADD(%[ff_ph_1b00]) "psubsb %[ftmp6], %[ftmp6], %[ftmp1] \n\t" "paddsb %[ftmp5], %[ftmp5], %[ftmp1] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" MMI_SLL(%[tmp0], %[src_pixel_step], 0x02) MMI_SUBU(%[src_ptr], %[src_ptr], %[tmp0]) "gssdlc1 %[ftmp5], 0x07(%[src_ptr]) \n\t" @@ -613,8 +643,8 @@ void vp8_mbloop_filter_horizontal_edge_mmi( VP8_MBLOOP_HPSRAB_ADD(%[ff_ph_1200]) "paddsb %[ftmp4], %[ftmp4], %[ftmp1] \n\t" "psubsb %[ftmp7], %[ftmp7], %[ftmp1] \n\t" - "xor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" - "xor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" + "pxor %[ftmp4], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" MMI_ADDU(%[src_ptr], %[src_ptr], %[src_pixel_step]) "gssdlc1 %[ftmp7], 0x07(%[src_ptr]) \n\t" "gssdrc1 %[ftmp7], 0x00(%[src_ptr]) \n\t" @@ -624,12 +654,12 @@ void vp8_mbloop_filter_horizontal_edge_mmi( "gssdrc1 %[ftmp4], 0x00(%[src_ptr]) \n\t" VP8_MBLOOP_HPSRAB_ADD(%[ff_ph_0900]) - "xor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" - "xor %[ftmp8], %[ftmp8], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ff_pb_80] \n\t" "paddsb %[ftmp3], %[ftmp3], %[ftmp1] \n\t" "psubsb %[ftmp8], %[ftmp8], %[ftmp1] \n\t" - "xor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" - "xor %[ftmp8], %[ftmp8], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ff_pb_80] \n\t" MMI_ADDU(%[src_ptr], %[src_ptr], %[tmp0]) "gssdlc1 %[ftmp8], 0x07(%[src_ptr]) \n\t" "gssdrc1 %[ftmp8], 0x00(%[src_ptr]) \n\t" @@ -649,21 +679,23 @@ void vp8_mbloop_filter_horizontal_edge_mmi( [ftmp8]"=&f"(ftmp[8]), [ftmp9]"=&f"(ftmp[9]), [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [ftmp12]"=&f"(ftmp[12]), [tmp0]"=&r"(tmp[0]), - [src_ptr]"+&r"(src_ptr), [count]"+&r"(count) + [src_ptr]"+&r"(src_ptr), [count]"+&r"(count), + [ff_pb_fe]"=&f"(ff_pb_fe), [ff_pb_80]"=&f"(ff_pb_80), + [ff_pb_04]"=&f"(ff_pb_04), [ff_pb_03]"=&f"(ff_pb_03), + [ff_ph_0900]"=&f"(ff_ph_0900), [ff_ph_1b00]"=&f"(ff_ph_1b00), + [ff_ph_1200]"=&f"(ff_ph_1200), [ff_ph_003f]"=&f"(ff_ph_003f) : [limit]"r"(limit), [blimit]"r"(blimit), [thresh]"r"(thresh), - [src_pixel_step]"r"((mips_reg)src_pixel_step), - [ff_pb_fe]"f"(ff_pb_fe), [ff_pb_80]"f"(ff_pb_80), - [ff_pb_04]"f"(ff_pb_04), [ff_pb_03]"f"(ff_pb_03), - [ff_ph_0900]"f"(ff_ph_0900), [ff_ph_1b00]"f"(ff_ph_1b00), - [ff_ph_1200]"f"(ff_ph_1200), [ff_ph_003f]"f"(ff_ph_003f) + [src_pixel_step]"r"((mips_reg)src_pixel_step) : "memory" ); + /* clang-format on */ } +/* clang-format off */ #define VP8_MBLOOP_VPSRAB_ADDH \ - "xor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" \ - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" \ + "pxor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" \ + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" \ "punpcklbh %[ftmp7], %[ftmp7], %[ftmp0] \n\t" \ "punpckhbh %[ftmp8], %[ftmp8], %[ftmp0] \n\t" @@ -673,15 +705,30 @@ void vp8_mbloop_filter_horizontal_edge_mmi( "psrah %[ftmp7], %[ftmp7], %[ftmp12] \n\t" \ "psrah %[ftmp8], %[ftmp8], %[ftmp12] \n\t" \ "packsshb %[ftmp3], %[ftmp7], %[ftmp8] \n\t" +/* clang-format on */ void vp8_mbloop_filter_vertical_edge_mmi( unsigned char *src_ptr, int src_pixel_step, const unsigned char *blimit, const unsigned char *limit, const unsigned char *thresh, int count) { mips_reg tmp[1]; - DECLARE_ALIGNED(8, const uint64_t, srct[1]); + DECLARE_ALIGNED(8, const uint64_t, srct[2]); double ftmp[14]; + double ff_ph_003f, ff_ph_0900, ff_pb_fe, ff_pb_80, ff_pb_04, ff_pb_03; + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0x003f003f003f003f \n\t" + "dmtc1 %[tmp0], %[ff_ph_003f] \n\t" + "dli %[tmp0], 0x0900090009000900 \n\t" + "dmtc1 %[tmp0], %[ff_ph_0900] \n\t" + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x0303030303030303 \n\t" + "dmtc1 %[tmp0], %[ff_pb_03] \n\t" MMI_SUBU(%[src_ptr], %[src_ptr], 0x04) "1: \n\t" @@ -755,23 +802,23 @@ void vp8_mbloop_filter_vertical_edge_mmi( /* abs (q2-q1) */ "pasubub %[ftmp7], %[ftmp11], %[ftmp10] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* ftmp3: abs(q1-q0) */ "pasubub %[ftmp3], %[ftmp10], %[ftmp9] \n\t" "psubusb %[ftmp7], %[ftmp3], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* ftmp4: abs(p1-p0) */ "pasubub %[ftmp4], %[ftmp5], %[ftmp6] \n\t" "psubusb %[ftmp7], %[ftmp4], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* abs (p2-p1) */ "pasubub %[ftmp7], %[ftmp2], %[ftmp5] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" /* abs (p3-p2) */ "pasubub %[ftmp7], %[ftmp1], %[ftmp2] \n\t" "psubusb %[ftmp7], %[ftmp7], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "gsldlc1 %[ftmp13], 0x07(%[blimit]) \n\t" "gsldrc1 %[ftmp13], 0x00(%[blimit]) \n\t" @@ -782,14 +829,14 @@ void vp8_mbloop_filter_vertical_edge_mmi( "paddusb %[ftmp1], %[ftmp1], %[ftmp1] \n\t" /* abs (p1-q1) / 2 */ "pasubub %[ftmp12], %[ftmp10], %[ftmp5] \n\t" - "and %[ftmp12], %[ftmp12], %[ff_pb_fe] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp8] \n\t" + "pand %[ftmp12], %[ftmp12], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp8] \n\t" "psrlh %[ftmp12], %[ftmp12], %[ftmp8] \n\t" "paddusb %[ftmp12], %[ftmp1], %[ftmp12] \n\t" "psubusb %[ftmp12], %[ftmp12], %[ftmp13] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp12] \n\t" - "xor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp12] \n\t" + "pxor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" /* ftmp0: mask */ "pcmpeqb %[ftmp0], %[ftmp0], %[ftmp12] \n\t" @@ -797,19 +844,19 @@ void vp8_mbloop_filter_vertical_edge_mmi( "psubusb %[ftmp4], %[ftmp4], %[ftmp7] \n\t" /* abs(q1-q0) - thresh */ "psubusb %[ftmp3], %[ftmp3], %[ftmp7] \n\t" - "or %[ftmp3], %[ftmp4], %[ftmp3] \n\t" + "por %[ftmp3], %[ftmp4], %[ftmp3] \n\t" "pcmpeqb %[ftmp3], %[ftmp3], %[ftmp12] \n\t" "pcmpeqb %[ftmp1], %[ftmp1], %[ftmp1] \n\t" /* ftmp1: hev */ - "xor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" + "pxor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" /* ftmp2:ps2, ftmp5:ps1, ftmp6:ps0, ftmp9:qs0, ftmp10:qs1, ftmp11:qs2 */ - "xor %[ftmp11], %[ftmp11], %[ff_pb_80] \n\t" - "xor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" - "xor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" - "xor %[ftmp2], %[ftmp2], %[ff_pb_80] \n\t" + "pxor %[ftmp11], %[ftmp11], %[ff_pb_80] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ff_pb_80] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ff_pb_80] \n\t" + "pxor %[ftmp2], %[ftmp2], %[ff_pb_80] \n\t" "psubsb %[ftmp3], %[ftmp5], %[ftmp10] \n\t" "psubsb %[ftmp4], %[ftmp9], %[ftmp6] \n\t" @@ -817,15 +864,15 @@ void vp8_mbloop_filter_vertical_edge_mmi( "paddsb %[ftmp3], %[ftmp3], %[ftmp4] \n\t" "paddsb %[ftmp3], %[ftmp3], %[ftmp4] \n\t" /* filter_value &= mask */ - "and %[ftmp0], %[ftmp0], %[ftmp3] \n\t" + "pand %[ftmp0], %[ftmp0], %[ftmp3] \n\t" /* Filter2 = filter_value & hev */ - "and %[ftmp3], %[ftmp1], %[ftmp0] \n\t" + "pand %[ftmp3], %[ftmp1], %[ftmp0] \n\t" /* filter_value &= ~hev */ "pandn %[ftmp0], %[ftmp1], %[ftmp0] \n\t" "paddsb %[ftmp4], %[ftmp3], %[ff_pb_04] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp12] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "punpcklbh %[ftmp7], %[ftmp7], %[ftmp4] \n\t" "punpckhbh %[ftmp8], %[ftmp8], %[ftmp4] \n\t" "psrah %[ftmp7], %[ftmp7], %[ftmp12] \n\t" @@ -842,8 +889,8 @@ void vp8_mbloop_filter_vertical_edge_mmi( /* ftmp6: ps0 */ "paddsb %[ftmp6], %[ftmp6], %[ftmp3] \n\t" - "li %[tmp0], 0x07 \n\t" - "mtc1 %[tmp0], %[ftmp12] \n\t" + "dli %[tmp0], 0x07 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" VP8_MBLOOP_VPSRAB_ADDH "paddh %[ftmp1], %[ff_ph_0900], %[ff_ph_0900] \n\t" "paddh %[ftmp1], %[ftmp1], %[ff_ph_0900] \n\t" @@ -852,10 +899,10 @@ void vp8_mbloop_filter_vertical_edge_mmi( VP8_MBLOOP_VPSRAB_ADDT "psubsb %[ftmp4], %[ftmp9], %[ftmp3] \n\t" /* ftmp9: oq0 */ - "xor %[ftmp9], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp9], %[ftmp4], %[ff_pb_80] \n\t" "paddsb %[ftmp4], %[ftmp6], %[ftmp3] \n\t" /* ftmp6: op0 */ - "xor %[ftmp6], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp4], %[ff_pb_80] \n\t" VP8_MBLOOP_VPSRAB_ADDH "paddh %[ftmp1], %[ff_ph_0900], %[ff_ph_0900] \n\t" @@ -864,10 +911,10 @@ void vp8_mbloop_filter_vertical_edge_mmi( VP8_MBLOOP_VPSRAB_ADDT "psubsb %[ftmp4], %[ftmp10], %[ftmp3] \n\t" /* ftmp10: oq1 */ - "xor %[ftmp10], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp10], %[ftmp4], %[ff_pb_80] \n\t" "paddsb %[ftmp4], %[ftmp5], %[ftmp3] \n\t" /* ftmp5: op1 */ - "xor %[ftmp5], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp5], %[ftmp4], %[ff_pb_80] \n\t" VP8_MBLOOP_VPSRAB_ADDH "pmulhh %[ftmp7], %[ftmp7], %[ff_ph_0900] \n\t" @@ -875,10 +922,10 @@ void vp8_mbloop_filter_vertical_edge_mmi( VP8_MBLOOP_VPSRAB_ADDT "psubsb %[ftmp4], %[ftmp11], %[ftmp3] \n\t" /* ftmp11: oq2 */ - "xor %[ftmp11], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp11], %[ftmp4], %[ff_pb_80] \n\t" "paddsb %[ftmp4], %[ftmp2], %[ftmp3] \n\t" /* ftmp2: op2 */ - "xor %[ftmp2], %[ftmp4], %[ff_pb_80] \n\t" + "pxor %[ftmp2], %[ftmp4], %[ff_pb_80] \n\t" "ldc1 %[ftmp12], 0x00(%[srct]) \n\t" "ldc1 %[ftmp8], 0x08(%[srct]) \n\t" @@ -948,41 +995,58 @@ void vp8_mbloop_filter_vertical_edge_mmi( [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [ftmp12]"=&f"(ftmp[12]), [ftmp13]"=&f"(ftmp[13]), [tmp0]"=&r"(tmp[0]), [src_ptr]"+&r"(src_ptr), - [count]"+&r"(count) + [count]"+&r"(count), + [ff_ph_003f]"=&f"(ff_ph_003f), [ff_ph_0900]"=&f"(ff_ph_0900), + [ff_pb_03]"=&f"(ff_pb_03), [ff_pb_04]"=&f"(ff_pb_04), + [ff_pb_80]"=&f"(ff_pb_80), [ff_pb_fe]"=&f"(ff_pb_fe) : [limit]"r"(limit), [blimit]"r"(blimit), [srct]"r"(srct), [thresh]"r"(thresh), - [src_pixel_step]"r"((mips_reg)src_pixel_step), - [ff_ph_003f]"f"(ff_ph_003f), [ff_ph_0900]"f"(ff_ph_0900), - [ff_pb_03]"f"(ff_pb_03), [ff_pb_04]"f"(ff_pb_04), - [ff_pb_80]"f"(ff_pb_80), [ff_pb_fe]"f"(ff_pb_fe) + [src_pixel_step]"r"((mips_reg)src_pixel_step) : "memory" ); + /* clang-format on */ } +/* clang-format off */ #define VP8_SIMPLE_HPSRAB \ "psllh %[ftmp0], %[ftmp5], %[ftmp8] \n\t" \ "psrah %[ftmp0], %[ftmp0], %[ftmp9] \n\t" \ "psrlh %[ftmp0], %[ftmp0], %[ftmp8] \n\t" \ "psrah %[ftmp1], %[ftmp5], %[ftmp10] \n\t" \ "psllh %[ftmp1], %[ftmp1], %[ftmp8] \n\t" \ - "or %[ftmp0], %[ftmp0], %[ftmp1] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp1] \n\t" +/* clang-format on */ void vp8_loop_filter_simple_horizontal_edge_mmi(unsigned char *src_ptr, int src_pixel_step, const unsigned char *blimit) { - uint32_t tmp[1], count = 2; + uint64_t tmp[1], count = 2; mips_reg addr[2]; double ftmp[12]; + double ff_pb_fe, ff_pb_80, ff_pb_04, ff_pb_01; + /* clang-format off */ __asm__ volatile ( - "li %[tmp0], 0x08 \n\t" - "mtc1 %[tmp0], %[ftmp8] \n\t" - "li %[tmp0], 0x03 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp10] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp11] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" + "dli %[tmp0], 0x08 \n\t" + "dmtc1 %[tmp0], %[ftmp8] \n\t" + "dli %[tmp0], 0x03 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x0101010101010101 \n\t" + "dmtc1 %[tmp0], %[ff_pb_01] \n\t" "1: \n\t" "gsldlc1 %[ftmp3], 0x07(%[blimit]) \n\t" @@ -996,7 +1060,7 @@ void vp8_loop_filter_simple_horizontal_edge_mmi(unsigned char *src_ptr, "gsldlc1 %[ftmp7], 0x07(%[addr0]) \n\t" "gsldrc1 %[ftmp7], 0x00(%[addr0]) \n\t" "pasubub %[ftmp1], %[ftmp7], %[ftmp2] \n\t" - "and %[ftmp1], %[ftmp1], %[ff_pb_fe] \n\t" + "pand %[ftmp1], %[ftmp1], %[ff_pb_fe] \n\t" "psrlh %[ftmp1], %[ftmp1], %[ftmp11] \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step]) @@ -1008,31 +1072,31 @@ void vp8_loop_filter_simple_horizontal_edge_mmi(unsigned char *src_ptr, "paddusb %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "paddusb %[ftmp5], %[ftmp5], %[ftmp1] \n\t" "psubusb %[ftmp5], %[ftmp5], %[ftmp3] \n\t" - "xor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" "pcmpeqb %[ftmp5], %[ftmp5], %[ftmp3] \n\t" - "xor %[ftmp2], %[ftmp2], %[ff_pb_80] \n\t" - "xor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" + "pxor %[ftmp2], %[ftmp2], %[ff_pb_80] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ff_pb_80] \n\t" "psubsb %[ftmp2], %[ftmp2], %[ftmp7] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" - "xor %[ftmp3], %[ftmp0], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp0], %[ff_pb_80] \n\t" "psubsb %[ftmp0], %[ftmp3], %[ftmp6] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp0] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp0] \n\t" "paddsb %[ftmp2], %[ftmp2], %[ftmp0] \n\t" - "and %[ftmp5], %[ftmp5], %[ftmp2] \n\t" + "pand %[ftmp5], %[ftmp5], %[ftmp2] \n\t" "paddsb %[ftmp5], %[ftmp5], %[ff_pb_04] \n\t" VP8_SIMPLE_HPSRAB "psubsb %[ftmp3], %[ftmp3], %[ftmp0] \n\t" - "xor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" "gssdlc1 %[ftmp3], 0x07(%[src_ptr]) \n\t" "gssdrc1 %[ftmp3], 0x00(%[src_ptr]) \n\t" "psubsb %[ftmp5], %[ftmp5], %[ff_pb_01] \n\t" VP8_SIMPLE_HPSRAB "paddsb %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" MMI_SUBU(%[addr1], %[src_ptr], %[src_pixel_step]) "gssdlc1 %[ftmp6], 0x07(%[addr1]) \n\t" "gssdrc1 %[ftmp6], 0x00(%[addr1]) \n\t" @@ -1048,30 +1112,43 @@ void vp8_loop_filter_simple_horizontal_edge_mmi(unsigned char *src_ptr, [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]), [addr0]"=&r"(addr[0]), [addr1]"=&r"(addr[1]), - [src_ptr]"+&r"(src_ptr), [count]"+&r"(count) + [src_ptr]"+&r"(src_ptr), [count]"+&r"(count), + [ff_pb_fe]"=&f"(ff_pb_fe), [ff_pb_80]"=&f"(ff_pb_80), + [ff_pb_04]"=&f"(ff_pb_04), [ff_pb_01]"=&f"(ff_pb_01) : [blimit]"r"(blimit), [src_pixel_step]"r"((mips_reg)src_pixel_step), - [src_pixel_step_x2]"r"((mips_reg)(src_pixel_step<<1)), - [ff_pb_fe]"f"(ff_pb_fe), [ff_pb_80]"f"(ff_pb_80), - [ff_pb_04]"f"(ff_pb_04), [ff_pb_01]"f"(ff_pb_01) + [src_pixel_step_x2]"r"((mips_reg)(src_pixel_step<<1)) : "memory" ); + /* clang-format on */ } void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, int src_pixel_step, const unsigned char *blimit) { - uint32_t tmp[1], count = 2; + uint64_t tmp[1], count = 2; mips_reg addr[2]; - DECLARE_ALIGNED(8, const uint64_t, srct[1]); - double ftmp[12]; + DECLARE_ALIGNED(8, const uint64_t, srct[2]); + double ftmp[12], ff_pb_fe, ff_pb_80, ff_pb_04, ff_pb_01; + /* clang-format off */ __asm__ volatile ( - "li %[tmp0], 0x08 \n\t" - "mtc1 %[tmp0], %[ftmp8] \n\t" - "li %[tmp0], 0x20 \n\t" - "mtc1 %[tmp0], %[ftmp10] \n\t" - + "dli %[tmp0], 0x08 \n\t" + "dmtc1 %[tmp0], %[ftmp8] \n\t" + "dli %[tmp0], 0x20 \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0x08 \n\t" + "dmtc1 %[tmp0], %[ftmp8] \n\t" + "dli %[tmp0], 0x20 \n\t" + "dmtc1 %[tmp0], %[ftmp10] \n\t" + "dli %[tmp0], 0xfefefefefefefefe \n\t" + "dmtc1 %[tmp0], %[ff_pb_fe] \n\t" + "dli %[tmp0], 0x8080808080808080 \n\t" + "dmtc1 %[tmp0], %[ff_pb_80] \n\t" + "dli %[tmp0], 0x0404040404040404 \n\t" + "dmtc1 %[tmp0], %[ff_pb_04] \n\t" + "dli %[tmp0], 0x0101010101010101 \n\t" + "dmtc1 %[tmp0], %[ff_pb_01] \n\t" MMI_ADDU(%[src_ptr], %[src_ptr], %[src_pixel_step_x4]) MMI_SUBU(%[src_ptr], %[src_ptr], 0x02) @@ -1118,10 +1195,10 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, "punpckhwd %[ftmp3], %[ftmp2], %[ftmp5] \n\t" "punpcklwd %[ftmp2], %[ftmp2], %[ftmp5] \n\t" - "li %[tmp0], 0x01 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x01 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "pasubub %[ftmp6], %[ftmp3], %[ftmp0] \n\t" - "and %[ftmp6], %[ftmp6], %[ff_pb_fe] \n\t" + "pand %[ftmp6], %[ftmp6], %[ff_pb_fe] \n\t" "psrlh %[ftmp6], %[ftmp6], %[ftmp9] \n\t" "pasubub %[ftmp5], %[ftmp1], %[ftmp2] \n\t" "paddusb %[ftmp5], %[ftmp5], %[ftmp5] \n\t" @@ -1130,53 +1207,53 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, "gsldlc1 %[ftmp7], 0x07(%[blimit]) \n\t" "gsldrc1 %[ftmp7], 0x00(%[blimit]) \n\t" "psubusb %[ftmp5], %[ftmp5], %[ftmp7] \n\t" - "xor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" "pcmpeqb %[ftmp5], %[ftmp5], %[ftmp7] \n\t" "sdc1 %[ftmp0], 0x00(%[srct]) \n\t" "sdc1 %[ftmp3], 0x08(%[srct]) \n\t" - "xor %[ftmp0], %[ftmp0], %[ff_pb_80] \n\t" - "xor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" "psubsb %[ftmp0], %[ftmp0], %[ftmp3] \n\t" - "xor %[ftmp6], %[ftmp1], %[ff_pb_80] \n\t" - "xor %[ftmp3], %[ftmp2], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp1], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp2], %[ff_pb_80] \n\t" "psubsb %[ftmp7], %[ftmp3], %[ftmp6] \n\t" "paddsb %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "paddsb %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "paddsb %[ftmp0], %[ftmp0], %[ftmp7] \n\t" - "and %[ftmp5], %[ftmp5], %[ftmp0] \n\t" + "pand %[ftmp5], %[ftmp5], %[ftmp0] \n\t" "paddsb %[ftmp5], %[ftmp5], %[ff_pb_04] \n\t" - "li %[tmp0], 0x03 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x03 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psllh %[ftmp0], %[ftmp5], %[ftmp8] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp9] \n\t" "psrlh %[ftmp0], %[ftmp0], %[ftmp8] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psrah %[ftmp7], %[ftmp5], %[ftmp9] \n\t" "psllh %[ftmp7], %[ftmp7], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp7] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp7] \n\t" "psubsb %[ftmp3], %[ftmp3], %[ftmp0] \n\t" - "xor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ff_pb_80] \n\t" "psubsb %[ftmp5], %[ftmp5], %[ff_pb_01] \n\t" - "li %[tmp0], 0x03 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x03 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psllh %[ftmp0], %[ftmp5], %[ftmp8] \n\t" "psrah %[ftmp0], %[ftmp0], %[ftmp9] \n\t" "psrlh %[ftmp0], %[ftmp0], %[ftmp8] \n\t" - "li %[tmp0], 0x0b \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x0b \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psrah %[ftmp5], %[ftmp5], %[ftmp9] \n\t" "psllh %[ftmp5], %[ftmp5], %[ftmp8] \n\t" - "or %[ftmp0], %[ftmp0], %[ftmp5] \n\t" + "por %[ftmp0], %[ftmp0], %[ftmp5] \n\t" "paddsb %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "xor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ff_pb_80] \n\t" "ldc1 %[ftmp0], 0x00(%[srct]) \n\t" "ldc1 %[ftmp4], 0x08(%[srct]) \n\t" @@ -1195,7 +1272,7 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, "punpckhhw %[ftmp5], %[ftmp1], %[ftmp3] \n\t" "punpcklhw %[ftmp1], %[ftmp1], %[ftmp3] \n\t" - "dsrl %[ftmp0], %[ftmp0], %[ftmp10] \n\t" + "ssrld %[ftmp0], %[ftmp0], %[ftmp10] \n\t" MMI_SUBU(%[addr1], %[addr0], %[src_pixel_step_x4]) "gsswlc1 %[ftmp0], 0x03(%[addr1]) \n\t" "gsswrc1 %[ftmp0], 0x00(%[addr1]) \n\t" @@ -1203,7 +1280,7 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, "gsswlc1 %[ftmp6], 0x03(%[addr1]) \n\t" "gsswrc1 %[ftmp6], 0x00(%[addr1]) \n\t" - "dsrl %[ftmp6], %[ftmp6], %[ftmp10] \n\t" + "ssrld %[ftmp6], %[ftmp6], %[ftmp10] \n\t" "gsswlc1 %[ftmp1], 0x03(%[src_ptr]) \n\t" "gsswrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -1215,11 +1292,11 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, "gsswlc1 %[ftmp5], 0x03(%[addr1]) \n\t" "gsswrc1 %[ftmp5], 0x00(%[addr1]) \n\t" - "dsrl %[ftmp1], %[ftmp1], %[ftmp10] \n\t" + "ssrld %[ftmp1], %[ftmp1], %[ftmp10] \n\t" "gsswlc1 %[ftmp1], 0x03(%[addr0]) \n\t" "gsswrc1 %[ftmp1], 0x00(%[addr0]) \n\t" - "dsrl %[ftmp5], %[ftmp5], %[ftmp10] \n\t" + "ssrld %[ftmp5], %[ftmp5], %[ftmp10] \n\t" MMI_ADDU(%[addr1], %[addr0], %[src_pixel_step_x2]) "gsswlc1 %[ftmp5], 0x03(%[addr1]) \n\t" "gsswrc1 %[ftmp5], 0x00(%[addr1]) \n\t" @@ -1235,16 +1312,17 @@ void vp8_loop_filter_simple_vertical_edge_mmi(unsigned char *src_ptr, [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), [tmp0]"=&r"(tmp[0]), [addr0]"=&r"(addr[0]), [addr1]"=&r"(addr[1]), - [src_ptr]"+&r"(src_ptr), [count]"+&r"(count) + [src_ptr]"+&r"(src_ptr), [count]"+&r"(count), + [ff_pb_fe]"=&f"(ff_pb_fe), [ff_pb_80]"=&f"(ff_pb_80), + [ff_pb_04]"=&f"(ff_pb_04), [ff_pb_01]"=&f"(ff_pb_01) : [blimit]"r"(blimit), [srct]"r"(srct), [src_pixel_step]"r"((mips_reg)src_pixel_step), [src_pixel_step_x2]"r"((mips_reg)(src_pixel_step<<1)), [src_pixel_step_x4]"r"((mips_reg)(src_pixel_step<<2)), - [src_pixel_step_x8]"r"((mips_reg)(src_pixel_step<<3)), - [ff_pb_fe]"f"(ff_pb_fe), [ff_pb_80]"f"(ff_pb_80), - [ff_pb_04]"f"(ff_pb_04), [ff_pb_01]"f"(ff_pb_01) + [src_pixel_step_x8]"r"((mips_reg)(src_pixel_step<<3)) : "memory" ); + /* clang-format on */ } /* Horizontal MB filtering */ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/sixtap_filter_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/sixtap_filter_mmi.c index 77d665d45..b85f73fdf 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/sixtap_filter_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/mmi/sixtap_filter_mmi.c @@ -70,9 +70,8 @@ static INLINE void vp8_filter_block1d_h6_mmi(unsigned char *src_ptr, unsigned int output_height, unsigned int output_width, const int16_t *vp8_filter) { - uint32_t tmp[1]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_40) = { 0x0040004000400040ULL }; - + uint64_t tmp[1]; + double ff_ph_40; #if _MIPS_SIM == _ABIO32 register double fzero asm("$f0"); register double ftmp0 asm("$f2"); @@ -103,18 +102,21 @@ static INLINE void vp8_filter_block1d_h6_mmi(unsigned char *src_ptr, register double ftmp11 asm("$f12"); #endif // _MIPS_SIM == _ABIO32 + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0x0040004000400040 \n\t" + "dmtc1 %[tmp0], %[ff_ph_40] \n\t" "ldc1 %[ftmp0], 0x00(%[vp8_filter]) \n\t" "ldc1 %[ftmp1], 0x10(%[vp8_filter]) \n\t" "ldc1 %[ftmp2], 0x20(%[vp8_filter]) \n\t" "ldc1 %[ftmp3], 0x30(%[vp8_filter]) \n\t" "ldc1 %[ftmp4], 0x40(%[vp8_filter]) \n\t" "ldc1 %[ftmp5], 0x50(%[vp8_filter]) \n\t" - "xor %[fzero], %[fzero], %[fzero] \n\t" - "li %[tmp0], 0x07 \n\t" - "mtc1 %[tmp0], %[ftmp7] \n\t" - "li %[tmp0], 0x08 \n\t" - "mtc1 %[tmp0], %[ftmp11] \n\t" + "pxor %[fzero], %[fzero], %[fzero] \n\t" + "dli %[tmp0], 0x07 \n\t" + "dmtc1 %[tmp0], %[ftmp7] \n\t" + "dli %[tmp0], 0x08 \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "1: \n\t" "gsldlc1 %[ftmp9], 0x05(%[src_ptr]) \n\t" @@ -137,12 +139,12 @@ static INLINE void vp8_filter_block1d_h6_mmi(unsigned char *src_ptr, "pmullh %[ftmp6], %[ftmp6], %[ftmp5] \n\t" "paddsh %[ftmp8], %[ftmp8], %[ftmp6] \n\t" - "dsrl %[ftmp10], %[ftmp10], %[ftmp11] \n\t" + "ssrld %[ftmp10], %[ftmp10], %[ftmp11] \n\t" "punpcklbh %[ftmp6], %[ftmp10], %[fzero] \n\t" "pmullh %[ftmp6], %[ftmp6], %[ftmp2] \n\t" "paddsh %[ftmp8], %[ftmp8], %[ftmp6] \n\t" - "dsrl %[ftmp10], %[ftmp10], %[ftmp11] \n\t" + "ssrld %[ftmp10], %[ftmp10], %[ftmp11] \n\t" "punpcklbh %[ftmp6], %[ftmp10], %[fzero] \n\t" "pmullh %[ftmp6], %[ftmp6], %[ftmp3] \n\t" "paddsh %[ftmp8], %[ftmp8], %[ftmp6] \n\t" @@ -166,21 +168,22 @@ static INLINE void vp8_filter_block1d_h6_mmi(unsigned char *src_ptr, [ftmp9]"=&f"(ftmp9), [ftmp10]"=&f"(ftmp10), [ftmp11]"=&f"(ftmp11), [tmp0]"=&r"(tmp[0]), [output_ptr]"+&r"(output_ptr), [output_height]"+&r"(output_height), - [src_ptr]"+&r"(src_ptr) + [src_ptr]"+&r"(src_ptr), [ff_ph_40]"=&f"(ff_ph_40) : [src_pixels_per_line]"r"((mips_reg)src_pixels_per_line), - [vp8_filter]"r"(vp8_filter), [output_width]"r"(output_width), - [ff_ph_40]"f"(ff_ph_40) + [vp8_filter]"r"(vp8_filter), [output_width]"r"(output_width) : "memory" ); + /* clang-format on */ } /* Horizontal filter: pixel_step is always W */ static INLINE void vp8_filter_block1dc_v6_mmi( uint16_t *src_ptr, unsigned char *output_ptr, unsigned int output_height, int output_pitch, unsigned int pixels_per_line, const int16_t *vp8_filter) { - DECLARE_ALIGNED(8, const uint64_t, ff_ph_40) = { 0x0040004000400040ULL }; - uint32_t tmp[1]; + double ff_ph_40; + uint64_t tmp[1]; mips_reg addr[1]; + #if _MIPS_SIM == _ABIO32 register double fzero asm("$f0"); register double ftmp0 asm("$f2"); @@ -215,16 +218,19 @@ static INLINE void vp8_filter_block1dc_v6_mmi( register double ftmp13 asm("$f14"); #endif // _MIPS_SIM == _ABIO32 + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0x0040004000400040 \n\t" + "dmtc1 %[tmp0], %[ff_ph_40] \n\t" "ldc1 %[ftmp0], 0x00(%[vp8_filter]) \n\t" "ldc1 %[ftmp1], 0x10(%[vp8_filter]) \n\t" "ldc1 %[ftmp2], 0x20(%[vp8_filter]) \n\t" "ldc1 %[ftmp3], 0x30(%[vp8_filter]) \n\t" "ldc1 %[ftmp4], 0x40(%[vp8_filter]) \n\t" "ldc1 %[ftmp5], 0x50(%[vp8_filter]) \n\t" - "xor %[fzero], %[fzero], %[fzero] \n\t" - "li %[tmp0], 0x07 \n\t" - "mtc1 %[tmp0], %[ftmp13] \n\t" + "pxor %[fzero], %[fzero], %[fzero] \n\t" + "dli %[tmp0], 0x07 \n\t" + "dmtc1 %[tmp0], %[ftmp13] \n\t" /* In order to make full use of memory load delay slot, * Operation of memory loading and calculating has been rearranged. @@ -285,15 +291,16 @@ static INLINE void vp8_filter_block1dc_v6_mmi( [ftmp11]"=&f"(ftmp11), [ftmp12]"=&f"(ftmp12), [ftmp13]"=&f"(ftmp13), [tmp0]"=&r"(tmp[0]), [addr0]"=&r"(addr[0]), [src_ptr]"+&r"(src_ptr), - [output_ptr]"+&r"(output_ptr), [output_height]"+&r"(output_height) + [output_ptr]"+&r"(output_ptr), [output_height]"+&r"(output_height), + [ff_ph_40]"=&f"(ff_ph_40) : [pixels_per_line]"r"((mips_reg)pixels_per_line), [pixels_per_line_x2]"r"((mips_reg)(pixels_per_line<<1)), [pixels_per_line_x4]"r"((mips_reg)(pixels_per_line<<2)), [vp8_filter]"r"(vp8_filter), - [output_pitch]"r"((mips_reg)output_pitch), - [ff_ph_40]"f"(ff_ph_40) + [output_pitch]"r"((mips_reg)output_pitch) : "memory" ); + /* clang-format on */ } /* When xoffset == 0, vp8_filter= {0,0,128,0,0,0}, @@ -313,8 +320,9 @@ static INLINE void vp8_filter_block1d_h6_filter0_mmi( register double ftmp1 asm("$f2"); #endif // _MIPS_SIM == _ABIO32 + /* clang-format off */ __asm__ volatile ( - "xor %[fzero], %[fzero], %[fzero] \n\t" + "pxor %[fzero], %[fzero], %[fzero] \n\t" "1: \n\t" "gsldlc1 %[ftmp0], 0x07(%[src_ptr]) \n\t" @@ -335,6 +343,7 @@ static INLINE void vp8_filter_block1d_h6_filter0_mmi( [output_width]"r"(output_width) : "memory" ); + /* clang-format on */ } static INLINE void vp8_filter_block1dc_v6_filter0_mmi( @@ -350,8 +359,9 @@ static INLINE void vp8_filter_block1dc_v6_filter0_mmi( register double ftmp1 asm("$f2"); #endif // _MIPS_SIM == _ABIO32 + /* clang-format on */ __asm__ volatile ( - "xor %[fzero], %[fzero], %[fzero] \n\t" + "pxor %[fzero], %[fzero], %[fzero] \n\t" "1: \n\t" "gsldlc1 %[ftmp0], 0x07(%[src_ptr]) \n\t" @@ -371,6 +381,7 @@ static INLINE void vp8_filter_block1dc_v6_filter0_mmi( [output_pitch]"r"((mips_reg)output_pitch) : "memory" ); + /* clang-format on */ } #define sixtapNxM(n, m) \ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/msa/vp8_macros_msa.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/msa/vp8_macros_msa.h index 14f83799f..ddc881a7f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/msa/vp8_macros_msa.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/mips/msa/vp8_macros_msa.h @@ -122,10 +122,10 @@ const uint8_t *psrc_m = (const uint8_t *)(psrc); \ uint32_t val_m; \ \ - asm volatile("ulw %[val_m], %[psrc_m] \n\t" \ - \ - : [val_m] "=r"(val_m) \ - : [psrc_m] "m"(*psrc_m)); \ + asm volatile("lwr %[val_m], 0(%[psrc_m]) \n\t" \ + "lwl %[val_m], 3(%[psrc_m]) \n\t" \ + : [val_m] "=&r"(val_m) \ + : [psrc_m] "r"(psrc_m)); \ \ val_m; \ }) @@ -136,10 +136,10 @@ const uint8_t *psrc_m = (const uint8_t *)(psrc); \ uint64_t val_m = 0; \ \ - asm volatile("uld %[val_m], %[psrc_m] \n\t" \ - \ - : [val_m] "=r"(val_m) \ - : [psrc_m] "m"(*psrc_m)); \ + asm volatile("ldr %[val_m], 0(%[psrc_m]) \n\t" \ + "ldl %[val_m], 7(%[psrc_m]) \n\t" \ + : [val_m] "=&r"(val_m) \ + : [psrc_m] "r"(psrc_m)); \ \ val_m; \ }) diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/threading.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/threading.h index f92136938..1cfb9fec5 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/threading.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/threading.h @@ -171,17 +171,20 @@ static inline int sem_destroy(sem_t *sem) { #define sem_wait(sem) (semaphore_wait(*sem)) #define sem_post(sem) semaphore_signal(*sem) #define sem_destroy(sem) semaphore_destroy(mach_task_self(), *sem) -#define thread_sleep(nms) -/* { struct timespec ts;ts.tv_sec=0; ts.tv_nsec = - 1000*nms;nanosleep(&ts, NULL);} */ #else #include #include -#define thread_sleep(nms) sched_yield(); +#endif /* __APPLE__ */ +/* Not Windows. Assume pthreads */ + +/* thread_sleep implementation: yield unless Linux/Unix. */ +#if defined(__unix__) || defined(__APPLE__) +#define thread_sleep(nms) /* {struct timespec ts;ts.tv_sec=0; ts.tv_nsec = 1000*nms;nanosleep(&ts, NULL);} */ -#endif -/* Not Windows. Assume pthreads */ +#else +#define thread_sleep(nms) sched_yield(); +#endif /* __unix__ || __APPLE__ */ #endif diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/dequantize_mmx.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/dequantize_mmx.asm index bfdd99778..0a269e15f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/dequantize_mmx.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/dequantize_mmx.asm @@ -14,7 +14,7 @@ SECTION .text ;void vp8_dequantize_b_impl_mmx(short *sq, short *dq, short *q) -global sym(vp8_dequantize_b_impl_mmx) PRIVATE +globalsym(vp8_dequantize_b_impl_mmx) sym(vp8_dequantize_b_impl_mmx): push rbp mov rbp, rsp @@ -56,7 +56,7 @@ sym(vp8_dequantize_b_impl_mmx): ;short *dq, 1 ;unsigned char *dest, 2 ;int stride) 3 -global sym(vp8_dequant_idct_add_mmx) PRIVATE +globalsym(vp8_dequant_idct_add_mmx) sym(vp8_dequant_idct_add_mmx): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_mmx.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_mmx.asm index 5773d9d84..6cea86fe0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_mmx.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_mmx.asm @@ -35,7 +35,7 @@ SECTION .text ;void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, ;int pitch, unsigned char *dest,int stride) -global sym(vp8_short_idct4x4llm_mmx) PRIVATE +globalsym(vp8_short_idct4x4llm_mmx) sym(vp8_short_idct4x4llm_mmx): push rbp mov rbp, rsp @@ -225,7 +225,7 @@ sym(vp8_short_idct4x4llm_mmx): ;int pred_stride, ;unsigned char *dst_ptr, ;int stride) -global sym(vp8_dc_only_idct_add_mmx) PRIVATE +globalsym(vp8_dc_only_idct_add_mmx) sym(vp8_dc_only_idct_add_mmx): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_sse2.asm index 560faba00..bb79d2da3 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/idctllm_sse2.asm @@ -21,7 +21,7 @@ SECTION .text -global sym(vp8_idct_dequant_0_2x_sse2) PRIVATE +globalsym(vp8_idct_dequant_0_2x_sse2) sym(vp8_idct_dequant_0_2x_sse2): push rbp mov rbp, rsp @@ -103,7 +103,7 @@ sym(vp8_idct_dequant_0_2x_sse2): ; unsigned char *dst - 2 ; int dst_stride - 3 ; ) -global sym(vp8_idct_dequant_full_2x_sse2) PRIVATE +globalsym(vp8_idct_dequant_full_2x_sse2) sym(vp8_idct_dequant_full_2x_sse2): push rbp mov rbp, rsp @@ -360,7 +360,7 @@ sym(vp8_idct_dequant_full_2x_sse2): ; int dst_stride - 3 ; short *dc - 4 ; ) -global sym(vp8_idct_dequant_dc_0_2x_sse2) PRIVATE +globalsym(vp8_idct_dequant_dc_0_2x_sse2) sym(vp8_idct_dequant_dc_0_2x_sse2): push rbp mov rbp, rsp @@ -436,7 +436,7 @@ sym(vp8_idct_dequant_dc_0_2x_sse2): ; int dst_stride - 3 ; short *dc - 4 ; ) -global sym(vp8_idct_dequant_dc_full_2x_sse2) PRIVATE +globalsym(vp8_idct_dequant_dc_full_2x_sse2) sym(vp8_idct_dequant_dc_full_2x_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/iwalsh_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/iwalsh_sse2.asm index 0043e93b0..56f37c3e0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/iwalsh_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/iwalsh_sse2.asm @@ -14,7 +14,7 @@ SECTION .text ;void vp8_short_inv_walsh4x4_sse2(short *input, short *mb_dqcoeff) -global sym(vp8_short_inv_walsh4x4_sse2) PRIVATE +globalsym(vp8_short_inv_walsh4x4_sse2) sym(vp8_short_inv_walsh4x4_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_block_sse2_x86_64.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_block_sse2_x86_64.asm index 6a3d05290..8d12f5385 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_block_sse2_x86_64.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_block_sse2_x86_64.asm @@ -135,7 +135,7 @@ SECTION .text ; const char *limit, ; const char *thresh ;) -global sym(vp8_loop_filter_bh_y_sse2) PRIVATE +globalsym(vp8_loop_filter_bh_y_sse2) sym(vp8_loop_filter_bh_y_sse2): %if LIBVPX_YASM_WIN64 @@ -277,7 +277,7 @@ LF_FILTER xmm0, xmm1, xmm3, xmm8, xmm4, xmm2 ; const char *thresh ;) -global sym(vp8_loop_filter_bv_y_sse2) PRIVATE +globalsym(vp8_loop_filter_bv_y_sse2) sym(vp8_loop_filter_bv_y_sse2): %if LIBVPX_YASM_WIN64 diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_sse2.asm index 2ae028fea..ce5c31313 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/loopfilter_sse2.asm @@ -288,7 +288,7 @@ SECTION .text ; const char *limit, ; const char *thresh, ;) -global sym(vp8_loop_filter_horizontal_edge_sse2) PRIVATE +globalsym(vp8_loop_filter_horizontal_edge_sse2) sym(vp8_loop_filter_horizontal_edge_sse2): push rbp mov rbp, rsp @@ -336,7 +336,7 @@ sym(vp8_loop_filter_horizontal_edge_sse2): ; const char *thresh, ; int count ;) -global sym(vp8_loop_filter_horizontal_edge_uv_sse2) PRIVATE +globalsym(vp8_loop_filter_horizontal_edge_uv_sse2) sym(vp8_loop_filter_horizontal_edge_uv_sse2): push rbp mov rbp, rsp @@ -563,7 +563,7 @@ sym(vp8_loop_filter_horizontal_edge_uv_sse2): ; const char *limit, ; const char *thresh, ;) -global sym(vp8_mbloop_filter_horizontal_edge_sse2) PRIVATE +globalsym(vp8_mbloop_filter_horizontal_edge_sse2) sym(vp8_mbloop_filter_horizontal_edge_sse2): push rbp mov rbp, rsp @@ -609,7 +609,7 @@ sym(vp8_mbloop_filter_horizontal_edge_sse2): ; const char *thresh, ; unsigned char *v ;) -global sym(vp8_mbloop_filter_horizontal_edge_uv_sse2) PRIVATE +globalsym(vp8_mbloop_filter_horizontal_edge_uv_sse2) sym(vp8_mbloop_filter_horizontal_edge_uv_sse2): push rbp mov rbp, rsp @@ -930,7 +930,7 @@ sym(vp8_mbloop_filter_horizontal_edge_uv_sse2): ; const char *limit, ; const char *thresh, ;) -global sym(vp8_loop_filter_vertical_edge_sse2) PRIVATE +globalsym(vp8_loop_filter_vertical_edge_sse2) sym(vp8_loop_filter_vertical_edge_sse2): push rbp mov rbp, rsp @@ -995,7 +995,7 @@ sym(vp8_loop_filter_vertical_edge_sse2): ; const char *thresh, ; unsigned char *v ;) -global sym(vp8_loop_filter_vertical_edge_uv_sse2) PRIVATE +globalsym(vp8_loop_filter_vertical_edge_uv_sse2) sym(vp8_loop_filter_vertical_edge_uv_sse2): push rbp mov rbp, rsp @@ -1144,7 +1144,7 @@ sym(vp8_loop_filter_vertical_edge_uv_sse2): ; const char *limit, ; const char *thresh, ;) -global sym(vp8_mbloop_filter_vertical_edge_sse2) PRIVATE +globalsym(vp8_mbloop_filter_vertical_edge_sse2) sym(vp8_mbloop_filter_vertical_edge_sse2): push rbp mov rbp, rsp @@ -1211,7 +1211,7 @@ sym(vp8_mbloop_filter_vertical_edge_sse2): ; const char *thresh, ; unsigned char *v ;) -global sym(vp8_mbloop_filter_vertical_edge_uv_sse2) PRIVATE +globalsym(vp8_mbloop_filter_vertical_edge_uv_sse2) sym(vp8_mbloop_filter_vertical_edge_uv_sse2): push rbp mov rbp, rsp @@ -1271,7 +1271,7 @@ sym(vp8_mbloop_filter_vertical_edge_uv_sse2): ; int src_pixel_step, ; const char *blimit, ;) -global sym(vp8_loop_filter_simple_horizontal_edge_sse2) PRIVATE +globalsym(vp8_loop_filter_simple_horizontal_edge_sse2) sym(vp8_loop_filter_simple_horizontal_edge_sse2): push rbp mov rbp, rsp @@ -1376,7 +1376,7 @@ sym(vp8_loop_filter_simple_horizontal_edge_sse2): ; int src_pixel_step, ; const char *blimit, ;) -global sym(vp8_loop_filter_simple_vertical_edge_sse2) PRIVATE +globalsym(vp8_loop_filter_simple_vertical_edge_sse2) sym(vp8_loop_filter_simple_vertical_edge_sse2): push rbp ; save old base pointer value. mov rbp, rsp ; set new base pointer value. diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/mfqe_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/mfqe_sse2.asm index 3fde973ad..3ec2a99ec 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/mfqe_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/mfqe_sse2.asm @@ -21,7 +21,7 @@ SECTION .text ; int dst_stride, ; int src_weight ;) -global sym(vp8_filter_by_weight16x16_sse2) PRIVATE +globalsym(vp8_filter_by_weight16x16_sse2) sym(vp8_filter_by_weight16x16_sse2): push rbp mov rbp, rsp @@ -99,7 +99,7 @@ sym(vp8_filter_by_weight16x16_sse2): ; int dst_stride, ; int src_weight ;) -global sym(vp8_filter_by_weight8x8_sse2) PRIVATE +globalsym(vp8_filter_by_weight8x8_sse2) sym(vp8_filter_by_weight8x8_sse2): push rbp mov rbp, rsp @@ -167,7 +167,7 @@ sym(vp8_filter_by_weight8x8_sse2): ; unsigned int *variance, 4 ; unsigned int *sad, 5 ;) -global sym(vp8_variance_and_sad_16x16_sse2) PRIVATE +globalsym(vp8_variance_and_sad_16x16_sse2) sym(vp8_variance_and_sad_16x16_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_mmx.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_mmx.asm index e6a48f6b0..01cf06683 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_mmx.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_mmx.asm @@ -19,7 +19,7 @@ SECTION .text ; unsigned char *dst, ; int dst_stride ; ) -global sym(vp8_copy_mem8x8_mmx) PRIVATE +globalsym(vp8_copy_mem8x8_mmx) sym(vp8_copy_mem8x8_mmx): push rbp mov rbp, rsp @@ -82,7 +82,7 @@ sym(vp8_copy_mem8x8_mmx): ; unsigned char *dst, ; int dst_stride ; ) -global sym(vp8_copy_mem8x4_mmx) PRIVATE +globalsym(vp8_copy_mem8x4_mmx) sym(vp8_copy_mem8x4_mmx): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_sse2.asm index 57f8899c7..17baf094e 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/recon_sse2.asm @@ -19,7 +19,7 @@ SECTION .text ; unsigned char *dst, ; int dst_stride ; ) -global sym(vp8_copy_mem16x16_sse2) PRIVATE +globalsym(vp8_copy_mem16x16_sse2) sym(vp8_copy_mem16x16_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_mmx.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_mmx.asm index 67bcd0cbd..8f0f6fcc8 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_mmx.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_mmx.asm @@ -27,7 +27,7 @@ SECTION .text ; unsigned int output_width, ; short * vp8_filter ;) -global sym(vp8_filter_block1d_h6_mmx) PRIVATE +globalsym(vp8_filter_block1d_h6_mmx) sym(vp8_filter_block1d_h6_mmx): push rbp mov rbp, rsp @@ -124,7 +124,7 @@ sym(vp8_filter_block1d_h6_mmx): ; unsigned int output_width, ; short * vp8_filter ;) -global sym(vp8_filter_block1dc_v6_mmx) PRIVATE +globalsym(vp8_filter_block1dc_v6_mmx) sym(vp8_filter_block1dc_v6_mmx): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_sse2.asm index 51c015e3d..94e14aed6 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_sse2.asm @@ -33,7 +33,7 @@ SECTION .text ; unsigned int output_width, ; short *vp8_filter ;) -global sym(vp8_filter_block1d8_h6_sse2) PRIVATE +globalsym(vp8_filter_block1d8_h6_sse2) sym(vp8_filter_block1d8_h6_sse2): push rbp mov rbp, rsp @@ -153,7 +153,7 @@ sym(vp8_filter_block1d8_h6_sse2): ; even number. This function handles 8 pixels in horizontal direction, calculating ONE ; rows each iteration to take advantage of the 128 bits operations. ;*************************************************************************************/ -global sym(vp8_filter_block1d16_h6_sse2) PRIVATE +globalsym(vp8_filter_block1d16_h6_sse2) sym(vp8_filter_block1d16_h6_sse2): push rbp mov rbp, rsp @@ -333,7 +333,7 @@ sym(vp8_filter_block1d16_h6_sse2): ; Notes: filter_block1d8_v6 applies a 6 tap filter vertically to the input pixels. The ; input pixel array has output_height rows. ;*************************************************************************************/ -global sym(vp8_filter_block1d8_v6_sse2) PRIVATE +globalsym(vp8_filter_block1d8_v6_sse2) sym(vp8_filter_block1d8_v6_sse2): push rbp mov rbp, rsp @@ -428,7 +428,7 @@ sym(vp8_filter_block1d8_v6_sse2): ; Notes: filter_block1d16_v6 applies a 6 tap filter vertically to the input pixels. The ; input pixel array has output_height rows. ;*************************************************************************************/ -global sym(vp8_filter_block1d16_v6_sse2) PRIVATE +globalsym(vp8_filter_block1d16_v6_sse2) sym(vp8_filter_block1d16_v6_sse2): push rbp mov rbp, rsp @@ -538,7 +538,7 @@ sym(vp8_filter_block1d16_v6_sse2): ; const short *vp8_filter ;) ; First-pass filter only when yoffset==0 -global sym(vp8_filter_block1d8_h6_only_sse2) PRIVATE +globalsym(vp8_filter_block1d8_h6_only_sse2) sym(vp8_filter_block1d8_h6_only_sse2): push rbp mov rbp, rsp @@ -651,7 +651,7 @@ sym(vp8_filter_block1d8_h6_only_sse2): ; const short *vp8_filter ;) ; First-pass filter only when yoffset==0 -global sym(vp8_filter_block1d16_h6_only_sse2) PRIVATE +globalsym(vp8_filter_block1d16_h6_only_sse2) sym(vp8_filter_block1d16_h6_only_sse2): push rbp mov rbp, rsp @@ -816,7 +816,7 @@ sym(vp8_filter_block1d16_h6_only_sse2): ; const short *vp8_filter ;) ; Second-pass filter only when xoffset==0 -global sym(vp8_filter_block1d8_v6_only_sse2) PRIVATE +globalsym(vp8_filter_block1d8_v6_only_sse2) sym(vp8_filter_block1d8_v6_only_sse2): push rbp mov rbp, rsp @@ -908,7 +908,7 @@ sym(vp8_filter_block1d8_v6_only_sse2): ; unsigned int output_height, ; unsigned int output_width ;) -global sym(vp8_unpack_block1d16_h6_sse2) PRIVATE +globalsym(vp8_unpack_block1d16_h6_sse2) sym(vp8_unpack_block1d16_h6_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_ssse3.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_ssse3.asm index 8d55c9320..17247227d 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_ssse3.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/common/x86/subpixel_ssse3.asm @@ -35,7 +35,7 @@ SECTION .text ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d8_h6_ssse3) PRIVATE +globalsym(vp8_filter_block1d8_h6_ssse3) sym(vp8_filter_block1d8_h6_ssse3): push rbp mov rbp, rsp @@ -178,7 +178,7 @@ vp8_filter_block1d8_h4_ssse3: ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d16_h6_ssse3) PRIVATE +globalsym(vp8_filter_block1d16_h6_ssse3) sym(vp8_filter_block1d16_h6_ssse3): push rbp mov rbp, rsp @@ -285,7 +285,7 @@ sym(vp8_filter_block1d16_h6_ssse3): ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d4_h6_ssse3) PRIVATE +globalsym(vp8_filter_block1d4_h6_ssse3) sym(vp8_filter_block1d4_h6_ssse3): push rbp mov rbp, rsp @@ -415,7 +415,7 @@ sym(vp8_filter_block1d4_h6_ssse3): ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d16_v6_ssse3) PRIVATE +globalsym(vp8_filter_block1d16_v6_ssse3) sym(vp8_filter_block1d16_v6_ssse3): push rbp mov rbp, rsp @@ -603,7 +603,7 @@ sym(vp8_filter_block1d16_v6_ssse3): ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d8_v6_ssse3) PRIVATE +globalsym(vp8_filter_block1d8_v6_ssse3) sym(vp8_filter_block1d8_v6_ssse3): push rbp mov rbp, rsp @@ -743,7 +743,7 @@ sym(vp8_filter_block1d8_v6_ssse3): ; unsigned int output_height, ; unsigned int vp8_filter_index ;) -global sym(vp8_filter_block1d4_v6_ssse3) PRIVATE +globalsym(vp8_filter_block1d4_v6_ssse3) sym(vp8_filter_block1d4_v6_ssse3): push rbp mov rbp, rsp @@ -882,7 +882,7 @@ sym(vp8_filter_block1d4_v6_ssse3): ; unsigned char *dst_ptr, ; int dst_pitch ;) -global sym(vp8_bilinear_predict16x16_ssse3) PRIVATE +globalsym(vp8_bilinear_predict16x16_ssse3) sym(vp8_bilinear_predict16x16_ssse3): push rbp mov rbp, rsp @@ -1145,7 +1145,7 @@ sym(vp8_bilinear_predict16x16_ssse3): ; unsigned char *dst_ptr, ; int dst_pitch ;) -global sym(vp8_bilinear_predict8x8_ssse3) PRIVATE +globalsym(vp8_bilinear_predict8x8_ssse3) sym(vp8_bilinear_predict8x8_ssse3): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/bitstream.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/bitstream.c index 3daa4e2c2..80cbb882f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/bitstream.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/bitstream.c @@ -222,7 +222,7 @@ void vp8_pack_tokens(vp8_writer *w, const TOKENEXTRA *p, int xcount) { validate_buffer(w->buffer + w->pos, 1, w->buffer_end, w->error); - w->buffer[w->pos++] = (lowvalue >> (24 - offset)); + w->buffer[w->pos++] = (lowvalue >> (24 - offset)) & 0xff; lowvalue <<= offset; shift = count; lowvalue &= 0xffffff; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/dct_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/dct_mmi.c index 1f60a692d..0fd25fcda 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/dct_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/dct_mmi.c @@ -24,19 +24,19 @@ "punpcklhw %[ftmp5], %[ftmp1], %[ftmp0] \n\t" \ "punpcklhw %[ftmp9], %[ftmp2], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp5], %[ftmp5], %[ftmp9] \n\t" \ + "por %[ftmp5], %[ftmp5], %[ftmp9] \n\t" \ "punpckhhw %[ftmp6], %[ftmp1], %[ftmp0] \n\t" \ "punpckhhw %[ftmp9], %[ftmp2], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp6], %[ftmp6], %[ftmp9] \n\t" \ + "por %[ftmp6], %[ftmp6], %[ftmp9] \n\t" \ "punpcklhw %[ftmp7], %[ftmp3], %[ftmp0] \n\t" \ "punpcklhw %[ftmp9], %[ftmp4], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp7], %[ftmp7], %[ftmp9] \n\t" \ + "por %[ftmp7], %[ftmp7], %[ftmp9] \n\t" \ "punpckhhw %[ftmp8], %[ftmp3], %[ftmp0] \n\t" \ "punpckhhw %[ftmp9], %[ftmp4], %[ftmp0] \n\t" \ "pshufh %[ftmp9], %[ftmp9], %[ftmp10] \n\t" \ - "or %[ftmp8], %[ftmp8], %[ftmp9] \n\t" \ + "por %[ftmp8], %[ftmp8], %[ftmp9] \n\t" \ "punpcklwd %[ftmp1], %[ftmp5], %[ftmp7] \n\t" \ "punpckhwd %[ftmp2], %[ftmp5], %[ftmp7] \n\t" \ "punpcklwd %[ftmp3], %[ftmp6], %[ftmp8] \n\t" \ @@ -46,6 +46,7 @@ void vp8_short_fdct4x4_mmi(int16_t *input, int16_t *output, int pitch) { uint64_t tmp[1]; int16_t *ip = input; + double ff_ph_op1, ff_ph_op3; #if _MIPS_SIM == _ABIO32 register double ftmp0 asm("$f0"); @@ -83,14 +84,17 @@ void vp8_short_fdct4x4_mmi(int16_t *input, int16_t *output, int pitch) { DECLARE_ALIGNED(8, const uint64_t, ff_pw_51000) = { 0x0000c7380000c738ULL }; DECLARE_ALIGNED(8, const uint64_t, ff_pw_14500) = { 0x000038a4000038a4ULL }; DECLARE_ALIGNED(8, const uint64_t, ff_pw_7500) = { 0x00001d4c00001d4cULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_op1) = { 0x14e808a914e808a9ULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_op3) = { 0xeb1808a9eb1808a9ULL }; DECLARE_ALIGNED(8, const uint64_t, ff_pw_5352) = { 0x000014e8000014e8ULL }; DECLARE_ALIGNED(8, const uint64_t, ff_pw_2217) = { 0x000008a9000008a9ULL }; DECLARE_ALIGNED(8, const uint64_t, ff_ph_8) = { 0x0008000800080008ULL }; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dli %[tmp0], 0x14e808a914e808a9 \n\t" + "dmtc1 %[tmp0], %[ff_ph_op1] \n\t" + "dli %[tmp0], 0xeb1808a9eb1808a9 \n\t" + "dmtc1 %[tmp0], %[ff_ph_op3] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[ftmp1], 0x07(%[ip]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[ip]) \n\t" MMI_ADDU(%[ip], %[ip], %[pitch]) @@ -129,7 +133,7 @@ void vp8_short_fdct4x4_mmi(int16_t *input, int16_t *output, int pitch) { // op[1] = (c1 * 2217 + d1 * 5352 + 14500) >> 12 MMI_LI(%[tmp0], 0x0c) - "mtc1 %[tmp0], %[ftmp11] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "ldc1 %[ftmp12], %[ff_pw_14500] \n\t" "punpcklhw %[ftmp9], %[ftmp7], %[ftmp8] \n\t" "pmaddhw %[ftmp5], %[ftmp9], %[ff_ph_op1] \n\t" @@ -169,7 +173,7 @@ void vp8_short_fdct4x4_mmi(int16_t *input, int16_t *output, int pitch) { "paddh %[ftmp1], %[ftmp1], %[ftmp9] \n\t" "paddh %[ftmp2], %[ftmp2], %[ftmp9] \n\t" MMI_LI(%[tmp0], 0x04) - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "psrah %[ftmp1], %[ftmp1], %[ftmp9] \n\t" "psrah %[ftmp2], %[ftmp2], %[ftmp9] \n\t" @@ -211,15 +215,16 @@ void vp8_short_fdct4x4_mmi(int16_t *input, int16_t *output, int pitch) { [ftmp3] "=&f"(ftmp3), [ftmp4] "=&f"(ftmp4), [ftmp5] "=&f"(ftmp5), [ftmp6] "=&f"(ftmp6), [ftmp7] "=&f"(ftmp7), [ftmp8] "=&f"(ftmp8), [ftmp9] "=&f"(ftmp9), [ftmp10] "=&f"(ftmp10), [ftmp11] "=&f"(ftmp11), - [ftmp12] "=&f"(ftmp12), [tmp0] "=&r"(tmp[0]), [ip]"+&r"(ip) + [ftmp12] "=&f"(ftmp12), [tmp0] "=&r"(tmp[0]), [ip]"+&r"(ip), + [ff_ph_op1] "=&f"(ff_ph_op1), [ff_ph_op3] "=&f"(ff_ph_op3) : [ff_ph_01] "m"(ff_ph_01), [ff_ph_07] "m"(ff_ph_07), - [ff_ph_op1] "f"(ff_ph_op1), [ff_ph_op3] "f"(ff_ph_op3), [ff_pw_14500] "m"(ff_pw_14500), [ff_pw_7500] "m"(ff_pw_7500), [ff_pw_12000] "m"(ff_pw_12000), [ff_pw_51000] "m"(ff_pw_51000), [ff_pw_5352]"m"(ff_pw_5352), [ff_pw_2217]"m"(ff_pw_2217), [ff_ph_8]"m"(ff_ph_8), [pitch]"r"(pitch), [output] "r"(output) : "memory" ); + /* clang-format on */ } void vp8_short_fdct8x4_mmi(int16_t *input, int16_t *output, int pitch) { @@ -228,17 +233,22 @@ void vp8_short_fdct8x4_mmi(int16_t *input, int16_t *output, int pitch) { } void vp8_short_walsh4x4_mmi(int16_t *input, int16_t *output, int pitch) { - double ftmp[13]; - uint32_t tmp[1]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_01) = { 0x0001000100010001ULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_pw_01) = { 0x0000000100000001ULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_pw_03) = { 0x0000000300000003ULL }; - DECLARE_ALIGNED(8, const uint64_t, ff_pw_mask) = { 0x0001000000010000ULL }; + double ftmp[13], ff_ph_01, ff_pw_01, ff_pw_03, ff_pw_mask; + uint64_t tmp[1]; + /* clang-format off */ __asm__ volatile ( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ff_ph_01] \n\t" + "dli %[tmp0], 0x0000000100000001 \n\t" + "dmtc1 %[tmp0], %[ff_pw_01] \n\t" + "dli %[tmp0], 0x0000000300000003 \n\t" + "dmtc1 %[tmp0], %[ff_pw_03] \n\t" + "dli %[tmp0], 0x0001000000010000 \n\t" + "dmtc1 %[tmp0], %[ff_pw_mask] \n\t" MMI_LI(%[tmp0], 0x02) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "mtc1 %[tmp0], %[ftmp11] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "gsldlc1 %[ftmp1], 0x07(%[ip]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[ip]) \n\t" @@ -337,52 +347,52 @@ void vp8_short_walsh4x4_mmi(int16_t *input, int16_t *output, int pitch) { "psubw %[ftmp4], %[ftmp9], %[ftmp10] \n\t" MMI_LI(%[tmp0], 0x03) - "mtc1 %[tmp0], %[ftmp11] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp1] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp1], %[ftmp1], %[ftmp9] \n\t" "paddw %[ftmp1], %[ftmp1], %[ff_pw_03] \n\t" "psraw %[ftmp1], %[ftmp1], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp2] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp2], %[ftmp2], %[ftmp9] \n\t" "paddw %[ftmp2], %[ftmp2], %[ff_pw_03] \n\t" "psraw %[ftmp2], %[ftmp2], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp3] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp3], %[ftmp3], %[ftmp9] \n\t" "paddw %[ftmp3], %[ftmp3], %[ff_pw_03] \n\t" "psraw %[ftmp3], %[ftmp3], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp4] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp4], %[ftmp4], %[ftmp9] \n\t" "paddw %[ftmp4], %[ftmp4], %[ff_pw_03] \n\t" "psraw %[ftmp4], %[ftmp4], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp5] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp5], %[ftmp5], %[ftmp9] \n\t" "paddw %[ftmp5], %[ftmp5], %[ff_pw_03] \n\t" "psraw %[ftmp5], %[ftmp5], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp6] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp6], %[ftmp6], %[ftmp9] \n\t" "paddw %[ftmp6], %[ftmp6], %[ff_pw_03] \n\t" "psraw %[ftmp6], %[ftmp6], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp7] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp7], %[ftmp7], %[ftmp9] \n\t" "paddw %[ftmp7], %[ftmp7], %[ff_pw_03] \n\t" "psraw %[ftmp7], %[ftmp7], %[ftmp11] \n\t" "pcmpgtw %[ftmp9], %[ftmp0], %[ftmp8] \n\t" - "and %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" + "pand %[ftmp9], %[ftmp9], %[ff_pw_01] \n\t" "paddw %[ftmp8], %[ftmp8], %[ftmp9] \n\t" "paddw %[ftmp8], %[ftmp8], %[ff_pw_03] \n\t" "psraw %[ftmp8], %[ftmp8], %[ftmp11] \n\t" @@ -393,7 +403,7 @@ void vp8_short_walsh4x4_mmi(int16_t *input, int16_t *output, int pitch) { "packsswh %[ftmp4], %[ftmp4], %[ftmp8] \n\t" MMI_LI(%[tmp0], 0x72) - "mtc1 %[tmp0], %[ftmp11] \n\t" + "dmtc1 %[tmp0], %[ftmp11] \n\t" "pshufh %[ftmp1], %[ftmp1], %[ftmp11] \n\t" "pshufh %[ftmp2], %[ftmp2], %[ftmp11] \n\t" "pshufh %[ftmp3], %[ftmp3], %[ftmp11] \n\t" @@ -413,13 +423,12 @@ void vp8_short_walsh4x4_mmi(int16_t *input, int16_t *output, int pitch) { [ftmp6]"=&f"(ftmp[6]), [ftmp7]"=&f"(ftmp[7]), [ftmp8]"=&f"(ftmp[8]), [ftmp9]"=&f"(ftmp[9]), [ftmp10]"=&f"(ftmp[10]), [ftmp11]"=&f"(ftmp[11]), - [ftmp12]"=&f"(ftmp[12]), - [tmp0]"=&r"(tmp[0]), - [ip]"+&r"(input) - : [op]"r"(output), - [ff_pw_01]"f"(ff_pw_01), [pitch]"r"((mips_reg)pitch), - [ff_pw_03]"f"(ff_pw_03), [ff_pw_mask]"f"(ff_pw_mask), - [ff_ph_01]"f"(ff_ph_01) + [ftmp12]"=&f"(ftmp[12]), [ff_pw_mask]"=&f"(ff_pw_mask), + [tmp0]"=&r"(tmp[0]), [ff_pw_01]"=&f"(ff_pw_01), + [ip]"+&r"(input), [ff_pw_03]"=&f"(ff_pw_03), + [ff_ph_01]"=&f"(ff_ph_01) + : [op]"r"(output), [pitch]"r"((mips_reg)pitch) : "memory" ); + /* clang-format on */ } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/vp8_quantize_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/vp8_quantize_mmi.c index 3ccb196ff..1986444aa 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/vp8_quantize_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/mips/mmi/vp8_quantize_mmi.c @@ -42,24 +42,25 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { double ftmp[13]; uint64_t tmp[1]; - DECLARE_ALIGNED(8, const uint64_t, ones) = { 0xffffffffffffffffULL }; - int eob = 0; + int64_t eob = 0; + double ones; __asm__ volatile( // loop 0 ~ 7 - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pcmpeqh %[ones], %[ones], %[ones] \n\t" "gsldlc1 %[ftmp1], 0x07(%[coeff_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[coeff_ptr]) \n\t" - "li %[tmp0], 0x0f \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x0f \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "gsldlc1 %[ftmp2], 0x0f(%[coeff_ptr]) \n\t" "gsldrc1 %[ftmp2], 0x08(%[coeff_ptr]) \n\t" "psrah %[ftmp3], %[ftmp1], %[ftmp9] \n\t" - "xor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" + "pxor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" "psubh %[ftmp1], %[ftmp1], %[ftmp3] \n\t" "psrah %[ftmp4], %[ftmp2], %[ftmp9] \n\t" - "xor %[ftmp2], %[ftmp4], %[ftmp2] \n\t" + "pxor %[ftmp2], %[ftmp4], %[ftmp2] \n\t" "psubh %[ftmp2], %[ftmp2], %[ftmp4] \n\t" "gsldlc1 %[ftmp5], 0x07(%[round_ptr]) \n\t" @@ -75,8 +76,8 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "pmulhuh %[ftmp5], %[ftmp5], %[ftmp7] \n\t" "pmulhuh %[ftmp6], %[ftmp6], %[ftmp8] \n\t" - "xor %[ftmp7], %[ftmp5], %[ftmp3] \n\t" - "xor %[ftmp8], %[ftmp6], %[ftmp4] \n\t" + "pxor %[ftmp7], %[ftmp5], %[ftmp3] \n\t" + "pxor %[ftmp8], %[ftmp6], %[ftmp4] \n\t" "psubh %[ftmp7], %[ftmp7], %[ftmp3] \n\t" "psubh %[ftmp8], %[ftmp8], %[ftmp4] \n\t" "gssdlc1 %[ftmp7], 0x07(%[qcoeff_ptr]) \n\t" @@ -90,10 +91,10 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "gsldrc1 %[ftmp2], 0x08(%[inv_zig_zag]) \n\t" "pcmpeqh %[ftmp5], %[ftmp5], %[ftmp0] \n\t" "pcmpeqh %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "xor %[ftmp5], %[ftmp5], %[ones] \n\t" - "xor %[ftmp6], %[ftmp6], %[ones] \n\t" - "and %[ftmp5], %[ftmp5], %[ftmp1] \n\t" - "and %[ftmp6], %[ftmp6], %[ftmp2] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ones] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ones] \n\t" + "pand %[ftmp5], %[ftmp5], %[ftmp1] \n\t" + "pand %[ftmp6], %[ftmp6], %[ftmp2] \n\t" "pmaxsh %[ftmp10], %[ftmp5], %[ftmp6] \n\t" "gsldlc1 %[ftmp5], 0x07(%[dequant_ptr]) \n\t" @@ -114,10 +115,10 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "gsldrc1 %[ftmp2], 0x18(%[coeff_ptr]) \n\t" "psrah %[ftmp3], %[ftmp1], %[ftmp9] \n\t" - "xor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" + "pxor %[ftmp1], %[ftmp3], %[ftmp1] \n\t" "psubh %[ftmp1], %[ftmp1], %[ftmp3] \n\t" "psrah %[ftmp4], %[ftmp2], %[ftmp9] \n\t" - "xor %[ftmp2], %[ftmp4], %[ftmp2] \n\t" + "pxor %[ftmp2], %[ftmp4], %[ftmp2] \n\t" "psubh %[ftmp2], %[ftmp2], %[ftmp4] \n\t" "gsldlc1 %[ftmp5], 0x17(%[round_ptr]) \n\t" @@ -133,8 +134,8 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "pmulhuh %[ftmp5], %[ftmp5], %[ftmp7] \n\t" "pmulhuh %[ftmp6], %[ftmp6], %[ftmp8] \n\t" - "xor %[ftmp7], %[ftmp5], %[ftmp3] \n\t" - "xor %[ftmp8], %[ftmp6], %[ftmp4] \n\t" + "pxor %[ftmp7], %[ftmp5], %[ftmp3] \n\t" + "pxor %[ftmp8], %[ftmp6], %[ftmp4] \n\t" "psubh %[ftmp7], %[ftmp7], %[ftmp3] \n\t" "psubh %[ftmp8], %[ftmp8], %[ftmp4] \n\t" "gssdlc1 %[ftmp7], 0x17(%[qcoeff_ptr]) \n\t" @@ -148,10 +149,10 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "gsldrc1 %[ftmp2], 0x18(%[inv_zig_zag]) \n\t" "pcmpeqh %[ftmp5], %[ftmp5], %[ftmp0] \n\t" "pcmpeqh %[ftmp6], %[ftmp6], %[ftmp0] \n\t" - "xor %[ftmp5], %[ftmp5], %[ones] \n\t" - "xor %[ftmp6], %[ftmp6], %[ones] \n\t" - "and %[ftmp5], %[ftmp5], %[ftmp1] \n\t" - "and %[ftmp6], %[ftmp6], %[ftmp2] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ones] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ones] \n\t" + "pand %[ftmp5], %[ftmp5], %[ftmp1] \n\t" + "pand %[ftmp6], %[ftmp6], %[ftmp2] \n\t" "pmaxsh %[ftmp11], %[ftmp5], %[ftmp6] \n\t" "gsldlc1 %[ftmp5], 0x17(%[dequant_ptr]) \n\t" @@ -165,34 +166,34 @@ void vp8_fast_quantize_b_mmi(BLOCK *b, BLOCKD *d) { "gssdlc1 %[ftmp6], 0x1f(%[dqcoeff_ptr]) \n\t" "gssdrc1 %[ftmp6], 0x18(%[dqcoeff_ptr]) \n\t" - "li %[tmp0], 0x10 \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0x10 \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "pmaxsh %[ftmp10], %[ftmp10], %[ftmp11] \n\t" "psrlw %[ftmp11], %[ftmp10], %[ftmp9] \n\t" "pmaxsh %[ftmp10], %[ftmp10], %[ftmp11] \n\t" - "li %[tmp0], 0xaa \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" + "dli %[tmp0], 0xaa \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" "pshufh %[ftmp11], %[ftmp10], %[ftmp9] \n\t" "pmaxsh %[ftmp10], %[ftmp10], %[ftmp11] \n\t" - "li %[tmp0], 0xffff \n\t" - "mtc1 %[tmp0], %[ftmp9] \n\t" - "and %[ftmp10], %[ftmp10], %[ftmp9] \n\t" + "dli %[tmp0], 0xffff \n\t" + "dmtc1 %[tmp0], %[ftmp9] \n\t" + "pand %[ftmp10], %[ftmp10], %[ftmp9] \n\t" "gssdlc1 %[ftmp10], 0x07(%[eob]) \n\t" "gssdrc1 %[ftmp10], 0x00(%[eob]) \n\t" : [ftmp0] "=&f"(ftmp[0]), [ftmp1] "=&f"(ftmp[1]), [ftmp2] "=&f"(ftmp[2]), [ftmp3] "=&f"(ftmp[3]), [ftmp4] "=&f"(ftmp[4]), [ftmp5] "=&f"(ftmp[5]), [ftmp6] "=&f"(ftmp[6]), [ftmp7] "=&f"(ftmp[7]), [ftmp8] "=&f"(ftmp[8]), [ftmp9] "=&f"(ftmp[9]), [ftmp10] "=&f"(ftmp[10]), - [ftmp11] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) + [ftmp11] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), + [tmp0] "=&r"(tmp[0]), [ones] "=&f"(ones) : [coeff_ptr] "r"((mips_reg)coeff_ptr), [qcoeff_ptr] "r"((mips_reg)qcoeff_ptr), [dequant_ptr] "r"((mips_reg)dequant_ptr), [round_ptr] "r"((mips_reg)round_ptr), [quant_ptr] "r"((mips_reg)quant_ptr), [dqcoeff_ptr] "r"((mips_reg)dqcoeff_ptr), - [inv_zig_zag] "r"((mips_reg)inv_zig_zag), [eob] "r"((mips_reg)&eob), - [ones] "f"(ones) + [inv_zig_zag] "r"((mips_reg)inv_zig_zag), [eob] "r"((mips_reg)&eob) : "memory"); *d->eob = eob; @@ -217,7 +218,7 @@ void vp8_regular_quantize_b_mmi(BLOCK *b, BLOCKD *d) { // memset(dqcoeff_ptr, 0, 32); /* clang-format off */ __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gssdlc1 %[ftmp0], 0x07(%[qcoeff_ptr]) \n\t" "gssdrc1 %[ftmp0], 0x00(%[qcoeff_ptr]) \n\t" "gssdlc1 %[ftmp0], 0x0f(%[qcoeff_ptr]) \n\t" diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/onyx_if.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/onyx_if.c index 3f5b9816d..aeed719d1 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/onyx_if.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/onyx_if.c @@ -1430,6 +1430,7 @@ void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) { VP8_COMMON *cm = &cpi->common; int last_w, last_h; unsigned int prev_number_of_layers; + unsigned int raw_target_rate; if (!cpi) return; @@ -1570,6 +1571,10 @@ void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) { cpi->oxcf.maximum_buffer_size_in_ms = 240000; } + raw_target_rate = (unsigned int)((int64_t)cpi->oxcf.Width * cpi->oxcf.Height * + 8 * 3 * cpi->framerate / 1000); + if (cpi->oxcf.target_bandwidth > raw_target_rate) + cpi->oxcf.target_bandwidth = raw_target_rate; /* Convert target bandwidth from Kbit/s to Bit/s */ cpi->oxcf.target_bandwidth *= 1000; @@ -3615,7 +3620,7 @@ static void encode_frame_to_data_rate(VP8_COMP *cpi, size_t *size, if (cpi->this_key_frame_forced) { if (cpi->active_best_quality > cpi->avg_frame_qindex * 7 / 8) { cpi->active_best_quality = cpi->avg_frame_qindex * 7 / 8; - } else if (cpi->active_best_qualityavg_frame_qindex>> 2) { + } else if (cpi->active_best_quality < (cpi->avg_frame_qindex >> 2)) { cpi->active_best_quality = cpi->avg_frame_qindex >> 2; } } @@ -4533,9 +4538,11 @@ static void encode_frame_to_data_rate(VP8_COMP *cpi, size_t *size, /* Actual bits spent */ cpi->total_actual_bits += cpi->projected_frame_size; +#if 0 && CONFIG_INTERNAL_STATS /* Debug stats */ cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size); +#endif cpi->buffer_level = cpi->bits_off_target; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/treewriter.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/treewriter.h index c02683a58..4e9ed6af1 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/treewriter.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/treewriter.h @@ -14,6 +14,8 @@ /* Trees map alphabets into huffman-like codes suitable for an arithmetic bit coder. Timothy S Murphy 11 October 2004 */ +#include + #include "./vpx_config.h" #include "vp8/common/treecoder.h" @@ -48,7 +50,9 @@ static INLINE unsigned int vp8_cost_branch(const unsigned int ct[2], vp8_prob p) { /* Imitate existing calculation */ - return ((ct[0] * vp8_cost_zero(p)) + (ct[1] * vp8_cost_one(p))) >> 8; + return (unsigned int)(((((uint64_t)ct[0]) * vp8_cost_zero(p)) + + (((uint64_t)ct[1]) * vp8_cost_one(p))) >> + 8); } /* Small functions to write explicit values and tokens, as well as diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/block_error_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/block_error_sse2.asm index f6c6aeae7..200b4ccfe 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/block_error_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/block_error_sse2.asm @@ -14,7 +14,7 @@ SECTION .text ;int vp8_block_error_sse2(short *coeff_ptr, short *dcoef_ptr) -global sym(vp8_block_error_sse2) PRIVATE +globalsym(vp8_block_error_sse2) sym(vp8_block_error_sse2): push rbp mov rbp, rsp @@ -62,7 +62,7 @@ sym(vp8_block_error_sse2): ret ;int vp8_mbblock_error_sse2_impl(short *coeff_ptr, short *dcoef_ptr, int dc); -global sym(vp8_mbblock_error_sse2_impl) PRIVATE +globalsym(vp8_mbblock_error_sse2_impl) sym(vp8_mbblock_error_sse2_impl): push rbp mov rbp, rsp @@ -132,7 +132,7 @@ sym(vp8_mbblock_error_sse2_impl): ;int vp8_mbuverror_sse2_impl(short *s_ptr, short *d_ptr); -global sym(vp8_mbuverror_sse2_impl) PRIVATE +globalsym(vp8_mbuverror_sse2_impl) sym(vp8_mbuverror_sse2_impl): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse2.asm index 480faa255..fe78da398 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse2.asm @@ -19,7 +19,7 @@ SECTION .text ; unsigned char *dst_ptr, ; int dst_stride, ; int height); -global sym(vp8_copy32xn_sse2) PRIVATE +globalsym(vp8_copy32xn_sse2) sym(vp8_copy32xn_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse3.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse3.asm index 31ea898a3..c40b2d8bf 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse3.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/copy_sse3.asm @@ -91,7 +91,7 @@ SECTION .text ; unsigned char *dst_ptr, ; int dst_stride, ; int height); -global sym(vp8_copy32xn_sse3) PRIVATE +globalsym(vp8_copy32xn_sse3) sym(vp8_copy32xn_sse3): STACK_FRAME_CREATE_X3 diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/dct_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/dct_sse2.asm index 4d92f0341..3c28cb902 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/dct_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/dct_sse2.asm @@ -63,7 +63,7 @@ SECTION .text ;void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch) -global sym(vp8_short_fdct4x4_sse2) PRIVATE +globalsym(vp8_short_fdct4x4_sse2) sym(vp8_short_fdct4x4_sse2): STACK_FRAME_CREATE @@ -168,7 +168,7 @@ sym(vp8_short_fdct4x4_sse2): STACK_FRAME_DESTROY ;void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch) -global sym(vp8_short_fdct8x4_sse2) PRIVATE +globalsym(vp8_short_fdct8x4_sse2) sym(vp8_short_fdct8x4_sse2): STACK_FRAME_CREATE diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm index b5d5de4a5..938fc173f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm @@ -14,7 +14,7 @@ SECTION .text ;void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch) -global sym(vp8_short_walsh4x4_sse2) PRIVATE +globalsym(vp8_short_walsh4x4_sse2) sym(vp8_short_walsh4x4_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/temporal_filter_apply_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/temporal_filter_apply_sse2.asm index d2b4711b8..67102064a 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/temporal_filter_apply_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/encoder/x86/temporal_filter_apply_sse2.asm @@ -22,7 +22,7 @@ SECTION .text ; int filter_weight, | 5 ; unsigned int *accumulator, | 6 ; unsigned short *count) | 7 -global sym(vp8_temporal_filter_apply_sse2) PRIVATE +globalsym(vp8_temporal_filter_apply_sse2) sym(vp8_temporal_filter_apply_sse2): push rbp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_cx_iface.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_cx_iface.c index 8f7617abf..1160f51d6 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_cx_iface.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_cx_iface.c @@ -264,9 +264,12 @@ static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx, const vpx_image_t *img) { switch (img->fmt) { case VPX_IMG_FMT_YV12: - case VPX_IMG_FMT_I420: break; + case VPX_IMG_FMT_I420: + case VPX_IMG_FMT_NV12: break; default: - ERROR("Invalid image format. Only YV12 and I420 images are supported"); + ERROR( + "Invalid image format. Only YV12, I420 and NV12 images are " + "supported"); } if ((img->d_w != ctx->cfg.g_w) || (img->d_h != ctx->cfg.g_h)) diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_dx_iface.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_dx_iface.c index 43156a078..ba0714abe 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_dx_iface.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp8/vp8_dx_iface.c @@ -687,7 +687,7 @@ static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx, return VPX_CODEC_OK; } -vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = { +static vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = { { VP8_SET_REFERENCE, vp8_set_reference }, { VP8_COPY_REFERENCE, vp8_get_reference }, { VP8_SET_POSTPROC, vp8_set_postproc }, diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/vp9_onyxc_int.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/vp9_onyxc_int.h index 6f9c6985f..1cfc12f6f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/vp9_onyxc_int.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/vp9_onyxc_int.h @@ -75,12 +75,10 @@ typedef struct { // TODO(angiebird): Set frame_index/frame_coding_index on the decoder side // properly. - int frame_index; // Display order in the video, it's equivalent to the - // show_idx defined in EncodeFrameInfo. -#if CONFIG_RATE_CTRL + int frame_index; // Display order in the video, it's equivalent to the + // show_idx defined in EncodeFrameInfo. int frame_coding_index; // The coding order (starting from zero) of this // frame. -#endif // CONFIG_RATE_CTRL vpx_codec_frame_buffer_t raw_frame_buffer; YV12_BUFFER_CONFIG buf; } RefCntBuffer; @@ -240,13 +238,11 @@ typedef struct VP9Common { // TODO(angiebird): current_video_frame/current_frame_coding_index into a // structure unsigned int current_video_frame; -#if CONFIG_RATE_CTRL // Each show or no show frame is assigned with a coding index based on its // coding order (starting from zero). // Current frame's coding index. int current_frame_coding_index; -#endif BITSTREAM_PROFILE profile; // VPX_BITS_8 in profile 0 or 1, VPX_BITS_10 or VPX_BITS_12 in profile 2 or 3. @@ -276,9 +272,7 @@ typedef struct VP9Common { static INLINE void init_frame_indexes(VP9_COMMON *cm) { cm->current_video_frame = 0; -#if CONFIG_RATE_CTRL cm->current_frame_coding_index = 0; -#endif // CONFIG_RATE_CTRL } static INLINE void update_frame_indexes(VP9_COMMON *cm, int show_frame) { @@ -287,9 +281,7 @@ static INLINE void update_frame_indexes(VP9_COMMON *cm, int show_frame) { // update not a real frame ++cm->current_video_frame; } -#if CONFIG_RATE_CTRL ++cm->current_frame_coding_index; -#endif // CONFIG_RATE_CTRL } typedef struct { diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/x86/vp9_mfqe_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/x86/vp9_mfqe_sse2.asm index ca0897ab9..ae7c94ea3 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/x86/vp9_mfqe_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/common/x86/vp9_mfqe_sse2.asm @@ -22,7 +22,7 @@ SECTION .text ; int dst_stride, ; int src_weight ;) -global sym(vp9_filter_by_weight16x16_sse2) PRIVATE +globalsym(vp9_filter_by_weight16x16_sse2) sym(vp9_filter_by_weight16x16_sse2): push rbp mov rbp, rsp @@ -100,7 +100,7 @@ sym(vp9_filter_by_weight16x16_sse2): ; int dst_stride, ; int src_weight ;) -global sym(vp9_filter_by_weight8x8_sse2) PRIVATE +globalsym(vp9_filter_by_weight8x8_sse2) sym(vp9_filter_by_weight8x8_sse2): push rbp mov rbp, rsp @@ -168,7 +168,7 @@ sym(vp9_filter_by_weight8x8_sse2): ; unsigned int *variance, 4 ; unsigned int *sad, 5 ;) -global sym(vp9_variance_and_sad_16x16_sse2) PRIVATE +globalsym(vp9_variance_and_sad_16x16_sse2) sym(vp9_variance_and_sad_16x16_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decoder.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decoder.c index bcade52c4..7db8ed72d 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decoder.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decoder.c @@ -153,6 +153,11 @@ static int vp9_dec_alloc_mi(VP9_COMMON *cm, int mi_size) { } static void vp9_dec_free_mi(VP9_COMMON *cm) { +#if CONFIG_VP9_POSTPROC + // MFQE allocates an additional mip and swaps it with cm->mip. + vpx_free(cm->postproc_state.prev_mip); + cm->postproc_state.prev_mip = NULL; +#endif vpx_free(cm->mip); cm->mip = NULL; vpx_free(cm->mi_grid_base); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodeframe.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodeframe.c index 13f9a1fbd..dcd647658 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodeframe.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodeframe.c @@ -3766,9 +3766,6 @@ static int wiener_var_segment(VP9_COMP *cpi, BLOCK_SIZE bsize, int mi_row, static int get_rdmult_delta(VP9_COMP *cpi, BLOCK_SIZE bsize, int mi_row, int mi_col, int orig_rdmult) { const int gf_group_index = cpi->twopass.gf_group.index; - TplDepFrame *tpl_frame = &cpi->tpl_stats[gf_group_index]; - TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr; - int tpl_stride = tpl_frame->stride; int64_t intra_cost = 0; int64_t mc_dep_cost = 0; int mi_wide = num_8x8_blocks_wide_lookup[bsize]; @@ -3779,11 +3776,18 @@ static int get_rdmult_delta(VP9_COMP *cpi, BLOCK_SIZE bsize, int mi_row, int count = 0; double r0, rk, beta; - if (tpl_frame->is_valid == 0) return orig_rdmult; - - if (cpi->twopass.gf_group.layer_depth[gf_group_index] > 1) return orig_rdmult; + TplDepFrame *tpl_frame; + TplDepStats *tpl_stats; + int tpl_stride; if (gf_group_index >= MAX_ARF_GOP_SIZE) return orig_rdmult; + tpl_frame = &cpi->tpl_stats[gf_group_index]; + + if (tpl_frame->is_valid == 0) return orig_rdmult; + tpl_stats = tpl_frame->tpl_stats_ptr; + tpl_stride = tpl_frame->stride; + + if (cpi->twopass.gf_group.layer_depth[gf_group_index] > 1) return orig_rdmult; for (row = mi_row; row < mi_row + mi_high; ++row) { for (col = mi_col; col < mi_col + mi_wide; ++col) { @@ -5086,8 +5090,8 @@ static void nonrd_pick_partition(VP9_COMP *cpi, ThreadData *td, (void)*tp_orig; - // Avoid checking for rectangular partitions for speed >= 6. - if (cpi->oxcf.speed >= 6) do_rect = 0; + // Avoid checking for rectangular partitions for speed >= 5. + if (cpi->oxcf.speed >= 5) do_rect = 0; assert(num_8x8_blocks_wide_lookup[bsize] == num_8x8_blocks_high_lookup[bsize]); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.c index b15d5f59c..8d60a0c00 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.c @@ -1024,6 +1024,8 @@ static void dealloc_compressor_data(VP9_COMP *cpi) { #if CONFIG_RATE_CTRL free_partition_info(cpi); free_motion_vector_info(cpi); + free_fp_motion_vector_info(cpi); + free_tpl_stats_info(cpi); #endif vp9_free_ref_frame_buffers(cm->buffer_pool); @@ -1523,8 +1525,29 @@ static void init_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) { vp9_noise_estimate_init(&cpi->noise_estimate, cm->width, cm->height); } -static void set_rc_buffer_sizes(RATE_CONTROL *rc, - const VP9EncoderConfig *oxcf) { +void vp9_check_reset_rc_flag(VP9_COMP *cpi) { + RATE_CONTROL *rc = &cpi->rc; + + if (cpi->common.current_video_frame > + (unsigned int)cpi->svc.number_spatial_layers) { + if (cpi->use_svc) { + vp9_svc_check_reset_layer_rc_flag(cpi); + } else { + if (rc->avg_frame_bandwidth > (3 * rc->last_avg_frame_bandwidth >> 1) || + rc->avg_frame_bandwidth < (rc->last_avg_frame_bandwidth >> 1)) { + rc->rc_1_frame = 0; + rc->rc_2_frame = 0; + rc->bits_off_target = rc->optimal_buffer_level; + rc->buffer_level = rc->optimal_buffer_level; + } + } + } +} + +void vp9_set_rc_buffer_sizes(VP9_COMP *cpi) { + RATE_CONTROL *rc = &cpi->rc; + const VP9EncoderConfig *oxcf = &cpi->oxcf; + const int64_t bandwidth = oxcf->target_bandwidth; const int64_t starting = oxcf->starting_buffer_level_ms; const int64_t optimal = oxcf->optimal_buffer_level_ms; @@ -1535,6 +1558,11 @@ static void set_rc_buffer_sizes(RATE_CONTROL *rc, (optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000; rc->maximum_buffer_size = (maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000; + + // Under a configuration change, where maximum_buffer_size may change, + // keep buffer level clipped to the maximum allowed buffer size. + rc->bits_off_target = VPXMIN(rc->bits_off_target, rc->maximum_buffer_size); + rc->buffer_level = VPXMIN(rc->buffer_level, rc->maximum_buffer_size); } #if CONFIG_VP9_HIGHBITDEPTH @@ -1991,12 +2019,7 @@ void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) { } cpi->encode_breakout = cpi->oxcf.encode_breakout; - set_rc_buffer_sizes(rc, &cpi->oxcf); - - // Under a configuration change, where maximum_buffer_size may change, - // keep buffer level clipped to the maximum allowed buffer size. - rc->bits_off_target = VPXMIN(rc->bits_off_target, rc->maximum_buffer_size); - rc->buffer_level = VPXMIN(rc->buffer_level, rc->maximum_buffer_size); + vp9_set_rc_buffer_sizes(cpi); // Set up frame rate and related parameters rate control values. vp9_new_framerate(cpi, cpi->framerate); @@ -2057,23 +2080,7 @@ void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) { (int)cpi->oxcf.target_bandwidth); } - // Check for resetting the rc flags (rc_1_frame, rc_2_frame) if the - // configuration change has a large change in avg_frame_bandwidth. - // For SVC check for resetting based on spatial layer average bandwidth. - // Also reset buffer level to optimal level. - if (cm->current_video_frame > (unsigned int)cpi->svc.number_spatial_layers) { - if (cpi->use_svc) { - vp9_svc_check_reset_layer_rc_flag(cpi); - } else { - if (rc->avg_frame_bandwidth > (3 * rc->last_avg_frame_bandwidth >> 1) || - rc->avg_frame_bandwidth < (rc->last_avg_frame_bandwidth >> 1)) { - rc->rc_1_frame = 0; - rc->rc_2_frame = 0; - rc->bits_off_target = rc->optimal_buffer_level; - rc->buffer_level = rc->optimal_buffer_level; - } - } - } + vp9_check_reset_rc_flag(cpi); cpi->alt_ref_source = NULL; rc->is_src_frame_alt_ref = 0; @@ -2457,6 +2464,8 @@ VP9_COMP *vp9_create_compressor(const VP9EncoderConfig *oxcf, cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED; + vp9_extrc_init(&cpi->ext_ratectrl); + #if !CONFIG_REALTIME_ONLY if (oxcf->pass == 1) { vp9_init_first_pass(cpi); @@ -2656,6 +2665,8 @@ VP9_COMP *vp9_create_compressor(const VP9EncoderConfig *oxcf, encode_command_init(&cpi->encode_command); partition_info_init(cpi); motion_vector_info_init(cpi); + fp_motion_vector_info_init(cpi); + tpl_stats_info_init(cpi); #endif return cpi; @@ -2827,6 +2838,8 @@ void vp9_remove_compressor(VP9_COMP *cpi) { } #endif + vp9_extrc_delete(&cpi->ext_ratectrl); + vp9_remove_common(cm); vp9_free_ref_frame_buffers(cm->buffer_pool); #if CONFIG_VP9_POSTPROC @@ -3309,6 +3322,13 @@ static void loopfilter_frame(VP9_COMP *cpi, VP9_COMMON *cm) { return; } + if (cpi->loopfilter_ctrl == NO_LOOPFILTER || + (!is_reference_frame && cpi->loopfilter_ctrl == LOOPFILTER_REFERENCE)) { + lf->filter_level = 0; + vpx_extend_frame_inner_borders(cm->frame_to_show); + return; + } + if (xd->lossless) { lf->filter_level = 0; lf->last_filt_level = 0; @@ -3742,15 +3762,19 @@ static void set_frame_size(VP9_COMP *cpi) { } #endif // !CONFIG_REALTIME_ONLY - if (oxcf->pass == 0 && oxcf->rc_mode == VPX_CBR && !cpi->use_svc && + if (oxcf->pass == 0 && oxcf->rc_mode == VPX_CBR && oxcf->resize_mode == RESIZE_DYNAMIC && cpi->resize_pending != 0) { - oxcf->scaled_frame_width = - (oxcf->width * cpi->resize_scale_num) / cpi->resize_scale_den; - oxcf->scaled_frame_height = - (oxcf->height * cpi->resize_scale_num) / cpi->resize_scale_den; - // There has been a change in frame size. - vp9_set_size_literal(cpi, oxcf->scaled_frame_width, - oxcf->scaled_frame_height); + // For SVC scaled width/height will have been set (svc->resize_set=1) + // in get_svc_params based on the layer width/height. + if (!cpi->use_svc || !cpi->svc.resize_set) { + oxcf->scaled_frame_width = + (oxcf->width * cpi->resize_scale_num) / cpi->resize_scale_den; + oxcf->scaled_frame_height = + (oxcf->height * cpi->resize_scale_num) / cpi->resize_scale_den; + // There has been a change in frame size. + vp9_set_size_literal(cpi, oxcf->scaled_frame_width, + oxcf->scaled_frame_height); + } // TODO(agrange) Scale cpi->max_mv_magnitude if frame-size has changed. set_mv_search_params(cpi); @@ -4035,8 +4059,11 @@ static int encode_without_recode_loop(VP9_COMP *cpi, size_t *size, // For 1 pass CBR SVC, only ZEROMV is allowed for spatial reference frame // when svc->force_zero_mode_spatial_ref = 1. Under those conditions we can // avoid this frame-level upsampling (for non intra_only frames). + // For SVC single_layer mode, dynamic resize is allowed and we need to + // scale references for this case. if (frame_is_intra_only(cm) == 0 && - !(is_one_pass_cbr_svc(cpi) && svc->force_zero_mode_spatial_ref)) { + ((svc->single_layer_svc && cpi->oxcf.resize_mode == RESIZE_DYNAMIC) || + !(is_one_pass_cbr_svc(cpi) && svc->force_zero_mode_spatial_ref))) { vp9_scale_references(cpi); } @@ -4181,6 +4208,27 @@ static int encode_without_recode_loop(VP9_COMP *cpi, size_t *size, return 1; } +static int get_ref_frame_flags(const VP9_COMP *cpi) { + const int *const map = cpi->common.ref_frame_map; + const int gold_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx]; + const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx]; + const int gold_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx]; + int flags = VP9_ALT_FLAG | VP9_GOLD_FLAG | VP9_LAST_FLAG; + + if (gold_is_last) flags &= ~VP9_GOLD_FLAG; + + if (cpi->rc.frames_till_gf_update_due == INT_MAX && + (cpi->svc.number_temporal_layers == 1 && + cpi->svc.number_spatial_layers == 1)) + flags &= ~VP9_GOLD_FLAG; + + if (alt_is_last) flags &= ~VP9_ALT_FLAG; + + if (gold_is_alt) flags &= ~VP9_ALT_FLAG; + + return flags; +} + #if !CONFIG_REALTIME_ONLY #define MAX_QSTEP_ADJ 4 static int get_qstep_adj(int rate_excess, int rate_limit) { @@ -4189,8 +4237,149 @@ static int get_qstep_adj(int rate_excess, int rate_limit) { return VPXMIN(qstep, MAX_QSTEP_ADJ); } -static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, - uint8_t *dest) { +#if CONFIG_RATE_CTRL +static void init_rq_history(RATE_QINDEX_HISTORY *rq_history) { + rq_history->recode_count = 0; + rq_history->q_index_high = 255; + rq_history->q_index_low = 0; +} + +static void update_rq_history(RATE_QINDEX_HISTORY *rq_history, int target_bits, + int actual_bits, int q_index) { + rq_history->q_index_history[rq_history->recode_count] = q_index; + rq_history->rate_history[rq_history->recode_count] = actual_bits; + if (actual_bits <= target_bits) { + rq_history->q_index_high = q_index; + } + if (actual_bits >= target_bits) { + rq_history->q_index_low = q_index; + } + rq_history->recode_count += 1; +} + +static int guess_q_index_from_model(const RATE_QSTEP_MODEL *rq_model, + int target_bits) { + // The model predicts bits as follows. + // target_bits = bias - ratio * log2(q_step) + // Given the target_bits, we compute the q_step as follows. + double q_step; + assert(rq_model->ratio > 0); + q_step = pow(2.0, (rq_model->bias - target_bits) / rq_model->ratio); + // TODO(angiebird): Make this function support highbitdepth. + return vp9_convert_q_to_qindex(q_step, VPX_BITS_8); +} + +static int guess_q_index_linear(int prev_q_index, int target_bits, + int actual_bits, int gap) { + int q_index = prev_q_index; + if (actual_bits < target_bits) { + q_index -= gap; + q_index = VPXMAX(q_index, 0); + } else { + q_index += gap; + q_index = VPXMIN(q_index, 255); + } + return q_index; +} + +static double get_bits_percent_diff(int target_bits, int actual_bits) { + double diff; + target_bits = VPXMAX(target_bits, 1); + diff = abs(target_bits - actual_bits) * 1. / target_bits; + return diff * 100; +} + +static int rq_model_predict_q_index(const RATE_QSTEP_MODEL *rq_model, + const RATE_QINDEX_HISTORY *rq_history, + int target_bits) { + int q_index = 128; + if (rq_history->recode_count > 0) { + const int actual_bits = + rq_history->rate_history[rq_history->recode_count - 1]; + const int prev_q_index = + rq_history->q_index_history[rq_history->recode_count - 1]; + const double percent_diff = get_bits_percent_diff(target_bits, actual_bits); + if (percent_diff > 50) { + // Binary search. + // When the actual_bits and target_bits are far apart, binary search + // q_index is faster. + q_index = (rq_history->q_index_low + rq_history->q_index_high) / 2; + } else { + if (rq_model->ready) { + q_index = guess_q_index_from_model(rq_model, target_bits); + } else { + // TODO(angiebird): Find a better way to set the gap. + q_index = + guess_q_index_linear(prev_q_index, target_bits, actual_bits, 20); + } + } + } else { + if (rq_model->ready) { + q_index = guess_q_index_from_model(rq_model, target_bits); + } + } + + assert(rq_history->q_index_low <= rq_history->q_index_high); + if (q_index <= rq_history->q_index_low) { + q_index = rq_history->q_index_low + 1; + } + if (q_index >= rq_history->q_index_high) { + q_index = rq_history->q_index_high - 1; + } + return q_index; +} + +static void rq_model_update(const RATE_QINDEX_HISTORY *rq_history, + int target_bits, RATE_QSTEP_MODEL *rq_model) { + const int recode_count = rq_history->recode_count; + const double delta = 0.00001; + if (recode_count >= 2) { + const int q_index1 = rq_history->q_index_history[recode_count - 2]; + const int q_index2 = rq_history->q_index_history[recode_count - 1]; + const int r1 = rq_history->rate_history[recode_count - 2]; + const int r2 = rq_history->rate_history[recode_count - 1]; + int valid = 0; + // lower q_index should yield higher bit rate + if (q_index1 < q_index2) { + valid = r1 > r2; + } else if (q_index1 > q_index2) { + valid = r1 < r2; + } + // Only update the model when the q_index and rate behave normally. + if (valid) { + // Fit the ratio and bias of rq_model based on last two recode histories. + const double s1 = vp9_convert_qindex_to_q(q_index1, VPX_BITS_8); + const double s2 = vp9_convert_qindex_to_q(q_index2, VPX_BITS_8); + if (fabs(log2(s1) - log2(s2)) > delta) { + rq_model->ratio = (r2 - r1) / (log2(s1) - log2(s2)); + rq_model->bias = r1 + (rq_model->ratio) * log2(s1); + if (rq_model->ratio > delta && rq_model->bias > delta) { + rq_model->ready = 1; + } + } + } + } else if (recode_count == 1) { + if (rq_model->ready) { + // Update the ratio only when the initial model exists and we only have + // one recode history. + const int prev_q = rq_history->q_index_history[recode_count - 1]; + const double prev_q_step = vp9_convert_qindex_to_q(prev_q, VPX_BITS_8); + if (fabs(log2(prev_q_step)) > delta) { + const int actual_bits = rq_history->rate_history[recode_count - 1]; + rq_model->ratio = + rq_model->ratio + (target_bits - actual_bits) / log2(prev_q_step); + } + } + } +} +#endif // CONFIG_RATE_CTRL + +static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, uint8_t *dest +#if CONFIG_RATE_CTRL + , + RATE_QINDEX_HISTORY *rq_history +#endif // CONFIG_RATE_CTRL +) { const VP9EncoderConfig *const oxcf = &cpi->oxcf; VP9_COMMON *const cm = &cpi->common; RATE_CONTROL *const rc = &cpi->rc; @@ -4208,6 +4397,14 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, int qrange_adj = 1; #endif +#if CONFIG_RATE_CTRL + const FRAME_UPDATE_TYPE update_type = + cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index]; + const ENCODE_FRAME_TYPE frame_type = get_encode_frame_type(update_type); + RATE_QSTEP_MODEL *rq_model = &cpi->rq_model[frame_type]; + init_rq_history(rq_history); +#endif // CONFIG_RATE_CTRL + if (cm->show_existing_frame) { rc->this_frame_target = 0; if (is_psnr_calc_enabled(cpi)) set_raw_source_frame(cpi); @@ -4254,6 +4451,11 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, loop_at_this_size = 0; } +#if CONFIG_RATE_CTRL + if (cpi->encode_command.use_external_target_frame_bits) { + q = rq_model_predict_q_index(rq_model, rq_history, rc->this_frame_target); + } +#endif // CONFIG_RATE_CTRL // Decide frame size bounds first time through. if (loop_count == 0) { vp9_rc_compute_frame_size_bounds(cpi, rc->this_frame_target, @@ -4300,6 +4502,19 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, q = cpi->encode_command.external_quantize_index; } #endif + if (cpi->ext_ratectrl.ready) { + const GF_GROUP *gf_group = &cpi->twopass.gf_group; + vpx_rc_encodeframe_decision_t encode_frame_decision; + FRAME_UPDATE_TYPE update_type = gf_group->update_type[gf_group->index]; + const int ref_frame_flags = get_ref_frame_flags(cpi); + RefCntBuffer *ref_frame_bufs[MAX_INTER_REF_FRAMES]; + get_ref_frame_bufs(cpi, ref_frame_bufs); + vp9_extrc_get_encodeframe_decision( + &cpi->ext_ratectrl, cm->current_video_frame, + cm->current_frame_coding_index, update_type, ref_frame_bufs, + ref_frame_flags, &encode_frame_decision); + q = encode_frame_decision.q_index; + } vp9_set_quantizer(cpi, q); @@ -4339,6 +4554,9 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1; } + if (cpi->ext_ratectrl.ready) { + break; + } #if CONFIG_RATE_CTRL // This part needs to be after save_coding_context() because // restore_coding_context will be called in the end of this function. @@ -4347,7 +4565,28 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, if (cpi->encode_command.use_external_quantize_index) { break; } -#endif + + if (cpi->encode_command.use_external_target_frame_bits) { + const double percent_diff = get_bits_percent_diff( + rc->this_frame_target, rc->projected_frame_size); + update_rq_history(rq_history, rc->this_frame_target, + rc->projected_frame_size, q); + loop_count += 1; + + rq_model_update(rq_history, rc->this_frame_target, rq_model); + + // Check if we hit the target bitrate. + if (percent_diff <= cpi->encode_command.target_frame_bits_error_percent || + rq_history->recode_count >= RATE_CTRL_MAX_RECODE_NUM || + rq_history->q_index_low >= rq_history->q_index_high) { + break; + } + + loop = 1; + restore_coding_context(cpi); + continue; + } +#endif // CONFIG_RATE_CTRL if (oxcf->rc_mode == VPX_Q) { loop = 0; @@ -4562,27 +4801,6 @@ static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size, } #endif // !CONFIG_REALTIME_ONLY -static int get_ref_frame_flags(const VP9_COMP *cpi) { - const int *const map = cpi->common.ref_frame_map; - const int gold_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx]; - const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx]; - const int gold_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx]; - int flags = VP9_ALT_FLAG | VP9_GOLD_FLAG | VP9_LAST_FLAG; - - if (gold_is_last) flags &= ~VP9_GOLD_FLAG; - - if (cpi->rc.frames_till_gf_update_due == INT_MAX && - (cpi->svc.number_temporal_layers == 1 && - cpi->svc.number_spatial_layers == 1)) - flags &= ~VP9_GOLD_FLAG; - - if (alt_is_last) flags &= ~VP9_ALT_FLAG; - - if (gold_is_alt) flags &= ~VP9_ALT_FLAG; - - return flags; -} - static void set_ext_overrides(VP9_COMP *cpi) { // Overrides the defaults with the externally supplied values with // vp9_update_reference() and vp9_update_entropy() calls @@ -4887,9 +5105,7 @@ static void set_frame_index(VP9_COMP *cpi, VP9_COMMON *cm) { const GF_GROUP *const gf_group = &cpi->twopass.gf_group; ref_buffer->frame_index = cm->current_video_frame + gf_group->arf_src_offset[gf_group->index]; -#if CONFIG_RATE_CTRL ref_buffer->frame_coding_index = cm->current_frame_coding_index; -#endif // CONFIG_RATE_CTRL } } @@ -5092,6 +5308,7 @@ static void update_encode_frame_result( #if CONFIG_RATE_CTRL const PARTITION_INFO *partition_info, const MOTION_VECTOR_INFO *motion_vector_info, + const TplDepStats *tpl_stats_info, #endif // CONFIG_RATE_CTRL ENCODE_FRAME_RESULT *encode_frame_result); #endif // !CONFIG_REALTIME_ONLY @@ -5197,8 +5414,12 @@ static void encode_frame_to_data_rate( if (!encode_without_recode_loop(cpi, size, dest)) return; } else { #if !CONFIG_REALTIME_ONLY +#if CONFIG_RATE_CTRL + encode_with_recode_loop(cpi, size, dest, &encode_frame_result->rq_history); +#else // CONFIG_RATE_CTRL encode_with_recode_loop(cpi, size, dest); -#endif +#endif // CONFIG_RATE_CTRL +#endif // !CONFIG_REALTIME_ONLY } // TODO(jingning): When using show existing frame mode, we assume that the @@ -5263,6 +5484,13 @@ static void encode_frame_to_data_rate( // build the bitstream vp9_pack_bitstream(cpi, dest, size); + { + const RefCntBuffer *coded_frame_buf = + get_ref_cnt_buffer(cm, cm->new_fb_idx); + vp9_extrc_update_encodeframe_result( + &cpi->ext_ratectrl, (*size) << 3, cpi->Source, &coded_frame_buf->buf, + cm->bit_depth, cpi->oxcf.input_bit_depth); + } #if CONFIG_REALTIME_ONLY (void)encode_frame_result; assert(encode_frame_result == NULL); @@ -5293,9 +5521,9 @@ static void encode_frame_to_data_rate( ref_frame_flags, cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index], cpi->Source, coded_frame_buf, ref_frame_bufs, vp9_get_quantizer(cpi), - cpi->oxcf.input_bit_depth, cm->bit_depth, cpi->td.counts, + cm->bit_depth, cpi->oxcf.input_bit_depth, cpi->td.counts, #if CONFIG_RATE_CTRL - cpi->partition_info, cpi->motion_vector_info, + cpi->partition_info, cpi->motion_vector_info, cpi->tpl_stats_info, #endif // CONFIG_RATE_CTRL encode_frame_result); } @@ -5450,6 +5678,11 @@ static void Pass2Encode(VP9_COMP *cpi, size_t *size, uint8_t *dest, unsigned int *frame_flags, ENCODE_FRAME_RESULT *encode_frame_result) { cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED; + + if (cpi->common.current_frame_coding_index == 0) { + vp9_extrc_send_firstpass_stats(&cpi->ext_ratectrl, + &cpi->twopass.first_pass_info); + } #if CONFIG_MISMATCH_DEBUG mismatch_move_frame_idx_w(); #endif @@ -7141,6 +7374,48 @@ static void free_tpl_buffer(VP9_COMP *cpi) { } } +#if CONFIG_RATE_CTRL +static void accumulate_frame_tpl_stats(VP9_COMP *cpi) { + VP9_COMMON *const cm = &cpi->common; + const GF_GROUP *gf_group = &cpi->twopass.gf_group; + int show_frame_count = 0; + int frame_idx; + // Accumulate tpl stats for each frame in the current group of picture. + for (frame_idx = 1; frame_idx < gf_group->gf_group_size; ++frame_idx) { + TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx]; + TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr; + const int tpl_stride = tpl_frame->stride; + int64_t intra_cost_base = 0; + int64_t inter_cost_base = 0; + int64_t mc_dep_cost_base = 0; + int64_t mc_ref_cost_base = 0; + int64_t mc_flow_base = 0; + int row, col; + + if (!tpl_frame->is_valid) continue; + + for (row = 0; row < cm->mi_rows && tpl_frame->is_valid; ++row) { + for (col = 0; col < cm->mi_cols; ++col) { + TplDepStats *this_stats = &tpl_stats[row * tpl_stride + col]; + intra_cost_base += this_stats->intra_cost; + inter_cost_base += this_stats->inter_cost; + mc_dep_cost_base += this_stats->mc_dep_cost; + mc_ref_cost_base += this_stats->mc_ref_cost; + mc_flow_base += this_stats->mc_flow; + } + } + + cpi->tpl_stats_info[show_frame_count].intra_cost = intra_cost_base; + cpi->tpl_stats_info[show_frame_count].inter_cost = inter_cost_base; + cpi->tpl_stats_info[show_frame_count].mc_dep_cost = mc_dep_cost_base; + cpi->tpl_stats_info[show_frame_count].mc_ref_cost = mc_ref_cost_base; + cpi->tpl_stats_info[show_frame_count].mc_flow = mc_flow_base; + + ++show_frame_count; + } +} +#endif // CONFIG_RATE_CTRL + static void setup_tpl_stats(VP9_COMP *cpi) { GF_PICTURE gf_picture[MAX_ARF_GOP_SIZE]; const GF_GROUP *gf_group = &cpi->twopass.gf_group; @@ -7163,6 +7438,34 @@ static void setup_tpl_stats(VP9_COMP *cpi) { dump_tpl_stats(cpi, tpl_group_frames, gf_group, gf_picture, cpi->tpl_bsize); #endif // DUMP_TPL_STATS #endif // CONFIG_NON_GREEDY_MV + +#if CONFIG_RATE_CTRL + accumulate_frame_tpl_stats(cpi); +#endif // CONFIG_RATE_CTRL +} + +void vp9_get_ref_frame_info(FRAME_UPDATE_TYPE update_type, int ref_frame_flags, + RefCntBuffer *ref_frame_bufs[MAX_INTER_REF_FRAMES], + int *ref_frame_coding_indexes, + int *ref_frame_valid_list) { + if (update_type != KF_UPDATE) { + const VP9_REFFRAME inter_ref_flags[MAX_INTER_REF_FRAMES] = { VP9_LAST_FLAG, + VP9_GOLD_FLAG, + VP9_ALT_FLAG }; + int i; + for (i = 0; i < MAX_INTER_REF_FRAMES; ++i) { + assert(ref_frame_bufs[i] != NULL); + ref_frame_coding_indexes[i] = ref_frame_bufs[i]->frame_coding_index; + ref_frame_valid_list[i] = (ref_frame_flags & inter_ref_flags[i]) != 0; + } + } else { + // No reference frame is available when this is a key frame. + int i; + for (i = 0; i < MAX_INTER_REF_FRAMES; ++i) { + ref_frame_coding_indexes[i] = -1; + ref_frame_valid_list[i] = 0; + } + } } #if !CONFIG_REALTIME_ONLY @@ -7312,6 +7615,7 @@ static void yv12_buffer_to_image_buffer(const YV12_BUFFER_CONFIG *yv12_buffer, } } #endif // CONFIG_RATE_CTRL + static void update_encode_frame_result( int ref_frame_flags, FRAME_UPDATE_TYPE update_type, const YV12_BUFFER_CONFIG *source_frame, const RefCntBuffer *coded_frame_buf, @@ -7320,12 +7624,13 @@ static void update_encode_frame_result( #if CONFIG_RATE_CTRL const PARTITION_INFO *partition_info, const MOTION_VECTOR_INFO *motion_vector_info, + const TplDepStats *tpl_stats_info, #endif // CONFIG_RATE_CTRL ENCODE_FRAME_RESULT *encode_frame_result) { #if CONFIG_RATE_CTRL PSNR_STATS psnr; #if CONFIG_VP9_HIGHBITDEPTH - vpx_calc_highbd_psnr(source_frame, coded_frame_buf->buf, &psnr, bit_depth, + vpx_calc_highbd_psnr(source_frame, &coded_frame_buf->buf, &psnr, bit_depth, input_bit_depth); #else // CONFIG_VP9_HIGHBITDEPTH (void)bit_depth; @@ -7334,31 +7639,16 @@ static void update_encode_frame_result( #endif // CONFIG_VP9_HIGHBITDEPTH encode_frame_result->frame_coding_index = coded_frame_buf->frame_coding_index; - if (update_type != KF_UPDATE) { - const VP9_REFFRAME inter_ref_flags[MAX_INTER_REF_FRAMES] = { VP9_LAST_FLAG, - VP9_GOLD_FLAG, - VP9_ALT_FLAG }; - int i; - for (i = 0; i < MAX_INTER_REF_FRAMES; ++i) { - assert(ref_frame_bufs[i] != NULL); - encode_frame_result->ref_frame_coding_indexes[i] = - ref_frame_bufs[i]->frame_coding_index; - encode_frame_result->ref_frame_valid_list[i] = - (ref_frame_flags & inter_ref_flags[i]) != 0; - } - } else { - // No reference frame is available when this is a key frame. - int i; - for (i = 0; i < MAX_INTER_REF_FRAMES; ++i) { - encode_frame_result->ref_frame_coding_indexes[i] = -1; - encode_frame_result->ref_frame_valid_list[i] = 0; - } - } + vp9_get_ref_frame_info(update_type, ref_frame_flags, ref_frame_bufs, + encode_frame_result->ref_frame_coding_indexes, + encode_frame_result->ref_frame_valid_list); + encode_frame_result->psnr = psnr.psnr[0]; encode_frame_result->sse = psnr.sse[0]; copy_frame_counts(counts, &encode_frame_result->frame_counts); encode_frame_result->partition_info = partition_info; encode_frame_result->motion_vector_info = motion_vector_info; + encode_frame_result->tpl_stats_info = tpl_stats_info; if (encode_frame_result->coded_frame.allocated) { yv12_buffer_to_image_buffer(&coded_frame_buf->buf, &encode_frame_result->coded_frame); @@ -7384,6 +7674,7 @@ void vp9_init_encode_frame_result(ENCODE_FRAME_RESULT *encode_frame_result) { encode_frame_result->frame_coding_index = -1; vp9_zero(encode_frame_result->coded_frame); encode_frame_result->coded_frame.allocated = 0; + init_rq_history(&encode_frame_result->rq_history); #endif // CONFIG_RATE_CTRL } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.h index a237b74f9..8763a5e78 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.h @@ -15,6 +15,7 @@ #include "./vpx_config.h" #include "vpx/internal/vpx_codec_internal.h" +#include "vpx/vpx_ext_ratectrl.h" #include "vpx/vp8cx.h" #if CONFIG_INTERNAL_STATS #include "vpx_dsp/ssim.h" @@ -38,6 +39,7 @@ #include "vp9/encoder/vp9_context_tree.h" #include "vp9/encoder/vp9_encodemb.h" #include "vp9/encoder/vp9_ethread.h" +#include "vp9/encoder/vp9_ext_ratectrl.h" #include "vp9/encoder/vp9_firstpass.h" #include "vp9/encoder/vp9_job_queue.h" #include "vp9/encoder/vp9_lookahead.h" @@ -147,6 +149,12 @@ typedef enum { kVeryHighSad = 6, } CONTENT_STATE_SB; +typedef enum { + LOOPFILTER_ALL = 0, + LOOPFILTER_REFERENCE = 1, // Disable loopfilter on non reference frames. + NO_LOOPFILTER = 2, // Disable loopfilter on all frames. +} LOOPFILTER_CONTROL; + typedef struct VP9EncoderConfig { BITSTREAM_PROFILE profile; vpx_bit_depth_t bit_depth; // Codec bit-depth. @@ -532,24 +540,83 @@ typedef struct MOTION_VECTOR_INFO { int_mv mv[2]; } MOTION_VECTOR_INFO; +typedef struct GOP_COMMAND { + int use; // use this command to set gop or not. If not, use vp9's decision. + int show_frame_count; + int use_alt_ref; +} GOP_COMMAND; + +static INLINE void gop_command_on(GOP_COMMAND *gop_command, + int show_frame_count, int use_alt_ref) { + gop_command->use = 1; + gop_command->show_frame_count = show_frame_count; + gop_command->use_alt_ref = use_alt_ref; +} + +static INLINE void gop_command_off(GOP_COMMAND *gop_command) { + gop_command->use = 0; + gop_command->show_frame_count = 0; + gop_command->use_alt_ref = 0; +} + +static INLINE int gop_command_coding_frame_count( + const GOP_COMMAND *gop_command) { + if (gop_command->use == 0) { + assert(0); + return -1; + } + return gop_command->show_frame_count + gop_command->use_alt_ref; +} + +// TODO(angiebird): See if we can merge this one with FrameType in +// simple_encode.h +typedef enum ENCODE_FRAME_TYPE { + ENCODE_FRAME_TYPE_KEY, + ENCODE_FRAME_TYPE_INTER, + ENCODE_FRAME_TYPE_ALTREF, + ENCODE_FRAME_TYPE_OVERLAY, + ENCODE_FRAME_TYPE_GOLDEN, + ENCODE_FRAME_TYPES, +} ENCODE_FRAME_TYPE; + +// TODO(angiebird): Merge this function with get_frame_type_from_update_type() +static INLINE ENCODE_FRAME_TYPE +get_encode_frame_type(FRAME_UPDATE_TYPE update_type) { + switch (update_type) { + case KF_UPDATE: return ENCODE_FRAME_TYPE_KEY; + case ARF_UPDATE: return ENCODE_FRAME_TYPE_ALTREF; + case GF_UPDATE: return ENCODE_FRAME_TYPE_GOLDEN; + case OVERLAY_UPDATE: return ENCODE_FRAME_TYPE_OVERLAY; + case LF_UPDATE: return ENCODE_FRAME_TYPE_INTER; + default: + fprintf(stderr, "Unsupported update_type %d\n", update_type); + abort(); + return ENCODE_FRAME_TYPE_INTER; + } +} + +typedef struct RATE_QSTEP_MODEL { + // The rq model predicts the bit usage as follows. + // rate = bias - ratio * log2(q_step) + int ready; + double bias; + double ratio; +} RATE_QSTEP_MODEL; + typedef struct ENCODE_COMMAND { int use_external_quantize_index; int external_quantize_index; - // A list of binary flags set from the external controller. - // Each binary flag indicates whether the frame is an arf or not. - const int *external_arf_indexes; + + int use_external_target_frame_bits; + int target_frame_bits; + double target_frame_bits_error_percent; + + GOP_COMMAND gop_command; } ENCODE_COMMAND; -static INLINE void encode_command_init(ENCODE_COMMAND *encode_command) { - vp9_zero(*encode_command); - encode_command->use_external_quantize_index = 0; - encode_command->external_quantize_index = -1; - encode_command->external_arf_indexes = NULL; -} - -static INLINE void encode_command_set_external_arf_indexes( - ENCODE_COMMAND *encode_command, const int *external_arf_indexes) { - encode_command->external_arf_indexes = external_arf_indexes; +static INLINE void encode_command_set_gop_command( + ENCODE_COMMAND *encode_command, GOP_COMMAND gop_command) { + encode_command->gop_command = gop_command; } static INLINE void encode_command_set_external_quantize_index( @@ -564,9 +631,35 @@ static INLINE void encode_command_reset_external_quantize_index( encode_command->external_quantize_index = -1; } +static INLINE void encode_command_set_target_frame_bits( + ENCODE_COMMAND *encode_command, int target_frame_bits, + double target_frame_bits_error_percent) { + encode_command->use_external_target_frame_bits = 1; + encode_command->target_frame_bits = target_frame_bits; + encode_command->target_frame_bits_error_percent = + target_frame_bits_error_percent; +} + +static INLINE void encode_command_reset_target_frame_bits( + ENCODE_COMMAND *encode_command) { + encode_command->use_external_target_frame_bits = 0; + encode_command->target_frame_bits = -1; + encode_command->target_frame_bits_error_percent = 0; +} + +static INLINE void encode_command_init(ENCODE_COMMAND *encode_command) { + vp9_zero(*encode_command); + encode_command_reset_external_quantize_index(encode_command); + encode_command_reset_target_frame_bits(encode_command); + gop_command_off(&encode_command->gop_command); +} + // Returns number of units in size of 4, if not multiple not a multiple of 4, // round it up. For example, size is 7, return 2. static INLINE int get_num_unit_4x4(int size) { return (size + 3) >> 2; } +// Returns number of units in size of 16, if not multiple not a multiple of 16, +// round it up. For example, size is 17, return 2. +static INLINE int get_num_unit_16x16(int size) { return (size + 15) >> 4; } #endif // CONFIG_RATE_CTRL typedef struct VP9_COMP { @@ -873,11 +966,18 @@ typedef struct VP9_COMP { int multi_layer_arf; vpx_roi_map_t roi; + + LOOPFILTER_CONTROL loopfilter_ctrl; #if CONFIG_RATE_CTRL ENCODE_COMMAND encode_command; PARTITION_INFO *partition_info; MOTION_VECTOR_INFO *motion_vector_info; + MOTION_VECTOR_INFO *fp_motion_vector_info; + TplDepStats *tpl_stats_info; + + RATE_QSTEP_MODEL rq_model[ENCODE_FRAME_TYPES]; #endif + EXT_RATECTRL ext_ratectrl; } VP9_COMP; #if CONFIG_RATE_CTRL @@ -902,6 +1002,13 @@ static INLINE void free_partition_info(struct VP9_COMP *cpi) { cpi->partition_info = NULL; } +static INLINE void reset_mv_info(MOTION_VECTOR_INFO *mv_info) { + mv_info->ref_frame[0] = NONE; + mv_info->ref_frame[1] = NONE; + mv_info->mv[0].as_int = INVALID_MV; + mv_info->mv[1].as_int = INVALID_MV; +} + // Allocates memory for the motion vector information. // The unit size is each 4x4 block. // Only called once in vp9_create_compressor(). @@ -923,6 +1030,53 @@ static INLINE void free_motion_vector_info(struct VP9_COMP *cpi) { cpi->motion_vector_info = NULL; } +// Allocates memory for the tpl stats information. +// Only called once in vp9_create_compressor(). +static INLINE void tpl_stats_info_init(struct VP9_COMP *cpi) { + VP9_COMMON *const cm = &cpi->common; + CHECK_MEM_ERROR( + cm, cpi->tpl_stats_info, + (TplDepStats *)vpx_calloc(MAX_LAG_BUFFERS, sizeof(TplDepStats))); + memset(cpi->tpl_stats_info, 0, MAX_LAG_BUFFERS * sizeof(TplDepStats)); +} + +// Frees memory of the tpl stats information. +// Only called once in dealloc_compressor_data(). +static INLINE void free_tpl_stats_info(struct VP9_COMP *cpi) { + vpx_free(cpi->tpl_stats_info); + cpi->tpl_stats_info = NULL; +} + +// Allocates memory for the first pass motion vector information. +// The unit size is each 16x16 block. +// Only called once in vp9_create_compressor(). +static INLINE void fp_motion_vector_info_init(struct VP9_COMP *cpi) { + VP9_COMMON *const cm = &cpi->common; + const int unit_width = get_num_unit_16x16(cpi->frame_info.frame_width); + const int unit_height = get_num_unit_16x16(cpi->frame_info.frame_height); + CHECK_MEM_ERROR(cm, cpi->fp_motion_vector_info, + (MOTION_VECTOR_INFO *)vpx_calloc(unit_width * unit_height, + sizeof(MOTION_VECTOR_INFO))); +} + +static INLINE void fp_motion_vector_info_reset( + int frame_width, int frame_height, + MOTION_VECTOR_INFO *fp_motion_vector_info) { + const int unit_width = get_num_unit_16x16(frame_width); + const int unit_height = get_num_unit_16x16(frame_height); + int i; + for (i = 0; i < unit_width * unit_height; ++i) { + reset_mv_info(fp_motion_vector_info + i); + } +} + +// Frees memory of the first pass motion vector information. +// Only called once in dealloc_compressor_data(). +static INLINE void free_fp_motion_vector_info(struct VP9_COMP *cpi) { + vpx_free(cpi->fp_motion_vector_info); + cpi->fp_motion_vector_info = NULL; +} + // This is the c-version counter part of ImageBuffer typedef struct IMAGE_BUFFER { int allocated; @@ -930,6 +1084,17 @@ typedef struct IMAGE_BUFFER { int plane_height[3]; uint8_t *plane_buffer[3]; } IMAGE_BUFFER; + +#define RATE_CTRL_MAX_RECODE_NUM 7 + +typedef struct RATE_QINDEX_HISTORY { + int recode_count; + int q_index_history[RATE_CTRL_MAX_RECODE_NUM]; + int rate_history[RATE_CTRL_MAX_RECODE_NUM]; + int q_index_high; + int q_index_low; +} RATE_QINDEX_HISTORY; + #endif // CONFIG_RATE_CTRL typedef struct ENCODE_FRAME_RESULT { @@ -944,7 +1109,9 @@ typedef struct ENCODE_FRAME_RESULT { FRAME_COUNTS frame_counts; const PARTITION_INFO *partition_info; const MOTION_VECTOR_INFO *motion_vector_info; + const TplDepStats *tpl_stats_info; IMAGE_BUFFER coded_frame; + RATE_QINDEX_HISTORY rq_history; #endif // CONFIG_RATE_CTRL int quantize_index; } ENCODE_FRAME_RESULT; @@ -1000,6 +1167,14 @@ int vp9_set_size_literal(VP9_COMP *cpi, unsigned int width, void vp9_set_svc(VP9_COMP *cpi, int use_svc); +// Check for resetting the rc flags (rc_1_frame, rc_2_frame) if the +// configuration change has a large change in avg_frame_bandwidth. +// For SVC check for resetting based on spatial layer average bandwidth. +// Also reset buffer level to optimal level. +void vp9_check_reset_rc_flag(VP9_COMP *cpi); + +void vp9_set_rc_buffer_sizes(VP9_COMP *cpi); + static INLINE int stack_pop(int *stack, int stack_size) { int idx; const int r = stack[0]; @@ -1112,6 +1287,11 @@ void vp9_scale_references(VP9_COMP *cpi); void vp9_update_reference_frames(VP9_COMP *cpi); +void vp9_get_ref_frame_info(FRAME_UPDATE_TYPE update_type, int ref_frame_flags, + RefCntBuffer *ref_frame_bufs[MAX_INTER_REF_FRAMES], + int *ref_frame_coding_indexes, + int *ref_frame_valid_list); + void vp9_set_high_precision_mv(VP9_COMP *cpi, int allow_high_precision_mv); YV12_BUFFER_CONFIG *vp9_svc_twostage_scale( diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.c new file mode 100644 index 000000000..94c2addd2 --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.c @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "vp9/encoder/vp9_ext_ratectrl.h" +#include "vp9/encoder/vp9_encoder.h" +#include "vp9/common/vp9_common.h" +#include "vpx_dsp/psnr.h" + +void vp9_extrc_init(EXT_RATECTRL *ext_ratectrl) { vp9_zero(*ext_ratectrl); } + +void vp9_extrc_create(vpx_rc_funcs_t funcs, vpx_rc_config_t ratectrl_config, + EXT_RATECTRL *ext_ratectrl) { + vpx_rc_firstpass_stats_t *rc_firstpass_stats; + vp9_extrc_delete(ext_ratectrl); + ext_ratectrl->funcs = funcs; + ext_ratectrl->ratectrl_config = ratectrl_config; + ext_ratectrl->funcs.create_model(ext_ratectrl->funcs.priv, + &ext_ratectrl->ratectrl_config, + &ext_ratectrl->model); + rc_firstpass_stats = &ext_ratectrl->rc_firstpass_stats; + rc_firstpass_stats->num_frames = ratectrl_config.show_frame_count; + rc_firstpass_stats->frame_stats = + vpx_malloc(sizeof(*rc_firstpass_stats->frame_stats) * + rc_firstpass_stats->num_frames); + ext_ratectrl->ready = 1; +} + +void vp9_extrc_delete(EXT_RATECTRL *ext_ratectrl) { + if (ext_ratectrl->ready) { + ext_ratectrl->funcs.delete_model(ext_ratectrl->model); + vpx_free(ext_ratectrl->rc_firstpass_stats.frame_stats); + } + vp9_extrc_init(ext_ratectrl); +} + +static void gen_rc_firstpass_stats(const FIRSTPASS_STATS *stats, + vpx_rc_frame_stats_t *rc_frame_stats) { + rc_frame_stats->frame = stats->frame; + rc_frame_stats->weight = stats->weight; + rc_frame_stats->intra_error = stats->intra_error; + rc_frame_stats->coded_error = stats->coded_error; + rc_frame_stats->sr_coded_error = stats->sr_coded_error; + rc_frame_stats->frame_noise_energy = stats->frame_noise_energy; + rc_frame_stats->pcnt_inter = stats->pcnt_inter; + rc_frame_stats->pcnt_motion = stats->pcnt_motion; + rc_frame_stats->pcnt_second_ref = stats->pcnt_second_ref; + rc_frame_stats->pcnt_neutral = stats->pcnt_neutral; + rc_frame_stats->pcnt_intra_low = stats->pcnt_intra_low; + rc_frame_stats->pcnt_intra_high = stats->pcnt_intra_high; + rc_frame_stats->intra_skip_pct = stats->intra_skip_pct; + rc_frame_stats->intra_smooth_pct = stats->intra_smooth_pct; + rc_frame_stats->inactive_zone_rows = stats->inactive_zone_rows; + rc_frame_stats->inactive_zone_cols = stats->inactive_zone_cols; + rc_frame_stats->MVr = stats->MVr; + rc_frame_stats->mvr_abs = stats->mvr_abs; + rc_frame_stats->MVc = stats->MVc; + rc_frame_stats->mvc_abs = stats->mvc_abs; + rc_frame_stats->MVrv = stats->MVrv; + rc_frame_stats->MVcv = stats->MVcv; + rc_frame_stats->mv_in_out_count = stats->mv_in_out_count; + rc_frame_stats->duration = stats->duration; + rc_frame_stats->count = stats->count; +} + +void vp9_extrc_send_firstpass_stats(EXT_RATECTRL *ext_ratectrl, + const FIRST_PASS_INFO *first_pass_info) { + if (ext_ratectrl->ready) { + vpx_rc_firstpass_stats_t *rc_firstpass_stats = + &ext_ratectrl->rc_firstpass_stats; + int i; + assert(rc_firstpass_stats->num_frames == first_pass_info->num_frames); + for (i = 0; i < rc_firstpass_stats->num_frames; ++i) { + gen_rc_firstpass_stats(&first_pass_info->stats[i], + &rc_firstpass_stats->frame_stats[i]); + } + ext_ratectrl->funcs.send_firstpass_stats(ext_ratectrl->model, + rc_firstpass_stats); + } +} + +static int extrc_get_frame_type(FRAME_UPDATE_TYPE update_type) { + // TODO(angiebird): Add unit test to make sure this function behaves like + // get_frame_type_from_update_type() + // TODO(angiebird): Merge this function with get_frame_type_from_update_type() + switch (update_type) { + case KF_UPDATE: return 0; // kFrameTypeKey; + case ARF_UPDATE: return 2; // kFrameTypeAltRef; + case GF_UPDATE: return 4; // kFrameTypeGolden; + case OVERLAY_UPDATE: return 3; // kFrameTypeOverlay; + case LF_UPDATE: return 1; // kFrameTypeInter; + default: + fprintf(stderr, "Unsupported update_type %d\n", update_type); + abort(); + return 1; + } +} + +void vp9_extrc_get_encodeframe_decision( + EXT_RATECTRL *ext_ratectrl, int show_index, int coding_index, + FRAME_UPDATE_TYPE update_type, + RefCntBuffer *ref_frame_bufs[MAX_INTER_REF_FRAMES], int ref_frame_flags, + vpx_rc_encodeframe_decision_t *encode_frame_decision) { + if (ext_ratectrl->ready) { + vpx_rc_encodeframe_info_t encode_frame_info; + encode_frame_info.show_index = show_index; + encode_frame_info.coding_index = coding_index; + encode_frame_info.frame_type = extrc_get_frame_type(update_type); + + vp9_get_ref_frame_info(update_type, ref_frame_flags, ref_frame_bufs, + encode_frame_info.ref_frame_coding_indexes, + encode_frame_info.ref_frame_valid_list); + + ext_ratectrl->funcs.get_encodeframe_decision( + ext_ratectrl->model, &encode_frame_info, encode_frame_decision); + } +} + +void vp9_extrc_update_encodeframe_result(EXT_RATECTRL *ext_ratectrl, + int64_t bit_count, + const YV12_BUFFER_CONFIG *source_frame, + const YV12_BUFFER_CONFIG *coded_frame, + uint32_t bit_depth, + uint32_t input_bit_depth) { + if (ext_ratectrl->ready) { + PSNR_STATS psnr; + vpx_rc_encodeframe_result_t encode_frame_result; + encode_frame_result.bit_count = bit_count; + encode_frame_result.pixel_count = + source_frame->y_width * source_frame->y_height + + 2 * source_frame->uv_width * source_frame->uv_height; +#if CONFIG_VP9_HIGHBITDEPTH + vpx_calc_highbd_psnr(source_frame, coded_frame, &psnr, bit_depth, + input_bit_depth); +#else + (void)bit_depth; + (void)input_bit_depth; + vpx_calc_psnr(source_frame, coded_frame, &psnr); +#endif + encode_frame_result.sse = psnr.sse[0]; + ext_ratectrl->funcs.update_encodeframe_result(ext_ratectrl->model, + &encode_frame_result); + } +} diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.h new file mode 100644 index 000000000..fb6cfe1ac --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VPX_VP9_ENCODER_VP9_EXT_RATECTRL_H_ +#define VPX_VP9_ENCODER_VP9_EXT_RATECTRL_H_ + +#include "vpx/vpx_ext_ratectrl.h" +#include "vp9/encoder/vp9_firstpass.h" + +typedef struct EXT_RATECTRL { + int ready; + vpx_rc_model_t model; + vpx_rc_funcs_t funcs; + vpx_rc_config_t ratectrl_config; + vpx_rc_firstpass_stats_t rc_firstpass_stats; +} EXT_RATECTRL; + +void vp9_extrc_init(EXT_RATECTRL *ext_ratectrl); + +void vp9_extrc_create(vpx_rc_funcs_t funcs, vpx_rc_config_t ratectrl_config, + EXT_RATECTRL *ext_ratectrl); + +void vp9_extrc_delete(EXT_RATECTRL *ext_ratectrl); + +void vp9_extrc_send_firstpass_stats(EXT_RATECTRL *ext_ratectrl, + const FIRST_PASS_INFO *first_pass_info); + +void vp9_extrc_get_encodeframe_decision( + EXT_RATECTRL *ext_ratectrl, int show_index, int coding_index, + FRAME_UPDATE_TYPE update_type, + RefCntBuffer *ref_frame_bufs[MAX_INTER_REF_FRAMES], int ref_frame_flags, + vpx_rc_encodeframe_decision_t *encode_frame_decision); + +void vp9_extrc_update_encodeframe_result(EXT_RATECTRL *ext_ratectrl, + int64_t bit_count, + const YV12_BUFFER_CONFIG *source_frame, + const YV12_BUFFER_CONFIG *coded_frame, + uint32_t bit_depth, + uint32_t input_bit_depth); + +#endif // VPX_VP9_ENCODER_VP9_EXT_RATECTRL_H_ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_extend.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_extend.c index f8e24610a..dcb62e876 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_extend.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_extend.c @@ -18,18 +18,26 @@ static void copy_and_extend_plane(const uint8_t *src, int src_pitch, uint8_t *dst, int dst_pitch, int w, int h, int extend_top, int extend_left, - int extend_bottom, int extend_right) { - int i, linesize; + int extend_bottom, int extend_right, + int interleave_step) { + int i, j, linesize; + const int step = interleave_step < 1 ? 1 : interleave_step; // copy the left and right most columns out const uint8_t *src_ptr1 = src; - const uint8_t *src_ptr2 = src + w - 1; + const uint8_t *src_ptr2 = src + (w - 1) * step; uint8_t *dst_ptr1 = dst - extend_left; uint8_t *dst_ptr2 = dst + w; for (i = 0; i < h; i++) { memset(dst_ptr1, src_ptr1[0], extend_left); - memcpy(dst_ptr1 + extend_left, src_ptr1, w); + if (step == 1) { + memcpy(dst_ptr1 + extend_left, src_ptr1, w); + } else { + for (j = 0; j < w; j++) { + dst_ptr1[extend_left + j] = src_ptr1[step * j]; + } + } memset(dst_ptr2, src_ptr2[0], extend_right); src_ptr1 += src_pitch; src_ptr2 += src_pitch; @@ -122,6 +130,8 @@ void vp9_copy_and_extend_frame(const YV12_BUFFER_CONFIG *src, const int el_uv = el_y >> uv_width_subsampling; const int eb_uv = eb_y >> uv_height_subsampling; const int er_uv = er_y >> uv_width_subsampling; + // detect nv12 colorspace + const int chroma_step = src->v_buffer - src->u_buffer == 1 ? 2 : 1; #if CONFIG_VP9_HIGHBITDEPTH if (src->flags & YV12_FLAG_HIGHBITDEPTH) { @@ -142,15 +152,15 @@ void vp9_copy_and_extend_frame(const YV12_BUFFER_CONFIG *src, copy_and_extend_plane(src->y_buffer, src->y_stride, dst->y_buffer, dst->y_stride, src->y_crop_width, src->y_crop_height, - et_y, el_y, eb_y, er_y); + et_y, el_y, eb_y, er_y, 1); copy_and_extend_plane(src->u_buffer, src->uv_stride, dst->u_buffer, dst->uv_stride, src->uv_crop_width, src->uv_crop_height, - et_uv, el_uv, eb_uv, er_uv); + et_uv, el_uv, eb_uv, er_uv, chroma_step); copy_and_extend_plane(src->v_buffer, src->uv_stride, dst->v_buffer, dst->uv_stride, src->uv_crop_width, src->uv_crop_height, - et_uv, el_uv, eb_uv, er_uv); + et_uv, el_uv, eb_uv, er_uv, chroma_step); } void vp9_copy_and_extend_frame_with_rect(const YV12_BUFFER_CONFIG *src, @@ -176,16 +186,18 @@ void vp9_copy_and_extend_frame_with_rect(const YV12_BUFFER_CONFIG *src, const int dst_uv_offset = ((srcy * dst->uv_stride) >> 1) + (srcx >> 1); const int srch_uv = ROUND_POWER_OF_TWO(srch, 1); const int srcw_uv = ROUND_POWER_OF_TWO(srcw, 1); + // detect nv12 colorspace + const int chroma_step = src->v_buffer - src->u_buffer == 1 ? 2 : 1; copy_and_extend_plane(src->y_buffer + src_y_offset, src->y_stride, dst->y_buffer + dst_y_offset, dst->y_stride, srcw, srch, - et_y, el_y, eb_y, er_y); + et_y, el_y, eb_y, er_y, 1); copy_and_extend_plane(src->u_buffer + src_uv_offset, src->uv_stride, dst->u_buffer + dst_uv_offset, dst->uv_stride, srcw_uv, - srch_uv, et_uv, el_uv, eb_uv, er_uv); + srch_uv, et_uv, el_uv, eb_uv, er_uv, chroma_step); copy_and_extend_plane(src->v_buffer + src_uv_offset, src->uv_stride, dst->v_buffer + dst_uv_offset, dst->uv_stride, srcw_uv, - srch_uv, et_uv, el_uv, eb_uv, er_uv); + srch_uv, et_uv, el_uv, eb_uv, er_uv, chroma_step); } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.c index 0bda4b7d6..de954f757 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.c @@ -389,6 +389,29 @@ static int get_search_range(const VP9_COMP *cpi) { return sr; } +// Reduce limits to keep the motion search within MV_MAX of ref_mv. Not doing +// this can be problematic for big videos (8K) and may cause assert failure +// (or memory violation) in mv_cost. Limits are only modified if they would +// be non-empty. Returns 1 if limits are non-empty. +static int intersect_limits_with_mv_max(MvLimits *mv_limits, const MV *ref_mv) { + const int row_min = + VPXMAX(mv_limits->row_min, (ref_mv->row + 7 - MV_MAX) >> 3); + const int row_max = + VPXMIN(mv_limits->row_max, (ref_mv->row - 1 + MV_MAX) >> 3); + const int col_min = + VPXMAX(mv_limits->col_min, (ref_mv->col + 7 - MV_MAX) >> 3); + const int col_max = + VPXMIN(mv_limits->col_max, (ref_mv->col - 1 + MV_MAX) >> 3); + if (row_min > row_max || col_min > col_max) { + return 0; + } + mv_limits->row_min = row_min; + mv_limits->row_max = row_max; + mv_limits->col_min = col_min; + mv_limits->col_max = col_max; + return 1; +} + static void first_pass_motion_search(VP9_COMP *cpi, MACROBLOCK *x, const MV *ref_mv, MV *best_mv, int *best_motion_err) { @@ -403,9 +426,14 @@ static void first_pass_motion_search(VP9_COMP *cpi, MACROBLOCK *x, int step_param = 3; int further_steps = (MAX_MVSEARCH_STEPS - 1) - step_param; const int sr = get_search_range(cpi); + const MvLimits tmp_mv_limits = x->mv_limits; step_param += sr; further_steps -= sr; + if (!intersect_limits_with_mv_max(&x->mv_limits, ref_mv)) { + return; + } + // Override the default variance function to use MSE. v_fn_ptr.vf = get_block_variance_fn(bsize); #if CONFIG_VP9_HIGHBITDEPTH @@ -451,6 +479,7 @@ static void first_pass_motion_search(VP9_COMP *cpi, MACROBLOCK *x, } } } + x->mv_limits = tmp_mv_limits; } static BLOCK_SIZE get_bsize(const VP9_COMMON *cm, int mb_row, int mb_col) { @@ -810,6 +839,22 @@ static void accumulate_fp_mb_row_stat(TileDataEnc *this_tile, fp_acc_data->image_data_start_row); } +#if CONFIG_RATE_CTRL +static void store_fp_motion_vector(VP9_COMP *cpi, const MV *mv, + const int mb_row, const int mb_col, + MV_REFERENCE_FRAME frame_type, + const int mv_idx) { + VP9_COMMON *const cm = &cpi->common; + const int mb_index = mb_row * cm->mb_cols + mb_col; + MOTION_VECTOR_INFO *this_motion_vector_info = + &cpi->fp_motion_vector_info[mb_index]; + this_motion_vector_info->ref_frame[mv_idx] = frame_type; + if (frame_type != INTRA_FRAME) { + this_motion_vector_info->mv[mv_idx].as_mv = *mv; + } +} +#endif // CONFIG_RATE_CTRL + #define NZ_MOTION_PENALTY 128 #define INTRA_MODE_PENALTY 1024 void vp9_first_pass_encode_tile_mb_row(VP9_COMP *cpi, ThreadData *td, @@ -1044,6 +1089,11 @@ void vp9_first_pass_encode_tile_mb_row(VP9_COMP *cpi, ThreadData *td, struct buf_2d unscaled_last_source_buf_2d; vp9_variance_fn_ptr_t v_fn_ptr = cpi->fn_ptr[bsize]; +#if CONFIG_RATE_CTRL + // Store zero mv as default + store_fp_motion_vector(cpi, &mv, mb_row, mb_col, LAST_FRAME, 0); +#endif // CONFIG_RAGE_CTRL + xd->plane[0].pre[0].buf = first_ref_buf->y_buffer + recon_yoffset; #if CONFIG_VP9_HIGHBITDEPTH if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { @@ -1108,6 +1158,9 @@ void vp9_first_pass_encode_tile_mb_row(VP9_COMP *cpi, ThreadData *td, vp9_get_mvpred_var(x, &tmp_mv, &zero_mv, &v_fn_ptr, 0); } } +#if CONFIG_RATE_CTRL + store_fp_motion_vector(cpi, &mv, mb_row, mb_col, LAST_FRAME, 0); +#endif // CONFIG_RAGE_CTRL // Search in an older reference frame. if ((cm->current_video_frame > 1) && gld_yv12 != NULL) { @@ -1129,6 +1182,9 @@ void vp9_first_pass_encode_tile_mb_row(VP9_COMP *cpi, ThreadData *td, #endif // CONFIG_VP9_HIGHBITDEPTH first_pass_motion_search(cpi, x, &zero_mv, &tmp_mv, &gf_motion_error); +#if CONFIG_RATE_CTRL + store_fp_motion_vector(cpi, &tmp_mv, mb_row, mb_col, GOLDEN_FRAME, 1); +#endif // CONFIG_RAGE_CTRL if (gf_motion_error < motion_error && gf_motion_error < this_error) ++(fp_acc_data->second_ref_count); @@ -1302,6 +1358,9 @@ void vp9_first_pass_encode_tile_mb_row(VP9_COMP *cpi, ThreadData *td, } } else { fp_acc_data->sr_coded_error += (int64_t)this_error; +#if CONFIG_RATE_CTRL + store_fp_motion_vector(cpi, NULL, mb_row, mb_col, INTRA_FRAME, 0); +#endif // CONFIG_RAGE_CTRL } fp_acc_data->coded_error += (int64_t)this_error; @@ -1328,6 +1387,12 @@ static void first_pass_encode(VP9_COMP *cpi, FIRSTPASS_DATA *fp_acc_data) { // Tiling is ignored in the first pass. vp9_tile_init(tile, cm, 0, 0); +#if CONFIG_RATE_CTRL + fp_motion_vector_info_reset(cpi->frame_info.frame_width, + cpi->frame_info.frame_height, + cpi->fp_motion_vector_info); +#endif + for (mb_row = 0; mb_row < cm->mb_rows; ++mb_row) { best_ref_mv = zero_mv; vp9_first_pass_encode_tile_mb_row(cpi, &cpi->td, fp_acc_data, &tile_data, @@ -2479,9 +2544,6 @@ typedef struct RANGE { * structs. */ static int get_gop_coding_frame_num( -#if CONFIG_RATE_CTRL - const int *external_arf_indexes, -#endif int *use_alt_ref, const FRAME_INFO *frame_info, const FIRST_PASS_INFO *first_pass_info, const RATE_CONTROL *rc, int gf_start_show_idx, const RANGE *active_gf_interval, @@ -2497,24 +2559,6 @@ static int get_gop_coding_frame_num( (frame_info->frame_height + frame_info->frame_width) / 4.0; double zero_motion_accumulator = 1.0; int gop_coding_frames; -#if CONFIG_RATE_CTRL - (void)mv_ratio_accumulator_thresh; - (void)active_gf_interval; - (void)gop_intra_factor; - - if (external_arf_indexes != NULL && rc->frames_to_key > 1) { - // gop_coding_frames = 1 is necessary to filter out the overlay frame, - // since the arf is in this group of picture and its overlay is in the next. - gop_coding_frames = 1; - *use_alt_ref = 1; - while (gop_coding_frames < rc->frames_to_key) { - const int frame_index = gf_start_show_idx + gop_coding_frames; - ++gop_coding_frames; - if (external_arf_indexes[frame_index] == 1) break; - } - return gop_coding_frames; - } -#endif // CONFIG_RATE_CTRL *use_alt_ref = 1; gop_coding_frames = 0; @@ -2741,15 +2785,26 @@ static void define_gf_group(VP9_COMP *cpi, int gf_start_show_idx) { gop_intra_factor = 1.0; } - { - gop_coding_frames = get_gop_coding_frame_num( #if CONFIG_RATE_CTRL - cpi->encode_command.external_arf_indexes, -#endif - &use_alt_ref, frame_info, first_pass_info, rc, gf_start_show_idx, - &active_gf_interval, gop_intra_factor, cpi->oxcf.lag_in_frames); - use_alt_ref &= allow_alt_ref; + { + const GOP_COMMAND *gop_command = &cpi->encode_command.gop_command; + assert(allow_alt_ref == 1); + if (gop_command->use) { + gop_coding_frames = gop_command_coding_frame_count(gop_command); + use_alt_ref = gop_command->use_alt_ref; + } else { + gop_coding_frames = get_gop_coding_frame_num( + &use_alt_ref, frame_info, first_pass_info, rc, gf_start_show_idx, + &active_gf_interval, gop_intra_factor, cpi->oxcf.lag_in_frames); + use_alt_ref &= allow_alt_ref; + } } +#else + gop_coding_frames = get_gop_coding_frame_num( + &use_alt_ref, frame_info, first_pass_info, rc, gf_start_show_idx, + &active_gf_interval, gop_intra_factor, cpi->oxcf.lag_in_frames); + use_alt_ref &= allow_alt_ref; +#endif // Was the group length constrained by the requirement for a new KF? rc->constrained_gf_group = (gop_coding_frames >= rc->frames_to_key) ? 1 : 0; @@ -3675,6 +3730,7 @@ void vp9_get_next_group_of_picture(const VP9_COMP *cpi, int *first_is_key_frame, int *use_alt_ref, int *coding_frame_count, int *first_show_idx, int *last_gop_use_alt_ref) { + const GOP_COMMAND *gop_command = &cpi->encode_command.gop_command; // We make a copy of rc here because we want to get information from the // encoder without changing its state. // TODO(angiebird): Avoid copying rc here. @@ -3697,14 +3753,19 @@ void vp9_get_next_group_of_picture(const VP9_COMP *cpi, int *first_is_key_frame, *first_is_key_frame = 1; } - *coding_frame_count = vp9_get_gop_coding_frame_count( - cpi->encode_command.external_arf_indexes, &cpi->oxcf, &cpi->frame_info, - &cpi->twopass.first_pass_info, &rc, *first_show_idx, multi_layer_arf, - allow_alt_ref, *first_is_key_frame, *last_gop_use_alt_ref, use_alt_ref); + if (gop_command->use) { + *coding_frame_count = gop_command_coding_frame_count(gop_command); + *use_alt_ref = gop_command->use_alt_ref; + assert(*coding_frame_count < rc.frames_to_key); + } else { + *coding_frame_count = vp9_get_gop_coding_frame_count( + &cpi->oxcf, &cpi->frame_info, &cpi->twopass.first_pass_info, &rc, + *first_show_idx, multi_layer_arf, allow_alt_ref, *first_is_key_frame, + *last_gop_use_alt_ref, use_alt_ref); + } } -int vp9_get_gop_coding_frame_count(const int *external_arf_indexes, - const VP9EncoderConfig *oxcf, +int vp9_get_gop_coding_frame_count(const VP9EncoderConfig *oxcf, const FRAME_INFO *frame_info, const FIRST_PASS_INFO *first_pass_info, const RATE_CONTROL *rc, int show_idx, @@ -3727,9 +3788,6 @@ int vp9_get_gop_coding_frame_count(const int *external_arf_indexes, } frame_count = get_gop_coding_frame_num( -#if CONFIG_RATE_CTRL - external_arf_indexes, -#endif use_alt_ref, frame_info, first_pass_info, rc, show_idx, &active_gf_interval, gop_intra_factor, oxcf->lag_in_frames); *use_alt_ref &= allow_alt_ref; @@ -3738,8 +3796,7 @@ int vp9_get_gop_coding_frame_count(const int *external_arf_indexes, // Under CONFIG_RATE_CTRL, once the first_pass_info is ready, the number of // coding frames (including show frame and alt ref) can be determined. -int vp9_get_coding_frame_num(const int *external_arf_indexes, - const VP9EncoderConfig *oxcf, +int vp9_get_coding_frame_num(const VP9EncoderConfig *oxcf, const FRAME_INFO *frame_info, const FIRST_PASS_INFO *first_pass_info, int multi_layer_arf, int allow_alt_ref) { @@ -3750,7 +3807,6 @@ int vp9_get_coding_frame_num(const int *external_arf_indexes, int show_idx = 0; int last_gop_use_alt_ref = 0; vp9_rc_init(oxcf, 1, &rc); - rc.static_scene_max_gf_interval = 250; while (show_idx < first_pass_info->num_frames) { int use_alt_ref; @@ -3763,9 +3819,8 @@ int vp9_get_coding_frame_num(const int *external_arf_indexes, } gop_coding_frame_count = vp9_get_gop_coding_frame_count( - external_arf_indexes, oxcf, frame_info, first_pass_info, &rc, show_idx, - multi_layer_arf, allow_alt_ref, first_is_key_frame, - last_gop_use_alt_ref, &use_alt_ref); + oxcf, frame_info, first_pass_info, &rc, show_idx, multi_layer_arf, + allow_alt_ref, first_is_key_frame, last_gop_use_alt_ref, &use_alt_ref); rc.source_alt_ref_active = use_alt_ref; last_gop_use_alt_ref = use_alt_ref; @@ -3777,6 +3832,30 @@ int vp9_get_coding_frame_num(const int *external_arf_indexes, } return coding_frame_num; } + +void vp9_get_key_frame_map(const VP9EncoderConfig *oxcf, + const FRAME_INFO *frame_info, + const FIRST_PASS_INFO *first_pass_info, + int *key_frame_map) { + int show_idx = 0; + RATE_CONTROL rc; + vp9_rc_init(oxcf, 1, &rc); + + // key_frame_map points to an int array with size equal to + // first_pass_info->num_frames, which is also the number of show frames in the + // video. + memset(key_frame_map, 0, + sizeof(*key_frame_map) * first_pass_info->num_frames); + while (show_idx < first_pass_info->num_frames) { + int key_frame_group_size; + key_frame_map[show_idx] = 1; + key_frame_group_size = vp9_get_frames_to_next_key( + oxcf, frame_info, first_pass_info, show_idx, rc.min_gf_interval); + assert(key_frame_group_size > 0); + show_idx += key_frame_group_size; + } + assert(show_idx == first_pass_info->num_frames); +} #endif // CONFIG_RATE_CTRL FIRSTPASS_STATS vp9_get_frame_stats(const TWO_PASS *twopass) { diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.h index dcaf2eec6..b1047eab2 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_firstpass.h @@ -264,7 +264,6 @@ void vp9_get_next_group_of_picture(const struct VP9_COMP *cpi, /*!\brief Call this function before coding a new group of pictures to get * information about it. - * \param[in] external_arf_indexes External arf indexs passed in * \param[in] oxcf Encoder config * \param[in] frame_info Frame info * \param[in] first_pass_info First pass stats @@ -279,8 +278,7 @@ void vp9_get_next_group_of_picture(const struct VP9_COMP *cpi, * * \return Returns coding frame count */ -int vp9_get_gop_coding_frame_count(const int *external_arf_indexes, - const struct VP9EncoderConfig *oxcf, +int vp9_get_gop_coding_frame_count(const struct VP9EncoderConfig *oxcf, const FRAME_INFO *frame_info, const FIRST_PASS_INFO *first_pass_info, const RATE_CONTROL *rc, int show_idx, @@ -288,11 +286,20 @@ int vp9_get_gop_coding_frame_count(const int *external_arf_indexes, int first_is_key_frame, int last_gop_use_alt_ref, int *use_alt_ref); -int vp9_get_coding_frame_num(const int *external_arf_indexes, - const struct VP9EncoderConfig *oxcf, +int vp9_get_coding_frame_num(const struct VP9EncoderConfig *oxcf, const FRAME_INFO *frame_info, const FIRST_PASS_INFO *first_pass_info, int multi_layer_arf, int allow_alt_ref); + +/*!\brief Compute a key frame binary map indicates whether key frames appear + * in the corresponding positions. The passed in key_frame_map must point to an + * integer array with length equal to first_pass_info->num_frames, which is the + * number of show frames in the video. + */ +void vp9_get_key_frame_map(const struct VP9EncoderConfig *oxcf, + const FRAME_INFO *frame_info, + const FIRST_PASS_INFO *first_pass_info, + int *key_frame_map); #endif // CONFIG_RATE_CTRL FIRSTPASS_STATS vp9_get_frame_stats(const TWO_PASS *twopass); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_pickmode.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_pickmode.c index 23c943c21..695fd484f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_pickmode.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_pickmode.c @@ -1127,7 +1127,7 @@ static INLINE void update_thresh_freq_fact_row_mt( } static INLINE void update_thresh_freq_fact( - VP9_COMP *cpi, TileDataEnc *tile_data, int source_variance, + VP9_COMP *cpi, TileDataEnc *tile_data, unsigned int source_variance, BLOCK_SIZE bsize, MV_REFERENCE_FRAME ref_frame, THR_MODES best_mode_idx, PREDICTION_MODE mode) { THR_MODES thr_mode_idx = mode_idx[ref_frame][mode_offset(mode)]; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.c index ef64cc6c5..4b87ff2f0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.c @@ -249,7 +249,7 @@ int vp9_rc_clamp_iframe_target_size(const VP9_COMP *const cpi, int target) { // way for CBR mode, for the buffering updates below. Look into removing one // of these (i.e., bits_off_target). // Update the buffer level before encoding with the per-frame-bandwidth, -static void update_buffer_level_preencode(VP9_COMP *cpi) { +void vp9_update_buffer_level_preencode(VP9_COMP *cpi) { RATE_CONTROL *const rc = &cpi->rc; rc->bits_off_target += rc->avg_frame_bandwidth; // Clip the buffer level to the maximum specified buffer size. @@ -431,11 +431,17 @@ void vp9_rc_init(const VP9EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) { rc->max_gf_interval = vp9_rc_get_default_max_gf_interval( oxcf->init_framerate, rc->min_gf_interval); rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2; + if ((oxcf->pass == 0) && (oxcf->rc_mode == VPX_Q)) { + rc->static_scene_max_gf_interval = FIXED_GF_INTERVAL; + } else { + rc->static_scene_max_gf_interval = MAX_STATIC_GF_GROUP_LENGTH; + } rc->force_max_q = 0; rc->last_post_encode_dropped_scene_change = 0; rc->use_post_encode_drop = 0; rc->ext_use_post_encode_drop = 0; + rc->disable_overshoot_maxq_cbr = 0; rc->arf_active_best_quality_adjustment_factor = 1.0; rc->arf_increase_active_best_quality = 0; rc->preserve_arf_as_gld = 0; @@ -1690,8 +1696,10 @@ void vp9_rc_compute_frame_size_bounds(const VP9_COMP *cpi, int frame_target, } else { // For very small rate targets where the fractional adjustment // may be tiny make sure there is at least a minimum range. - const int tol_low = (cpi->sf.recode_tolerance_low * frame_target) / 100; - const int tol_high = (cpi->sf.recode_tolerance_high * frame_target) / 100; + const int tol_low = + (int)(((int64_t)cpi->sf.recode_tolerance_low * frame_target) / 100); + const int tol_high = + (int)(((int64_t)cpi->sf.recode_tolerance_high * frame_target) / 100); *frame_under_shoot_limit = VPXMAX(frame_target - tol_low - 100, 0); *frame_over_shoot_limit = VPXMIN(frame_target + tol_high + 100, cpi->rc.max_frame_bandwidth); @@ -1706,9 +1714,16 @@ void vp9_rc_set_frame_target(VP9_COMP *cpi, int target) { // Modify frame size target when down-scaling. if (cpi->oxcf.resize_mode == RESIZE_DYNAMIC && - rc->frame_size_selector != UNSCALED) + rc->frame_size_selector != UNSCALED) { rc->this_frame_target = (int)(rc->this_frame_target * rate_thresh_mult[rc->frame_size_selector]); + } + +#if CONFIG_RATE_CTRL + if (cpi->encode_command.use_external_target_frame_bits) { + rc->this_frame_target = cpi->encode_command.target_frame_bits; + } +#endif // Target rate per SB64 (including partial SB64s. rc->sb64_target_rate = (int)(((int64_t)rc->this_frame_target * 64 * 64) / @@ -1981,6 +1996,7 @@ void vp9_rc_postencode_update_drop_frame(VP9_COMP *cpi) { cpi->rc.rc_2_frame = 0; cpi->rc.rc_1_frame = 0; cpi->rc.last_avg_frame_bandwidth = cpi->rc.avg_frame_bandwidth; + cpi->rc.last_q[INTER_FRAME] = cpi->common.base_qindex; // For SVC on dropped frame when framedrop_mode != LAYER_DROP: // in this mode the whole superframe may be dropped if only a single layer // has buffer underflow (below threshold). Since this can then lead to @@ -2098,7 +2114,7 @@ void vp9_rc_get_one_pass_vbr_params(VP9_COMP *cpi) { vp9_cyclic_refresh_update_parameters(cpi); } -static int calc_pframe_target_size_one_pass_cbr(const VP9_COMP *cpi) { +int vp9_calc_pframe_target_size_one_pass_cbr(const VP9_COMP *cpi) { const VP9EncoderConfig *oxcf = &cpi->oxcf; const RATE_CONTROL *rc = &cpi->rc; const SVC *const svc = &cpi->svc; @@ -2147,7 +2163,7 @@ static int calc_pframe_target_size_one_pass_cbr(const VP9_COMP *cpi) { return VPXMAX(min_frame_target, target); } -static int calc_iframe_target_size_one_pass_cbr(const VP9_COMP *cpi) { +int vp9_calc_iframe_target_size_one_pass_cbr(const VP9_COMP *cpi) { const RATE_CONTROL *rc = &cpi->rc; const VP9EncoderConfig *oxcf = &cpi->oxcf; const SVC *const svc = &cpi->svc; @@ -2253,7 +2269,7 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { cpi->ref_frame_flags &= (~VP9_LAST_FLAG & ~VP9_GOLD_FLAG & ~VP9_ALT_FLAG); // Assumption here is that LAST_FRAME is being updated for a keyframe. // Thus no change in update flags. - target = calc_iframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_iframe_target_size_one_pass_cbr(cpi); } } else { cm->frame_type = INTER_FRAME; @@ -2266,7 +2282,7 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { (svc->spatial_layer_id == 0 && cm->current_video_frame > 0) ? 0 : svc->layer_context[svc->temporal_layer_id].is_key_frame; - target = calc_pframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_pframe_target_size_one_pass_cbr(cpi); } } @@ -2275,7 +2291,7 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { svc->layer_context[layer].is_key_frame == 1) { cm->frame_type = KEY_FRAME; cpi->ref_frame_flags &= (~VP9_LAST_FLAG & ~VP9_GOLD_FLAG & ~VP9_ALT_FLAG); - target = calc_iframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_iframe_target_size_one_pass_cbr(cpi); } // Set the buffer idx and refresh flags for key frames in simulcast mode. // Note the buffer slot for long-term reference is set below (line 2255), @@ -2360,7 +2376,7 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { } if (svc->set_intra_only_frame) { set_intra_only_frame(cpi); - target = calc_iframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_iframe_target_size_one_pass_cbr(cpi); } // Any update/change of global cyclic refresh parameters (amount/delta-qp) // should be done here, before the frame qp is selected. @@ -2371,7 +2387,8 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { if (cm->show_frame) update_buffer_level_svc_preencode(cpi); if (cpi->oxcf.resize_mode == RESIZE_DYNAMIC && svc->single_layer_svc == 1 && - svc->spatial_layer_id == svc->first_spatial_layer_to_encode) { + svc->spatial_layer_id == svc->first_spatial_layer_to_encode && + svc->temporal_layer_id == 0) { LAYER_CONTEXT *lc = NULL; cpi->resize_pending = vp9_resize_one_pass_cbr(cpi); if (cpi->resize_pending) { @@ -2385,6 +2402,11 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { cpi->resize_scale_num * lc->scaling_factor_num; lc->scaling_factor_den_resize = cpi->resize_scale_den * lc->scaling_factor_den; + // Reset rate control for all temporal layers. + lc->rc.buffer_level = lc->rc.optimal_buffer_level; + lc->rc.bits_off_target = lc->rc.optimal_buffer_level; + lc->rc.rate_correction_factors[INTER_FRAME] = + rc->rate_correction_factors[INTER_FRAME]; } // Set the size for this current temporal layer. lc = &svc->layer_context[svc->spatial_layer_id * @@ -2394,9 +2416,11 @@ void vp9_rc_get_svc_params(VP9_COMP *cpi) { lc->scaling_factor_num_resize, lc->scaling_factor_den_resize, &width, &height); vp9_set_size_literal(cpi, width, height); + svc->resize_set = 1; } } else { cpi->resize_pending = 0; + svc->resize_set = 0; } } @@ -2433,13 +2457,13 @@ void vp9_rc_get_one_pass_cbr_params(VP9_COMP *cpi) { vp9_cyclic_refresh_update_parameters(cpi); if (frame_is_intra_only(cm)) - target = calc_iframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_iframe_target_size_one_pass_cbr(cpi); else - target = calc_pframe_target_size_one_pass_cbr(cpi); + target = vp9_calc_pframe_target_size_one_pass_cbr(cpi); vp9_rc_set_frame_target(cpi, target); - if (cm->show_frame) update_buffer_level_preencode(cpi); + if (cm->show_frame) vp9_update_buffer_level_preencode(cpi); if (cpi->oxcf.resize_mode == RESIZE_DYNAMIC) cpi->resize_pending = vp9_resize_one_pass_cbr(cpi); @@ -2657,6 +2681,7 @@ int vp9_resize_one_pass_cbr(VP9_COMP *cpi) { int min_width = (320 * 4) / 3; int min_height = (180 * 4) / 3; int down_size_on = 1; + int force_downsize_rate = 0; cpi->resize_scale_num = 1; cpi->resize_scale_den = 1; // Don't resize on key frame; reset the counters on key frame. @@ -2677,11 +2702,32 @@ int vp9_resize_one_pass_cbr(VP9_COMP *cpi) { } #endif + // Force downsize based on per-frame-bandwidth, for extreme case, + // for HD input. + if (cpi->resize_state == ORIG && cm->width * cm->height >= 1280 * 720) { + if (rc->avg_frame_bandwidth < 300000 / 30) { + resize_action = DOWN_ONEHALF; + cpi->resize_state = ONE_HALF; + force_downsize_rate = 1; + } else if (rc->avg_frame_bandwidth < 400000 / 30) { + resize_action = ONEHALFONLY_RESIZE ? DOWN_ONEHALF : DOWN_THREEFOUR; + cpi->resize_state = ONEHALFONLY_RESIZE ? ONE_HALF : THREE_QUARTER; + force_downsize_rate = 1; + } + } else if (cpi->resize_state == THREE_QUARTER && + cm->width * cm->height >= 960 * 540) { + if (rc->avg_frame_bandwidth < 300000 / 30) { + resize_action = DOWN_ONEHALF; + cpi->resize_state = ONE_HALF; + force_downsize_rate = 1; + } + } + // Resize based on average buffer underflow and QP over some window. // Ignore samples close to key frame, since QP is usually high after key. - if (cpi->rc.frames_since_key > 2 * cpi->framerate) { - const int window = (int)(4 * cpi->framerate); - cpi->resize_avg_qp += cm->base_qindex; + if (!force_downsize_rate && cpi->rc.frames_since_key > cpi->framerate) { + const int window = VPXMIN(30, (int)(2 * cpi->framerate)); + cpi->resize_avg_qp += rc->last_q[INTER_FRAME]; if (cpi->rc.buffer_level < (int)(30 * rc->optimal_buffer_level / 100)) ++cpi->resize_buffer_underflow; ++cpi->resize_count; @@ -2742,7 +2788,7 @@ int vp9_resize_one_pass_cbr(VP9_COMP *cpi) { // Reset buffer level to optimal, update target size. rc->buffer_level = rc->optimal_buffer_level; rc->bits_off_target = rc->optimal_buffer_level; - rc->this_frame_target = calc_pframe_target_size_one_pass_cbr(cpi); + rc->this_frame_target = vp9_calc_pframe_target_size_one_pass_cbr(cpi); // Get the projected qindex, based on the scaled target frame size (scaled // so target_bits_per_mb in vp9_rc_regulate_q will be correct target). target_bits_per_frame = (resize_action >= 0) @@ -2960,7 +3006,7 @@ void vp9_scene_detection_onepass(VP9_COMP *cpi) { int scene_cut_force_key_frame = 0; int num_zero_temp_sad = 0; uint64_t avg_sad_current = 0; - uint32_t min_thresh = 10000; + uint32_t min_thresh = 20000; // ~5 * 64 * 64 float thresh = 8.0f; uint32_t thresh_key = 140000; if (cpi->oxcf.speed <= 5) thresh_key = 240000; @@ -3217,7 +3263,7 @@ int vp9_encodedframe_overshoot(VP9_COMP *cpi, int frame_size, int *q) { int tl = 0; int sl = 0; SVC *svc = &cpi->svc; - for (sl = 0; sl < svc->first_spatial_layer_to_encode; ++sl) { + for (sl = 0; sl < VPXMAX(1, svc->first_spatial_layer_to_encode); ++sl) { for (tl = 0; tl < svc->number_temporal_layers; ++tl) { const int layer = LAYER_IDS_TO_IDX(sl, tl, svc->number_temporal_layers); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.h index fa070f9be..0120f90a0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.h @@ -195,7 +195,8 @@ typedef struct { int use_post_encode_drop; // External flag to enable post encode frame dropping, controlled by user. int ext_use_post_encode_drop; - + // Flag to disable CBR feature to increase Q on overshoot detection. + int disable_overshoot_maxq_cbr; int damped_adjustment[RATE_FACTOR_LEVELS]; double arf_active_best_quality_adjustment_factor; int arf_increase_active_best_quality; @@ -252,6 +253,9 @@ int vp9_rc_get_default_max_gf_interval(double framerate, int min_gf_interval); // encode_frame_to_data_rate() function. void vp9_rc_get_one_pass_vbr_params(struct VP9_COMP *cpi); void vp9_rc_get_one_pass_cbr_params(struct VP9_COMP *cpi); +int vp9_calc_pframe_target_size_one_pass_cbr(const struct VP9_COMP *cpi); +int vp9_calc_iframe_target_size_one_pass_cbr(const struct VP9_COMP *cpi); +void vp9_update_buffer_level_preencode(struct VP9_COMP *cpi); void vp9_rc_get_svc_params(struct VP9_COMP *cpi); // Post encode update of the rate control parameters based diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rdopt.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rdopt.c index 39b99d50c..37de4e483 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rdopt.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rdopt.c @@ -4443,6 +4443,7 @@ void vp9_rd_pick_inter_mode_sub8x8(VP9_COMP *cpi, TileDataEnc *tile_data, tmp_best_sse = total_sse; tmp_best_skippable = skippable; tmp_best_mbmode = *mi; + x->sum_y_eobs[TX_4X4] = 0; for (i = 0; i < 4; i++) { tmp_best_bmodes[i] = xd->mi[0]->bmi[i]; x->zcoeff_blk[TX_4X4][i] = !x->plane[0].eobs[i]; @@ -4476,6 +4477,11 @@ void vp9_rd_pick_inter_mode_sub8x8(VP9_COMP *cpi, TileDataEnc *tile_data, &rate, &rate_y, &distortion, &skippable, &total_sse, (int)this_rd_thresh, seg_mvs, bsi, 0, mi_row, mi_col); if (tmp_rd == INT64_MAX) continue; + x->sum_y_eobs[TX_4X4] = 0; + for (i = 0; i < 4; i++) { + x->zcoeff_blk[TX_4X4][i] = !x->plane[0].eobs[i]; + x->sum_y_eobs[TX_4X4] += x->plane[0].eobs[i]; + } } else { total_sse = tmp_best_sse; rate = tmp_best_rate; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_speed_features.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_speed_features.c index 7a26c4176..585c9604c 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_speed_features.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_speed_features.c @@ -621,7 +621,7 @@ static void set_rt_speed_feature_framesize_independent( // increase in encoding time. if (cpi->use_svc && svc->spatial_layer_id > 0) sf->nonrd_keyframe = 1; if (cm->frame_type != KEY_FRAME && cpi->resize_state == ORIG && - cpi->oxcf.rc_mode == VPX_CBR) { + cpi->oxcf.rc_mode == VPX_CBR && !cpi->rc.disable_overshoot_maxq_cbr) { if (cm->width * cm->height <= 352 * 288 && !cpi->use_svc && cpi->oxcf.content != VP9E_CONTENT_SCREEN) sf->overshoot_detection_cbr_rt = RE_ENCODE_MAXQ; @@ -634,6 +634,7 @@ static void set_rt_speed_feature_framesize_independent( sf->use_compound_nonrd_pickmode = 1; } if (cm->width * cm->height > 1280 * 720) sf->cb_pred_filter_search = 1; + if (!cpi->external_resize) sf->use_source_sad = 1; } if (speed >= 6) { @@ -646,8 +647,6 @@ static void set_rt_speed_feature_framesize_independent( sf->mv.reduce_first_step_size = 1; sf->skip_encode_sb = 0; - if (!cpi->external_resize) sf->use_source_sad = 1; - if (sf->use_source_sad) { sf->adapt_partition_source_sad = 1; sf->adapt_partition_thresh = @@ -669,7 +668,7 @@ static void set_rt_speed_feature_framesize_independent( sf->base_mv_aggressive = 1; } if (cm->frame_type != KEY_FRAME && cpi->resize_state == ORIG && - cpi->oxcf.rc_mode == VPX_CBR) + cpi->oxcf.rc_mode == VPX_CBR && !cpi->rc.disable_overshoot_maxq_cbr) sf->overshoot_detection_cbr_rt = FAST_DETECTION_MAXQ; } @@ -728,7 +727,10 @@ static void set_rt_speed_feature_framesize_independent( if (speed >= 8) { sf->adaptive_rd_thresh = 4; sf->skip_encode_sb = 1; - sf->nonrd_keyframe = 1; + if (cpi->svc.number_spatial_layers > 1 && !cpi->svc.simulcast_mode) + sf->nonrd_keyframe = 0; + else + sf->nonrd_keyframe = 1; if (!cpi->use_svc) cpi->max_copied_frame = 4; if (cpi->row_mt && cpi->oxcf.max_threads > 1) sf->adaptive_rd_thresh_row_mt = 1; @@ -787,6 +789,15 @@ static void set_rt_speed_feature_framesize_independent( if (cm->width * cm->height >= 640 * 360) sf->variance_part_thresh_mult = 2; } + // Disable split to 8x8 for low-resolution at very high Q. + // For variance partition (speed >= 6). Ignore the first few frames + // as avg_frame_qindex starts at max_q (worst_quality). + if (cm->frame_type != KEY_FRAME && cm->width * cm->height <= 320 * 240 && + sf->partition_search_type == VAR_BASED_PARTITION && + cpi->rc.avg_frame_qindex[INTER_FRAME] > 208 && + cpi->common.current_video_frame > 8) + sf->disable_16x16part_nonkey = 1; + if (sf->nonrd_use_ml_partition) sf->partition_search_type = ML_BASED_PARTITION; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.c index 1466d3a2b..b6c7c74e1 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.c @@ -56,6 +56,7 @@ void vp9_init_layer_context(VP9_COMP *const cpi) { svc->num_encoded_top_layer = 0; svc->simulcast_mode = 0; svc->single_layer_svc = 0; + svc->resize_set = 0; for (i = 0; i < REF_FRAMES; ++i) { svc->fb_idx_spatial_layer_id[i] = 0xff; @@ -356,6 +357,7 @@ void vp9_restore_layer_context(VP9_COMP *const cpi) { if (is_one_pass_cbr_svc(cpi) && lc->speed > 0) { cpi->oxcf.speed = lc->speed; } + cpi->loopfilter_ctrl = lc->loopfilter_ctrl; // Reset the frames_since_key and frames_to_key counters to their values // before the layer restore. Keep these defined for the stream (not layer). if (cpi->svc.number_temporal_layers > 1 || @@ -770,9 +772,7 @@ int vp9_one_pass_cbr_svc_start_layer(VP9_COMP *const cpi) { if (svc->disable_inter_layer_pred == INTER_LAYER_PRED_OFF && svc->number_spatial_layers > 1 && svc->number_spatial_layers <= 3 && - svc->number_temporal_layers <= 3 && - !(svc->temporal_layering_mode == VP9E_TEMPORAL_LAYERING_MODE_BYPASS && - svc->use_set_ref_frame_config)) + svc->number_temporal_layers <= 3) svc->simulcast_mode = 1; else svc->simulcast_mode = 0; @@ -866,8 +866,9 @@ int vp9_one_pass_cbr_svc_start_layer(VP9_COMP *const cpi) { } } - // Reset the drop flags for all spatial layers, on the base layer. - if (svc->spatial_layer_id == 0) { + // Reset the drop flags for all spatial layers, on the + // first_spatial_layer_to_encode. + if (svc->spatial_layer_id == svc->first_spatial_layer_to_encode) { vp9_zero(svc->drop_spatial_layer); // TODO(jianj/marpan): Investigate why setting svc->lst/gld/alt_fb_idx // causes an issue with frame dropping and temporal layers, when the frame @@ -1261,7 +1262,7 @@ static void vp9_svc_update_ref_frame_bypass_mode(VP9_COMP *const cpi) { BufferPool *const pool = cm->buffer_pool; int i; for (i = 0; i < REF_FRAMES; i++) { - if (cm->frame_type == KEY_FRAME || + if ((cm->frame_type == KEY_FRAME && !svc->simulcast_mode) || svc->update_buffer_slot[svc->spatial_layer_id] & (1 << i)) { ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[i], cm->new_fb_idx); svc->fb_idx_spatial_layer_id[i] = svc->spatial_layer_id; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.h index 7e46500b5..b12e7e01a 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.h @@ -71,6 +71,7 @@ typedef struct { int actual_num_seg2_blocks; int counter_encode_maxq_scene_change; uint8_t speed; + int loopfilter_ctrl; } LAYER_CONTEXT; typedef struct SVC { @@ -198,6 +199,7 @@ typedef struct SVC { // Flag to indicate SVC is dynamically switched to a single layer. int single_layer_svc; + int resize_set; } SVC; struct VP9_COMP; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.cc b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.cc new file mode 100644 index 000000000..47f9f3ba3 --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.cc @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "vp9/ratectrl_rtc.h" + +#include + +#include "vp9/encoder/vp9_encoder.h" +#include "vp9/encoder/vp9_picklpf.h" +#include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" + +namespace libvpx { + +std::unique_ptr VP9RateControlRTC::Create( + const VP9RateControlRtcConfig &cfg) { + std::unique_ptr rc_api(new (std::nothrow) + VP9RateControlRTC()); + if (!rc_api) return nullptr; + rc_api->cpi_ = static_cast(vpx_memalign(32, sizeof(*cpi_))); + if (rc_api->cpi_ == nullptr) { + return nullptr; + } + rc_api->InitRateControl(cfg); + return rc_api; +} + +void VP9RateControlRTC::InitRateControl(const VP9RateControlRtcConfig &rc_cfg) { + VP9_COMMON *cm = &cpi_->common; + VP9EncoderConfig *oxcf = &cpi_->oxcf; + RATE_CONTROL *const rc = &cpi_->rc; + cm->profile = PROFILE_0; + cm->bit_depth = VPX_BITS_8; + cm->show_frame = 1; + oxcf->rc_mode = VPX_CBR; + oxcf->pass = 0; + oxcf->aq_mode = NO_AQ; + oxcf->content = VP9E_CONTENT_DEFAULT; + oxcf->drop_frames_water_mark = 0; + + UpdateRateControl(rc_cfg); + + cpi_->use_svc = (cpi_->svc.number_spatial_layers > 1 || + cpi_->svc.number_temporal_layers > 1) + ? 1 + : 0; + + rc->rc_1_frame = 0; + rc->rc_2_frame = 0; + vp9_rc_init_minq_luts(); + vp9_rc_init(oxcf, 0, rc); + cpi_->sf.use_nonrd_pick_mode = 1; + cm->current_video_frame = 0; +} + +void VP9RateControlRTC::UpdateRateControl( + const VP9RateControlRtcConfig &rc_cfg) { + VP9_COMMON *cm = &cpi_->common; + VP9EncoderConfig *oxcf = &cpi_->oxcf; + RATE_CONTROL *const rc = &cpi_->rc; + + cm->width = rc_cfg.width; + cm->height = rc_cfg.height; + oxcf->width = rc_cfg.width; + oxcf->height = rc_cfg.height; + oxcf->worst_allowed_q = vp9_quantizer_to_qindex(rc_cfg.max_quantizer); + oxcf->best_allowed_q = vp9_quantizer_to_qindex(rc_cfg.min_quantizer); + rc->worst_quality = oxcf->worst_allowed_q; + rc->best_quality = oxcf->best_allowed_q; + oxcf->target_bandwidth = 1000 * rc_cfg.target_bandwidth; + oxcf->starting_buffer_level_ms = rc_cfg.buf_initial_sz; + oxcf->optimal_buffer_level_ms = rc_cfg.buf_optimal_sz; + oxcf->maximum_buffer_size_ms = rc_cfg.buf_sz; + oxcf->under_shoot_pct = rc_cfg.undershoot_pct; + oxcf->over_shoot_pct = rc_cfg.overshoot_pct; + oxcf->ss_number_layers = rc_cfg.ss_number_layers; + oxcf->ts_number_layers = rc_cfg.ts_number_layers; + oxcf->temporal_layering_mode = (VP9E_TEMPORAL_LAYERING_MODE)( + (rc_cfg.ts_number_layers > 1) ? rc_cfg.ts_number_layers : 0); + + cpi_->oxcf.rc_max_intra_bitrate_pct = rc_cfg.max_intra_bitrate_pct; + cpi_->framerate = rc_cfg.framerate; + cpi_->svc.number_spatial_layers = rc_cfg.ss_number_layers; + cpi_->svc.number_temporal_layers = rc_cfg.ts_number_layers; + + for (int sl = 0; sl < cpi_->svc.number_spatial_layers; ++sl) { + for (int tl = 0; tl < cpi_->svc.number_temporal_layers; ++tl) { + const int layer = + LAYER_IDS_TO_IDX(sl, tl, cpi_->svc.number_temporal_layers); + LAYER_CONTEXT *lc = &cpi_->svc.layer_context[layer]; + RATE_CONTROL *const lrc = &lc->rc; + oxcf->layer_target_bitrate[layer] = + 1000 * rc_cfg.layer_target_bitrate[layer]; + lrc->worst_quality = + vp9_quantizer_to_qindex(rc_cfg.max_quantizers[layer]); + lrc->best_quality = vp9_quantizer_to_qindex(rc_cfg.min_quantizers[layer]); + lc->scaling_factor_num = rc_cfg.scaling_factor_num[sl]; + lc->scaling_factor_den = rc_cfg.scaling_factor_den[sl]; + oxcf->ts_rate_decimator[tl] = rc_cfg.ts_rate_decimator[tl]; + } + } + vp9_set_rc_buffer_sizes(cpi_); + vp9_new_framerate(cpi_, cpi_->framerate); + if (cpi_->svc.number_temporal_layers > 1 || + cpi_->svc.number_spatial_layers > 1) { + if (cm->current_video_frame == 0) vp9_init_layer_context(cpi_); + vp9_update_layer_context_change_config(cpi_, + (int)cpi_->oxcf.target_bandwidth); + } + vp9_check_reset_rc_flag(cpi_); +} + +void VP9RateControlRTC::ComputeQP(const VP9FrameParamsQpRTC &frame_params) { + VP9_COMMON *const cm = &cpi_->common; + int width, height; + cpi_->svc.spatial_layer_id = frame_params.spatial_layer_id; + cpi_->svc.temporal_layer_id = frame_params.temporal_layer_id; + if (cpi_->svc.number_spatial_layers > 1) { + const int layer = LAYER_IDS_TO_IDX(cpi_->svc.spatial_layer_id, + cpi_->svc.temporal_layer_id, + cpi_->svc.number_temporal_layers); + LAYER_CONTEXT *lc = &cpi_->svc.layer_context[layer]; + get_layer_resolution(cpi_->oxcf.width, cpi_->oxcf.height, + lc->scaling_factor_num, lc->scaling_factor_den, &width, + &height); + cm->width = width; + cm->height = height; + } + vp9_set_mb_mi(cm, cm->width, cm->height); + cm->frame_type = frame_params.frame_type; + cpi_->refresh_golden_frame = (cm->frame_type == KEY_FRAME) ? 1 : 0; + cpi_->sf.use_nonrd_pick_mode = 1; + if (cpi_->svc.number_spatial_layers == 1 && + cpi_->svc.number_temporal_layers == 1) { + int target; + if (frame_is_intra_only(cm)) + target = vp9_calc_iframe_target_size_one_pass_cbr(cpi_); + else + target = vp9_calc_pframe_target_size_one_pass_cbr(cpi_); + vp9_rc_set_frame_target(cpi_, target); + vp9_update_buffer_level_preencode(cpi_); + } else { + vp9_update_temporal_layer_framerate(cpi_); + vp9_restore_layer_context(cpi_); + vp9_rc_get_svc_params(cpi_); + } + int bottom_index, top_index; + cpi_->common.base_qindex = + vp9_rc_pick_q_and_bounds(cpi_, &bottom_index, &top_index); +} + +int VP9RateControlRTC::GetQP() const { return cpi_->common.base_qindex; } + +int VP9RateControlRTC::GetLoopfilterLevel() const { + struct loopfilter *const lf = &cpi_->common.lf; + vp9_pick_filter_level(nullptr, cpi_, LPF_PICK_FROM_Q); + return lf->filter_level; +} + +void VP9RateControlRTC::PostEncodeUpdate(uint64_t encoded_frame_size) { + vp9_rc_postencode_update(cpi_, encoded_frame_size); + if (cpi_->svc.number_spatial_layers > 1 || + cpi_->svc.number_temporal_layers > 1) + vp9_save_layer_context(cpi_); + cpi_->common.current_video_frame++; +} + +} // namespace libvpx diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h new file mode 100644 index 000000000..72ea40fd6 --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VPX_VP9_RATECTRL_RTC_H_ +#define VPX_VP9_RATECTRL_RTC_H_ + +#include +#include + +#include "vp9/common/vp9_entropymode.h" +#include "vp9/common/vp9_enums.h" +#include "vp9/common/vp9_onyxc_int.h" +#include "vp9/vp9_iface_common.h" +#include "vp9/encoder/vp9_encoder.h" +#include "vp9/encoder/vp9_firstpass.h" +#include "vp9/vp9_cx_iface.h" +#include "vpx_mem/vpx_mem.h" + +namespace libvpx { + +struct VP9RateControlRtcConfig { + int width; + int height; + // 0-63 + int max_quantizer; + int min_quantizer; + int64_t target_bandwidth; + int64_t buf_initial_sz; + int64_t buf_optimal_sz; + int64_t buf_sz; + int undershoot_pct; + int overshoot_pct; + int max_intra_bitrate_pct; + double framerate; + // Number of spatial layers + int ss_number_layers; + // Number of temporal layers + int ts_number_layers; + int max_quantizers[VPX_MAX_LAYERS]; + int min_quantizers[VPX_MAX_LAYERS]; + int scaling_factor_num[VPX_SS_MAX_LAYERS]; + int scaling_factor_den[VPX_SS_MAX_LAYERS]; + int layer_target_bitrate[VPX_MAX_LAYERS]; + int ts_rate_decimator[VPX_TS_MAX_LAYERS]; +}; + +struct VP9FrameParamsQpRTC { + FRAME_TYPE frame_type; + int spatial_layer_id; + int temporal_layer_id; +}; + +// This interface allows using VP9 real-time rate control without initializing +// the encoder. To use this interface, you need to link with libvp9rc.a. +// +// #include "vp9/ratectrl_rtc.h" +// VP9RateControlRTC rc_api; +// VP9RateControlRtcConfig cfg; +// VP9FrameParamsQpRTC frame_params; +// +// YourFunctionToInitializeConfig(cfg); +// rc_api.InitRateControl(cfg); +// // start encoding +// while (frame_to_encode) { +// if (config_changed) +// rc_api.UpdateRateControl(cfg); +// YourFunctionToFillFrameParams(frame_params); +// rc_api.ComputeQP(frame_params); +// YourFunctionToUseQP(rc_api.GetQP()); +// YourFunctionToUseLoopfilter(rc_api.GetLoopfilterLevel()); +// // After encoding +// rc_api.PostEncode(encoded_frame_size); +// } +class VP9RateControlRTC { + public: + static std::unique_ptr Create( + const VP9RateControlRtcConfig &cfg); + ~VP9RateControlRTC() { + if (cpi_) { + for (int sl = 0; sl < cpi_->svc.number_spatial_layers; sl++) { + for (int tl = 0; tl < cpi_->svc.number_temporal_layers; tl++) { + int layer = LAYER_IDS_TO_IDX(sl, tl, cpi_->oxcf.ts_number_layers); + LAYER_CONTEXT *const lc = &cpi_->svc.layer_context[layer]; + vpx_free(lc->map); + vpx_free(lc->last_coded_q_map); + vpx_free(lc->consec_zero_mv); + } + } + vpx_free(cpi_); + } + } + + void UpdateRateControl(const VP9RateControlRtcConfig &rc_cfg); + // GetQP() needs to be called after ComputeQP() to get the latest QP + int GetQP() const; + int GetLoopfilterLevel() const; + void ComputeQP(const VP9FrameParamsQpRTC &frame_params); + // Feedback to rate control with the size of current encoded frame + void PostEncodeUpdate(uint64_t encoded_frame_size); + + private: + VP9RateControlRTC() {} + void InitRateControl(const VP9RateControlRtcConfig &cfg); + VP9_COMP *cpi_; +}; + +} // namespace libvpx + +#endif // VPX_VP9_RATECTRL_RTC_H_ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.cc b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.cc index c417a2589..afda6e203 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.cc +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.cc @@ -90,12 +90,20 @@ static int img_read(vpx_image_t *img, FILE *file) { return 1; } +// Assume every config in VP9EncoderConfig is less than 100 characters. +#define ENCODE_CONFIG_BUF_SIZE 100 +struct EncodeConfig { + char name[ENCODE_CONFIG_BUF_SIZE]; + char value[ENCODE_CONFIG_BUF_SIZE]; +}; + class SimpleEncode::EncodeImpl { public: VP9_COMP *cpi; vpx_img_fmt_t img_fmt; vpx_image_t tmp_img; std::vector first_pass_stats; + std::vector encode_config_list; }; static VP9_COMP *init_encoder(const VP9EncoderConfig *oxcf, @@ -167,7 +175,8 @@ static RefFrameType mv_ref_frame_to_ref_frame_type( static void update_motion_vector_info( const MOTION_VECTOR_INFO *input_motion_vector_info, const int num_rows_4x4, - const int num_cols_4x4, MotionVectorInfo *output_motion_vector_info) { + const int num_cols_4x4, MotionVectorInfo *output_motion_vector_info, + int motion_vector_scale) { const int num_units_4x4 = num_rows_4x4 * num_cols_4x4; for (int i = 0; i < num_units_4x4; ++i) { const MV_REFERENCE_FRAME *in_ref_frame = @@ -185,16 +194,34 @@ static void update_motion_vector_info( mv_ref_frame_to_ref_frame_type(in_ref_frame[1]); output_motion_vector_info[i].mv_row[0] = (double)input_motion_vector_info[i].mv[0].as_mv.row / - kMotionVectorPrecision; + motion_vector_scale; output_motion_vector_info[i].mv_column[0] = (double)input_motion_vector_info[i].mv[0].as_mv.col / - kMotionVectorPrecision; + motion_vector_scale; output_motion_vector_info[i].mv_row[1] = (double)input_motion_vector_info[i].mv[1].as_mv.row / - kMotionVectorPrecision; + motion_vector_scale; output_motion_vector_info[i].mv_column[1] = (double)input_motion_vector_info[i].mv[1].as_mv.col / - kMotionVectorPrecision; + motion_vector_scale; + } +} + +static void update_tpl_stats_info(const TplDepStats *input_tpl_stats_info, + const int show_frame_count, + TplStatsInfo *output_tpl_stats_info) { + int frame_idx; + for (frame_idx = 0; frame_idx < show_frame_count; ++frame_idx) { + output_tpl_stats_info[frame_idx].intra_cost = + input_tpl_stats_info[frame_idx].intra_cost; + output_tpl_stats_info[frame_idx].inter_cost = + input_tpl_stats_info[frame_idx].inter_cost; + output_tpl_stats_info[frame_idx].mc_flow = + input_tpl_stats_info[frame_idx].mc_flow; + output_tpl_stats_info[frame_idx].mc_dep_cost = + input_tpl_stats_info[frame_idx].mc_dep_cost; + output_tpl_stats_info[frame_idx].mc_ref_cost = + input_tpl_stats_info[frame_idx].mc_ref_cost; } } @@ -471,12 +498,13 @@ static bool init_encode_frame_result(EncodeFrameResult *encode_frame_result, encode_frame_result->coding_data.reset( new (std::nothrow) uint8_t[max_coding_data_byte_size]); - encode_frame_result->num_rows_4x4 = get_num_unit_4x4(frame_width); - encode_frame_result->num_cols_4x4 = get_num_unit_4x4(frame_height); + encode_frame_result->num_rows_4x4 = get_num_unit_4x4(frame_height); + encode_frame_result->num_cols_4x4 = get_num_unit_4x4(frame_width); encode_frame_result->partition_info.resize(encode_frame_result->num_rows_4x4 * encode_frame_result->num_cols_4x4); encode_frame_result->motion_vector_info.resize( encode_frame_result->num_rows_4x4 * encode_frame_result->num_cols_4x4); + encode_frame_result->tpl_stats_info.resize(MAX_LAG_BUFFERS); if (encode_frame_result->coding_data.get() == nullptr) { return false; @@ -485,8 +513,20 @@ static bool init_encode_frame_result(EncodeFrameResult *encode_frame_result, frame_height, img_fmt); } +static void encode_frame_result_update_rq_history( + const RATE_QINDEX_HISTORY *rq_history, + EncodeFrameResult *encode_frame_result) { + encode_frame_result->recode_count = rq_history->recode_count; + for (int i = 0; i < encode_frame_result->recode_count; ++i) { + const int q_index = rq_history->q_index_history[i]; + const int rate = rq_history->rate_history[i]; + encode_frame_result->q_index_history.push_back(q_index); + encode_frame_result->rate_history.push_back(rate); + } +} + static void update_encode_frame_result( - EncodeFrameResult *encode_frame_result, + EncodeFrameResult *encode_frame_result, const int show_frame_count, const ENCODE_FRAME_RESULT *encode_frame_info) { encode_frame_result->coding_data_bit_size = encode_frame_result->coding_data_byte_size * 8; @@ -511,9 +551,16 @@ static void update_encode_frame_result( update_motion_vector_info(encode_frame_info->motion_vector_info, encode_frame_result->num_rows_4x4, encode_frame_result->num_cols_4x4, - &encode_frame_result->motion_vector_info[0]); + &encode_frame_result->motion_vector_info[0], + kMotionVectorSubPixelPrecision); update_frame_counts(&encode_frame_info->frame_counts, &encode_frame_result->frame_counts); + if (encode_frame_result->frame_type == kFrameTypeAltRef) { + update_tpl_stats_info(encode_frame_info->tpl_stats_info, show_frame_count, + &encode_frame_result->tpl_stats_info[0]); + } + encode_frame_result_update_rq_history(&encode_frame_info->rq_history, + encode_frame_result); } static void IncreaseGroupOfPictureIndex(GroupOfPicture *group_of_picture) { @@ -612,6 +659,9 @@ static void SetGroupOfPicture(int first_is_key_frame, int use_alt_ref, group_of_picture->show_frame_count = coding_frame_count - use_alt_ref; group_of_picture->start_show_index = first_show_idx; group_of_picture->start_coding_index = start_coding_index; + group_of_picture->first_is_key_frame = first_is_key_frame; + group_of_picture->use_alt_ref = use_alt_ref; + group_of_picture->last_gop_use_alt_ref = last_gop_use_alt_ref; // We need to make a copy of start reference frame info because we // use it to simulate the ref frame update. @@ -692,6 +742,50 @@ static void UpdateGroupOfPicture(const VP9_COMP *cpi, int start_coding_index, start_ref_frame_info, group_of_picture); } +#define SET_STRUCT_VALUE(config, structure, ret, field) \ + if (strcmp(config.name, #field) == 0) { \ + structure->field = atoi(config.value); \ + ret = 1; \ + } + +static void UpdateEncodeConfig(const EncodeConfig &config, + VP9EncoderConfig *oxcf) { + int ret = 0; + SET_STRUCT_VALUE(config, oxcf, ret, key_freq); + SET_STRUCT_VALUE(config, oxcf, ret, two_pass_vbrmin_section); + SET_STRUCT_VALUE(config, oxcf, ret, two_pass_vbrmax_section); + SET_STRUCT_VALUE(config, oxcf, ret, under_shoot_pct); + SET_STRUCT_VALUE(config, oxcf, ret, over_shoot_pct); + SET_STRUCT_VALUE(config, oxcf, ret, max_threads); + SET_STRUCT_VALUE(config, oxcf, ret, frame_parallel_decoding_mode); + SET_STRUCT_VALUE(config, oxcf, ret, tile_columns); + SET_STRUCT_VALUE(config, oxcf, ret, arnr_max_frames); + SET_STRUCT_VALUE(config, oxcf, ret, arnr_strength); + SET_STRUCT_VALUE(config, oxcf, ret, lag_in_frames); + SET_STRUCT_VALUE(config, oxcf, ret, encode_breakout); + SET_STRUCT_VALUE(config, oxcf, ret, enable_tpl_model); + SET_STRUCT_VALUE(config, oxcf, ret, enable_auto_arf); + if (ret == 0) { + fprintf(stderr, "Ignored unsupported encode_config %s\n", config.name); + } +} + +static VP9EncoderConfig GetEncodeConfig( + int frame_width, int frame_height, vpx_rational_t frame_rate, + int target_bitrate, int encode_speed, vpx_enc_pass enc_pass, + const std::vector &encode_config_list) { + VP9EncoderConfig oxcf = + vp9_get_encoder_config(frame_width, frame_height, frame_rate, + target_bitrate, encode_speed, enc_pass); + for (const auto &config : encode_config_list) { + UpdateEncodeConfig(config, &oxcf); + } + if (enc_pass == VPX_RC_FIRST_PASS) { + oxcf.lag_in_frames = 0; + } + return oxcf; +} + SimpleEncode::SimpleEncode(int frame_width, int frame_height, int frame_rate_num, int frame_rate_den, int target_bitrate, int num_frames, @@ -703,6 +797,7 @@ SimpleEncode::SimpleEncode(int frame_width, int frame_height, frame_rate_den_ = frame_rate_den; target_bitrate_ = target_bitrate; num_frames_ = num_frames; + encode_speed_ = 0; frame_coding_index_ = 0; show_frame_count_ = 0; @@ -724,16 +819,55 @@ SimpleEncode::SimpleEncode(int frame_width, int frame_height, InitRefFrameInfo(&ref_frame_info_); } +void SimpleEncode::SetEncodeSpeed(int encode_speed) { + encode_speed_ = encode_speed; +} + +StatusCode SimpleEncode::SetEncodeConfig(const char *name, const char *value) { + if (name == nullptr || value == nullptr) { + fprintf(stderr, "SetEncodeConfig: null pointer, name %p value %p\n", name, + value); + return StatusError; + } + EncodeConfig config; + snprintf(config.name, ENCODE_CONFIG_BUF_SIZE, "%s", name); + snprintf(config.value, ENCODE_CONFIG_BUF_SIZE, "%s", value); + impl_ptr_->encode_config_list.push_back(config); + return StatusOk; +} + +StatusCode SimpleEncode::DumpEncodeConfigs(int pass, FILE *fp) { + if (fp == nullptr) { + fprintf(stderr, "DumpEncodeConfigs: null pointer, fp %p\n", fp); + return StatusError; + } + vpx_enc_pass enc_pass; + if (pass == 1) { + enc_pass = VPX_RC_FIRST_PASS; + } else { + enc_pass = VPX_RC_LAST_PASS; + } + const vpx_rational_t frame_rate = + make_vpx_rational(frame_rate_num_, frame_rate_den_); + const VP9EncoderConfig oxcf = + GetEncodeConfig(frame_width_, frame_height_, frame_rate, target_bitrate_, + encode_speed_, enc_pass, impl_ptr_->encode_config_list); + vp9_dump_encoder_config(&oxcf, fp); + return StatusOk; +} + void SimpleEncode::ComputeFirstPassStats() { vpx_rational_t frame_rate = make_vpx_rational(frame_rate_num_, frame_rate_den_); - const VP9EncoderConfig oxcf = - vp9_get_encoder_config(frame_width_, frame_height_, frame_rate, - target_bitrate_, VPX_RC_FIRST_PASS); + const VP9EncoderConfig oxcf = GetEncodeConfig( + frame_width_, frame_height_, frame_rate, target_bitrate_, encode_speed_, + VPX_RC_FIRST_PASS, impl_ptr_->encode_config_list); VP9_COMP *cpi = init_encoder(&oxcf, impl_ptr_->img_fmt); struct lookahead_ctx *lookahead = cpi->lookahead; int i; int use_highbitdepth = 0; + const int num_rows_16x16 = get_num_unit_16x16(frame_height_); + const int num_cols_16x16 = get_num_unit_16x16(frame_width_); #if CONFIG_VP9_HIGHBITDEPTH use_highbitdepth = cpi->common.use_highbitdepth; #endif @@ -766,6 +900,12 @@ void SimpleEncode::ComputeFirstPassStats() { // vp9_get_compressed_data only generates first pass stats not // compresses data assert(size == 0); + // Get vp9 first pass motion vector info. + std::vector mv_info(num_rows_16x16 * num_cols_16x16); + update_motion_vector_info(cpi->fp_motion_vector_info, num_rows_16x16, + num_cols_16x16, mv_info.data(), + kMotionVectorFullPixelPrecision); + fp_motion_vector_info_.push_back(mv_info); } impl_ptr_->first_pass_stats.push_back(vp9_get_frame_stats(&cpi->twopass)); } @@ -776,6 +916,9 @@ void SimpleEncode::ComputeFirstPassStats() { free_encoder(cpi); rewind(in_file_); vpx_img_free(&img); + + // Generate key_frame_map based on impl_ptr_->first_pass_stats. + key_frame_map_ = ComputeKeyFrameMap(); } std::vector> SimpleEncode::ObserveFirstPassStats() { @@ -800,9 +943,44 @@ std::vector> SimpleEncode::ObserveFirstPassStats() { return output_stats; } -void SimpleEncode::SetExternalGroupOfPicture( - std::vector external_arf_indexes) { - external_arf_indexes_ = external_arf_indexes; +std::vector> +SimpleEncode::ObserveFirstPassMotionVectors() { + return fp_motion_vector_info_; +} + +void SimpleEncode::SetExternalGroupOfPicturesMap(int *gop_map, + int gop_map_size) { + for (int i = 0; i < gop_map_size; ++i) { + gop_map_.push_back(gop_map[i]); + } + // The following will check and modify gop_map_ to make sure the + // gop_map_ satisfies the constraints. + // 1) Each key frame position should be at the start of a gop. + // 2) The last gop should not use an alt ref. + assert(gop_map_.size() == key_frame_map_.size()); + int last_gop_start = 0; + for (int i = 0; static_cast(i) < gop_map_.size(); ++i) { + if (key_frame_map_[i] == 1 && gop_map_[i] == 0) { + fprintf(stderr, "Add an extra gop start at show_idx %d\n", i); + // Insert a gop start at key frame location. + gop_map_[i] |= kGopMapFlagStart; + gop_map_[i] |= kGopMapFlagUseAltRef; + } + if (gop_map_[i] & kGopMapFlagStart) { + last_gop_start = i; + } + } + if (gop_map_[last_gop_start] & kGopMapFlagUseAltRef) { + fprintf(stderr, + "Last group of pictures starting at show_idx %d shouldn't use alt " + "ref\n", + last_gop_start); + gop_map_[last_gop_start] &= ~kGopMapFlagUseAltRef; + } +} + +std::vector SimpleEncode::ObserveExternalGroupOfPicturesMap() { + return gop_map_; } template @@ -813,13 +991,40 @@ T *GetVectorData(const std::vector &v) { return const_cast(v.data()); } +static GOP_COMMAND GetGopCommand(const std::vector &gop_map, + int start_show_index) { + GOP_COMMAND gop_command; + if (gop_map.size() > 0) { + assert(static_cast(start_show_index) < gop_map.size()); + assert((gop_map[start_show_index] & kGopMapFlagStart) != 0); + int end_show_index = start_show_index + 1; + // gop_map[end_show_index] & kGopMapFlagStart == 0 means this is + // the start of a gop. + while (static_cast(end_show_index) < gop_map.size() && + (gop_map[end_show_index] & kGopMapFlagStart) == 0) { + ++end_show_index; + } + const int show_frame_count = end_show_index - start_show_index; + int use_alt_ref = (gop_map[start_show_index] & kGopMapFlagUseAltRef) != 0; + if (static_cast(end_show_index) == gop_map.size()) { + // This is the last gop group, there must be no altref. + use_alt_ref = 0; + } + gop_command_on(&gop_command, show_frame_count, use_alt_ref); + } else { + gop_command_off(&gop_command); + } + return gop_command; +} + void SimpleEncode::StartEncode() { assert(impl_ptr_->first_pass_stats.size() > 0); vpx_rational_t frame_rate = make_vpx_rational(frame_rate_num_, frame_rate_den_); - VP9EncoderConfig oxcf = - vp9_get_encoder_config(frame_width_, frame_height_, frame_rate, - target_bitrate_, VPX_RC_LAST_PASS); + VP9EncoderConfig oxcf = GetEncodeConfig( + frame_width_, frame_height_, frame_rate, target_bitrate_, encode_speed_, + VPX_RC_LAST_PASS, impl_ptr_->encode_config_list); + vpx_fixed_buf_t stats; stats.buf = GetVectorData(impl_ptr_->first_pass_stats); stats.sz = sizeof(impl_ptr_->first_pass_stats[0]) * @@ -834,11 +1039,10 @@ void SimpleEncode::StartEncode() { frame_coding_index_ = 0; show_frame_count_ = 0; - encode_command_set_external_arf_indexes(&impl_ptr_->cpi->encode_command, - GetVectorData(external_arf_indexes_)); - UpdateKeyFrameGroup(show_frame_count_); + const GOP_COMMAND gop_command = GetGopCommand(gop_map_, show_frame_count_); + encode_command_set_gop_command(&impl_ptr_->cpi->encode_command, gop_command); UpdateGroupOfPicture(impl_ptr_->cpi, frame_coding_index_, ref_frame_info_, &group_of_picture_); rewind(in_file_); @@ -914,6 +1118,9 @@ void SimpleEncode::PostUpdateState( IncreaseGroupOfPictureIndex(&group_of_picture_); if (IsGroupOfPictureFinished(group_of_picture_)) { + const GOP_COMMAND gop_command = GetGopCommand(gop_map_, show_frame_count_); + encode_command_set_gop_command(&impl_ptr_->cpi->encode_command, + gop_command); // This function needs to be called after ref_frame_info_ is updated // properly in PostUpdateRefFrameInfo() and UpdateKeyFrameGroup(). UpdateGroupOfPicture(impl_ptr_->cpi, frame_coding_index_, ref_frame_info_, @@ -985,7 +1192,10 @@ void SimpleEncode::EncodeFrame(EncodeFrameResult *encode_frame_result) { abort(); } - update_encode_frame_result(encode_frame_result, &encode_frame_info); + const GroupOfPicture group_of_picture = this->ObserveGroupOfPicture(); + const int show_frame_count = group_of_picture.show_frame_count; + update_encode_frame_result(encode_frame_result, show_frame_count, + &encode_frame_info); PostUpdateState(*encode_frame_result); } else { // TODO(angiebird): Clean up encode_frame_result. @@ -1002,26 +1212,73 @@ void SimpleEncode::EncodeFrameWithQuantizeIndex( encode_command_reset_external_quantize_index(&impl_ptr_->cpi->encode_command); } +void SimpleEncode::EncodeFrameWithTargetFrameBits( + EncodeFrameResult *encode_frame_result, int target_frame_bits, + double percent_diff) { + encode_command_set_target_frame_bits(&impl_ptr_->cpi->encode_command, + target_frame_bits, percent_diff); + EncodeFrame(encode_frame_result); + encode_command_reset_target_frame_bits(&impl_ptr_->cpi->encode_command); +} + +static int GetCodingFrameNumFromGopMap(const std::vector &gop_map) { + int start_show_index = 0; + int coding_frame_count = 0; + while (static_cast(start_show_index) < gop_map.size()) { + const GOP_COMMAND gop_command = GetGopCommand(gop_map, start_show_index); + start_show_index += gop_command.show_frame_count; + coding_frame_count += gop_command_coding_frame_count(&gop_command); + } + assert(start_show_index == gop_map.size()); + return coding_frame_count; +} + int SimpleEncode::GetCodingFrameNum() const { - assert(impl_ptr_->first_pass_stats.size() - 1 > 0); + assert(impl_ptr_->first_pass_stats.size() > 0); + if (gop_map_.size() > 0) { + return GetCodingFrameNumFromGopMap(gop_map_); + } + // These are the default settings for now. const int multi_layer_arf = 0; const int allow_alt_ref = 1; vpx_rational_t frame_rate = make_vpx_rational(frame_rate_num_, frame_rate_den_); - const VP9EncoderConfig oxcf = - vp9_get_encoder_config(frame_width_, frame_height_, frame_rate, - target_bitrate_, VPX_RC_LAST_PASS); + const VP9EncoderConfig oxcf = GetEncodeConfig( + frame_width_, frame_height_, frame_rate, target_bitrate_, encode_speed_, + VPX_RC_LAST_PASS, impl_ptr_->encode_config_list); FRAME_INFO frame_info = vp9_get_frame_info(&oxcf); FIRST_PASS_INFO first_pass_info; fps_init_first_pass_info(&first_pass_info, GetVectorData(impl_ptr_->first_pass_stats), num_frames_); - return vp9_get_coding_frame_num(external_arf_indexes_.data(), &oxcf, - &frame_info, &first_pass_info, + return vp9_get_coding_frame_num(&oxcf, &frame_info, &first_pass_info, multi_layer_arf, allow_alt_ref); } +std::vector SimpleEncode::ComputeKeyFrameMap() const { + // The last entry of first_pass_stats is the overall stats. + assert(impl_ptr_->first_pass_stats.size() == num_frames_ + 1); + vpx_rational_t frame_rate = + make_vpx_rational(frame_rate_num_, frame_rate_den_); + const VP9EncoderConfig oxcf = GetEncodeConfig( + frame_width_, frame_height_, frame_rate, target_bitrate_, encode_speed_, + VPX_RC_LAST_PASS, impl_ptr_->encode_config_list); + FRAME_INFO frame_info = vp9_get_frame_info(&oxcf); + FIRST_PASS_INFO first_pass_info; + fps_init_first_pass_info(&first_pass_info, + GetVectorData(impl_ptr_->first_pass_stats), + num_frames_); + std::vector key_frame_map(num_frames_, 0); + vp9_get_key_frame_map(&oxcf, &frame_info, &first_pass_info, + GetVectorData(key_frame_map)); + return key_frame_map; +} + +std::vector SimpleEncode::ObserveKeyFrameMap() const { + return key_frame_map_; +} + uint64_t SimpleEncode::GetFramePixelCount() const { assert(frame_width_ % 2 == 0); assert(frame_height_ % 2 == 0); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.h index 4221a7015..380e8118f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/simple_encode.h @@ -19,13 +19,18 @@ namespace vp9 { +enum StatusCode { + StatusOk = 0, + StatusError, +}; + // TODO(angiebird): Add description for each frame type. enum FrameType { kFrameTypeKey = 0, - kFrameTypeInter, - kFrameTypeAltRef, - kFrameTypeOverlay, - kFrameTypeGolden, + kFrameTypeInter = 1, + kFrameTypeAltRef = 2, + kFrameTypeOverlay = 3, + kFrameTypeGolden = 4, }; // TODO(angiebird): Add description for each reference frame type. @@ -39,6 +44,14 @@ enum RefFrameType { kRefFrameTypeNone = -1, }; +enum GopMapFlag { + kGopMapFlagStart = + 1 << 0, // Indicate this location is the start of a group of pictures. + kGopMapFlagUseAltRef = + 1 << 1, // Indicate this group of pictures will use an alt ref. Only set + // this flag when kGopMapFlagStart is set. +}; + // The frame is split to 4x4 blocks. // This structure contains the information of each 4x4 block. struct PartitionInfo { @@ -50,9 +63,12 @@ struct PartitionInfo { int height; // prediction block height }; -constexpr int kMotionVectorPrecision = 8; +constexpr int kMotionVectorSubPixelPrecision = 8; +constexpr int kMotionVectorFullPixelPrecision = 1; -// The frame is split to 4x4 blocks. +// In the first pass. The frame is split to 16x16 blocks. +// This structure contains the information of each 16x16 block. +// In the second pass. The frame is split to 4x4 blocks. // This structure contains the information of each 4x4 block. struct MotionVectorInfo { // Number of valid motion vectors, always 0 if this block is in the key frame. @@ -60,8 +76,8 @@ struct MotionVectorInfo { int mv_count; // The reference frame for motion vectors. If the second motion vector does // not exist (mv_count = 1), the reference frame is kNoneRefFrame. - // Otherwise, the reference frame is either kLastFrame, or kGoldenFrame, - // or kAltRefFrame. + // Otherwise, the reference frame is either kRefFrameTypeLast, or + // kRefFrameTypePast, or kRefFrameTypeFuture. RefFrameType ref_frame[2]; // The row offset of motion vectors in the unit of pixel. // If the second motion vector does not exist, the value is 0. @@ -71,6 +87,24 @@ struct MotionVectorInfo { double mv_column[2]; }; +// Accumulated tpl stats of all blocks in one frame. +// For each frame, the tpl stats are computed per 32x32 block. +struct TplStatsInfo { + // Intra complexity: the sum of absolute transform difference (SATD) of + // intra predicted residuals. + int64_t intra_cost; + // Inter complexity: the SATD of inter predicted residuals. + int64_t inter_cost; + // Motion compensated information flow. It measures how much information + // is propagated from the current frame to other frames. + int64_t mc_flow; + // Motion compensated dependency cost. It equals to its own intra_cost + // plus the mc_flow. + int64_t mc_dep_cost; + // Motion compensated reference cost. + int64_t mc_ref_cost; +}; + struct RefFrameInfo { int coding_indexes[kRefFrameTypeMax]; @@ -237,7 +271,7 @@ struct EncodeFrameResult { std::vector partition_info; // A vector of the motion vector information of the frame. // The number of elements is |num_rows_4x4| * |num_cols_4x4|. - // The frame is divided 4x4 blocks of |num_rows_4x4| rows and + // The frame is divided into 4x4 blocks of |num_rows_4x4| rows and // |num_cols_4x4| columns. // Each 4x4 block contains 0 motion vector if this is an intra predicted // frame (for example, the key frame). If the frame is inter predicted, @@ -245,7 +279,25 @@ struct EncodeFrameResult { // Similar to partition info, all 4x4 blocks inside the same partition block // share the same motion vector information. std::vector motion_vector_info; + // A vector of the tpl stats information. + // The tpl stats measure the complexity of a frame, as well as the + // informatioin propagated along the motion trajactory between frames, in + // the reference frame structure. + // The tpl stats could be used as a more accurate spatial and temporal + // complexity measure in addition to the first pass stats. + // The vector contains tpl stats for all show frames in a GOP. + // The tpl stats stored in the vector is according to the encoding order. + // For example, suppose there are N show frames for the current GOP. + // Then tpl_stats_info[0] stores the information of the first frame to be + // encoded for this GOP, i.e, the AltRef frame. + std::vector tpl_stats_info; ImageBuffer coded_frame; + + // recode_count, q_index_history and rate_history are only available when + // EncodeFrameWithTargetFrameBits() is used. + int recode_count; + std::vector q_index_history; + std::vector rate_history; }; struct GroupOfPicture { @@ -255,6 +307,7 @@ struct GroupOfPicture { // triggered when the coded frame is the last one in the previous group of // pictures. std::vector encode_frame_list; + // Indicates the index of the next coding frame in encode_frame_list. // In other words, EncodeFrameInfo of the next coding frame can be // obtained with encode_frame_list[next_encode_frame_index]. @@ -263,13 +316,25 @@ struct GroupOfPicture { // will be increased after each EncodeFrame()/EncodeFrameWithQuantizeIndex() // call. int next_encode_frame_index; + // Number of show frames in this group of pictures. int show_frame_count; + // The show index/timestamp of the earliest show frame in the group of // pictures. int start_show_index; - // The coding index of the first coding frame in the group of picture. + + // The coding index of the first coding frame in the group of pictures. int start_coding_index; + + // Indicates whether this group of pictures starts with a key frame. + int first_is_key_frame; + + // Indicates whether this group of pictures uses an alt ref. + int use_alt_ref; + + // Indicates whether previous group of pictures used an alt ref. + int last_gop_use_alt_ref; }; class SimpleEncode { @@ -283,8 +348,44 @@ class SimpleEncode { SimpleEncode(SimpleEncode &) = delete; SimpleEncode &operator=(const SimpleEncode &) = delete; - // Makes encoder compute the first pass stats and store it internally for - // future encode. + // Adjusts the encoder's coding speed. + // If this function is not called, the encoder will use default encode_speed + // 0. Call this function before ComputeFirstPassStats() if needed. + // The encode_speed is equivalent to --cpu-used of the vpxenc command. + // The encode_speed's range should be [0, 9]. + // Setting the encode_speed to a higher level will yield faster coding + // at the cost of lower compression efficiency. + void SetEncodeSpeed(int encode_speed); + + // Set encoder config + // The following configs in VP9EncoderConfig are allowed to change in this + // function. See https://ffmpeg.org/ffmpeg-codecs.html#libvpx for each + // config's meaning. + // Configs in VP9EncoderConfig: Equivalent configs in ffmpeg: + // 1 key_freq -g + // 2 two_pass_vbrmin_section -minrate * 100LL / bit_rate + // 3 two_pass_vbrmax_section -maxrate * 100LL / bit_rate + // 4 under_shoot_pct -undershoot-pct + // 5 over_shoot_pct -overshoot-pct + // 6 max_threads -threads + // 7 frame_parallel_decoding_mode -frame-parallel + // 8 tile_column -tile-columns + // 9 arnr_max_frames -arnr-maxframes + // 10 arnr_strength -arnr-strength + // 11 lag_in_frames -rc_lookahead + // 12 encode_breakout -static-thresh + // 13 enable_tpl_model -enable-tpl + // 14 enable_auto_arf -auto-alt-ref + StatusCode SetEncodeConfig(const char *name, const char *value); + + // A debug function that dumps configs from VP9EncoderConfig + // pass = 1: first pass, pass = 2: second pass + // fp: file pointer for dumping config + StatusCode DumpEncodeConfigs(int pass, FILE *fp); + + // Makes encoder compute the first pass stats and store it at + // impl_ptr_->first_pass_stats. key_frame_map_ is also computed based on the + // first pass stats. void ComputeFirstPassStats(); // Outputs the first pass stats represented by a 2-D vector. @@ -293,13 +394,38 @@ class SimpleEncode { // values. For details, please check FIRSTPASS_STATS in vp9_firstpass.h std::vector> ObserveFirstPassStats(); - // Sets arf indexes for the video from external input. - // The arf index determines whether a frame is arf or not. - // Therefore it also determines the group of picture size. - // If set, VP9 will use the external arf index to make decision. + // Outputs the first pass motion vectors represented by a 2-D vector. + // One can use the frame index at first dimension to retrieve the mvs for + // each video frame. The frame is divided into 16x16 blocks. The number of + // elements is round_up(|num_rows_4x4| / 4) * round_up(|num_cols_4x4| / 4). + std::vector> ObserveFirstPassMotionVectors(); + + // Ouputs a copy of key_frame_map_, a binary vector with size equal to the + // number of show frames in the video. For each entry in the vector, 1 + // indicates the position is a key frame and 0 indicates it's not a key frame. + // This function should be called after ComputeFirstPassStats() + std::vector ObserveKeyFrameMap() const; + + // Sets group of pictures map for coding the entire video. + // Each entry in the gop_map corresponds to a show frame in the video. + // Therefore, the size of gop_map should equal to the number of show frames in + // the entire video. + // If a given entry's kGopMapFlagStart is set, it means this is the start of a + // gop. Once kGopMapFlagStart is set, one can set kGopMapFlagUseAltRef to + // indicate whether this gop use altref. + // If a given entry is zero, it means it's in the middle of a gop. // This function should be called only once after ComputeFirstPassStats(), // before StartEncode(). - void SetExternalGroupOfPicture(std::vector external_arf_indexes); + // This API will check and modify the gop_map to satisfy the following + // constraints. + // 1) Each key frame position should be at the start of a gop. + // 2) The last gop should not use an alt ref. + void SetExternalGroupOfPicturesMap(int *gop_map, int gop_map_size); + + // Observe the group of pictures map set through + // SetExternalGroupOfPicturesMap(). This function should be called after + // SetExternalGroupOfPicturesMap(). + std::vector ObserveExternalGroupOfPicturesMap(); // Initializes the encoder for actual encoding. // This function should be called after ComputeFirstPassStats(). @@ -332,6 +458,17 @@ class SimpleEncode { void EncodeFrameWithQuantizeIndex(EncodeFrameResult *encode_frame_result, int quantize_index); + // Encode a frame with target frame bits usage. + // The encoder will find a quantize index to make the actual frame bits usage + // match the target. EncodeFrameWithTargetFrameBits() will recode the frame + // up to 7 times to find a q_index to make the actual_frame_bits satisfy the + // following inequality. |actual_frame_bits - target_frame_bits| * 100 / + // target_frame_bits + // <= percent_diff. + void EncodeFrameWithTargetFrameBits(EncodeFrameResult *encode_frame_result, + int target_frame_bits, + double percent_diff); + // Gets the number of coding frames for the video. The coding frames include // show frame and no show frame. // This function should be called after ComputeFirstPassStats(). @@ -341,6 +478,12 @@ class SimpleEncode { uint64_t GetFramePixelCount() const; private: + // Compute the key frame locations of the video based on first pass stats. + // The results are returned as a binary vector with 1s indicating keyframes + // and 0s indicating non keyframes. + // It has to be called after impl_ptr_->first_pass_stats is computed. + std::vector ComputeKeyFrameMap() const; + // Updates key_frame_group_size_, reset key_frame_group_index_ and init // ref_frame_info_. void UpdateKeyFrameGroup(int key_frame_show_index); @@ -358,12 +501,14 @@ class SimpleEncode { int frame_rate_den_; int target_bitrate_; int num_frames_; + int encode_speed_; std::FILE *in_file_; std::FILE *out_file_; std::unique_ptr impl_ptr_; - std::vector external_arf_indexes_; + std::vector key_frame_map_; + std::vector gop_map_; GroupOfPicture group_of_picture_; // The key frame group size includes one key frame plus the number of @@ -387,6 +532,17 @@ class SimpleEncode { // frame appears? // Reference frames info of the to-be-coded frame. RefFrameInfo ref_frame_info_; + + // A 2-D vector of motion vector information of the frame collected + // from the first pass. The first dimension is the frame index. + // Each frame is divided into 16x16 blocks. The number of elements is + // round_up(|num_rows_4x4| / 4) * round_up(|num_cols_4x4| / 4). + // Each 16x16 block contains 0 motion vector if this is an intra predicted + // frame (for example, the key frame). If the frame is inter predicted, + // each 16x16 block contains either 1 or 2 motion vectors. + // The first motion vector is always from the LAST_FRAME. + // The second motion vector is always from the GOLDEN_FRAME. + std::vector> fp_motion_vector_info_; }; } // namespace vp9 diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c index 2ca2114ec..a73683dfe 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c @@ -13,6 +13,7 @@ #include "./vpx_config.h" #include "vpx/vpx_encoder.h" +#include "vpx/vpx_ext_ratectrl.h" #include "vpx_dsp/psnr.h" #include "vpx_ports/vpx_once.h" #include "vpx_ports/static_assert.h" @@ -355,13 +356,14 @@ static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx, switch (img->fmt) { case VPX_IMG_FMT_YV12: case VPX_IMG_FMT_I420: - case VPX_IMG_FMT_I42016: break; + case VPX_IMG_FMT_I42016: + case VPX_IMG_FMT_NV12: break; case VPX_IMG_FMT_I422: case VPX_IMG_FMT_I444: case VPX_IMG_FMT_I440: if (ctx->cfg.g_profile != (unsigned int)PROFILE_1) { ERROR( - "Invalid image format. I422, I444, I440 images are " + "Invalid image format. I422, I444, I440, NV12 images are " "not supported in profile."); } break; @@ -391,6 +393,7 @@ static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx, static int get_image_bps(const vpx_image_t *img) { switch (img->fmt) { case VPX_IMG_FMT_YV12: + case VPX_IMG_FMT_NV12: case VPX_IMG_FMT_I420: return 12; case VPX_IMG_FMT_I422: return 16; case VPX_IMG_FMT_I444: return 24; @@ -468,10 +471,11 @@ static vpx_rational64_t get_g_timebase_in_ts(vpx_rational_t g_timebase) { } static vpx_codec_err_t set_encoder_config( - VP9EncoderConfig *oxcf, const vpx_codec_enc_cfg_t *cfg, + VP9EncoderConfig *oxcf, vpx_codec_enc_cfg_t *cfg, const struct vp9_extracfg *extra_cfg) { const int is_vbr = cfg->rc_end_usage == VPX_VBR; int sl, tl; + unsigned int raw_target_rate; oxcf->profile = cfg->g_profile; oxcf->max_threads = (int)cfg->g_threads; oxcf->width = cfg->g_w; @@ -498,8 +502,14 @@ static vpx_codec_err_t set_encoder_config( cfg->g_pass == VPX_RC_FIRST_PASS ? 0 : cfg->g_lag_in_frames; oxcf->rc_mode = cfg->rc_end_usage; + raw_target_rate = + (unsigned int)((int64_t)oxcf->width * oxcf->height * oxcf->bit_depth * 3 * + oxcf->init_framerate / 1000); + // Cap target bitrate to raw rate + cfg->rc_target_bitrate = VPXMIN(raw_target_rate, cfg->rc_target_bitrate); + // Convert target bandwidth from Kbit/s to Bit/s - oxcf->target_bandwidth = 1000 * cfg->rc_target_bitrate; + oxcf->target_bandwidth = 1000 * (int64_t)cfg->rc_target_bitrate; oxcf->rc_max_intra_bitrate_pct = extra_cfg->rc_max_intra_bitrate_pct; oxcf->rc_max_inter_bitrate_pct = extra_cfg->rc_max_inter_bitrate_pct; oxcf->gf_cbr_boost_pct = extra_cfg->gf_cbr_boost_pct; @@ -624,7 +634,7 @@ static vpx_codec_err_t set_encoder_config( } if (get_level_index(oxcf->target_level) >= 0) config_target_level(oxcf); - // vp9_dump_encoder_config(oxcf); + // vp9_dump_encoder_config(oxcf, stderr); return VPX_CODEC_OK; } @@ -698,6 +708,10 @@ static vpx_codec_err_t ctrl_set_cpuused(vpx_codec_alg_priv_t *ctx, extra_cfg.cpu_used = CAST(VP8E_SET_CPUUSED, args); extra_cfg.cpu_used = VPXMIN(9, extra_cfg.cpu_used); extra_cfg.cpu_used = VPXMAX(-9, extra_cfg.cpu_used); +#if CONFIG_REALTIME_ONLY + if (extra_cfg.cpu_used > -5 && extra_cfg.cpu_used < 5) + extra_cfg.cpu_used = (extra_cfg.cpu_used > 0) ? 5 : -5; +#endif return update_extra_cfg(ctx, &extra_cfg); } @@ -1559,6 +1573,7 @@ static vpx_codec_err_t ctrl_set_svc_parameters(vpx_codec_alg_priv_t *ctx, lc->scaling_factor_num = params->scaling_factor_num[sl]; lc->scaling_factor_den = params->scaling_factor_den[sl]; lc->speed = params->speed_per_layer[sl]; + lc->loopfilter_ctrl = params->loopfilter_ctrl[sl]; } } @@ -1703,6 +1718,48 @@ static vpx_codec_err_t ctrl_set_postencode_drop(vpx_codec_alg_priv_t *ctx, return VPX_CODEC_OK; } +static vpx_codec_err_t ctrl_set_disable_overshoot_maxq_cbr( + vpx_codec_alg_priv_t *ctx, va_list args) { + VP9_COMP *const cpi = ctx->cpi; + const unsigned int data = va_arg(args, unsigned int); + cpi->rc.disable_overshoot_maxq_cbr = data; + return VPX_CODEC_OK; +} + +static vpx_codec_err_t ctrl_set_disable_loopfilter(vpx_codec_alg_priv_t *ctx, + va_list args) { + VP9_COMP *const cpi = ctx->cpi; + const unsigned int data = va_arg(args, unsigned int); + cpi->loopfilter_ctrl = data; + return VPX_CODEC_OK; +} + +static vpx_codec_err_t ctrl_set_external_rate_control(vpx_codec_alg_priv_t *ctx, + va_list args) { + vpx_rc_funcs_t funcs = *CAST(VP9E_SET_EXTERNAL_RATE_CONTROL, args); + VP9_COMP *cpi = ctx->cpi; + EXT_RATECTRL *ext_ratectrl = &cpi->ext_ratectrl; + const VP9EncoderConfig *oxcf = &cpi->oxcf; + // TODO(angiebird): Check the possibility of this flag being set at pass == 1 + if (oxcf->pass == 2) { + const FRAME_INFO *frame_info = &cpi->frame_info; + vpx_rc_config_t ratectrl_config; + + ratectrl_config.frame_width = frame_info->frame_width; + ratectrl_config.frame_height = frame_info->frame_height; + ratectrl_config.show_frame_count = cpi->twopass.first_pass_info.num_frames; + + // TODO(angiebird): Double check whether this is the proper way to set up + // target_bitrate and frame_rate. + ratectrl_config.target_bitrate_kbps = (int)(oxcf->target_bandwidth / 1000); + ratectrl_config.frame_rate_num = oxcf->g_timebase.den; + ratectrl_config.frame_rate_den = oxcf->g_timebase.num; + + vp9_extrc_create(funcs, ratectrl_config, ext_ratectrl); + } + return VPX_CODEC_OK; +} + static vpx_codec_ctrl_fn_map_t encoder_ctrl_maps[] = { { VP8_COPY_REFERENCE, ctrl_copy_reference }, @@ -1747,12 +1804,15 @@ static vpx_codec_ctrl_fn_map_t encoder_ctrl_maps[] = { { VP9E_SET_TARGET_LEVEL, ctrl_set_target_level }, { VP9E_SET_ROW_MT, ctrl_set_row_mt }, { VP9E_SET_POSTENCODE_DROP, ctrl_set_postencode_drop }, + { VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR, ctrl_set_disable_overshoot_maxq_cbr }, { VP9E_ENABLE_MOTION_VECTOR_UNIT_TEST, ctrl_enable_motion_vector_unit_test }, { VP9E_SET_SVC_INTER_LAYER_PRED, ctrl_set_svc_inter_layer_pred }, { VP9E_SET_SVC_FRAME_DROP_LAYER, ctrl_set_svc_frame_drop_layer }, { VP9E_SET_SVC_GF_TEMPORAL_REF, ctrl_set_svc_gf_temporal_ref }, { VP9E_SET_SVC_SPATIAL_LAYER_SYNC, ctrl_set_svc_spatial_layer_sync }, { VP9E_SET_DELTA_Q_UV, ctrl_set_delta_q_uv }, + { VP9E_SET_DISABLE_LOOPFILTER, ctrl_set_disable_loopfilter }, + { VP9E_SET_EXTERNAL_RATE_CONTROL, ctrl_set_external_rate_control }, // Getters { VP8E_GET_LAST_QUANTIZER, ctrl_get_quantizer }, @@ -1886,7 +1946,7 @@ static vp9_extracfg get_extra_cfg() { VP9EncoderConfig vp9_get_encoder_config(int frame_width, int frame_height, vpx_rational_t frame_rate, - int target_bitrate, + int target_bitrate, int encode_speed, vpx_enc_pass enc_pass) { /* This function will generate the same VP9EncoderConfig used by the * vpxenc command given below. @@ -1897,6 +1957,7 @@ VP9EncoderConfig vp9_get_encoder_config(int frame_width, int frame_height, * HEIGHT: frame_height * FPS: frame_rate * BITRATE: target_bitrate + * CPU_USED:encode_speed * * INPUT, OUTPUT, LIMIT will not affect VP9EncoderConfig * @@ -1908,9 +1969,10 @@ VP9EncoderConfig vp9_get_encoder_config(int frame_width, int frame_height, * BITRATE=600 * FPS=30/1 * LIMIT=150 + * CPU_USED=0 * ./vpxenc --limit=$LIMIT --width=$WIDTH --height=$HEIGHT --fps=$FPS * --lag-in-frames=25 \ - * --codec=vp9 --good --cpu-used=0 --threads=0 --profile=0 \ + * --codec=vp9 --good --cpu-used=CPU_USED --threads=0 --profile=0 \ * --min-q=0 --max-q=63 --auto-alt-ref=1 --passes=2 --kf-max-dist=150 \ * --kf-min-dist=0 --drop-frame=0 --static-thresh=0 --bias-pct=50 \ * --minsection-pct=0 --maxsection-pct=150 --arnr-maxframes=7 --psnr \ @@ -1933,49 +1995,50 @@ VP9EncoderConfig vp9_get_encoder_config(int frame_width, int frame_height, oxcf.tile_columns = 0; oxcf.frame_parallel_decoding_mode = 0; oxcf.two_pass_vbrmax_section = 150; + oxcf.speed = abs(encode_speed); return oxcf; } -#define DUMP_STRUCT_VALUE(struct, value) \ - printf(#value " %" PRId64 "\n", (int64_t)(struct)->value) +#define DUMP_STRUCT_VALUE(fp, structure, value) \ + fprintf(fp, #value " %" PRId64 "\n", (int64_t)(structure)->value) -void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf) { - DUMP_STRUCT_VALUE(oxcf, profile); - DUMP_STRUCT_VALUE(oxcf, bit_depth); - DUMP_STRUCT_VALUE(oxcf, width); - DUMP_STRUCT_VALUE(oxcf, height); - DUMP_STRUCT_VALUE(oxcf, input_bit_depth); - DUMP_STRUCT_VALUE(oxcf, init_framerate); +void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf, FILE *fp) { + DUMP_STRUCT_VALUE(fp, oxcf, profile); + DUMP_STRUCT_VALUE(fp, oxcf, bit_depth); + DUMP_STRUCT_VALUE(fp, oxcf, width); + DUMP_STRUCT_VALUE(fp, oxcf, height); + DUMP_STRUCT_VALUE(fp, oxcf, input_bit_depth); + DUMP_STRUCT_VALUE(fp, oxcf, init_framerate); // TODO(angiebird): dump g_timebase // TODO(angiebird): dump g_timebase_in_ts - DUMP_STRUCT_VALUE(oxcf, target_bandwidth); + DUMP_STRUCT_VALUE(fp, oxcf, target_bandwidth); - DUMP_STRUCT_VALUE(oxcf, noise_sensitivity); - DUMP_STRUCT_VALUE(oxcf, sharpness); - DUMP_STRUCT_VALUE(oxcf, speed); - DUMP_STRUCT_VALUE(oxcf, rc_max_intra_bitrate_pct); - DUMP_STRUCT_VALUE(oxcf, rc_max_inter_bitrate_pct); - DUMP_STRUCT_VALUE(oxcf, gf_cbr_boost_pct); + DUMP_STRUCT_VALUE(fp, oxcf, noise_sensitivity); + DUMP_STRUCT_VALUE(fp, oxcf, sharpness); + DUMP_STRUCT_VALUE(fp, oxcf, speed); + DUMP_STRUCT_VALUE(fp, oxcf, rc_max_intra_bitrate_pct); + DUMP_STRUCT_VALUE(fp, oxcf, rc_max_inter_bitrate_pct); + DUMP_STRUCT_VALUE(fp, oxcf, gf_cbr_boost_pct); - DUMP_STRUCT_VALUE(oxcf, mode); - DUMP_STRUCT_VALUE(oxcf, pass); + DUMP_STRUCT_VALUE(fp, oxcf, mode); + DUMP_STRUCT_VALUE(fp, oxcf, pass); // Key Framing Operations - DUMP_STRUCT_VALUE(oxcf, auto_key); - DUMP_STRUCT_VALUE(oxcf, key_freq); + DUMP_STRUCT_VALUE(fp, oxcf, auto_key); + DUMP_STRUCT_VALUE(fp, oxcf, key_freq); - DUMP_STRUCT_VALUE(oxcf, lag_in_frames); + DUMP_STRUCT_VALUE(fp, oxcf, lag_in_frames); // ---------------------------------------------------------------- // DATARATE CONTROL OPTIONS // vbr, cbr, constrained quality or constant quality - DUMP_STRUCT_VALUE(oxcf, rc_mode); + DUMP_STRUCT_VALUE(fp, oxcf, rc_mode); // buffer targeting aggressiveness - DUMP_STRUCT_VALUE(oxcf, under_shoot_pct); - DUMP_STRUCT_VALUE(oxcf, over_shoot_pct); + DUMP_STRUCT_VALUE(fp, oxcf, under_shoot_pct); + DUMP_STRUCT_VALUE(fp, oxcf, over_shoot_pct); // buffering parameters // TODO(angiebird): dump tarting_buffer_level_ms @@ -1983,37 +2046,37 @@ void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf) { // TODO(angiebird): dump maximum_buffer_size_ms // Frame drop threshold. - DUMP_STRUCT_VALUE(oxcf, drop_frames_water_mark); + DUMP_STRUCT_VALUE(fp, oxcf, drop_frames_water_mark); // controlling quality - DUMP_STRUCT_VALUE(oxcf, fixed_q); - DUMP_STRUCT_VALUE(oxcf, worst_allowed_q); - DUMP_STRUCT_VALUE(oxcf, best_allowed_q); - DUMP_STRUCT_VALUE(oxcf, cq_level); - DUMP_STRUCT_VALUE(oxcf, aq_mode); + DUMP_STRUCT_VALUE(fp, oxcf, fixed_q); + DUMP_STRUCT_VALUE(fp, oxcf, worst_allowed_q); + DUMP_STRUCT_VALUE(fp, oxcf, best_allowed_q); + DUMP_STRUCT_VALUE(fp, oxcf, cq_level); + DUMP_STRUCT_VALUE(fp, oxcf, aq_mode); // Special handling of Adaptive Quantization for AltRef frames - DUMP_STRUCT_VALUE(oxcf, alt_ref_aq); + DUMP_STRUCT_VALUE(fp, oxcf, alt_ref_aq); // Internal frame size scaling. - DUMP_STRUCT_VALUE(oxcf, resize_mode); - DUMP_STRUCT_VALUE(oxcf, scaled_frame_width); - DUMP_STRUCT_VALUE(oxcf, scaled_frame_height); + DUMP_STRUCT_VALUE(fp, oxcf, resize_mode); + DUMP_STRUCT_VALUE(fp, oxcf, scaled_frame_width); + DUMP_STRUCT_VALUE(fp, oxcf, scaled_frame_height); // Enable feature to reduce the frame quantization every x frames. - DUMP_STRUCT_VALUE(oxcf, frame_periodic_boost); + DUMP_STRUCT_VALUE(fp, oxcf, frame_periodic_boost); // two pass datarate control - DUMP_STRUCT_VALUE(oxcf, two_pass_vbrbias); - DUMP_STRUCT_VALUE(oxcf, two_pass_vbrmin_section); - DUMP_STRUCT_VALUE(oxcf, two_pass_vbrmax_section); - DUMP_STRUCT_VALUE(oxcf, vbr_corpus_complexity); + DUMP_STRUCT_VALUE(fp, oxcf, two_pass_vbrbias); + DUMP_STRUCT_VALUE(fp, oxcf, two_pass_vbrmin_section); + DUMP_STRUCT_VALUE(fp, oxcf, two_pass_vbrmax_section); + DUMP_STRUCT_VALUE(fp, oxcf, vbr_corpus_complexity); // END DATARATE CONTROL OPTIONS // ---------------------------------------------------------------- // Spatial and temporal scalability. - DUMP_STRUCT_VALUE(oxcf, ss_number_layers); - DUMP_STRUCT_VALUE(oxcf, ts_number_layers); + DUMP_STRUCT_VALUE(fp, oxcf, ss_number_layers); + DUMP_STRUCT_VALUE(fp, oxcf, ts_number_layers); // Bitrate allocation for spatial layers. // TODO(angiebird): dump layer_target_bitrate[VPX_MAX_LAYERS] @@ -2021,25 +2084,25 @@ void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf) { // TODO(angiebird): dump ss_enable_auto_arf[VPX_SS_MAX_LAYERS] // TODO(angiebird): dump ts_rate_decimator[VPX_TS_MAX_LAYERS] - DUMP_STRUCT_VALUE(oxcf, enable_auto_arf); - DUMP_STRUCT_VALUE(oxcf, encode_breakout); - DUMP_STRUCT_VALUE(oxcf, error_resilient_mode); - DUMP_STRUCT_VALUE(oxcf, frame_parallel_decoding_mode); + DUMP_STRUCT_VALUE(fp, oxcf, enable_auto_arf); + DUMP_STRUCT_VALUE(fp, oxcf, encode_breakout); + DUMP_STRUCT_VALUE(fp, oxcf, error_resilient_mode); + DUMP_STRUCT_VALUE(fp, oxcf, frame_parallel_decoding_mode); - DUMP_STRUCT_VALUE(oxcf, arnr_max_frames); - DUMP_STRUCT_VALUE(oxcf, arnr_strength); + DUMP_STRUCT_VALUE(fp, oxcf, arnr_max_frames); + DUMP_STRUCT_VALUE(fp, oxcf, arnr_strength); - DUMP_STRUCT_VALUE(oxcf, min_gf_interval); - DUMP_STRUCT_VALUE(oxcf, max_gf_interval); + DUMP_STRUCT_VALUE(fp, oxcf, min_gf_interval); + DUMP_STRUCT_VALUE(fp, oxcf, max_gf_interval); - DUMP_STRUCT_VALUE(oxcf, tile_columns); - DUMP_STRUCT_VALUE(oxcf, tile_rows); + DUMP_STRUCT_VALUE(fp, oxcf, tile_columns); + DUMP_STRUCT_VALUE(fp, oxcf, tile_rows); - DUMP_STRUCT_VALUE(oxcf, enable_tpl_model); + DUMP_STRUCT_VALUE(fp, oxcf, enable_tpl_model); - DUMP_STRUCT_VALUE(oxcf, max_threads); + DUMP_STRUCT_VALUE(fp, oxcf, max_threads); - DUMP_STRUCT_VALUE(oxcf, target_level); + DUMP_STRUCT_VALUE(fp, oxcf, target_level); // TODO(angiebird): dump two_pass_stats_in @@ -2047,19 +2110,19 @@ void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf) { // TODO(angiebird): dump firstpass_mb_stats_in #endif - DUMP_STRUCT_VALUE(oxcf, tuning); - DUMP_STRUCT_VALUE(oxcf, content); + DUMP_STRUCT_VALUE(fp, oxcf, tuning); + DUMP_STRUCT_VALUE(fp, oxcf, content); #if CONFIG_VP9_HIGHBITDEPTH - DUMP_STRUCT_VALUE(oxcf, use_highbitdepth); + DUMP_STRUCT_VALUE(fp, oxcf, use_highbitdepth); #endif - DUMP_STRUCT_VALUE(oxcf, color_space); - DUMP_STRUCT_VALUE(oxcf, color_range); - DUMP_STRUCT_VALUE(oxcf, render_width); - DUMP_STRUCT_VALUE(oxcf, render_height); - DUMP_STRUCT_VALUE(oxcf, temporal_layering_mode); + DUMP_STRUCT_VALUE(fp, oxcf, color_space); + DUMP_STRUCT_VALUE(fp, oxcf, color_range); + DUMP_STRUCT_VALUE(fp, oxcf, render_width); + DUMP_STRUCT_VALUE(fp, oxcf, render_height); + DUMP_STRUCT_VALUE(fp, oxcf, temporal_layering_mode); - DUMP_STRUCT_VALUE(oxcf, row_mt); - DUMP_STRUCT_VALUE(oxcf, motion_vector_unit_test); + DUMP_STRUCT_VALUE(fp, oxcf, row_mt); + DUMP_STRUCT_VALUE(fp, oxcf, motion_vector_unit_test); } FRAME_INFO vp9_get_frame_info(const VP9EncoderConfig *oxcf) { diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.h index 08569fcc9..01338adb4 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.h @@ -19,10 +19,10 @@ extern "C" { VP9EncoderConfig vp9_get_encoder_config(int frame_width, int frame_height, vpx_rational_t frame_rate, - int target_bitrate, + int target_bitrate, int encode_speed, vpx_enc_pass enc_pass); -void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf); +void vp9_dump_encoder_config(const VP9EncoderConfig *oxcf, FILE *fp); FRAME_INFO vp9_get_frame_info(const VP9EncoderConfig *oxcf); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_iface_common.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_iface_common.c index 74d08a587..8d031694d 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_iface_common.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9_iface_common.c @@ -88,8 +88,9 @@ vpx_codec_err_t image2yuvconfig(const vpx_image_t *img, yv12->y_width = img->d_w; yv12->y_height = img->d_h; - yv12->uv_width = - img->x_chroma_shift == 1 ? (1 + yv12->y_width) / 2 : yv12->y_width; + yv12->uv_width = img->x_chroma_shift == 1 || img->fmt == VPX_IMG_FMT_NV12 + ? (1 + yv12->y_width) / 2 + : yv12->y_width; yv12->uv_height = img->y_chroma_shift == 1 ? (1 + yv12->y_height) / 2 : yv12->y_height; yv12->uv_crop_width = yv12->uv_width; @@ -127,5 +128,9 @@ vpx_codec_err_t image2yuvconfig(const vpx_image_t *img, #endif // CONFIG_VP9_HIGHBITDEPTH yv12->subsampling_x = img->x_chroma_shift; yv12->subsampling_y = img->y_chroma_shift; + // When reading the data, UV are in one plane for NV12 format, thus + // x_chroma_shift is 0. After converting, UV are in separate planes, and + // subsampling_x should be set to 1. + if (img->fmt == VPX_IMG_FMT_NV12) yv12->subsampling_x = 1; return VPX_CODEC_OK; } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9cx.mk b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9cx.mk index ad774505c..38e99165a 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9cx.mk +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vp9/vp9cx.mk @@ -18,9 +18,6 @@ VP9_CX_SRCS_REMOVE-no += $(VP9_COMMON_SRCS_REMOVE-no) VP9_CX_SRCS-yes += vp9_cx_iface.c VP9_CX_SRCS-yes += vp9_cx_iface.h -VP9_CX_SRCS-$(CONFIG_RATE_CTRL) += simple_encode.cc -VP9_CX_SRCS-$(CONFIG_RATE_CTRL) += simple_encode.h - VP9_CX_SRCS-yes += encoder/vp9_bitstream.c VP9_CX_SRCS-yes += encoder/vp9_context_tree.c VP9_CX_SRCS-yes += encoder/vp9_context_tree.h @@ -99,6 +96,8 @@ VP9_CX_SRCS-yes += encoder/vp9_skin_detection.c VP9_CX_SRCS-yes += encoder/vp9_skin_detection.h VP9_CX_SRCS-yes += encoder/vp9_noise_estimate.c VP9_CX_SRCS-yes += encoder/vp9_noise_estimate.h +VP9_CX_SRCS-yes += encoder/vp9_ext_ratectrl.c +VP9_CX_SRCS-yes += encoder/vp9_ext_ratectrl.h ifeq ($(CONFIG_VP9_POSTPROC),yes) VP9_CX_SRCS-$(CONFIG_INTERNAL_STATS) += common/vp9_postproc.h VP9_CX_SRCS-$(CONFIG_INTERNAL_STATS) += common/vp9_postproc.c diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/internal/vpx_codec_internal.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/internal/vpx_codec_internal.h index 9eed85e5d..4ef93057f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/internal/vpx_codec_internal.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/internal/vpx_codec_internal.h @@ -27,13 +27,15 @@ * * * An application instantiates a specific decoder instance by using - * vpx_codec_init() and a pointer to the algorithm's interface structure: + * vpx_codec_dec_init() and a pointer to the algorithm's interface structure: *
  *     my_app.c:
  *       extern vpx_codec_iface_t my_codec;
  *       {
  *           vpx_codec_ctx_t algo;
- *           res = vpx_codec_init(&algo, &my_codec);
+ *           int threads = 4;
+ *           vpx_codec_dec_cfg_t cfg = { threads, 0, 0 };
+ *           res = vpx_codec_dec_init(&algo, &my_codec, &cfg, 0);
  *       }
  *     
* @@ -66,7 +68,7 @@ typedef struct vpx_codec_priv_enc_mr_cfg vpx_codec_priv_enc_mr_cfg_t; /*!\brief init function pointer prototype * * Performs algorithm-specific initialization of the decoder context. This - * function is called by the generic vpx_codec_init() wrapper function, so + * function is called by vpx_codec_dec_init() and vpx_codec_enc_init(), so * plugins implementing this interface may trust the input parameters to be * properly initialized. * @@ -175,16 +177,15 @@ typedef const struct vpx_codec_ctrl_fn_map { /*!\brief decode data function pointer prototype * * Processes a buffer of coded data. If the processing results in a new - * decoded frame becoming available, #VPX_CODEC_CB_PUT_SLICE and - * #VPX_CODEC_CB_PUT_FRAME events are generated as appropriate. This - * function is called by the generic vpx_codec_decode() wrapper function, - * so plugins implementing this interface may trust the input parameters - * to be properly initialized. + * decoded frame becoming available, put_slice and put_frame callbacks + * are invoked as appropriate. This function is called by the generic + * vpx_codec_decode() wrapper function, so plugins implementing this + * interface may trust the input parameters to be properly initialized. * * \param[in] ctx Pointer to this instance's context * \param[in] data Pointer to this block of new coded data. If - * NULL, a #VPX_CODEC_CB_PUT_FRAME event is posted - * for the previously decoded frame. + * NULL, the put_frame callback is invoked for + * the previously decoded frame. * \param[in] data_sz Size of the coded data, in bytes. * * \return Returns #VPX_CODEC_OK if the coded data was processed completely diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_codec.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_codec.c index 10331aa21..114b94e19 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_codec.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_codec.c @@ -97,7 +97,7 @@ vpx_codec_err_t vpx_codec_control_(vpx_codec_ctx_t *ctx, int ctrl_id, ...) { res = VPX_CODEC_INCAPABLE; - for (entry = ctx->iface->ctrl_maps; entry && entry->fn; entry++) { + for (entry = ctx->iface->ctrl_maps; entry->fn; entry++) { if (!entry->ctrl_id || entry->ctrl_id == ctrl_id) { va_list ap; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_decoder.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_decoder.c index fc1c2bcca..427cd1bf4 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_decoder.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_decoder.c @@ -138,9 +138,10 @@ vpx_codec_err_t vpx_codec_register_put_frame_cb(vpx_codec_ctx_t *ctx, if (!ctx || !cb) res = VPX_CODEC_INVALID_PARAM; - else if (!ctx->iface || !ctx->priv || - !(ctx->iface->caps & VPX_CODEC_CAP_PUT_FRAME)) + else if (!ctx->iface || !ctx->priv) res = VPX_CODEC_ERROR; + else if (!(ctx->iface->caps & VPX_CODEC_CAP_PUT_FRAME)) + res = VPX_CODEC_INCAPABLE; else { ctx->priv->dec.put_frame_cb.u.put_frame = cb; ctx->priv->dec.put_frame_cb.user_priv = user_priv; @@ -157,9 +158,10 @@ vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx, if (!ctx || !cb) res = VPX_CODEC_INVALID_PARAM; - else if (!ctx->iface || !ctx->priv || - !(ctx->iface->caps & VPX_CODEC_CAP_PUT_SLICE)) + else if (!ctx->iface || !ctx->priv) res = VPX_CODEC_ERROR; + else if (!(ctx->iface->caps & VPX_CODEC_CAP_PUT_SLICE)) + res = VPX_CODEC_INCAPABLE; else { ctx->priv->dec.put_slice_cb.u.put_slice = cb; ctx->priv->dec.put_slice_cb.user_priv = user_priv; @@ -176,9 +178,10 @@ vpx_codec_err_t vpx_codec_set_frame_buffer_functions( if (!ctx || !cb_get || !cb_release) { res = VPX_CODEC_INVALID_PARAM; - } else if (!ctx->iface || !ctx->priv || - !(ctx->iface->caps & VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER)) { + } else if (!ctx->iface || !ctx->priv) { res = VPX_CODEC_ERROR; + } else if (!(ctx->iface->caps & VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER)) { + res = VPX_CODEC_INCAPABLE; } else { res = ctx->iface->dec.set_fb_fn(get_alg_priv(ctx), cb_get, cb_release, cb_priv); diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_image.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_image.c index a7c6ec0ce..ff496b5d3 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_image.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/src/vpx_image.c @@ -39,7 +39,8 @@ static vpx_image_t *img_alloc_helper(vpx_image_t *img, vpx_img_fmt_t fmt, /* Get sample size for this format */ switch (fmt) { case VPX_IMG_FMT_I420: - case VPX_IMG_FMT_YV12: bps = 12; break; + case VPX_IMG_FMT_YV12: + case VPX_IMG_FMT_NV12: bps = 12; break; case VPX_IMG_FMT_I422: case VPX_IMG_FMT_I440: bps = 16; break; case VPX_IMG_FMT_I444: bps = 24; break; @@ -51,6 +52,8 @@ static vpx_image_t *img_alloc_helper(vpx_image_t *img, vpx_img_fmt_t fmt, } /* Get chroma shift values for this format */ + // For VPX_IMG_FMT_NV12, xcs needs to be 0 such that UV data is all read at + // one time. switch (fmt) { case VPX_IMG_FMT_I420: case VPX_IMG_FMT_YV12: @@ -62,6 +65,7 @@ static vpx_image_t *img_alloc_helper(vpx_image_t *img, vpx_img_fmt_t fmt, switch (fmt) { case VPX_IMG_FMT_I420: + case VPX_IMG_FMT_NV12: case VPX_IMG_FMT_I440: case VPX_IMG_FMT_YV12: case VPX_IMG_FMT_I42016: @@ -173,7 +177,12 @@ int vpx_img_set_rect(vpx_image_t *img, unsigned int x, unsigned int y, data + x * bytes_per_sample + y * img->stride[VPX_PLANE_Y]; data += img->h * img->stride[VPX_PLANE_Y]; - if (!(img->fmt & VPX_IMG_FMT_UV_FLIP)) { + if (img->fmt == VPX_IMG_FMT_NV12) { + img->planes[VPX_PLANE_U] = + data + (x >> img->x_chroma_shift) + + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; + img->planes[VPX_PLANE_V] = img->planes[VPX_PLANE_U] + 1; + } else if (!(img->fmt & VPX_IMG_FMT_UV_FLIP)) { img->planes[VPX_PLANE_U] = data + (x >> img->x_chroma_shift) * bytes_per_sample + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vp8cx.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vp8cx.h index dcdd710c0..37ad07d33 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vp8cx.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vp8cx.h @@ -17,6 +17,7 @@ */ #include "./vp8.h" #include "./vpx_encoder.h" +#include "./vpx_ext_ratectrl.h" /*!\file * \brief Provides definitions for using VP8 or VP9 encoder algorithm within the @@ -684,6 +685,33 @@ enum vp8e_enc_control_id { * Supported in codecs: VP9 */ VP9E_SET_DELTA_Q_UV, + + /*!\brief Codec control function to disable increase Q on overshoot in CBR. + * + * 0: On (default), 1: Disable. + * + * Supported in codecs: VP9 + */ + VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR, + + /*!\brief Codec control function to disable loopfilter. + * + * 0: Loopfilter on all frames, 1: Disable on non reference frames. + * 2: Disable on all frames. + * + * Supported in codecs: VP9 + */ + VP9E_SET_DISABLE_LOOPFILTER, + + /*!\brief Codec control function to enable external rate control library. + * + * args[0]: path of the rate control library + * + * args[1]: private config of the rate control library + * + * Supported in codecs: VP9 + */ + VP9E_SET_EXTERNAL_RATE_CONTROL, }; /*!\brief vpx 1-D scaling mode @@ -1034,6 +1062,15 @@ VPX_CTRL_USE_TYPE(VP9E_SET_POSTENCODE_DROP, unsigned int) VPX_CTRL_USE_TYPE(VP9E_SET_DELTA_Q_UV, int) #define VPX_CTRL_VP9E_SET_DELTA_Q_UV +VPX_CTRL_USE_TYPE(VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR, int) +#define VPX_CTRL_VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR + +VPX_CTRL_USE_TYPE(VP9E_SET_DISABLE_LOOPFILTER, int) +#define VPX_CTRL_VP9E_SET_DISABLE_LOOPFILTER + +VPX_CTRL_USE_TYPE(VP9E_SET_EXTERNAL_RATE_CONTROL, vpx_rc_funcs_t *) +#define VPX_CTRL_VP9E_SET_EXTERNAL_RATE_CONTROL + /*!\endcond */ /*! @} - end defgroup vp8_encoder */ #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.h index 6371a6ca2..b0a931e01 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.h @@ -22,13 +22,16 @@ * video codec algorithm. * * An application instantiates a specific codec instance by using - * vpx_codec_init() and a pointer to the algorithm's interface structure: + * vpx_codec_dec_init() or vpx_codec_enc_init() and a pointer to the + * algorithm's interface structure: *
  *     my_app.c:
  *       extern vpx_codec_iface_t my_codec;
  *       {
  *           vpx_codec_ctx_t algo;
- *           res = vpx_codec_init(&algo, &my_codec);
+ *           int threads = 4;
+ *           vpx_codec_dec_cfg_t cfg = { threads, 0, 0 };
+ *           res = vpx_codec_dec_init(&algo, &my_codec, &cfg, 0);
  *       }
  *     
* diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.mk b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.mk index 4ed77ad6d..350dc247b 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.mk +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_codec.mk @@ -24,6 +24,7 @@ API_DOC_SRCS-$(CONFIG_VP8_DECODER) += vp8dx.h API_DOC_SRCS-yes += vpx_codec.h API_DOC_SRCS-yes += vpx_decoder.h API_DOC_SRCS-yes += vpx_encoder.h +API_DOC_SRCS-yes += vpx_ext_ratectrl.h API_DOC_SRCS-yes += vpx_frame_buffer.h API_DOC_SRCS-yes += vpx_image.h @@ -39,3 +40,4 @@ API_SRCS-yes += vpx_codec.mk API_SRCS-yes += vpx_frame_buffer.h API_SRCS-yes += vpx_image.h API_SRCS-yes += vpx_integer.h +API_SRCS-yes += vpx_ext_ratectrl.h diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_decoder.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_decoder.h index f113f7196..39e5f585f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_decoder.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_decoder.h @@ -58,6 +58,10 @@ extern "C" { #define VPX_CODEC_CAP_ERROR_CONCEALMENT 0x80000 /*!\brief Can receive encoded frames one fragment at a time */ #define VPX_CODEC_CAP_INPUT_FRAGMENTS 0x100000 +/*!\brief Can support frame-based multi-threading */ +#define VPX_CODEC_CAP_FRAME_THREADING 0x200000 +/*!brief Can support external frame buffers */ +#define VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER 0x400000 /*! \brief Initialization-time Feature Enabling * @@ -66,11 +70,6 @@ extern "C" { * * The available flags are specified by VPX_CODEC_USE_* defines. */ -/*!\brief Can support frame-based multi-threading */ -#define VPX_CODEC_CAP_FRAME_THREADING 0x200000 -/*!brief Can support external frame buffers */ -#define VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER 0x400000 - #define VPX_CODEC_USE_POSTPROC 0x10000 /**< Postprocess decoded frame */ /*!\brief Conceal errors in decoded frames */ #define VPX_CODEC_USE_ERROR_CONCEALMENT 0x20000 @@ -185,8 +184,8 @@ vpx_codec_err_t vpx_codec_get_stream_info(vpx_codec_ctx_t *ctx, /*!\brief Decode data * * Processes a buffer of coded data. If the processing results in a new - * decoded frame becoming available, PUT_SLICE and PUT_FRAME events may be - * generated, as appropriate. Encoded data \ref MUST be passed in DTS (decode + * decoded frame becoming available, put_slice and put_frame callbacks may be + * invoked, as appropriate. Encoded data \ref MUST be passed in DTS (decode * time stamp) order. Frames produced will always be in PTS (presentation * time stamp) order. * If the decoder is configured with VPX_CODEC_USE_INPUT_FRAGMENTS enabled, @@ -199,8 +198,8 @@ vpx_codec_err_t vpx_codec_get_stream_info(vpx_codec_ctx_t *ctx, * * \param[in] ctx Pointer to this instance's context * \param[in] data Pointer to this block of new coded data. If - * NULL, a VPX_CODEC_CB_PUT_FRAME event is posted - * for the previously decoded frame. + * NULL, the put_frame callback is invoked for + * the previously decoded frame. * \param[in] data_sz Size of the coded data, in bytes. * \param[in] user_priv Application specific data to associate with * this frame. @@ -236,11 +235,10 @@ vpx_image_t *vpx_codec_get_frame(vpx_codec_ctx_t *ctx, vpx_codec_iter_t *iter); /*!\defgroup cap_put_frame Frame-Based Decoding Functions * - * The following functions are required to be implemented for all decoders - * that advertise the VPX_CODEC_CAP_PUT_FRAME capability. Calling these - * functions - * for codecs that don't advertise this capability will result in an error - * code being returned, usually VPX_CODEC_ERROR + * The following function is required to be implemented for all decoders + * that advertise the VPX_CODEC_CAP_PUT_FRAME capability. Calling this + * function for codecs that don't advertise this capability will result in + * an error code being returned, usually VPX_CODEC_INCAPABLE. * @{ */ @@ -264,8 +262,9 @@ typedef void (*vpx_codec_put_frame_cb_fn_t)(void *user_priv, * \retval #VPX_CODEC_OK * Callback successfully registered. * \retval #VPX_CODEC_ERROR - * Decoder context not initialized, or algorithm not capable of - * posting slice completion. + * Decoder context not initialized. + * \retval #VPX_CODEC_INCAPABLE + * Algorithm not capable of posting frame completion. */ vpx_codec_err_t vpx_codec_register_put_frame_cb(vpx_codec_ctx_t *ctx, vpx_codec_put_frame_cb_fn_t cb, @@ -275,18 +274,17 @@ vpx_codec_err_t vpx_codec_register_put_frame_cb(vpx_codec_ctx_t *ctx, /*!\defgroup cap_put_slice Slice-Based Decoding Functions * - * The following functions are required to be implemented for all decoders - * that advertise the VPX_CODEC_CAP_PUT_SLICE capability. Calling these - * functions - * for codecs that don't advertise this capability will result in an error - * code being returned, usually VPX_CODEC_ERROR + * The following function is required to be implemented for all decoders + * that advertise the VPX_CODEC_CAP_PUT_SLICE capability. Calling this + * function for codecs that don't advertise this capability will result in + * an error code being returned, usually VPX_CODEC_INCAPABLE. * @{ */ /*!\brief put slice callback prototype * * This callback is invoked by the decoder to notify the application of - * the availability of partially decoded image data. The + * the availability of partially decoded image data. */ typedef void (*vpx_codec_put_slice_cb_fn_t)(void *user_priv, const vpx_image_t *img, @@ -305,8 +303,9 @@ typedef void (*vpx_codec_put_slice_cb_fn_t)(void *user_priv, * \retval #VPX_CODEC_OK * Callback successfully registered. * \retval #VPX_CODEC_ERROR - * Decoder context not initialized, or algorithm not capable of - * posting slice completion. + * Decoder context not initialized. + * \retval #VPX_CODEC_INCAPABLE + * Algorithm not capable of posting slice completion. */ vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx, vpx_codec_put_slice_cb_fn_t cb, @@ -316,10 +315,10 @@ vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx, /*!\defgroup cap_external_frame_buffer External Frame Buffer Functions * - * The following section is required to be implemented for all decoders + * The following function is required to be implemented for all decoders * that advertise the VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER capability. * Calling this function for codecs that don't advertise this capability - * will result in an error code being returned, usually VPX_CODEC_ERROR. + * will result in an error code being returned, usually VPX_CODEC_INCAPABLE. * * \note * Currently this only works with VP9. @@ -344,8 +343,9 @@ vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx, * \retval #VPX_CODEC_INVALID_PARAM * One or more of the callbacks were NULL. * \retval #VPX_CODEC_ERROR - * Decoder context not initialized, or algorithm not capable of - * using external frame buffers. + * Decoder context not initialized. + * \retval #VPX_CODEC_INCAPABLE + * Algorithm not capable of using external frame buffers. * * \note * When decoding VP9, the application may be required to pass in at least diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_encoder.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_encoder.h index c84d40f7f..39b2aef62 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_encoder.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_encoder.h @@ -705,6 +705,7 @@ typedef struct vpx_svc_parameters { int scaling_factor_den[VPX_MAX_LAYERS]; /**< Scaling factor-denominator */ int speed_per_layer[VPX_MAX_LAYERS]; /**< Speed setting for each sl */ int temporal_layering_mode; /**< Temporal layering mode */ + int loopfilter_ctrl[VPX_MAX_LAYERS]; /**< Loopfilter ctrl for each sl */ } vpx_svc_extra_cfg_t; /*!\brief Initialize an encoder instance diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_ext_ratectrl.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_ext_ratectrl.h new file mode 100644 index 000000000..bb3caa614 --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_ext_ratectrl.h @@ -0,0 +1,337 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VPX_VPX_VPX_EXT_RATECTRL_H_ +#define VPX_VPX_VPX_EXT_RATECTRL_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include "./vpx_integer.h" + +/*!\brief Abstract rate control model handler + * + * The encoder will receive the model handler from create_model() defined in + * vpx_rc_funcs_t. + */ +typedef void *vpx_rc_model_t; + +/*!\brief Encode frame decision made by the external rate control model + * + * The encoder will receive the decision from the external rate control model + * through get_encodeframe_decision() defined in vpx_rc_funcs_t. + */ +typedef struct vpx_rc_encodeframe_decision { + int q_index; /**< Quantizer step index [0..255]*/ +} vpx_rc_encodeframe_decision_t; + +/*!\brief Information for the frame to be encoded. + * + * The encoder will send the information to external rate control model through + * get_encodeframe_decision() defined in vpx_rc_funcs_t. + * + */ +typedef struct vpx_rc_encodeframe_info { + /*! + * 0: Key frame + * 1: Inter frame + * 2: Alternate reference frame + * 3: Overlay frame + * 4: Golden frame + */ + int frame_type; + int show_index; /**< display index, starts from zero*/ + int coding_index; /**< coding index, starts from zero*/ + int ref_frame_coding_indexes[3]; /**< three reference frames' coding indices*/ + /*! + * The validity of the three reference frames. + * 0: Invalid + * 1: Valid + */ + int ref_frame_valid_list[3]; +} vpx_rc_encodeframe_info_t; + +/*!\brief Frame coding result + * + * The encoder will send the result to the external rate control model through + * update_encodeframe_result() defined in vpx_rc_funcs_t. + */ +typedef struct vpx_rc_encodeframe_result { + int64_t sse; /**< sum of squared error of the reconstructed frame */ + int64_t bit_count; /**< number of bits spent on coding the frame*/ + int64_t pixel_count; /**< number of pixels in YUV planes of the frame*/ +} vpx_rc_encodeframe_result_t; + +/*!\brief Status returned by rate control callback functions. + */ +typedef enum vpx_rc_status { + VPX_RC_OK = 0, + VPX_RC_ERROR = 1, +} vpx_rc_status_t; + +/*!\brief First pass frame stats + * This is a mirror of vp9's FIRSTPASS_STATS except that spatial_layer_id is + * omitted + */ +typedef struct vpx_rc_frame_stats { + /*! + * Frame number in display order, if stats are for a single frame. + * No real meaning for a collection of frames. + */ + double frame; + /*! + * Weight assigned to this frame (or total weight for the collection of + * frames) currently based on intra factor and brightness factor. This is used + * to distribute bits between easier and harder frames. + */ + double weight; + /*! + * Intra prediction error. + */ + double intra_error; + /*! + * Best of intra pred error and inter pred error using last frame as ref. + */ + double coded_error; + /*! + * Best of intra pred error and inter pred error using golden frame as ref. + */ + double sr_coded_error; + /*! + * Estimate the noise energy of the current frame. + */ + double frame_noise_energy; + /*! + * Percentage of blocks with inter pred error < intra pred error. + */ + double pcnt_inter; + /*! + * Percentage of blocks using (inter prediction and) non-zero motion vectors. + */ + double pcnt_motion; + /*! + * Percentage of blocks where golden frame was better than last or intra: + * inter pred error using golden frame < inter pred error using last frame and + * inter pred error using golden frame < intra pred error + */ + double pcnt_second_ref; + /*! + * Percentage of blocks where intra and inter prediction errors were very + * close. Note that this is a 'weighted count', that is, the so blocks may be + * weighted by how close the two errors were. + */ + double pcnt_neutral; + /*! + * Percentage of blocks that have intra error < inter error and inter error < + * LOW_I_THRESH LOW_I_THRESH = 24000 using bit_depth 8 LOW_I_THRESH = 24000 << + * 4 using bit_depth 10 LOW_I_THRESH = 24000 << 8 using bit_depth 12 + */ + double pcnt_intra_low; + /*! + * Percentage of blocks that have intra error < inter error and intra error < + * LOW_I_THRESH but inter error >= LOW_I_THRESH LOW_I_THRESH = 24000 using + * bit_depth 8 LOW_I_THRESH = 24000 << 4 using bit_depth 10 LOW_I_THRESH = + * 24000 << 8 using bit_depth 12 + */ + double pcnt_intra_high; + /*! + * Percentage of blocks that have almost no intra error residual + * (i.e. are in effect completely flat and untextured in the intra + * domain). In natural videos this is uncommon, but it is much more + * common in animations, graphics and screen content, so may be used + * as a signal to detect these types of content. + */ + double intra_skip_pct; + /*! + * Percentage of blocks that have intra error < SMOOTH_INTRA_THRESH + * SMOOTH_INTRA_THRESH = 4000 using bit_depth 8 + * SMOOTH_INTRA_THRESH = 4000 << 4 using bit_depth 10 + * SMOOTH_INTRA_THRESH = 4000 << 8 using bit_depth 12 + */ + double intra_smooth_pct; + /*! + * Image mask rows top and bottom. + */ + double inactive_zone_rows; + /*! + * Image mask columns at left and right edges. + */ + double inactive_zone_cols; + /*! + * Average of row motion vectors. + */ + double MVr; + /*! + * Mean of absolute value of row motion vectors. + */ + double mvr_abs; + /*! + * Mean of column motion vectors. + */ + double MVc; + /*! + * Mean of absolute value of column motion vectors. + */ + double mvc_abs; + /*! + * Variance of row motion vectors. + */ + double MVrv; + /*! + * Variance of column motion vectors. + */ + double MVcv; + /*! + * Value in range [-1,1] indicating fraction of row and column motion vectors + * that point inwards (negative MV value) or outwards (positive MV value). + * For example, value of 1 indicates, all row/column MVs are inwards. + */ + double mv_in_out_count; + /*! + * Duration of the frame / collection of frames. + */ + double duration; + /*! + * 1.0 if stats are for a single frame, OR + * Number of frames in this collection for which the stats are accumulated. + */ + double count; +} vpx_rc_frame_stats_t; + +/*!\brief Collection of first pass frame stats + */ +typedef struct vpx_rc_firstpass_stats { + /*! + * Pointer to first pass frame stats. + * The pointed array of vpx_rc_frame_stats_t should have length equal to + * number of show frames in the video. + */ + vpx_rc_frame_stats_t *frame_stats; + /*! + * Number of show frames in the video. + */ + int num_frames; +} vpx_rc_firstpass_stats_t; + +/*!\brief Encode config sent to external rate control model + */ +typedef struct vpx_rc_config { + int frame_width; /**< frame width */ + int frame_height; /**< frame height */ + int show_frame_count; /**< number of visible frames in the video */ + /*! + * Target bitrate in kilobytes per second + */ + int target_bitrate_kbps; + int frame_rate_num; /**< numerator of frame rate */ + int frame_rate_den; /**< denominator of frame rate */ +} vpx_rc_config_t; + +/*!\brief Create an external rate control model callback prototype + * + * This callback is invoked by the encoder to create an external rate control + * model. + * + * \param[in] priv Callback's private data + * \param[in] ratectrl_config Pointer to vpx_rc_config_t + * \param[out] rate_ctrl_model_pt Pointer to vpx_rc_model_t + */ +typedef vpx_rc_status_t (*vpx_rc_create_model_cb_fn_t)( + void *priv, const vpx_rc_config_t *ratectrl_config, + vpx_rc_model_t *rate_ctrl_model_pt); + +/*!\brief Send first pass stats to the external rate control model callback + * prototype + * + * This callback is invoked by the encoder to send first pass stats to the + * external rate control model. + * + * \param[in] rate_ctrl_model rate control model + * \param[in] first_pass_stats first pass stats + */ +typedef vpx_rc_status_t (*vpx_rc_send_firstpass_stats_cb_fn_t)( + vpx_rc_model_t rate_ctrl_model, + const vpx_rc_firstpass_stats_t *first_pass_stats); + +/*!\brief Receive encode frame decision callback prototype + * + * This callback is invoked by the encoder to receive encode frame decision from + * the external rate control model. + * + * \param[in] rate_ctrl_model rate control model + * \param[in] encode_frame_info information of the coding frame + * \param[out] frame_decision encode decision of the coding frame + */ +typedef vpx_rc_status_t (*vpx_rc_get_encodeframe_decision_cb_fn_t)( + vpx_rc_model_t rate_ctrl_model, + const vpx_rc_encodeframe_info_t *encode_frame_info, + vpx_rc_encodeframe_decision_t *frame_decision); + +/*!\brief Update encode frame result callback prototype + * + * This callback is invoked by the encoder to update encode frame result to the + * external rate control model. + * + * \param[in] rate_ctrl_model rate control model + * \param[out] encode_frame_result encode result of the coding frame + */ +typedef vpx_rc_status_t (*vpx_rc_update_encodeframe_result_cb_fn_t)( + vpx_rc_model_t rate_ctrl_model, + const vpx_rc_encodeframe_result_t *encode_frame_result); + +/*!\brief Delete the external rate control model callback prototype + * + * This callback is invoked by the encoder to delete the external rate control + * model. + * + * \param[in] rate_ctrl_model rate control model + */ +typedef vpx_rc_status_t (*vpx_rc_delete_model_cb_fn_t)( + vpx_rc_model_t rate_ctrl_model); + +/*!\brief Callback function set for external rate control. + * + * The user can enable external rate control by registering + * a set of callback functions with the codec control flag + * VP9E_SET_EXTERNAL_RATE_CONTROL. + */ +typedef struct vpx_rc_funcs { + /*! + * Create an external rate control model. + */ + vpx_rc_create_model_cb_fn_t create_model; + /*! + * Send first pass stats to the external rate control model. + */ + vpx_rc_send_firstpass_stats_cb_fn_t send_firstpass_stats; + /*! + * Get encodeframe decision from the external rate control model. + */ + vpx_rc_get_encodeframe_decision_cb_fn_t get_encodeframe_decision; + /*! + * Update encodeframe result to the external rate control model. + */ + vpx_rc_update_encodeframe_result_cb_fn_t update_encodeframe_result; + /*! + * Delete the external rate control model. + */ + vpx_rc_delete_model_cb_fn_t delete_model; + /*! + * Private data for the external rate control model. + */ + void *priv; +} vpx_rc_funcs_t; + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // VPX_VPX_VPX_EXT_RATECTRL_H_ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_image.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_image.h index 98be5966a..bc23be50c 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_image.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx/vpx_image.h @@ -43,6 +43,7 @@ typedef enum vpx_img_fmt { VPX_IMG_FMT_I422 = VPX_IMG_FMT_PLANAR | 5, VPX_IMG_FMT_I444 = VPX_IMG_FMT_PLANAR | 6, VPX_IMG_FMT_I440 = VPX_IMG_FMT_PLANAR | 7, + VPX_IMG_FMT_NV12 = VPX_IMG_FMT_PLANAR | 9, VPX_IMG_FMT_I42016 = VPX_IMG_FMT_I420 | VPX_IMG_FMT_HIGHBITDEPTH, VPX_IMG_FMT_I42216 = VPX_IMG_FMT_I422 | VPX_IMG_FMT_HIGHBITDEPTH, VPX_IMG_FMT_I44416 = VPX_IMG_FMT_I444 | VPX_IMG_FMT_HIGHBITDEPTH, diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/macros_msa.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/macros_msa.h index a3a5a4dfe..3c2f50c79 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/macros_msa.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/macros_msa.h @@ -88,10 +88,10 @@ const uint8_t *psrc_lw_m = (const uint8_t *)(psrc); \ uint32_t val_lw_m; \ \ - __asm__ __volatile__("ulw %[val_lw_m], %[psrc_lw_m] \n\t" \ - \ - : [val_lw_m] "=r"(val_lw_m) \ - : [psrc_lw_m] "m"(*psrc_lw_m)); \ + __asm__ __volatile__("lwr %[val_lw_m], 0(%[psrc_lw_m]) \n\t" \ + "lwl %[val_lw_m], 3(%[psrc_lw_m]) \n\t" \ + : [val_lw_m] "=&r"(val_lw_m) \ + : [psrc_lw_m] "r"(psrc_lw_m)); \ \ val_lw_m; \ }) @@ -102,10 +102,10 @@ const uint8_t *psrc_ld_m = (const uint8_t *)(psrc); \ uint64_t val_ld_m = 0; \ \ - __asm__ __volatile__("uld %[val_ld_m], %[psrc_ld_m] \n\t" \ - \ - : [val_ld_m] "=r"(val_ld_m) \ - : [psrc_ld_m] "m"(*psrc_ld_m)); \ + __asm__ __volatile__("ldr %[val_ld_m], 0(%[psrc_ld_m]) \n\t" \ + "ldl %[val_ld_m], 7(%[psrc_ld_m]) \n\t" \ + : [val_ld_m] "=&r"(val_ld_m) \ + : [psrc_ld_m] "r"(psrc_ld_m)); \ \ val_ld_m; \ }) diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/sad_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/sad_mmi.c index 4368db5fd..eaca4773f 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/sad_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/sad_mmi.c @@ -364,8 +364,9 @@ static inline unsigned int vpx_sad64x(const uint8_t *src, int src_stride, double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_REF_ABS_SUB_64 @@ -383,6 +384,7 @@ static inline unsigned int vpx_sad64x(const uint8_t *src, int src_stride, : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -405,9 +407,11 @@ static inline unsigned int vpx_sad_avg64x(const uint8_t *src, int src_stride, unsigned int sad; double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + mips_reg l_second_pred = (mips_reg)second_pred; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_AVGREF_ABS_SUB_64 @@ -424,11 +428,12 @@ static inline unsigned int vpx_sad_avg64x(const uint8_t *src, int src_stride, : [ftmp1]"=&f"(ftmp1), [ftmp2]"=&f"(ftmp2), [ftmp3]"=&f"(ftmp3), [ftmp4]"=&f"(ftmp4), [ftmp5]"=&f"(ftmp5), [counter]"+&r"(l_counter), [src]"+&r"(src), [ref]"+&r"(ref), - [second_pred]"+&r"((mips_reg)second_pred), + [second_pred]"+&r"(l_second_pred), [sad]"=&r"(sad) : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -450,8 +455,9 @@ static inline unsigned int vpx_sad32x(const uint8_t *src, int src_stride, double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_REF_ABS_SUB_32 @@ -469,6 +475,7 @@ static inline unsigned int vpx_sad32x(const uint8_t *src, int src_stride, : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -493,9 +500,11 @@ static inline unsigned int vpx_sad_avg32x(const uint8_t *src, int src_stride, unsigned int sad; double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + mips_reg l_second_pred = (mips_reg)second_pred; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_AVGREF_ABS_SUB_32 @@ -512,11 +521,12 @@ static inline unsigned int vpx_sad_avg32x(const uint8_t *src, int src_stride, : [ftmp1]"=&f"(ftmp1), [ftmp2]"=&f"(ftmp2), [ftmp3]"=&f"(ftmp3), [ftmp4]"=&f"(ftmp4), [ftmp5]"=&f"(ftmp5), [counter]"+&r"(l_counter), [src]"+&r"(src), [ref]"+&r"(ref), - [second_pred]"+&r"((mips_reg)second_pred), + [second_pred]"+&r"(l_second_pred), [sad]"=&r"(sad) : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -539,8 +549,9 @@ static inline unsigned int vpx_sad16x(const uint8_t *src, int src_stride, double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_REF_ABS_SUB_16 @@ -558,6 +569,7 @@ static inline unsigned int vpx_sad16x(const uint8_t *src, int src_stride, : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -586,9 +598,11 @@ static inline unsigned int vpx_sad_avg16x(const uint8_t *src, int src_stride, unsigned int sad; double ftmp1, ftmp2, ftmp3, ftmp4, ftmp5; mips_reg l_counter = counter; + mips_reg l_second_pred = (mips_reg)second_pred; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" + "pxor %[ftmp5], %[ftmp5], %[ftmp5] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_AVGREF_ABS_SUB_16 @@ -605,11 +619,12 @@ static inline unsigned int vpx_sad_avg16x(const uint8_t *src, int src_stride, : [ftmp1]"=&f"(ftmp1), [ftmp2]"=&f"(ftmp2), [ftmp3]"=&f"(ftmp3), [ftmp4]"=&f"(ftmp4), [ftmp5]"=&f"(ftmp5), [counter]"+&r"(l_counter), [src]"+&r"(src), [ref]"+&r"(ref), - [second_pred]"+&r"((mips_reg)second_pred), + [second_pred]"+&r"(l_second_pred), [sad]"=&r"(sad) : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -632,8 +647,9 @@ static inline unsigned int vpx_sad8x(const uint8_t *src, int src_stride, double ftmp1, ftmp2, ftmp3; mips_reg l_counter = counter; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_REF_ABS_SUB_8 @@ -651,6 +667,7 @@ static inline unsigned int vpx_sad8x(const uint8_t *src, int src_stride, : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -679,9 +696,11 @@ static inline unsigned int vpx_sad_avg8x(const uint8_t *src, int src_stride, unsigned int sad; double ftmp1, ftmp2, ftmp3; mips_reg l_counter = counter; + mips_reg l_second_pred = (mips_reg)second_pred; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_AVGREF_ABS_SUB_8 @@ -697,11 +716,12 @@ static inline unsigned int vpx_sad_avg8x(const uint8_t *src, int src_stride, "mfc1 %[sad], %[ftmp3] \n\t" : [ftmp1]"=&f"(ftmp1), [ftmp2]"=&f"(ftmp2), [ftmp3]"=&f"(ftmp3), [counter]"+&r"(l_counter), [src]"+&r"(src), [ref]"+&r"(ref), - [second_pred]"+&r"((mips_reg)second_pred), + [second_pred]"+&r"(l_second_pred), [sad]"=&r"(sad) : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -724,8 +744,9 @@ static inline unsigned int vpx_sad4x(const uint8_t *src, int src_stride, double ftmp1, ftmp2, ftmp3; mips_reg l_counter = counter; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_REF_ABS_SUB_4 @@ -743,6 +764,7 @@ static inline unsigned int vpx_sad4x(const uint8_t *src, int src_stride, : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } @@ -767,9 +789,11 @@ static inline unsigned int vpx_sad_avg4x(const uint8_t *src, int src_stride, unsigned int sad; double ftmp1, ftmp2, ftmp3; mips_reg l_counter = counter; + mips_reg l_second_pred = (mips_reg)second_pred; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" + "pxor %[ftmp3], %[ftmp3], %[ftmp3] \n\t" "1: \n\t" // Include two loop body, to reduce loop time. SAD_SRC_AVGREF_ABS_SUB_4 @@ -785,11 +809,12 @@ static inline unsigned int vpx_sad_avg4x(const uint8_t *src, int src_stride, "mfc1 %[sad], %[ftmp3] \n\t" : [ftmp1]"=&f"(ftmp1), [ftmp2]"=&f"(ftmp2), [ftmp3]"=&f"(ftmp3), [counter]"+&r"(l_counter), [src]"+&r"(src), [ref]"+&r"(ref), - [second_pred]"+&r"((mips_reg)second_pred), + [second_pred]"+&r"(l_second_pred), [sad]"=&r"(sad) : [src_stride]"r"((mips_reg)src_stride), [ref_stride]"r"((mips_reg)ref_stride) ); + /* clang-format on */ return sad; } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/subtract_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/subtract_mmi.c index 9f361704a..8bd7e6977 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/subtract_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/subtract_mmi.c @@ -24,7 +24,7 @@ void vpx_subtract_block_mmi(int rows, int cols, int16_t *diff, switch (rows) { case 4: __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" #if _MIPS_SIM == _ABIO32 "ulw %[tmp0], 0x00(%[src]) \n\t" "mtc1 %[tmp0], %[ftmp1] \n\t" @@ -118,7 +118,7 @@ void vpx_subtract_block_mmi(int rows, int cols, int16_t *diff, break; case 8: __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "li %[tmp0], 0x02 \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src]) \n\t" @@ -206,7 +206,7 @@ void vpx_subtract_block_mmi(int rows, int cols, int16_t *diff, break; case 16: __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "li %[tmp0], 0x08 \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src]) \n\t" diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/variance_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/variance_mmi.c index c1780c33a..c2adcfa01 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/variance_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/variance_mmi.c @@ -150,7 +150,7 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp2], %[ftmp2], %[ftmp6] \n\t" \ \ /* store: temp2[0] ~ temp2[3] */ \ - "and %[ftmp2], %[ftmp2], %[mask] \n\t" \ + "pand %[ftmp2], %[ftmp2], %[mask] \n\t" \ "packushb %[ftmp2], %[ftmp2], %[ftmp0] \n\t" \ "gssdrc1 %[ftmp2], 0x00(%[temp2_ptr]) \n\t" @@ -163,7 +163,7 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp4], %[ftmp4], %[ftmp6] \n\t" \ \ /* store: temp2[0] ~ temp2[3] */ \ - "and %[ftmp4], %[ftmp4], %[mask] \n\t" \ + "pand %[ftmp4], %[ftmp4], %[mask] \n\t" \ "packushb %[ftmp4], %[ftmp4], %[ftmp0] \n\t" \ "gssdrc1 %[ftmp4], 0x00(%[temp2_ptr]) \n\t" @@ -225,8 +225,8 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp3], %[ftmp3], %[ftmp14] \n\t" \ \ /* store: temp2[0] ~ temp2[7] */ \ - "and %[ftmp2], %[ftmp2], %[mask] \n\t" \ - "and %[ftmp3], %[ftmp3], %[mask] \n\t" \ + "pand %[ftmp2], %[ftmp2], %[mask] \n\t" \ + "pand %[ftmp3], %[ftmp3], %[mask] \n\t" \ "packushb %[ftmp2], %[ftmp2], %[ftmp3] \n\t" \ "gssdlc1 %[ftmp2], 0x07(%[temp2_ptr]) \n\t" \ "gssdrc1 %[ftmp2], 0x00(%[temp2_ptr]) \n\t" @@ -247,8 +247,8 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp9], %[ftmp9], %[ftmp14] \n\t" \ \ /* store: temp2[0] ~ temp2[7] */ \ - "and %[ftmp8], %[ftmp8], %[mask] \n\t" \ - "and %[ftmp9], %[ftmp9], %[mask] \n\t" \ + "pand %[ftmp8], %[ftmp8], %[mask] \n\t" \ + "pand %[ftmp9], %[ftmp9], %[mask] \n\t" \ "packushb %[ftmp8], %[ftmp8], %[ftmp9] \n\t" \ "gssdlc1 %[ftmp8], 0x07(%[temp2_ptr]) \n\t" \ "gssdrc1 %[ftmp8], 0x00(%[temp2_ptr]) \n\t" @@ -319,8 +319,8 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp5], %[ftmp5], %[ftmp14] \n\t" \ \ /* store: temp2[8] ~ temp2[15] */ \ - "and %[ftmp4], %[ftmp4], %[mask] \n\t" \ - "and %[ftmp5], %[ftmp5], %[mask] \n\t" \ + "pand %[ftmp4], %[ftmp4], %[mask] \n\t" \ + "pand %[ftmp5], %[ftmp5], %[mask] \n\t" \ "packushb %[ftmp4], %[ftmp4], %[ftmp5] \n\t" \ "gssdlc1 %[ftmp4], 0x0f(%[temp2_ptr]) \n\t" \ "gssdrc1 %[ftmp4], 0x08(%[temp2_ptr]) \n\t" @@ -343,8 +343,8 @@ static const uint8_t bilinear_filters[8][2] = { "psrlh %[ftmp11], %[ftmp11], %[ftmp14] \n\t" \ \ /* store: temp2[8] ~ temp2[15] */ \ - "and %[ftmp10], %[ftmp10], %[mask] \n\t" \ - "and %[ftmp11], %[ftmp11], %[mask] \n\t" \ + "pand %[ftmp10], %[ftmp10], %[mask] \n\t" \ + "pand %[ftmp11], %[ftmp11], %[mask] \n\t" \ "packushb %[ftmp10], %[ftmp10], %[ftmp11] \n\t" \ "gssdlc1 %[ftmp10], 0x0f(%[temp2_ptr]) \n\t" \ "gssdrc1 %[ftmp10], 0x08(%[temp2_ptr]) \n\t" @@ -414,13 +414,14 @@ static inline uint32_t vpx_variance64x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -478,7 +479,7 @@ static inline uint32_t vpx_variance64x(const uint8_t *src_ptr, int src_stride, "mfc1 %[tmp1], %[ftmp9] \n\t" "mfhc1 %[tmp2], %[ftmp9] \n\t" "addu %[sum], %[tmp1], %[tmp2] \n\t" - "dsrl %[ftmp1], %[ftmp10], %[ftmp11] \n\t" + "ssrld %[ftmp1], %[ftmp10], %[ftmp11] \n\t" "paddw %[ftmp1], %[ftmp1], %[ftmp10] \n\t" "swc1 %[ftmp1], 0x00(%[sse]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), @@ -496,6 +497,7 @@ static inline uint32_t vpx_variance64x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / (64 * high)); } @@ -519,13 +521,14 @@ uint32_t vpx_variance32x64_mmi(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" "li %[tmp0], 0x40 \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp9], %[ftmp9], %[ftmp9] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -559,7 +562,7 @@ uint32_t vpx_variance32x64_mmi(const uint8_t *src_ptr, int src_stride, "mfc1 %[tmp1], %[ftmp9] \n\t" "mfhc1 %[tmp2], %[ftmp9] \n\t" "addu %[sum], %[tmp1], %[tmp2] \n\t" - "dsrl %[ftmp1], %[ftmp10], %[ftmp11] \n\t" + "ssrld %[ftmp1], %[ftmp10], %[ftmp11] \n\t" "paddw %[ftmp1], %[ftmp1], %[ftmp10] \n\t" "swc1 %[ftmp1], 0x00(%[sse]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), @@ -577,6 +580,7 @@ uint32_t vpx_variance32x64_mmi(const uint8_t *src_ptr, int src_stride, [sse]"r"(sse) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / 2048); } @@ -590,14 +594,15 @@ static inline uint32_t vpx_variance32x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" - "xor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "pxor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -625,7 +630,7 @@ static inline uint32_t vpx_variance32x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp8], %[ftmp11] \n\t" + "ssrld %[ftmp9], %[ftmp8], %[ftmp11] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp8] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" @@ -636,7 +641,7 @@ static inline uint32_t vpx_variance32x(const uint8_t *src_ptr, int src_stride, "paddw %[ftmp3], %[ftmp3], %[ftmp4] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp5] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp6] \n\t" - "dsrl %[ftmp0], %[ftmp3], %[ftmp11] \n\t" + "ssrld %[ftmp0], %[ftmp3], %[ftmp11] \n\t" "paddw %[ftmp0], %[ftmp0], %[ftmp3] \n\t" "swc1 %[ftmp0], 0x00(%[sum]) \n\t" @@ -653,6 +658,7 @@ static inline uint32_t vpx_variance32x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse), [sum]"r"(&sum) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / (32 * high)); } @@ -676,14 +682,15 @@ static inline uint32_t vpx_variance16x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" - "xor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "pxor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -701,7 +708,7 @@ static inline uint32_t vpx_variance16x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp8], %[ftmp11] \n\t" + "ssrld %[ftmp9], %[ftmp8], %[ftmp11] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp8] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" @@ -712,7 +719,7 @@ static inline uint32_t vpx_variance16x(const uint8_t *src_ptr, int src_stride, "paddw %[ftmp3], %[ftmp3], %[ftmp4] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp5] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp6] \n\t" - "dsrl %[ftmp0], %[ftmp3], %[ftmp11] \n\t" + "ssrld %[ftmp0], %[ftmp3], %[ftmp11] \n\t" "paddw %[ftmp0], %[ftmp0], %[ftmp3] \n\t" "swc1 %[ftmp0], 0x00(%[sum]) \n\t" @@ -729,6 +736,7 @@ static inline uint32_t vpx_variance16x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse), [sum]"r"(&sum) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / (16 * high)); } @@ -753,14 +761,15 @@ static inline uint32_t vpx_variance8x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" - "xor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" - "xor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp10], %[ftmp10], %[ftmp10] \n\t" + "pxor %[ftmp12], %[ftmp12], %[ftmp12] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -773,7 +782,7 @@ static inline uint32_t vpx_variance8x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp8], %[ftmp11] \n\t" + "ssrld %[ftmp9], %[ftmp8], %[ftmp11] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp8] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" @@ -784,7 +793,7 @@ static inline uint32_t vpx_variance8x(const uint8_t *src_ptr, int src_stride, "paddw %[ftmp3], %[ftmp3], %[ftmp4] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp5] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp6] \n\t" - "dsrl %[ftmp0], %[ftmp3], %[ftmp11] \n\t" + "ssrld %[ftmp0], %[ftmp3], %[ftmp11] \n\t" "paddw %[ftmp0], %[ftmp0], %[ftmp3] \n\t" "swc1 %[ftmp0], 0x00(%[sum]) \n\t" @@ -801,6 +810,7 @@ static inline uint32_t vpx_variance8x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse), [sum]"r"(&sum) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / (8 * high)); } @@ -825,14 +835,15 @@ static inline uint32_t vpx_variance4x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp10] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp6], %[ftmp6], %[ftmp6] \n\t" - "xor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp6], %[ftmp6], %[ftmp6] \n\t" + "pxor %[ftmp7], %[ftmp7], %[ftmp7] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" "1: \n\t" "gsldlc1 %[ftmp1], 0x07(%[src_ptr]) \n\t" "gsldrc1 %[ftmp1], 0x00(%[src_ptr]) \n\t" @@ -845,7 +856,7 @@ static inline uint32_t vpx_variance4x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp6], %[ftmp10] \n\t" + "ssrld %[ftmp9], %[ftmp6], %[ftmp10] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp6] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" @@ -856,7 +867,7 @@ static inline uint32_t vpx_variance4x(const uint8_t *src_ptr, int src_stride, "paddw %[ftmp3], %[ftmp3], %[ftmp4] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp5] \n\t" "psubw %[ftmp3], %[ftmp3], %[ftmp6] \n\t" - "dsrl %[ftmp0], %[ftmp3], %[ftmp10] \n\t" + "ssrld %[ftmp0], %[ftmp3], %[ftmp10] \n\t" "paddw %[ftmp0], %[ftmp0], %[ftmp3] \n\t" "swc1 %[ftmp0], 0x00(%[sum]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), @@ -872,6 +883,7 @@ static inline uint32_t vpx_variance4x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse), [sum]"r"(&sum) : "memory" ); + /* clang-format on */ return *sse - (((int64_t)sum * sum) / (4 * high)); } @@ -894,12 +906,13 @@ static inline uint32_t vpx_mse16x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" "1: \n\t" VARIANCE_SSE_16 @@ -909,7 +922,7 @@ static inline uint32_t vpx_mse16x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp8], %[ftmp11] \n\t" + "ssrld %[ftmp9], %[ftmp8], %[ftmp11] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp8] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), @@ -925,6 +938,7 @@ static inline uint32_t vpx_mse16x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse) : "memory" ); + /* clang-format on */ return *sse; } @@ -947,12 +961,13 @@ static inline uint32_t vpx_mse8x(const uint8_t *src_ptr, int src_stride, *sse = 0; + /* clang-format off */ __asm__ volatile ( "li %[tmp0], 0x20 \n\t" "mtc1 %[tmp0], %[ftmp11] \n\t" MMI_L(%[tmp0], %[high], 0x00) - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" - "xor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp8], %[ftmp8], %[ftmp8] \n\t" "1: \n\t" VARIANCE_SSE_8 @@ -962,7 +977,7 @@ static inline uint32_t vpx_mse8x(const uint8_t *src_ptr, int src_stride, MMI_ADDU(%[ref_ptr], %[ref_ptr], %[ref_stride]) "bnez %[tmp0], 1b \n\t" - "dsrl %[ftmp9], %[ftmp8], %[ftmp11] \n\t" + "ssrld %[ftmp9], %[ftmp8], %[ftmp11] \n\t" "paddw %[ftmp9], %[ftmp9], %[ftmp8] \n\t" "swc1 %[ftmp9], 0x00(%[sse]) \n\t" : [ftmp0]"=&f"(ftmp[0]), [ftmp1]"=&f"(ftmp[1]), @@ -978,6 +993,7 @@ static inline uint32_t vpx_mse8x(const uint8_t *src_ptr, int src_stride, [high]"r"(&high), [sse]"r"(sse) : "memory" ); + /* clang-format on */ return *sse; } @@ -1021,22 +1037,39 @@ static inline void var_filter_block2d_bil_16x(const uint8_t *src_ptr, uint8_t *temp2_ptr = temp2; mips_reg l_counter = counter; double ftmp[15]; + double ff_ph_40, mask; + double filter_x0, filter_x1, filter_y0, filter_y1; mips_reg tmp[2]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_40) = { 0x0040004000400040ULL }; - DECLARE_ALIGNED(8, const uint64_t, mask) = { 0x00ff00ff00ff00ffULL }; + uint64_t x0, x1, y0, y1, all; const uint8_t *filter_x = bilinear_filters[x_offset]; const uint8_t *filter_y = bilinear_filters[y_offset]; + x0 = (uint64_t)filter_x[0]; + x1 = (uint64_t)filter_x[1]; + y0 = (uint64_t)filter_y[0]; + y1 = (uint64_t)filter_y[1]; + all = x0 | x1 << 8 | y0 << 16 | y1 << 24; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + MMI_MTC1(%[all], %[ftmp14]) + "punpcklbh %[ftmp14], %[ftmp14], %[ftmp0] \n\t" + "pshufh %[filter_x0], %[ftmp14], %[ftmp0] \n\t" + MMI_LI(%[tmp0], 0x10) + MMI_MTC1(%[tmp0], %[mask]) + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_x1], %[ftmp14], %[ftmp0] \n\t" + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_y0], %[ftmp14], %[ftmp0] \n\t" + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_y1], %[ftmp14], %[ftmp0] \n\t" MMI_LI(%[tmp0], 0x07) MMI_MTC1(%[tmp0], %[ftmp14]) - "pshufh %[filter_x0], %[filter_x0], %[ftmp0] \n\t" - "pshufh %[filter_x1], %[filter_x1], %[ftmp0] \n\t" - "pshufh %[filter_y0], %[filter_y0], %[ftmp0] \n\t" - "pshufh %[filter_y1], %[filter_y1], %[ftmp0] \n\t" - + MMI_LI(%[tmp0], 0x0040004000400040) + MMI_MTC1(%[tmp0], %[ff_ph_40]) + MMI_LI(%[tmp0], 0x00ff00ff00ff00ff) + MMI_MTC1(%[tmp0], %[mask]) // fdata3: fdata3[0] ~ fdata3[15] VAR_FILTER_BLOCK2D_BIL_FIRST_PASS_16_A @@ -1072,15 +1105,13 @@ static inline void var_filter_block2d_bil_16x(const uint8_t *src_ptr, [ftmp11] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [ftmp13] "=&f"(ftmp[13]), [ftmp14] "=&f"(ftmp[14]), [tmp0] "=&r"(tmp[0]), [src_ptr] "+&r"(src_ptr), [temp2_ptr] "+&r"(temp2_ptr), - [counter]"+&r"(l_counter) - : [filter_x0] "f"((uint64_t)filter_x[0]), - [filter_x1] "f"((uint64_t)filter_x[1]), - [filter_y0] "f"((uint64_t)filter_y[0]), - [filter_y1] "f"((uint64_t)filter_y[1]), - [src_stride] "r"((mips_reg)src_stride), [ff_ph_40] "f"(ff_ph_40), - [mask] "f"(mask) + [counter]"+&r"(l_counter), [ff_ph_40] "=&f"(ff_ph_40), [mask] "=&f"(mask), + [filter_x0] "=&f"(filter_x0), [filter_x1] "=&f"(filter_x1), + [filter_y0] "=&f"(filter_y0), [filter_y1] "=&f"(filter_y1) + : [src_stride] "r"((mips_reg)src_stride), [all] "r"(all) : "memory" ); + /* clang-format on */ } #define SUBPIX_VAR16XN(H) \ @@ -1105,19 +1136,38 @@ static inline void var_filter_block2d_bil_8x(const uint8_t *src_ptr, mips_reg l_counter = counter; double ftmp[15]; mips_reg tmp[2]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_40) = { 0x0040004000400040ULL }; - DECLARE_ALIGNED(8, const uint64_t, mask) = { 0x00ff00ff00ff00ffULL }; + double ff_ph_40, mask; + uint64_t x0, x1, y0, y1, all; + double filter_x0, filter_x1, filter_y0, filter_y1; const uint8_t *filter_x = bilinear_filters[x_offset]; const uint8_t *filter_y = bilinear_filters[y_offset]; + x0 = (uint64_t)filter_x[0]; + x1 = (uint64_t)filter_x[1]; + y0 = (uint64_t)filter_y[0]; + y1 = (uint64_t)filter_y[1]; + all = x0 | x1 << 8 | y0 << 16 | y1 << 24; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + MMI_MTC1(%[all], %[ftmp14]) + "punpcklbh %[ftmp14], %[ftmp14], %[ftmp0] \n\t" + "pshufh %[filter_x0], %[ftmp14], %[ftmp0] \n\t" + MMI_LI(%[tmp0], 0x10) + MMI_MTC1(%[tmp0], %[mask]) + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_x1], %[ftmp14], %[ftmp0] \n\t" + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_y0], %[ftmp14], %[ftmp0] \n\t" + "ssrld %[ftmp14], %[ftmp14], %[mask] \n\t" + "pshufh %[filter_y1], %[ftmp14], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" MMI_LI(%[tmp0], 0x07) MMI_MTC1(%[tmp0], %[ftmp14]) - "pshufh %[filter_x0], %[filter_x0], %[ftmp0] \n\t" - "pshufh %[filter_x1], %[filter_x1], %[ftmp0] \n\t" - "pshufh %[filter_y0], %[filter_y0], %[ftmp0] \n\t" - "pshufh %[filter_y1], %[filter_y1], %[ftmp0] \n\t" + MMI_LI(%[tmp0], 0x0040004000400040) + MMI_MTC1(%[tmp0], %[ff_ph_40]) + MMI_LI(%[tmp0], 0x00ff00ff00ff00ff) + MMI_MTC1(%[tmp0], %[mask]) // fdata3: fdata3[0] ~ fdata3[7] VAR_FILTER_BLOCK2D_BIL_FIRST_PASS_8_A @@ -1154,15 +1204,13 @@ static inline void var_filter_block2d_bil_8x(const uint8_t *src_ptr, [ftmp11] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [ftmp13] "=&f"(ftmp[13]), [ftmp14] "=&f"(ftmp[14]), [tmp0] "=&r"(tmp[0]), [src_ptr] "+&r"(src_ptr), [temp2_ptr] "+&r"(temp2_ptr), - [counter]"+&r"(l_counter) - : [filter_x0] "f"((uint64_t)filter_x[0]), - [filter_x1] "f"((uint64_t)filter_x[1]), - [filter_y0] "f"((uint64_t)filter_y[0]), - [filter_y1] "f"((uint64_t)filter_y[1]), - [src_stride] "r"((mips_reg)src_stride), [ff_ph_40] "f"(ff_ph_40), - [mask] "f"(mask) + [counter]"+&r"(l_counter), [ff_ph_40] "=&f"(ff_ph_40), [mask] "=&f"(mask), + [filter_x0] "=&f"(filter_x0), [filter_x1] "=&f"(filter_x1), + [filter_y0] "=&f"(filter_y0), [filter_y1] "=&f"(filter_y1) + : [src_stride] "r"((mips_reg)src_stride), [all] "r"(all) : "memory" ); + /* clang-format on */ } #define SUBPIX_VAR8XN(H) \ @@ -1188,19 +1236,38 @@ static inline void var_filter_block2d_bil_4x(const uint8_t *src_ptr, mips_reg l_counter = counter; double ftmp[7]; mips_reg tmp[2]; - DECLARE_ALIGNED(8, const uint64_t, ff_ph_40) = { 0x0040004000400040ULL }; - DECLARE_ALIGNED(8, const uint64_t, mask) = { 0x00ff00ff00ff00ffULL }; + double ff_ph_40, mask; + uint64_t x0, x1, y0, y1, all; + double filter_x0, filter_x1, filter_y0, filter_y1; const uint8_t *filter_x = bilinear_filters[x_offset]; const uint8_t *filter_y = bilinear_filters[y_offset]; + x0 = (uint64_t)filter_x[0]; + x1 = (uint64_t)filter_x[1]; + y0 = (uint64_t)filter_y[0]; + y1 = (uint64_t)filter_y[1]; + all = x0 | x1 << 8 | y0 << 16 | y1 << 24; + /* clang-format off */ __asm__ volatile ( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + MMI_MTC1(%[all], %[ftmp6]) + "punpcklbh %[ftmp6], %[ftmp6], %[ftmp0] \n\t" + "pshufh %[filter_x0], %[ftmp6], %[ftmp0] \n\t" + MMI_LI(%[tmp0], 0x10) + MMI_MTC1(%[tmp0], %[mask]) + "ssrld %[ftmp6], %[ftmp6], %[mask] \n\t" + "pshufh %[filter_x1], %[ftmp6], %[ftmp0] \n\t" + "ssrld %[ftmp6], %[ftmp6], %[mask] \n\t" + "pshufh %[filter_y0], %[ftmp6], %[ftmp0] \n\t" + "ssrld %[ftmp6], %[ftmp6], %[mask] \n\t" + "pshufh %[filter_y1], %[ftmp6], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" MMI_LI(%[tmp0], 0x07) MMI_MTC1(%[tmp0], %[ftmp6]) - "pshufh %[filter_x0], %[filter_x0], %[ftmp0] \n\t" - "pshufh %[filter_x1], %[filter_x1], %[ftmp0] \n\t" - "pshufh %[filter_y0], %[filter_y0], %[ftmp0] \n\t" - "pshufh %[filter_y1], %[filter_y1], %[ftmp0] \n\t" + MMI_LI(%[tmp0], 0x0040004000400040) + MMI_MTC1(%[tmp0], %[ff_ph_40]) + MMI_LI(%[tmp0], 0x00ff00ff00ff00ff) + MMI_MTC1(%[tmp0], %[mask]) // fdata3: fdata3[0] ~ fdata3[3] VAR_FILTER_BLOCK2D_BIL_FIRST_PASS_4_A @@ -1232,15 +1299,14 @@ static inline void var_filter_block2d_bil_4x(const uint8_t *src_ptr, : [ftmp0] "=&f"(ftmp[0]), [ftmp1] "=&f"(ftmp[1]), [ftmp2] "=&f"(ftmp[2]), [ftmp3] "=&f"(ftmp[3]), [ftmp4] "=&f"(ftmp[4]), [ftmp5] "=&f"(ftmp[5]), [ftmp6] "=&f"(ftmp[6]), [tmp0] "=&r"(tmp[0]), [src_ptr] "+&r"(src_ptr), - [temp2_ptr] "+&r"(temp2_ptr), [counter]"+&r"(l_counter) - : [filter_x0] "f"((uint64_t)filter_x[0]), - [filter_x1] "f"((uint64_t)filter_x[1]), - [filter_y0] "f"((uint64_t)filter_y[0]), - [filter_y1] "f"((uint64_t)filter_y[1]), - [src_stride] "r"((mips_reg)src_stride), [ff_ph_40] "f"(ff_ph_40), - [mask] "f"(mask) + [temp2_ptr] "+&r"(temp2_ptr), [counter]"+&r"(l_counter), + [ff_ph_40] "=&f"(ff_ph_40), [mask] "=&f"(mask), + [filter_x0] "=&f"(filter_x0), [filter_x1] "=&f"(filter_x1), + [filter_y0] "=&f"(filter_y0), [filter_y1] "=&f"(filter_y1) + : [src_stride] "r"((mips_reg)src_stride), [all] "r"(all) : "memory" ); + /* clang-format on */ } #define SUBPIX_VAR4XN(H) \ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/vpx_convolve8_mmi.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/vpx_convolve8_mmi.c index ba9ceb866..cb7bca558 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/vpx_convolve8_mmi.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/mips/vpx_convolve8_mmi.c @@ -105,7 +105,7 @@ static void convolve_horiz_mmi(const uint8_t *src, ptrdiff_t src_stride, /* clang-format off */ __asm__ volatile( "move %[tmp1], %[width] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[filter1], 0x03(%[filter]) \n\t" "gsldrc1 %[filter1], 0x00(%[filter]) \n\t" "gsldlc1 %[filter2], 0x0b(%[filter]) \n\t" @@ -178,7 +178,7 @@ static void convolve_vert_mmi(const uint8_t *src, ptrdiff_t src_stride, (void)y_step_q4; __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[ftmp4], 0x03(%[filter]) \n\t" "gsldrc1 %[ftmp4], 0x00(%[filter]) \n\t" "gsldlc1 %[ftmp5], 0x0b(%[filter]) \n\t" @@ -271,7 +271,7 @@ static void convolve_avg_horiz_mmi(const uint8_t *src, ptrdiff_t src_stride, __asm__ volatile( "move %[tmp1], %[width] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[filter1], 0x03(%[filter]) \n\t" "gsldrc1 %[filter1], 0x00(%[filter]) \n\t" "gsldlc1 %[filter2], 0x0b(%[filter]) \n\t" @@ -354,7 +354,7 @@ static void convolve_avg_vert_mmi(const uint8_t *src, ptrdiff_t src_stride, (void)y_step_q4; __asm__ volatile( - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "gsldlc1 %[ftmp4], 0x03(%[filter]) \n\t" "gsldrc1 %[ftmp4], 0x00(%[filter]) \n\t" "gsldlc1 %[ftmp5], 0x0b(%[filter]) \n\t" @@ -467,7 +467,7 @@ void vpx_convolve_avg_mmi(const uint8_t *src, ptrdiff_t src_stride, __asm__ volatile( "move %[tmp1], %[width] \n\t" - "xor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" + "pxor %[ftmp0], %[ftmp0], %[ftmp0] \n\t" "li %[tmp0], 0x10001 \n\t" MMI_MTC1(%[tmp0], %[ftmp3]) "punpcklhw %[ftmp3], %[ftmp3], %[ftmp3] \n\t" diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/add_noise_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/add_noise_sse2.asm index 80cced4ce..f51718cf9 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/add_noise_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/add_noise_sse2.asm @@ -16,7 +16,7 @@ SECTION .text ;void vpx_plane_add_noise_sse2(uint8_t *start, const int8_t *noise, ; int blackclamp, int whiteclamp, ; int width, int height, int pitch) -global sym(vpx_plane_add_noise_sse2) PRIVATE +globalsym(vpx_plane_add_noise_sse2) sym(vpx_plane_add_noise_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/deblock_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/deblock_sse2.asm index 9d8e5e3e0..b3af677d2 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/deblock_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/deblock_sse2.asm @@ -95,7 +95,7 @@ SECTION .text ; int *flimits, ; int size ;) -global sym(vpx_post_proc_down_and_across_mb_row_sse2) PRIVATE +globalsym(vpx_post_proc_down_and_across_mb_row_sse2) sym(vpx_post_proc_down_and_across_mb_row_sse2): push rbp mov rbp, rsp @@ -235,7 +235,7 @@ sym(vpx_post_proc_down_and_across_mb_row_sse2): ;void vpx_mbpost_proc_across_ip_sse2(unsigned char *src, ; int pitch, int rows, int cols,int flimit) -global sym(vpx_mbpost_proc_across_ip_sse2) PRIVATE +globalsym(vpx_mbpost_proc_across_ip_sse2) sym(vpx_mbpost_proc_across_ip_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/highbd_variance_impl_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/highbd_variance_impl_sse2.asm index a256a59ec..5bee51fa0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/highbd_variance_impl_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/highbd_variance_impl_sse2.asm @@ -22,7 +22,7 @@ SECTION .text ; unsigned int * SSE, ; int * Sum ;) -global sym(vpx_highbd_calc16x16var_sse2) PRIVATE +globalsym(vpx_highbd_calc16x16var_sse2) sym(vpx_highbd_calc16x16var_sse2): push rbp mov rbp, rsp @@ -175,7 +175,7 @@ sym(vpx_highbd_calc16x16var_sse2): ; unsigned int * SSE, ; int * Sum ;) -global sym(vpx_highbd_calc8x8var_sse2) PRIVATE +globalsym(vpx_highbd_calc8x8var_sse2) sym(vpx_highbd_calc8x8var_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse3.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse3.asm index 175dcc089..acbd2e4fa 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse3.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse3.asm @@ -173,7 +173,7 @@ SECTION .text ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad16x16x3_sse3) PRIVATE +globalsym(vpx_sad16x16x3_sse3) sym(vpx_sad16x16x3_sse3): STACK_FRAME_CREATE_X3 @@ -215,7 +215,7 @@ sym(vpx_sad16x16x3_sse3): ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad16x8x3_sse3) PRIVATE +globalsym(vpx_sad16x8x3_sse3) sym(vpx_sad16x8x3_sse3): STACK_FRAME_CREATE_X3 @@ -253,7 +253,7 @@ sym(vpx_sad16x8x3_sse3): ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad8x16x3_sse3) PRIVATE +globalsym(vpx_sad8x16x3_sse3) sym(vpx_sad8x16x3_sse3): STACK_FRAME_CREATE_X3 @@ -282,7 +282,7 @@ sym(vpx_sad8x16x3_sse3): ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad8x8x3_sse3) PRIVATE +globalsym(vpx_sad8x8x3_sse3) sym(vpx_sad8x8x3_sse3): STACK_FRAME_CREATE_X3 @@ -307,7 +307,7 @@ sym(vpx_sad8x8x3_sse3): ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad4x4x3_sse3) PRIVATE +globalsym(vpx_sad4x4x3_sse3) sym(vpx_sad4x4x3_sse3): STACK_FRAME_CREATE_X3 diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse4.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse4.asm index 03999dfca..0818ed5f0 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse4.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_sse4.asm @@ -173,7 +173,7 @@ SECTION .text ; const unsigned char *ref_ptr, ; int ref_stride, ; unsigned short *sad_array); -global sym(vpx_sad16x16x8_sse4_1) PRIVATE +globalsym(vpx_sad16x16x8_sse4_1) sym(vpx_sad16x16x8_sse4_1): push rbp mov rbp, rsp @@ -214,7 +214,7 @@ sym(vpx_sad16x16x8_sse4_1): ; int ref_stride, ; unsigned short *sad_array ;); -global sym(vpx_sad16x8x8_sse4_1) PRIVATE +globalsym(vpx_sad16x8x8_sse4_1) sym(vpx_sad16x8x8_sse4_1): push rbp mov rbp, rsp @@ -251,7 +251,7 @@ sym(vpx_sad16x8x8_sse4_1): ; int ref_stride, ; unsigned short *sad_array ;); -global sym(vpx_sad8x8x8_sse4_1) PRIVATE +globalsym(vpx_sad8x8x8_sse4_1) sym(vpx_sad8x8x8_sse4_1): push rbp mov rbp, rsp @@ -288,7 +288,7 @@ sym(vpx_sad8x8x8_sse4_1): ; int ref_stride, ; unsigned short *sad_array ;); -global sym(vpx_sad8x16x8_sse4_1) PRIVATE +globalsym(vpx_sad8x16x8_sse4_1) sym(vpx_sad8x16x8_sse4_1): push rbp mov rbp, rsp @@ -329,7 +329,7 @@ sym(vpx_sad8x16x8_sse4_1): ; int ref_stride, ; unsigned short *sad_array ;); -global sym(vpx_sad4x4x8_sse4_1) PRIVATE +globalsym(vpx_sad4x4x8_sse4_1) sym(vpx_sad4x4x8_sse4_1): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_ssse3.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_ssse3.asm index 7cf93cf51..a5bc6d730 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_ssse3.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/sad_ssse3.asm @@ -154,7 +154,7 @@ SECTION .text ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad16x16x3_ssse3) PRIVATE +globalsym(vpx_sad16x16x3_ssse3) sym(vpx_sad16x16x3_ssse3): push rbp mov rbp, rsp @@ -267,7 +267,7 @@ sym(vpx_sad16x16x3_ssse3): ; unsigned char *ref_ptr, ; int ref_stride, ; int *results) -global sym(vpx_sad16x8x3_ssse3) PRIVATE +globalsym(vpx_sad16x8x3_ssse3) sym(vpx_sad16x8x3_ssse3): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/ssim_opt_x86_64.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/ssim_opt_x86_64.asm index 300fa8aab..41ffbb07e 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/ssim_opt_x86_64.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/ssim_opt_x86_64.asm @@ -64,7 +64,7 @@ SECTION .text ; or pavgb At this point this is just meant to be first pass for calculating ; all the parms needed for 16x16 ssim so we can play with dssim as distortion ; in mode selection code. -global sym(vpx_ssim_parms_16x16_sse2) PRIVATE +globalsym(vpx_ssim_parms_16x16_sse2) sym(vpx_ssim_parms_16x16_sse2): push rbp mov rbp, rsp @@ -154,7 +154,7 @@ sym(vpx_ssim_parms_16x16_sse2): ; or pavgb At this point this is just meant to be first pass for calculating ; all the parms needed for 16x16 ssim so we can play with dssim as distortion ; in mode selection code. -global sym(vpx_ssim_parms_8x8_sse2) PRIVATE +globalsym(vpx_ssim_parms_8x8_sse2) sym(vpx_ssim_parms_8x8_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/transpose_sse2.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/transpose_sse2.h index 6e07871b1..b4f1190d7 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/transpose_sse2.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/transpose_sse2.h @@ -16,7 +16,7 @@ #include "./vpx_config.h" static INLINE __m128i transpose_8bit_4x4(const __m128i *const in) { - // Unpack 16 bit elements. Goes from: + // Unpack 8 bit elements. Goes from: // in[0]: 00 01 02 03 // in[1]: 10 11 12 13 // in[2]: 20 21 22 23 @@ -27,7 +27,7 @@ static INLINE __m128i transpose_8bit_4x4(const __m128i *const in) { const __m128i a0 = _mm_unpacklo_epi8(in[0], in[1]); const __m128i a1 = _mm_unpacklo_epi8(in[2], in[3]); - // Unpack 32 bit elements resulting in: + // Unpack 16 bit elements resulting in: // 00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33 return _mm_unpacklo_epi16(a0, a1); } diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_8t_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_8t_sse2.asm index c57149657..fc301fb39 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_8t_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_8t_sse2.asm @@ -208,7 +208,7 @@ SECTION .text ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d4_v8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_v8_sse2) sym(vpx_highbd_filter_block1d4_v8_sse2): push rbp mov rbp, rsp @@ -278,7 +278,7 @@ sym(vpx_highbd_filter_block1d4_v8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d8_v8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_v8_sse2) sym(vpx_highbd_filter_block1d8_v8_sse2): push rbp mov rbp, rsp @@ -337,7 +337,7 @@ sym(vpx_highbd_filter_block1d8_v8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d16_v8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_v8_sse2) sym(vpx_highbd_filter_block1d16_v8_sse2): push rbp mov rbp, rsp @@ -391,7 +391,7 @@ sym(vpx_highbd_filter_block1d16_v8_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d4_v8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_v8_avg_sse2) sym(vpx_highbd_filter_block1d4_v8_avg_sse2): push rbp mov rbp, rsp @@ -452,7 +452,7 @@ sym(vpx_highbd_filter_block1d4_v8_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d8_v8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_v8_avg_sse2) sym(vpx_highbd_filter_block1d8_v8_avg_sse2): push rbp mov rbp, rsp @@ -501,7 +501,7 @@ sym(vpx_highbd_filter_block1d8_v8_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_v8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_v8_avg_sse2) sym(vpx_highbd_filter_block1d16_v8_avg_sse2): push rbp mov rbp, rsp @@ -563,7 +563,7 @@ sym(vpx_highbd_filter_block1d16_v8_avg_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d4_h8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_h8_sse2) sym(vpx_highbd_filter_block1d4_h8_sse2): push rbp mov rbp, rsp @@ -638,7 +638,7 @@ sym(vpx_highbd_filter_block1d4_h8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d8_h8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_h8_sse2) sym(vpx_highbd_filter_block1d8_h8_sse2): push rbp mov rbp, rsp @@ -704,7 +704,7 @@ sym(vpx_highbd_filter_block1d8_h8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_highbd_filter_block1d16_h8_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_h8_sse2) sym(vpx_highbd_filter_block1d16_h8_sse2): push rbp mov rbp, rsp @@ -772,7 +772,7 @@ sym(vpx_highbd_filter_block1d16_h8_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d4_h8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_h8_avg_sse2) sym(vpx_highbd_filter_block1d4_h8_avg_sse2): push rbp mov rbp, rsp @@ -838,7 +838,7 @@ sym(vpx_highbd_filter_block1d4_h8_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d8_h8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_h8_avg_sse2) sym(vpx_highbd_filter_block1d8_h8_avg_sse2): push rbp mov rbp, rsp @@ -895,7 +895,7 @@ sym(vpx_highbd_filter_block1d8_h8_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_h8_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_h8_avg_sse2) sym(vpx_highbd_filter_block1d16_h8_avg_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_bilinear_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_bilinear_sse2.asm index ec18d370e..bd51c75bc 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_bilinear_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_high_subpixel_bilinear_sse2.asm @@ -173,7 +173,7 @@ SECTION .text -global sym(vpx_highbd_filter_block1d4_v2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_v2_sse2) sym(vpx_highbd_filter_block1d4_v2_sse2): push rbp mov rbp, rsp @@ -198,7 +198,7 @@ sym(vpx_highbd_filter_block1d4_v2_sse2): ret %if VPX_ARCH_X86_64 -global sym(vpx_highbd_filter_block1d8_v2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_v2_sse2) sym(vpx_highbd_filter_block1d8_v2_sse2): push rbp mov rbp, rsp @@ -224,7 +224,7 @@ sym(vpx_highbd_filter_block1d8_v2_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_v2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_v2_sse2) sym(vpx_highbd_filter_block1d16_v2_sse2): push rbp mov rbp, rsp @@ -253,7 +253,7 @@ sym(vpx_highbd_filter_block1d16_v2_sse2): ret %endif -global sym(vpx_highbd_filter_block1d4_v2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_v2_avg_sse2) sym(vpx_highbd_filter_block1d4_v2_avg_sse2): push rbp mov rbp, rsp @@ -278,7 +278,7 @@ sym(vpx_highbd_filter_block1d4_v2_avg_sse2): ret %if VPX_ARCH_X86_64 -global sym(vpx_highbd_filter_block1d8_v2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_v2_avg_sse2) sym(vpx_highbd_filter_block1d8_v2_avg_sse2): push rbp mov rbp, rsp @@ -304,7 +304,7 @@ sym(vpx_highbd_filter_block1d8_v2_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_v2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_v2_avg_sse2) sym(vpx_highbd_filter_block1d16_v2_avg_sse2): push rbp mov rbp, rsp @@ -333,7 +333,7 @@ sym(vpx_highbd_filter_block1d16_v2_avg_sse2): ret %endif -global sym(vpx_highbd_filter_block1d4_h2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_h2_sse2) sym(vpx_highbd_filter_block1d4_h2_sse2): push rbp mov rbp, rsp @@ -359,7 +359,7 @@ sym(vpx_highbd_filter_block1d4_h2_sse2): ret %if VPX_ARCH_X86_64 -global sym(vpx_highbd_filter_block1d8_h2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_h2_sse2) sym(vpx_highbd_filter_block1d8_h2_sse2): push rbp mov rbp, rsp @@ -385,7 +385,7 @@ sym(vpx_highbd_filter_block1d8_h2_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_h2_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_h2_sse2) sym(vpx_highbd_filter_block1d16_h2_sse2): push rbp mov rbp, rsp @@ -414,7 +414,7 @@ sym(vpx_highbd_filter_block1d16_h2_sse2): ret %endif -global sym(vpx_highbd_filter_block1d4_h2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d4_h2_avg_sse2) sym(vpx_highbd_filter_block1d4_h2_avg_sse2): push rbp mov rbp, rsp @@ -440,7 +440,7 @@ sym(vpx_highbd_filter_block1d4_h2_avg_sse2): ret %if VPX_ARCH_X86_64 -global sym(vpx_highbd_filter_block1d8_h2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d8_h2_avg_sse2) sym(vpx_highbd_filter_block1d8_h2_avg_sse2): push rbp mov rbp, rsp @@ -466,7 +466,7 @@ sym(vpx_highbd_filter_block1d8_h2_avg_sse2): pop rbp ret -global sym(vpx_highbd_filter_block1d16_h2_avg_sse2) PRIVATE +globalsym(vpx_highbd_filter_block1d16_h2_avg_sse2) sym(vpx_highbd_filter_block1d16_h2_avg_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_8t_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_8t_sse2.asm index 8497e1721..c8455e13a 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_8t_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_8t_sse2.asm @@ -187,7 +187,7 @@ SECTION .text ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d4_v8_sse2) PRIVATE +globalsym(vpx_filter_block1d4_v8_sse2) sym(vpx_filter_block1d4_v8_sse2): push rbp mov rbp, rsp @@ -254,7 +254,7 @@ sym(vpx_filter_block1d4_v8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d8_v8_sse2) PRIVATE +globalsym(vpx_filter_block1d8_v8_sse2) sym(vpx_filter_block1d8_v8_sse2): push rbp mov rbp, rsp @@ -313,7 +313,7 @@ sym(vpx_filter_block1d8_v8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d16_v8_sse2) PRIVATE +globalsym(vpx_filter_block1d16_v8_sse2) sym(vpx_filter_block1d16_v8_sse2): push rbp mov rbp, rsp @@ -367,7 +367,7 @@ sym(vpx_filter_block1d16_v8_sse2): pop rbp ret -global sym(vpx_filter_block1d4_v8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d4_v8_avg_sse2) sym(vpx_filter_block1d4_v8_avg_sse2): push rbp mov rbp, rsp @@ -425,7 +425,7 @@ sym(vpx_filter_block1d4_v8_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d8_v8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d8_v8_avg_sse2) sym(vpx_filter_block1d8_v8_avg_sse2): push rbp mov rbp, rsp @@ -474,7 +474,7 @@ sym(vpx_filter_block1d8_v8_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d16_v8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d16_v8_avg_sse2) sym(vpx_filter_block1d16_v8_avg_sse2): push rbp mov rbp, rsp @@ -536,7 +536,7 @@ sym(vpx_filter_block1d16_v8_avg_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d4_h8_sse2) PRIVATE +globalsym(vpx_filter_block1d4_h8_sse2) sym(vpx_filter_block1d4_h8_sse2): push rbp mov rbp, rsp @@ -610,7 +610,7 @@ sym(vpx_filter_block1d4_h8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d8_h8_sse2) PRIVATE +globalsym(vpx_filter_block1d8_h8_sse2) sym(vpx_filter_block1d8_h8_sse2): push rbp mov rbp, rsp @@ -685,7 +685,7 @@ sym(vpx_filter_block1d8_h8_sse2): ; unsigned int output_height, ; short *filter ;) -global sym(vpx_filter_block1d16_h8_sse2) PRIVATE +globalsym(vpx_filter_block1d16_h8_sse2) sym(vpx_filter_block1d16_h8_sse2): push rbp mov rbp, rsp @@ -771,7 +771,7 @@ sym(vpx_filter_block1d16_h8_sse2): pop rbp ret -global sym(vpx_filter_block1d4_h8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d4_h8_avg_sse2) sym(vpx_filter_block1d4_h8_avg_sse2): push rbp mov rbp, rsp @@ -836,7 +836,7 @@ sym(vpx_filter_block1d4_h8_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d8_h8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d8_h8_avg_sse2) sym(vpx_filter_block1d8_h8_avg_sse2): push rbp mov rbp, rsp @@ -902,7 +902,7 @@ sym(vpx_filter_block1d8_h8_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d16_h8_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d16_h8_avg_sse2) sym(vpx_filter_block1d16_h8_avg_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm index 6d79492e4..65790b1c2 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_sse2.asm @@ -133,7 +133,7 @@ SECTION .text -global sym(vpx_filter_block1d4_v2_sse2) PRIVATE +globalsym(vpx_filter_block1d4_v2_sse2) sym(vpx_filter_block1d4_v2_sse2): push rbp mov rbp, rsp @@ -157,7 +157,7 @@ sym(vpx_filter_block1d4_v2_sse2): pop rbp ret -global sym(vpx_filter_block1d8_v2_sse2) PRIVATE +globalsym(vpx_filter_block1d8_v2_sse2) sym(vpx_filter_block1d8_v2_sse2): push rbp mov rbp, rsp @@ -183,7 +183,7 @@ sym(vpx_filter_block1d8_v2_sse2): pop rbp ret -global sym(vpx_filter_block1d16_v2_sse2) PRIVATE +globalsym(vpx_filter_block1d16_v2_sse2) sym(vpx_filter_block1d16_v2_sse2): push rbp mov rbp, rsp @@ -211,7 +211,7 @@ sym(vpx_filter_block1d16_v2_sse2): pop rbp ret -global sym(vpx_filter_block1d4_v2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d4_v2_avg_sse2) sym(vpx_filter_block1d4_v2_avg_sse2): push rbp mov rbp, rsp @@ -235,7 +235,7 @@ sym(vpx_filter_block1d4_v2_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d8_v2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d8_v2_avg_sse2) sym(vpx_filter_block1d8_v2_avg_sse2): push rbp mov rbp, rsp @@ -261,7 +261,7 @@ sym(vpx_filter_block1d8_v2_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d16_v2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d16_v2_avg_sse2) sym(vpx_filter_block1d16_v2_avg_sse2): push rbp mov rbp, rsp @@ -289,7 +289,7 @@ sym(vpx_filter_block1d16_v2_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d4_h2_sse2) PRIVATE +globalsym(vpx_filter_block1d4_h2_sse2) sym(vpx_filter_block1d4_h2_sse2): push rbp mov rbp, rsp @@ -314,7 +314,7 @@ sym(vpx_filter_block1d4_h2_sse2): pop rbp ret -global sym(vpx_filter_block1d8_h2_sse2) PRIVATE +globalsym(vpx_filter_block1d8_h2_sse2) sym(vpx_filter_block1d8_h2_sse2): push rbp mov rbp, rsp @@ -341,7 +341,7 @@ sym(vpx_filter_block1d8_h2_sse2): pop rbp ret -global sym(vpx_filter_block1d16_h2_sse2) PRIVATE +globalsym(vpx_filter_block1d16_h2_sse2) sym(vpx_filter_block1d16_h2_sse2): push rbp mov rbp, rsp @@ -369,7 +369,7 @@ sym(vpx_filter_block1d16_h2_sse2): pop rbp ret -global sym(vpx_filter_block1d4_h2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d4_h2_avg_sse2) sym(vpx_filter_block1d4_h2_avg_sse2): push rbp mov rbp, rsp @@ -394,7 +394,7 @@ sym(vpx_filter_block1d4_h2_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d8_h2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d8_h2_avg_sse2) sym(vpx_filter_block1d8_h2_avg_sse2): push rbp mov rbp, rsp @@ -421,7 +421,7 @@ sym(vpx_filter_block1d8_h2_avg_sse2): pop rbp ret -global sym(vpx_filter_block1d16_h2_avg_sse2) PRIVATE +globalsym(vpx_filter_block1d16_h2_avg_sse2) sym(vpx_filter_block1d16_h2_avg_sse2): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm index 8c9c817be..32e3cd3d9 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_dsp/x86/vpx_subpixel_bilinear_ssse3.asm @@ -107,7 +107,7 @@ SECTION .text -global sym(vpx_filter_block1d4_v2_ssse3) PRIVATE +globalsym(vpx_filter_block1d4_v2_ssse3) sym(vpx_filter_block1d4_v2_ssse3): push rbp mov rbp, rsp @@ -131,7 +131,7 @@ sym(vpx_filter_block1d4_v2_ssse3): pop rbp ret -global sym(vpx_filter_block1d8_v2_ssse3) PRIVATE +globalsym(vpx_filter_block1d8_v2_ssse3) sym(vpx_filter_block1d8_v2_ssse3): push rbp mov rbp, rsp @@ -157,7 +157,7 @@ sym(vpx_filter_block1d8_v2_ssse3): pop rbp ret -global sym(vpx_filter_block1d16_v2_ssse3) PRIVATE +globalsym(vpx_filter_block1d16_v2_ssse3) sym(vpx_filter_block1d16_v2_ssse3): push rbp mov rbp, rsp @@ -184,7 +184,7 @@ sym(vpx_filter_block1d16_v2_ssse3): pop rbp ret -global sym(vpx_filter_block1d4_v2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d4_v2_avg_ssse3) sym(vpx_filter_block1d4_v2_avg_ssse3): push rbp mov rbp, rsp @@ -208,7 +208,7 @@ sym(vpx_filter_block1d4_v2_avg_ssse3): pop rbp ret -global sym(vpx_filter_block1d8_v2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d8_v2_avg_ssse3) sym(vpx_filter_block1d8_v2_avg_ssse3): push rbp mov rbp, rsp @@ -234,7 +234,7 @@ sym(vpx_filter_block1d8_v2_avg_ssse3): pop rbp ret -global sym(vpx_filter_block1d16_v2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d16_v2_avg_ssse3) sym(vpx_filter_block1d16_v2_avg_ssse3): push rbp mov rbp, rsp @@ -261,7 +261,7 @@ sym(vpx_filter_block1d16_v2_avg_ssse3): pop rbp ret -global sym(vpx_filter_block1d4_h2_ssse3) PRIVATE +globalsym(vpx_filter_block1d4_h2_ssse3) sym(vpx_filter_block1d4_h2_ssse3): push rbp mov rbp, rsp @@ -286,7 +286,7 @@ sym(vpx_filter_block1d4_h2_ssse3): pop rbp ret -global sym(vpx_filter_block1d8_h2_ssse3) PRIVATE +globalsym(vpx_filter_block1d8_h2_ssse3) sym(vpx_filter_block1d8_h2_ssse3): push rbp mov rbp, rsp @@ -313,7 +313,7 @@ sym(vpx_filter_block1d8_h2_ssse3): pop rbp ret -global sym(vpx_filter_block1d16_h2_ssse3) PRIVATE +globalsym(vpx_filter_block1d16_h2_ssse3) sym(vpx_filter_block1d16_h2_ssse3): push rbp mov rbp, rsp @@ -340,7 +340,7 @@ sym(vpx_filter_block1d16_h2_ssse3): pop rbp ret -global sym(vpx_filter_block1d4_h2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d4_h2_avg_ssse3) sym(vpx_filter_block1d4_h2_avg_ssse3): push rbp mov rbp, rsp @@ -365,7 +365,7 @@ sym(vpx_filter_block1d4_h2_avg_ssse3): pop rbp ret -global sym(vpx_filter_block1d8_h2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d8_h2_avg_ssse3) sym(vpx_filter_block1d8_h2_avg_ssse3): push rbp mov rbp, rsp @@ -392,7 +392,7 @@ sym(vpx_filter_block1d8_h2_avg_ssse3): pop rbp ret -global sym(vpx_filter_block1d16_h2_avg_ssse3) PRIVATE +globalsym(vpx_filter_block1d16_h2_avg_ssse3) sym(vpx_filter_block1d16_h2_avg_ssse3): push rbp mov rbp, rsp diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/asmdefs_mmi.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/asmdefs_mmi.h index 28355bf9f..400a51cc3 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/asmdefs_mmi.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/asmdefs_mmi.h @@ -34,7 +34,7 @@ "ld " #reg ", " #bias "(" #addr ") \n\t" #define MMI_SRL(reg1, reg2, shift) \ - "dsrl " #reg1 ", " #reg2 ", " #shift " \n\t" + "ssrld " #reg1 ", " #reg2 ", " #shift " \n\t" #define MMI_SLL(reg1, reg2, shift) \ "dsll " #reg1 ", " #reg2 ", " #shift " \n\t" @@ -63,7 +63,7 @@ "lw " #reg ", " #bias "(" #addr ") \n\t" #define MMI_SRL(reg1, reg2, shift) \ - "srl " #reg1 ", " #reg2 ", " #shift " \n\t" + "ssrlw " #reg1 ", " #reg2 ", " #shift " \n\t" #define MMI_SLL(reg1, reg2, shift) \ "sll " #reg1 ", " #reg2 ", " #shift " \n\t" diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/emms_mmx.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/emms_mmx.asm index 9f33590a2..b31b25ebd 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/emms_mmx.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/emms_mmx.asm @@ -12,7 +12,7 @@ %include "vpx_ports/x86_abi_support.asm" section .text -global sym(vpx_clear_system_state) PRIVATE +globalsym(vpx_clear_system_state) sym(vpx_clear_system_state): emms ret diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/float_control_word.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/float_control_word.asm index 256dae084..bb75b7a31 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/float_control_word.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/float_control_word.asm @@ -14,7 +14,7 @@ section .text %if LIBVPX_YASM_WIN64 -global sym(vpx_winx64_fldcw) PRIVATE +globalsym(vpx_winx64_fldcw) sym(vpx_winx64_fldcw): sub rsp, 8 mov [rsp], rcx ; win x64 specific @@ -23,7 +23,7 @@ sym(vpx_winx64_fldcw): ret -global sym(vpx_winx64_fstcw) PRIVATE +globalsym(vpx_winx64_fstcw) sym(vpx_winx64_fstcw): sub rsp, 8 fstcw [rsp] diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_null.cc b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips.h similarity index 52% rename from TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_null.cc rename to TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips.h index 6b1ccb322..bdc7525f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_null.cc +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,14 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/desktop_capturer.h" +#ifndef VPX_PORTS_MIPS_H_ +#define VPX_PORTS_MIPS_H_ -namespace webrtc { +#ifdef __cplusplus +extern "C" { +#endif -// static -std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { - return nullptr; -} +#define HAS_MMI 0x01 +#define HAS_MSA 0x02 -} // namespace webrtc +int mips_cpu_caps(void); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // VPX_PORTS_MIPS_H_ diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips_cpudetect.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips_cpudetect.c new file mode 100644 index 000000000..e0eca2d48 --- /dev/null +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/mips_cpudetect.c @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2020 The WebM project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include +#include "./vpx_config.h" +#include "vpx_ports/mips.h" + +#if CONFIG_RUNTIME_CPU_DETECT +#if defined(__mips__) && defined(__linux__) +int mips_cpu_caps(void) { + char cpuinfo_line[512]; + int flag = 0x0; + FILE *f = fopen("/proc/cpuinfo", "r"); + if (!f) { + // Assume nothing if /proc/cpuinfo is unavailable. + // This will occur for Chrome sandbox for Pepper or Render process. + return 0; + } + while (fgets(cpuinfo_line, sizeof(cpuinfo_line) - 1, f)) { + if (memcmp(cpuinfo_line, "cpu model", 9) == 0) { + // Workaround early kernel without mmi in ASEs line. + if (strstr(cpuinfo_line, "Loongson-3")) { + flag |= HAS_MMI; + } else if (strstr(cpuinfo_line, "Loongson-2K")) { + flag |= HAS_MMI | HAS_MSA; + } + } + if (memcmp(cpuinfo_line, "ASEs implemented", 16) == 0) { + if (strstr(cpuinfo_line, "loongson-mmi") && + strstr(cpuinfo_line, "loongson-ext")) { + flag |= HAS_MMI; + } + if (strstr(cpuinfo_line, "msa")) { + flag |= HAS_MSA; + } + // ASEs is the last line, so we can break here. + break; + } + } + fclose(f); + return flag; +} +#else /* end __mips__ && __linux__ */ +#error \ + "--enable-runtime-cpu-detect selected, but no CPU detection method " \ +"available for your platform. Reconfigure with --disable-runtime-cpu-detect." +#endif +#else /* end CONFIG_RUNTIME_CPU_DETECT */ +int mips_cpu_caps(void) { return 0; } +#endif diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/vpx_ports.mk b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/vpx_ports.mk index 233177369..e5001be49 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/vpx_ports.mk +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/vpx_ports.mk @@ -42,6 +42,9 @@ PORTS_SRCS-$(VPX_ARCH_ARM) += arm.h PORTS_SRCS-$(VPX_ARCH_PPC) += ppc_cpudetect.c PORTS_SRCS-$(VPX_ARCH_PPC) += ppc.h +PORTS_SRCS-$(VPX_ARCH_MIPS) += mips_cpudetect.c +PORTS_SRCS-$(VPX_ARCH_MIPS) += mips.h + ifeq ($(VPX_ARCH_MIPS), yes) PORTS_SRCS-yes += asmdefs_mmi.h endif diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/x86_abi_support.asm b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/x86_abi_support.asm index 7e1230ba3..6b2d6b968 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/x86_abi_support.asm +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpx_ports/x86_abi_support.asm @@ -16,6 +16,17 @@ ; In general, we make the source use 64 bit syntax, then twiddle with it using ; the preprocessor to get the 32 bit syntax on 32 bit platforms. ; +%ifidn __OUTPUT_FORMAT__,elf32 +%define ABI_IS_32BIT 1 +%elifidn __OUTPUT_FORMAT__,macho32 +%define ABI_IS_32BIT 1 +%elifidn __OUTPUT_FORMAT__,win32 +%define ABI_IS_32BIT 1 +%elifidn __OUTPUT_FORMAT__,aout +%define ABI_IS_32BIT 1 +%else +%define ABI_IS_32BIT 0 +%endif %if ABI_IS_32BIT %define rax eax @@ -78,34 +89,51 @@ %define LIBVPX_YASM_WIN64 0 %endif +; Declare groups of platforms +%ifidn __OUTPUT_FORMAT__,elf32 + %define LIBVPX_ELF 1 +%elifidn __OUTPUT_FORMAT__,elfx32 + %define LIBVPX_ELF 1 +%elifidn __OUTPUT_FORMAT__,elf64 + %define LIBVPX_ELF 1 +%else + %define LIBVPX_ELF 0 +%endif + +%ifidn __OUTPUT_FORMAT__,macho32 + %define LIBVPX_MACHO 1 +%elifidn __OUTPUT_FORMAT__,macho64 + %define LIBVPX_MACHO 1 +%else + %define LIBVPX_MACHO 0 +%endif + ; sym() ; Return the proper symbol name for the target ABI. ; ; Certain ABIs, notably MS COFF and Darwin MACH-O, require that symbols ; with C linkage be prefixed with an underscore. ; -%ifidn __OUTPUT_FORMAT__,elf32 -%define sym(x) x -%elifidn __OUTPUT_FORMAT__,elf64 -%define sym(x) x -%elifidn __OUTPUT_FORMAT__,elfx32 -%define sym(x) x -%elif LIBVPX_YASM_WIN64 -%define sym(x) x +%if LIBVPX_ELF || LIBVPX_YASM_WIN64 + %define sym(x) x %else -%define sym(x) _ %+ x + ; Mach-O / COFF + %define sym(x) _ %+ x %endif -; PRIVATE -; Macro for the attribute to hide a global symbol for the target ABI. -; This is only active if CHROMIUM is defined. +; globalsym() +; Return a global declaration with the proper decoration for the target ABI. ; -; Chromium doesn't like exported global symbols due to symbol clashing with -; plugins among other things. +; When CHROMIUM is defined, include attributes to hide the symbol from the +; global namespace. ; -; Requires Chromium's patched copy of yasm: -; http://src.chromium.org/viewvc/chrome?view=rev&revision=73761 -; http://www.tortall.net/projects/yasm/ticket/236 +; Chromium doesn't like exported global symbols due to symbol clashing with +; plugins among other things. +; +; Requires Chromium's patched copy of yasm: +; http://src.chromium.org/viewvc/chrome?view=rev&revision=73761 +; http://www.tortall.net/projects/yasm/ticket/236 +; or nasm > 2.14. ; %ifdef CHROMIUM %ifdef __NASM_VER__ @@ -115,19 +143,16 @@ %endif %endif - %ifidn __OUTPUT_FORMAT__,elf32 - %define PRIVATE :hidden - %elifidn __OUTPUT_FORMAT__,elf64 - %define PRIVATE :hidden - %elifidn __OUTPUT_FORMAT__,elfx32 - %define PRIVATE :hidden - %elif LIBVPX_YASM_WIN64 - %define PRIVATE + %if LIBVPX_ELF + %define globalsym(x) global sym(x) %+ :function hidden + %elif LIBVPX_MACHO + %define globalsym(x) global sym(x) %+ :private_extern %else - %define PRIVATE :private_extern + ; COFF / PE32+ + %define globalsym(x) global sym(x) %endif %else - %define PRIVATE + %define globalsym(x) global sym(x) %endif ; arg() diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.c b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.c index 50c36bedd..5d7546eb2 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.c +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.c @@ -95,6 +95,8 @@ static const arg_def_t debugmode = ARG_DEF("D", "debug", 0, "Debug mode (makes output deterministic)"); static const arg_def_t outputfile = ARG_DEF("o", "output", 1, "Output filename"); +static const arg_def_t use_nv12 = + ARG_DEF(NULL, "nv12", 0, "Input file is NV12 "); static const arg_def_t use_yv12 = ARG_DEF(NULL, "yv12", 0, "Input file is YV12 "); static const arg_def_t use_i420 = @@ -220,7 +222,8 @@ static const arg_def_t error_resilient = static const arg_def_t lag_in_frames = ARG_DEF(NULL, "lag-in-frames", 1, "Max number of frames to lag"); -static const arg_def_t *global_args[] = { &use_yv12, +static const arg_def_t *global_args[] = { &use_nv12, + &use_yv12, &use_i420, &use_i422, &use_i444, @@ -462,6 +465,13 @@ static const arg_def_t target_level = ARG_DEF( static const arg_def_t row_mt = ARG_DEF(NULL, "row-mt", 1, "Enable row based non-deterministic multi-threading in VP9"); + +static const arg_def_t disable_loopfilter = + ARG_DEF(NULL, "disable-loopfilter", 1, + "Control Loopfilter in VP9\n" + "0: Loopfilter on for all frames (default)\n" + "1: Loopfilter off for non reference frames\n" + "2: Loopfilter off for all frames"); #endif #if CONFIG_VP9_ENCODER @@ -492,6 +502,10 @@ static const arg_def_t *vp9_args[] = { &cpu_used_vp9, &max_gf_interval, &target_level, &row_mt, + &disable_loopfilter, +// NOTE: The entries above have a corresponding entry in vp9_arg_ctrl_map. The +// entries below do not have a corresponding entry in vp9_arg_ctrl_map. They +// must be listed at the end of vp9_args. #if CONFIG_VP9_HIGHBITDEPTH &bitdeptharg, &inbitdeptharg, @@ -524,6 +538,7 @@ static const int vp9_arg_ctrl_map[] = { VP8E_SET_CPUUSED, VP9E_SET_MAX_GF_INTERVAL, VP9E_SET_TARGET_LEVEL, VP9E_SET_ROW_MT, + VP9E_SET_DISABLE_LOOPFILTER, 0 }; #endif @@ -696,6 +711,8 @@ static void parse_global_config(struct VpxEncoderConfig *global, char **argv) { global->deadline = VPX_DL_REALTIME; else if (arg_match(&arg, &use_yv12, argi)) global->color_type = YV12; + else if (arg_match(&arg, &use_nv12, argi)) + global->color_type = NV12; else if (arg_match(&arg, &use_i420, argi)) global->color_type = I420; else if (arg_match(&arg, &use_i422, argi)) @@ -1642,6 +1659,7 @@ int main(int argc, const char **argv_) { case I444: input.fmt = VPX_IMG_FMT_I444; break; case I440: input.fmt = VPX_IMG_FMT_I440; break; case YV12: input.fmt = VPX_IMG_FMT_YV12; break; + case NV12: input.fmt = VPX_IMG_FMT_NV12; break; } { diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.h b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.h index b780aedca..be54840f7 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.h +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/vpxenc.h @@ -28,6 +28,7 @@ typedef enum { I444, // 4:4:4 8+ bit-depth I440, // 4:4:0 8+ bit-depth YV12, // 4:2:0 with uv flipped, only 8-bit depth + NV12, // 4:2:0 with uv interleaved } ColorInputType; struct VpxInterface; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmdec.cc b/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmdec.cc index d609075a9..68c6f4782 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmdec.cc +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmdec.cc @@ -19,25 +19,25 @@ namespace { void reset(struct WebmInputContext *const webm_ctx) { - if (webm_ctx->reader != NULL) { + if (webm_ctx->reader != nullptr) { mkvparser::MkvReader *const reader = reinterpret_cast(webm_ctx->reader); delete reader; } - if (webm_ctx->segment != NULL) { + if (webm_ctx->segment != nullptr) { mkvparser::Segment *const segment = reinterpret_cast(webm_ctx->segment); delete segment; } - if (webm_ctx->buffer != NULL) { + if (webm_ctx->buffer != nullptr) { delete[] webm_ctx->buffer; } - webm_ctx->reader = NULL; - webm_ctx->segment = NULL; - webm_ctx->buffer = NULL; - webm_ctx->cluster = NULL; - webm_ctx->block_entry = NULL; - webm_ctx->block = NULL; + webm_ctx->reader = nullptr; + webm_ctx->segment = nullptr; + webm_ctx->buffer = nullptr; + webm_ctx->cluster = nullptr; + webm_ctx->block_entry = nullptr; + webm_ctx->block = nullptr; webm_ctx->block_frame_index = 0; webm_ctx->video_track_index = 0; webm_ctx->timestamp_ns = 0; @@ -84,7 +84,7 @@ int file_is_webm(struct WebmInputContext *webm_ctx, } const mkvparser::Tracks *const tracks = segment->GetTracks(); - const mkvparser::VideoTrack *video_track = NULL; + const mkvparser::VideoTrack *video_track = nullptr; for (unsigned long i = 0; i < tracks->GetTracksCount(); ++i) { const mkvparser::Track *const track = tracks->GetTrackByIndex(i); if (track->GetType() == mkvparser::Track::kVideo) { @@ -94,7 +94,7 @@ int file_is_webm(struct WebmInputContext *webm_ctx, } } - if (video_track == NULL || video_track->GetCodecId() == NULL) { + if (video_track == nullptr || video_track->GetCodecId() == nullptr) { rewind_and_reset(webm_ctx, vpx_ctx); return 0; } @@ -137,12 +137,12 @@ int webm_read_frame(struct WebmInputContext *webm_ctx, uint8_t **buffer, do { long status = 0; bool get_new_block = false; - if (block_entry == NULL && !block_entry_eos) { + if (block_entry == nullptr && !block_entry_eos) { status = cluster->GetFirst(block_entry); get_new_block = true; } else if (block_entry_eos || block_entry->EOS()) { cluster = segment->GetNext(cluster); - if (cluster == NULL || cluster->EOS()) { + if (cluster == nullptr || cluster->EOS()) { *buffer_size = 0; webm_ctx->reached_eos = 1; return 1; @@ -150,22 +150,22 @@ int webm_read_frame(struct WebmInputContext *webm_ctx, uint8_t **buffer, status = cluster->GetFirst(block_entry); block_entry_eos = false; get_new_block = true; - } else if (block == NULL || + } else if (block == nullptr || webm_ctx->block_frame_index == block->GetFrameCount() || block->GetTrackNumber() != webm_ctx->video_track_index) { status = cluster->GetNext(block_entry, block_entry); - if (block_entry == NULL || block_entry->EOS()) { + if (block_entry == nullptr || block_entry->EOS()) { block_entry_eos = true; continue; } get_new_block = true; } - if (status || block_entry == NULL) { + if (status || block_entry == nullptr) { return -1; } if (get_new_block) { block = block_entry->GetBlock(); - if (block == NULL) return -1; + if (block == nullptr) return -1; webm_ctx->block_frame_index = 0; } } while (block_entry_eos || @@ -181,7 +181,7 @@ int webm_read_frame(struct WebmInputContext *webm_ctx, uint8_t **buffer, if (frame.len > static_cast(*buffer_size)) { delete[] * buffer; *buffer = new uint8_t[frame.len]; - if (*buffer == NULL) { + if (*buffer == nullptr) { return -1; } webm_ctx->buffer = *buffer; @@ -198,7 +198,7 @@ int webm_read_frame(struct WebmInputContext *webm_ctx, uint8_t **buffer, int webm_guess_framerate(struct WebmInputContext *webm_ctx, struct VpxInputContext *vpx_ctx) { uint32_t i = 0; - uint8_t *buffer = NULL; + uint8_t *buffer = nullptr; size_t buffer_size = 0; while (webm_ctx->timestamp_ns < 1000000000 && i < 50) { if (webm_read_frame(webm_ctx, &buffer, &buffer_size)) { @@ -212,8 +212,8 @@ int webm_guess_framerate(struct WebmInputContext *webm_ctx, delete[] buffer; get_first_cluster(webm_ctx); - webm_ctx->block = NULL; - webm_ctx->block_entry = NULL; + webm_ctx->block = nullptr; + webm_ctx->block_entry = nullptr; webm_ctx->block_frame_index = 0; webm_ctx->timestamp_ns = 0; webm_ctx->reached_eos = 0; diff --git a/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmenc.cc b/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmenc.cc index 66606674b..c718ab5a9 100644 --- a/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmenc.cc +++ b/TMessagesProj/jni/third_party/libvpx/source/libvpx/webmenc.cc @@ -90,6 +90,6 @@ void write_webm_file_footer(struct WebmOutputContext *webm_ctx) { segment->Finalize(); delete segment; delete writer; - webm_ctx->writer = NULL; - webm_ctx->segment = NULL; + webm_ctx->writer = nullptr; + webm_ctx->segment = nullptr; } diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv.h index de652836e..aeffd5ef7 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv.h @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef INCLUDE_LIBYUV_H_ // NOLINT +#ifndef INCLUDE_LIBYUV_H_ #define INCLUDE_LIBYUV_H_ #include "libyuv/basic_types.h" @@ -29,4 +29,4 @@ #include "libyuv/version.h" #include "libyuv/video_common.h" -#endif // INCLUDE_LIBYUV_H_ NOLINT +#endif // INCLUDE_LIBYUV_H_ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h index f571142fa..026b153ce 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h @@ -42,6 +42,21 @@ int I444ToI420(const uint8_t* src_y, int width, int height); +// Convert I444 to NV12. +LIBYUV_API +int I444ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + // Convert I444 to NV21. LIBYUV_API int I444ToNV21(const uint8_t* src_y, @@ -248,19 +263,6 @@ int AYUVToNV21(const uint8_t* src_ayuv, int width, int height); -// Convert M420 to I420. -LIBYUV_API -int M420ToI420(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - // Convert Android420 to I420. LIBYUV_API int Android420ToI420(const uint8_t* src_y, @@ -418,7 +420,15 @@ int RGB24ToJ400(const uint8_t* src_rgb24, int width, int height); -#ifdef HAVE_JPEG +// RGB big endian (rgb in memory) to J400. +LIBYUV_API +int RAWToJ400(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_yj, + int dst_stride_yj, + int width, + int height); + // src_width/height provided by capture. // dst_width/height for clipping determine final size. LIBYUV_API @@ -448,13 +458,25 @@ int MJPGToNV21(const uint8_t* sample, int dst_width, int dst_height); +// JPEG to NV12 +LIBYUV_API +int MJPGToNV12(const uint8_t* sample, + size_t sample_size, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int src_width, + int src_height, + int dst_width, + int dst_height); + // Query size of MJPG in pixels. LIBYUV_API int MJPGSize(const uint8_t* sample, size_t sample_size, int* width, int* height); -#endif // Convert camera sample to I420 with cropping, rotation and vertical flip. // "src_size" is needed to parse MJPG. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h index bf776348f..715a3dad9 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h @@ -15,16 +15,41 @@ #include "libyuv/rotate.h" // For enum RotationMode. -// TODO(fbarchard): This set of functions should exactly match convert.h -// TODO(fbarchard): Add tests. Create random content of right size and convert -// with C vs Opt and or to I420 and compare. -// TODO(fbarchard): Some of these functions lack parameter setting. - #ifdef __cplusplus namespace libyuv { extern "C" { #endif +// Conversion matrix for YUV to RGB +LIBYUV_API extern const struct YuvConstants kYuvI601Constants; // BT.601 +LIBYUV_API extern const struct YuvConstants kYuvJPEGConstants; // JPeg +LIBYUV_API extern const struct YuvConstants kYuvH709Constants; // BT.709 +LIBYUV_API extern const struct YuvConstants kYuv2020Constants; // BT.2020 + +// Conversion matrix for YVU to BGR +LIBYUV_API extern const struct YuvConstants kYvuI601Constants; // BT.601 +LIBYUV_API extern const struct YuvConstants kYvuJPEGConstants; // JPeg +LIBYUV_API extern const struct YuvConstants kYvuH709Constants; // BT.709 +LIBYUV_API extern const struct YuvConstants kYvu2020Constants; // BT.2020 + +// Macros for end swapped destination Matrix conversions. +// Swap UV and pass mirrored kYvuJPEGConstants matrix. +// TODO(fbarchard): Add macro for each Matrix function. +#define kYuvI601ConstantsVU kYvuI601Constants +#define kYuvJPEGConstantsVU kYvuJPEGConstants +#define kYuvH709ConstantsVU kYvuH709Constants +#define kYuv2020ConstantsVU kYvu2020Constants +#define NV12ToABGRMatrix(a, b, c, d, e, f, g, h, i) \ + NV21ToARGBMatrix(a, b, c, d, e, f, g##VU, h, i) +#define NV21ToABGRMatrix(a, b, c, d, e, f, g, h, i) \ + NV12ToARGBMatrix(a, b, c, d, e, f, g##VU, h, i) +#define NV12ToRAWMatrix(a, b, c, d, e, f, g, h, i) \ + NV21ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i) +#define NV21ToRAWMatrix(a, b, c, d, e, f, g, h, i) \ + NV12ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i) +#define I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I420AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) + // Alias. #define ARGBToARGB ARGBCopy @@ -657,15 +682,6 @@ int NV21ToRAW(const uint8_t* src_y, int width, int height); -// Convert M420 to ARGB. -LIBYUV_API -int M420ToARGB(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height); - // Convert YUY2 to ARGB. LIBYUV_API int YUY2ToARGB(const uint8_t* src_yuy2, @@ -956,7 +972,6 @@ int AR30ToAB30(const uint8_t* src_ar30, int width, int height); -#ifdef HAVE_JPEG // src_width/height provided by capture // dst_width/height for clipping determine final size. LIBYUV_API @@ -968,7 +983,6 @@ int MJPGToARGB(const uint8_t* sample, int src_height, int dst_width, int dst_height); -#endif // Convert Android420 to ARGB. LIBYUV_API @@ -998,6 +1012,561 @@ int Android420ToABGR(const uint8_t* src_y, int width, int height); +// Convert NV12 to RGB565. +LIBYUV_API +int NV12ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height); + +// Convert I422 to BGRA. +LIBYUV_API +int I422ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +// Convert I422 to ABGR. +LIBYUV_API +int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +// Convert I422 to RGBA. +LIBYUV_API +int I422ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +LIBYUV_API +int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +LIBYUV_API +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +LIBYUV_API +int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +LIBYUV_API +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +LIBYUV_API +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +LIBYUV_API +int I420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height); + +LIBYUV_API +int H420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +LIBYUV_API +int H420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height); + +LIBYUV_API +int J420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +LIBYUV_API +int J420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height); + +LIBYUV_API +int I420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height); + +LIBYUV_API +int J420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height); + +LIBYUV_API +int H420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height); + +LIBYUV_API +int I422ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height); + +// Convert I420 To RGB565 with 4x4 dither matrix (16 bytes). +// Values in dither matrix from 0 to 7 recommended. +// The order of the dither matrix is first byte is upper left. + +LIBYUV_API +int I420ToRGB565Dither(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const uint8_t* dither4x4, + int width, + int height); + +LIBYUV_API +int I420ToARGB1555(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb1555, + int dst_stride_argb1555, + int width, + int height); + +LIBYUV_API +int I420ToARGB4444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb4444, + int dst_stride_argb4444, + int width, + int height); + +// Convert I420 to AR30. +LIBYUV_API +int I420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height); + +// Convert H420 to AR30. +LIBYUV_API +int H420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height); + +// Convert I420 to ARGB with matrix. +LIBYUV_API +int I420ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I422 to ARGB with matrix. +LIBYUV_API +int I422ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I444 to ARGB with matrix. +LIBYUV_API +int I444ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// multiply 10 bit yuv into high bits to allow any number of bits. +LIBYUV_API +int I010ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// multiply 10 bit yuv into high bits to allow any number of bits. +LIBYUV_API +int I210ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert 10 bit YUV to ARGB with matrix. +LIBYUV_API +int I010ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert 10 bit 422 YUV to ARGB with matrix. +LIBYUV_API +int I210ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I420 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I420AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + +// Convert NV12 to ARGB with matrix. +LIBYUV_API +int NV12ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert NV21 to ARGB with matrix. +LIBYUV_API +int NV21ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert NV12 to RGB565 with matrix. +LIBYUV_API +int NV12ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert NV12 to RGB24 with matrix. +LIBYUV_API +int NV12ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert NV21 to RGB24 with matrix. +LIBYUV_API +int NV21ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert Android420 to ARGB with matrix. +LIBYUV_API +int Android420ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_pixel_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I422ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I420ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I420 to RGB24 with matrix. +LIBYUV_API +int I420ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I420 to RGB565 with specified color matrix. +LIBYUV_API +int I420ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I420 to AR30 with matrix. +LIBYUV_API +int I420ToAR30Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert I400 (grey) to ARGB. Reverse of ARGBToI400. +LIBYUV_API +int I400ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + // Convert camera sample to ARGB with cropping, rotation and vertical flip. // "sample_size" is needed to parse MJPG. // "dst_stride_argb" number of bytes in a row of the dst_argb plane. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h index afc43939a..5140ed4f3 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h @@ -132,6 +132,10 @@ int I420ToUYVY(const uint8_t* src_y, int width, int height); +// The following are from convert_argb.h +// DEPRECATED: The prototypes will be removed in future. Use convert_argb.h + +// Convert I420 to ARGB. LIBYUV_API int I420ToARGB(const uint8_t* src_y, int src_stride_y, @@ -144,18 +148,7 @@ int I420ToARGB(const uint8_t* src_y, int width, int height); -LIBYUV_API -int I420ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height); - +// Convert I420 to ABGR. LIBYUV_API int I420ToABGR(const uint8_t* src_y, int src_stride_y, @@ -168,205 +161,6 @@ int I420ToABGR(const uint8_t* src_y, int width, int height); -LIBYUV_API -int I420ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height); - -LIBYUV_API -int I420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -LIBYUV_API -int I420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -LIBYUV_API -int H420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -LIBYUV_API -int H420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -LIBYUV_API -int J420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height); - -LIBYUV_API -int J420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height); - -LIBYUV_API -int I420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -LIBYUV_API -int J420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -LIBYUV_API -int H420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -LIBYUV_API -int I422ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height); - -// Convert I420 To RGB565 with 4x4 dither matrix (16 bytes). -// Values in dither matrix from 0 to 7 recommended. -// The order of the dither matrix is first byte is upper left. - -LIBYUV_API -int I420ToRGB565Dither(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const uint8_t* dither4x4, - int width, - int height); - -LIBYUV_API -int I420ToARGB1555(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb1555, - int dst_stride_argb1555, - int width, - int height); - -LIBYUV_API -int I420ToARGB4444(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb4444, - int dst_stride_argb4444, - int width, - int height); - -// Convert I420 to AR30. -LIBYUV_API -int I420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - -// Convert H420 to AR30. -LIBYUV_API -int H420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height); - // Convert I420 to specified format. // "dst_sample_stride" is bytes in a row for the destination. Pass 0 if the // buffer has contiguous rows. Can be negative. A multiple of 16 is optimal. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h index 057182448..d992363ce 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h @@ -77,6 +77,10 @@ int ARGBToAR30(const uint8_t* src_argb, int width, int height); +// Aliases +#define ABGRToRGB24 ARGBToRAW +#define ABGRToRAW ARGBToRGB24 + // Convert ARGB To RGB24. LIBYUV_API int ARGBToRGB24(const uint8_t* src_argb, @@ -281,17 +285,6 @@ int ABGRToNV21(const uint8_t* src_abgr, int width, int height); -// Convert ARGB To NV21. -LIBYUV_API -int ARGBToNV21(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_vu, - int dst_stride_vu, - int width, - int height); - // Convert ARGB To YUY2. LIBYUV_API int ARGBToYUY2(const uint8_t* src_argb, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h index b01cd25c5..3e27cc107 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h @@ -71,6 +71,8 @@ static __inline int TestCpuFlag(int test_flag) { // Internal function for parsing /proc/cpuinfo. LIBYUV_API int ArmCpuCaps(const char* cpuinfo_name); +LIBYUV_API +int MipsCpuCaps(const char* cpuinfo_name); // For testing, allow CPU flags to be disabled. // ie MaskCpuFlags(~kCpuHasSSSE3) to disable SSSE3. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h index 29997ce11..4e232b66b 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h @@ -140,6 +140,9 @@ #define LD_B(RTYPE, psrc) *((RTYPE*)(psrc)) /* NOLINT */ #define LD_UB(...) LD_B(const v16u8, __VA_ARGS__) +#define LD_H(RTYPE, psrc) *((RTYPE*)(psrc)) /* NOLINT */ +#define LD_UH(...) LD_H(const v8u16, __VA_ARGS__) + #define ST_B(RTYPE, in, pdst) *((RTYPE*)(pdst)) = (in) /* NOLINT */ #define ST_UB(...) ST_B(v16u8, __VA_ARGS__) diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h index 5299fe2c0..9e0038f47 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h @@ -105,6 +105,19 @@ void MergeUVPlane(const uint8_t* src_u, int width, int height); +// Scale U and V to half width and height and merge into interleaved UV plane. +// width and height are source size, allowing odd sizes. +// Use for converting I444 or I422 to NV12. +LIBYUV_API +void HalfMergeUVPlane(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + // Swap U and V channels in interleaved UV plane. LIBYUV_API void SwapUVPlane(const uint8_t* src_uv, @@ -301,6 +314,22 @@ int I400Mirror(const uint8_t* src_y, int width, int height); +// Alias +#define NV12ToNV12Mirror NV12Mirror + +// NV12 mirror. +LIBYUV_API +int NV12Mirror(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + // Alias #define ARGBToARGBMirror ARGBMirror @@ -313,56 +342,35 @@ int ARGBMirror(const uint8_t* src_argb, int width, int height); -// Convert NV12 to RGB565. +// Alias +#define RGB24ToRGB24Mirror RGB24Mirror + +// RGB24 mirror. LIBYUV_API -int NV12ToRGB565(const uint8_t* src_y, +int RGB24Mirror(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +// Mirror a plane of data. +LIBYUV_API +void MirrorPlane(const uint8_t* src_y, int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb565, - int dst_stride_rgb565, + uint8_t* dst_y, + int dst_stride_y, int width, int height); -// I422ToARGB is in convert_argb.h -// Convert I422 to BGRA. +// Mirror a plane of UV data. LIBYUV_API -int I422ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height); - -// Convert I422 to ABGR. -LIBYUV_API -int I422ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height); - -// Convert I422 to RGBA. -LIBYUV_API -int I422ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height); +void MirrorUVPlane(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); // Alias #define RGB24ToRAW RAWToRGB24 @@ -743,6 +751,19 @@ int ARGBBlur(const uint8_t* src_argb, int height, int radius); +// Gaussian 5x5 blur a float plane. +// Coefficients of 1, 4, 6, 4, 1. +// Each destination pixel is a blur of the 5x5 +// pixels from the source. +// Source edges are clamped. +LIBYUV_API +int GaussPlane_F32(const float* src, + int src_stride, + float* dst, + int dst_stride, + int width, + int height); + // Multiply ARGB image by ARGB value. LIBYUV_API int ARGBShade(const uint8_t* src_argb, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h index c64e0216d..308882242 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h @@ -118,6 +118,10 @@ void RotatePlane270(const uint8_t* src, int width, int height); +// Rotations for when U and V are interleaved. +// These functions take one input pointer and +// split the data into two buffers while +// rotating them. Deprecated. LIBYUV_API void RotateUV90(const uint8_t* src, int src_stride, @@ -128,10 +132,6 @@ void RotateUV90(const uint8_t* src, int width, int height); -// Rotations for when U and V are interleaved. -// These functions take one input pointer and -// split the data into two buffers while -// rotating them. Deprecated. LIBYUV_API void RotateUV180(const uint8_t* src, int src_stride, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h index b721858f1..a27788c1f 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h @@ -98,7 +98,6 @@ extern "C" { #define HAS_COPYROW_SSE2 #define HAS_H422TOARGBROW_SSSE3 #define HAS_HALFFLOATROW_SSE2 -#define HAS_I400TOARGBROW_SSE2 #define HAS_I422TOARGB1555ROW_SSSE3 #define HAS_I422TOARGB4444ROW_SSSE3 #define HAS_I422TOARGBROW_SSSE3 @@ -112,7 +111,7 @@ extern "C" { #define HAS_J422TOARGBROW_SSSE3 #define HAS_MERGEUVROW_SSE2 #define HAS_MIRRORROW_SSSE3 -#define HAS_MIRRORUVROW_SSSE3 +#define HAS_MIRRORSPLITUVROW_SSSE3 #define HAS_NV12TOARGBROW_SSSE3 #define HAS_NV12TORGB24ROW_SSSE3 #define HAS_NV12TORGB565ROW_SSSE3 @@ -123,6 +122,8 @@ extern "C" { #define HAS_RAWTOYROW_SSSE3 #define HAS_RGB24TOARGBROW_SSSE3 #define HAS_RGB24TOYROW_SSSE3 +#define HAS_RGB24TOYJROW_SSSE3 +#define HAS_RAWTOYJROW_SSSE3 #define HAS_RGB565TOARGBROW_SSE2 #define HAS_RGBATOUVROW_SSSE3 #define HAS_RGBATOYROW_SSSE3 @@ -194,11 +195,12 @@ extern "C" { #define HAS_ARGBTOUVROW_AVX2 #define HAS_ARGBTOYJROW_AVX2 #define HAS_ARGBTOYROW_AVX2 +#define HAS_RGB24TOYJROW_AVX2 +#define HAS_RAWTOYJROW_AVX2 #define HAS_COPYROW_AVX #define HAS_H422TOARGBROW_AVX2 #define HAS_HALFFLOATROW_AVX2 // #define HAS_HALFFLOATROW_F16C // Enable to test halffloat cast -#define HAS_I400TOARGBROW_AVX2 #define HAS_I422TOARGB1555ROW_AVX2 #define HAS_I422TOARGB4444ROW_AVX2 #define HAS_I422TOARGBROW_AVX2 @@ -269,12 +271,16 @@ extern "C" { #define HAS_ARGBTOAR30ROW_SSSE3 #define HAS_CONVERT16TO8ROW_SSSE3 #define HAS_CONVERT8TO16ROW_SSE2 -// I210 is for H010. 2 = 422. I for 601 vs H for 709. +#define HAS_HALFMERGEUVROW_SSSE3 #define HAS_I210TOAR30ROW_SSSE3 #define HAS_I210TOARGBROW_SSSE3 +#define HAS_I400TOARGBROW_SSE2 #define HAS_I422TOAR30ROW_SSSE3 #define HAS_MERGERGBROW_SSSE3 +#define HAS_MIRRORUVROW_AVX2 +#define HAS_MIRRORUVROW_SSSE3 #define HAS_RAWTORGBAROW_SSSE3 +#define HAS_RGB24MIRRORROW_SSSE3 #define HAS_RGBATOYJROW_SSSE3 #define HAS_SPLITRGBROW_SSSE3 #define HAS_SWAPUVROW_SSSE3 @@ -293,8 +299,10 @@ extern "C" { #define HAS_ARGBTORGB24ROW_AVX2 #define HAS_CONVERT16TO8ROW_AVX2 #define HAS_CONVERT8TO16ROW_AVX2 +#define HAS_HALFMERGEUVROW_AVX2 #define HAS_I210TOAR30ROW_AVX2 #define HAS_I210TOARGBROW_AVX2 +#define HAS_I400TOARGBROW_AVX2 #define HAS_I422TOAR30ROW_AVX2 #define HAS_I422TOUYVYROW_AVX2 #define HAS_I422TOYUY2ROW_AVX2 @@ -338,7 +346,6 @@ extern "C" { #define HAS_ARGBTOUVJROW_NEON #define HAS_ARGBTOUVROW_NEON #define HAS_ARGBTOYJROW_NEON -#define HAS_RGBATOYJROW_NEON #define HAS_ARGBTOYROW_NEON #define HAS_AYUVTOUVROW_NEON #define HAS_AYUVTOVUROW_NEON @@ -348,6 +355,7 @@ extern "C" { #define HAS_BYTETOFLOATROW_NEON #define HAS_COPYROW_NEON #define HAS_HALFFLOATROW_NEON +#define HAS_HALFMERGEUVROW_NEON #define HAS_I400TOARGBROW_NEON #define HAS_I422ALPHATOARGBROW_NEON #define HAS_I422TOARGB1555ROW_NEON @@ -363,6 +371,7 @@ extern "C" { #define HAS_MERGEUVROW_NEON #define HAS_MIRRORROW_NEON #define HAS_MIRRORUVROW_NEON +#define HAS_MIRRORSPLITUVROW_NEON #define HAS_NV12TOARGBROW_NEON #define HAS_NV12TORGB24ROW_NEON #define HAS_NV12TORGB565ROW_NEON @@ -370,17 +379,20 @@ extern "C" { #define HAS_NV21TORGB24ROW_NEON #define HAS_NV21TOYUV24ROW_NEON #define HAS_RAWTOARGBROW_NEON -#define HAS_RAWTORGBAROW_NEON #define HAS_RAWTORGB24ROW_NEON +#define HAS_RAWTORGBAROW_NEON #define HAS_RAWTOUVROW_NEON +#define HAS_RAWTOYJROW_NEON #define HAS_RAWTOYROW_NEON #define HAS_RGB24TOARGBROW_NEON #define HAS_RGB24TOUVROW_NEON +#define HAS_RGB24TOYJROW_NEON #define HAS_RGB24TOYROW_NEON #define HAS_RGB565TOARGBROW_NEON #define HAS_RGB565TOUVROW_NEON #define HAS_RGB565TOYROW_NEON #define HAS_RGBATOUVROW_NEON +#define HAS_RGBATOYJROW_NEON #define HAS_RGBATOYROW_NEON #define HAS_SETROW_NEON #define HAS_SPLITRGBROW_NEON @@ -402,6 +414,7 @@ extern "C" { #define HAS_ARGBCOLORMATRIXROW_NEON #define HAS_ARGBGRAYROW_NEON #define HAS_ARGBMIRRORROW_NEON +#define HAS_RGB24MIRRORROW_NEON #define HAS_ARGBMULTIPLYROW_NEON #define HAS_ARGBQUANTIZEROW_NEON #define HAS_ARGBSEPIAROW_NEON @@ -419,6 +432,9 @@ extern "C" { // The following are available on AArch64 platforms: #if !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) #define HAS_SCALESUMSAMPLES_NEON +#define HAS_GAUSSROW_F32_NEON +#define HAS_GAUSSCOL_F32_NEON + #endif #if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) #define HAS_ABGRTOUVROW_MSA @@ -470,6 +486,7 @@ extern "C" { #define HAS_MERGEUVROW_MSA #define HAS_MIRRORROW_MSA #define HAS_MIRRORUVROW_MSA +#define HAS_MIRRORSPLITUVROW_MSA #define HAS_NV12TOARGBROW_MSA #define HAS_NV12TORGB565ROW_MSA #define HAS_NV21TOARGBROW_MSA @@ -552,7 +569,7 @@ extern "C" { #define HAS_MERGERGBROW_MMI #define HAS_MERGEUVROW_MMI #define HAS_MIRRORROW_MMI -#define HAS_MIRRORUVROW_MMI +#define HAS_MIRRORSPLITUVROW_MMI #define HAS_RAWTOARGBROW_MMI #define HAS_RAWTORGB24ROW_MMI #define HAS_RAWTOUVROW_MMI @@ -601,6 +618,7 @@ extern "C" { #endif typedef __declspec(align(16)) int16_t vec16[8]; typedef __declspec(align(16)) int32_t vec32[4]; +typedef __declspec(align(16)) float vecf32[4]; typedef __declspec(align(16)) int8_t vec8[16]; typedef __declspec(align(16)) uint16_t uvec16[8]; typedef __declspec(align(16)) uint32_t uvec32[4]; @@ -620,6 +638,7 @@ typedef __declspec(align(32)) uint8_t ulvec8[32]; #endif typedef int16_t __attribute__((vector_size(16))) vec16; typedef int32_t __attribute__((vector_size(16))) vec32; +typedef float __attribute__((vector_size(16))) vecf32; typedef int8_t __attribute__((vector_size(16))) vec8; typedef uint16_t __attribute__((vector_size(16))) uvec16; typedef uint32_t __attribute__((vector_size(16))) uvec32; @@ -634,6 +653,7 @@ typedef uint8_t __attribute__((vector_size(32))) ulvec8; #define SIMD_ALIGNED(var) var typedef int16_t vec16[8]; typedef int32_t vec32[4]; +typedef float vecf32[4]; typedef int8_t vec8[16]; typedef uint16_t uvec16[8]; typedef uint32_t uvec32[4]; @@ -674,6 +694,7 @@ struct YuvConstants { int16_t kUVBiasG[16]; int16_t kUVBiasR[16]; int16_t kYToRgb[16]; + int16_t kYBiasToRgb[16]; }; // Offsets into YuvConstants structure @@ -684,20 +705,10 @@ struct YuvConstants { #define KUVBIASG 128 #define KUVBIASR 160 #define KYTORGB 192 +#define KYBIASTORGB 224 + #endif -// Conversion matrix for YUV to RGB -extern const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants); // BT.601 -extern const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants); // JPeg -extern const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants); // BT.709 -extern const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants); // BT.2020 - -// Conversion matrix for YVU to BGR -extern const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants); // BT.601 -extern const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants); // JPeg -extern const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants); // BT.709 -extern const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants); // BT.2020 - #define IS_ALIGNED(p, a) (!((uintptr_t)(p) & ((a)-1))) #define align_buffer_64(var, size) \ @@ -965,7 +976,11 @@ void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width); void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width); void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width); void RGB24ToYRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); void RAWToYRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_y, int width); void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width); @@ -1134,7 +1149,9 @@ void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width); void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width); void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width); void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width); void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width); void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width); void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_y, @@ -1165,7 +1182,9 @@ void BGRAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); void ABGRToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); void RGBAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); void RGB24ToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); +void RGB24ToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width); void RAWToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); +void RAWToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width); void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width); void ARGB1555ToYRow_C(const uint8_t* src_argb1555, uint8_t* dst_y, int width); void ARGB4444ToYRow_C(const uint8_t* src_argb4444, uint8_t* dst_y, int width); @@ -1175,8 +1194,14 @@ void RGBAToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void BGRAToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ABGRToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RAWToYRow_Any_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RGB24ToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYJRow_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); +void RAWToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -1184,7 +1209,9 @@ void BGRAToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ABGRToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGB24ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGB565ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGB1555ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, @@ -1542,27 +1569,36 @@ void MirrorRow_Any_SSE2(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_SSSE3(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_MSA(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_C(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorUVRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorUVRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorUVRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorUVRow_SSSE3(const uint8_t* src, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_MSA(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_MMI(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void MirrorUVRow_C(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void MirrorSplitUVRow_SSSE3(const uint8_t* src, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void MirrorSplitUVRow_NEON(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void MirrorSplitUVRow_MSA(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void MirrorSplitUVRow_MMI(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void MirrorSplitUVRow_C(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBMirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width); void ARGBMirrorRow_SSE2(const uint8_t* src, uint8_t* dst, int width); @@ -1582,6 +1618,16 @@ void ARGBMirrorRow_Any_NEON(const uint8_t* src_ptr, void ARGBMirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBMirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); +void RGB24MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); +void RGB24MirrorRow_C(const uint8_t* src, uint8_t* dst, int width); +void RGB24MirrorRow_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); +void RGB24MirrorRow_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); + void SplitUVRow_C(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, @@ -1672,6 +1718,34 @@ void MergeUVRow_Any_MMI(const uint8_t* y_buf, uint8_t* dst_ptr, int width); +void HalfMergeUVRow_C(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width); + +void HalfMergeUVRow_NEON(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width); + +void HalfMergeUVRow_SSSE3(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width); + +void HalfMergeUVRow_AVX2(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width); + void SplitRGBRow_C(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, @@ -2728,23 +2802,50 @@ void I422ToRGB24Row_Any_AVX2(const uint8_t* y_buf, const struct YuvConstants* yuvconstants, int width); -void I400ToARGBRow_C(const uint8_t* src_y, uint8_t* rgb_buf, int width); -void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* dst_argb, int width); -void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, int width); -void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width); -void I400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, int width); -void I400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* dst_argb, int width); +void I400ToARGBRow_C(const uint8_t* src_y, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_SSE2(const uint8_t* y_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_AVX2(const uint8_t* y_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_NEON(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_MSA(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_MMI(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I400ToARGBRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, int width); void I400ToARGBRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, int width); void I400ToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, int width); -void I400ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void I400ToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void I400ToARGBRow_Any_MSA(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I400ToARGBRow_Any_MMI(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); // ARGB preattenuated alpha blend. void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, @@ -4256,6 +4357,25 @@ void UYVYToARGBRow_Any_MMI(const uint8_t* src_ptr, const struct YuvConstants* yuvconstants, int width); +void GaussRow_F32_NEON(const float* src, float* dst, int width); +void GaussRow_F32_C(const float* src, float* dst, int width); + +void GaussCol_F32_NEON(const float* src0, + const float* src1, + const float* src2, + const float* src3, + const float* src4, + float* dst, + int width); + +void GaussCol_F32_C(const float* src0, + const float* src1, + const float* src2, + const float* src3, + const float* src4, + float* dst, + int width); + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h index 23ba1634f..add5a9eb6 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h @@ -145,6 +145,31 @@ int I444Scale_16(const uint16_t* src_y, int dst_height, enum FilterMode filtering); +// Scales an NV12 image from the src width and height to the +// dst width and height. +// If filtering is kFilterNone, a simple nearest-neighbor algorithm is +// used. This produces basic (blocky) quality at the fastest speed. +// If filtering is kFilterBilinear, interpolation is used to produce a better +// quality image, at the expense of speed. +// kFilterBox is not supported for the UV channel and will be treated as +// bilinear. +// Returns 0 if successful. + +LIBYUV_API +int NV12Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering); + #ifdef __cplusplus // Legacy API. Deprecated. LIBYUV_API diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h index dd20718a8..a386d4998 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h @@ -72,6 +72,22 @@ extern "C" { #define HAS_SCALEROWDOWN4_SSSE3 #endif +// The following are available for gcc/clang x86 platforms: +// TODO(fbarchard): Port to Visual C +#if !defined(LIBYUV_DISABLE_X86) && \ + (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#define HAS_SCALEUVROWDOWN2BOX_SSSE3 +#endif + +// The following are available for gcc/clang x86 platforms, but +// require clang 3.4 or gcc 4.7. +// TODO(fbarchard): Port to Visual C +#if !defined(LIBYUV_DISABLE_X86) && \ + (defined(__x86_64__) || defined(__i386__)) && !defined(_MSC_VER) && \ + (defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2)) +#define HAS_SCALEUVROWDOWN2BOX_AVX2 +#endif + // The following are available on all x86 platforms, but // require VS2012, clang 3.4 or gcc 4.7. // The code supports NaCL but requires a new compiler and validator. @@ -96,6 +112,8 @@ extern "C" { #define HAS_SCALEROWDOWN34_NEON #define HAS_SCALEROWDOWN38_NEON #define HAS_SCALEROWDOWN4_NEON +#define HAS_SCALEUVROWDOWN2BOX_NEON +#define HAS_SCALEUVROWDOWNEVEN_NEON #endif #if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) @@ -376,6 +394,53 @@ void ScaleARGBFilterCols64_C(uint8_t* dst_argb, int dst_width, int x32, int dx); +void ScaleUVRowDown2_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Linear_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEven_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEvenBox_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVCols_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x, + int dx); +void ScaleUVCols64_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x32, + int dx); +void ScaleUVColsUp2_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int, + int); +void ScaleUVFilterCols_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x, + int dx); +void ScaleUVFilterCols64_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x32, + int dx); // Specialized scalers for x86. void ScaleRowDown2_SSSE3(const uint8_t* src_ptr, @@ -782,6 +847,192 @@ void ScaleARGBRowDownEvenBox_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width); +// UV Row functions +void ScaleUVRowDown2_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Linear_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width); +void ScaleUVRowDown2Linear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width); +void ScaleUVRowDown2_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Linear_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Linear_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2Box_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDown2_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Linear_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Box_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Box_Any_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Linear_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Box_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2_Any_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Linear_Any_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Box_Any_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2_Any_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Linear_Any_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDown2Box_Any_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEven_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEvenBox_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEven_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEvenBox_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEven_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int32_t src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEvenBox_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEven_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int32_t src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEvenBox_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width); +void ScaleUVRowDownEven_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEvenBox_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEven_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEvenBox_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEven_Any_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int32_t src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEvenBox_Any_MSA(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEven_Any_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int32_t src_stepx, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowDownEvenBox_Any_MMI(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_ptr, + int dst_width); + // ScaleRowDown2Box also used by planar functions // NEON downscalers with interpolation. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h new file mode 100644 index 000000000..1b6327aae --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h @@ -0,0 +1,38 @@ +/* + * Copyright 2020 The LibYuv Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef INCLUDE_LIBYUV_SCALE_UV_H_ +#define INCLUDE_LIBYUV_SCALE_UV_H_ + +#include "libyuv/basic_types.h" +#include "libyuv/scale.h" // For FilterMode + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +LIBYUV_API +int UVScale(const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering); + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // INCLUDE_LIBYUV_SCALE_UV_H_ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h index 4c446ba3d..1d085960e 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h @@ -11,6 +11,6 @@ #ifndef INCLUDE_LIBYUV_VERSION_H_ #define INCLUDE_LIBYUV_VERSION_H_ -#define LIBYUV_VERSION 1741 +#define LIBYUV_VERSION 1767 #endif // INCLUDE_LIBYUV_VERSION_H_ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h index 666eb3439..b9823d71d 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h @@ -62,7 +62,7 @@ enum FourCC { FOURCC_I010 = FOURCC('I', '0', '1', '0'), // bt.601 10 bit 420 FOURCC_I210 = FOURCC('I', '0', '1', '0'), // bt.601 10 bit 422 - // 1 Secondary YUV format: row biplanar. + // 1 Secondary YUV format: row biplanar. deprecated. FOURCC_M420 = FOURCC('M', '4', '2', '0'), // 11 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc @@ -86,10 +86,14 @@ enum FourCC { FOURCC_YV16 = FOURCC('Y', 'V', '1', '6'), FOURCC_YV24 = FOURCC('Y', 'V', '2', '4'), FOURCC_YU12 = FOURCC('Y', 'U', '1', '2'), // Linux version of I420. - FOURCC_J420 = FOURCC('J', '4', '2', '0'), // jpeg (bt.601 full), unofficial fourcc - FOURCC_J422 = FOURCC('J', '4', '2', '2'), // jpeg (bt.601 full), unofficial fourcc - FOURCC_J444 = FOURCC('J', '4', '4', '4'), // jpeg (bt.601 full), unofficial fourcc - FOURCC_J400 = FOURCC('J', '4', '0', '0'), // jpeg (bt.601 full), unofficial fourcc + FOURCC_J420 = + FOURCC('J', '4', '2', '0'), // jpeg (bt.601 full), unofficial fourcc + FOURCC_J422 = + FOURCC('J', '4', '2', '2'), // jpeg (bt.601 full), unofficial fourcc + FOURCC_J444 = + FOURCC('J', '4', '4', '4'), // jpeg (bt.601 full), unofficial fourcc + FOURCC_J400 = + FOURCC('J', '4', '0', '0'), // jpeg (bt.601 full), unofficial fourcc FOURCC_H420 = FOURCC('H', '4', '2', '0'), // bt.709, unofficial fourcc FOURCC_H422 = FOURCC('H', '4', '2', '2'), // bt.709, unofficial fourcc FOURCC_H444 = FOURCC('H', '4', '4', '4'), // bt.709, unofficial fourcc @@ -144,7 +148,7 @@ enum FourCCBpp { FOURCC_BPP_NV12 = 12, FOURCC_BPP_YUY2 = 16, FOURCC_BPP_UYVY = 16, - FOURCC_BPP_M420 = 12, + FOURCC_BPP_M420 = 12, // deprecated FOURCC_BPP_Q420 = 12, FOURCC_BPP_ARGB = 32, FOURCC_BPP_BGRA = 32, diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare.cc b/TMessagesProj/jni/third_party/libyuv/source/compare.cc index 7f4828104..e93aba1b5 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare.cc @@ -149,16 +149,16 @@ uint64_t ComputeHammingDistance(const uint8_t* src_a, HammingDistance = HammingDistance_AVX2; } #endif -#if defined(HAS_HAMMINGDISTANCE_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - HammingDistance = HammingDistance_MSA; - } -#endif #if defined(HAS_HAMMINGDISTANCE_MMI) if (TestCpuFlag(kCpuHasMMI)) { HammingDistance = HammingDistance_MMI; } #endif +#if defined(HAS_HAMMINGDISTANCE_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + HammingDistance = HammingDistance_MSA; + } +#endif #ifdef _OPENMP #pragma omp parallel for reduction(+ : diff) @@ -211,16 +211,16 @@ uint64_t ComputeSumSquareError(const uint8_t* src_a, SumSquareError = SumSquareError_AVX2; } #endif -#if defined(HAS_SUMSQUAREERROR_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SumSquareError = SumSquareError_MSA; - } -#endif #if defined(HAS_SUMSQUAREERROR_MMI) if (TestCpuFlag(kCpuHasMMI)) { SumSquareError = SumSquareError_MMI; } #endif +#if defined(HAS_SUMSQUAREERROR_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SumSquareError = SumSquareError_MSA; + } +#endif #ifdef _OPENMP #pragma omp parallel for reduction(+ : sse) #endif diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc index 676527c1b..6700f9697 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc @@ -29,38 +29,38 @@ uint32_t HammingDistance_SSE42(const uint8_t* src_a, uint64_t diff = 0u; asm volatile( - "xor %3,%3 \n" - "xor %%r8,%%r8 \n" - "xor %%r9,%%r9 \n" - "xor %%r10,%%r10 \n" + "xor %3,%3 \n" + "xor %%r8,%%r8 \n" + "xor %%r9,%%r9 \n" + "xor %%r10,%%r10 \n" // Process 32 bytes per loop. LABELALIGN "1: \n" - "mov (%0),%%rcx \n" - "mov 0x8(%0),%%rdx \n" - "xor (%1),%%rcx \n" - "xor 0x8(%1),%%rdx \n" - "popcnt %%rcx,%%rcx \n" - "popcnt %%rdx,%%rdx \n" - "mov 0x10(%0),%%rsi \n" - "mov 0x18(%0),%%rdi \n" - "xor 0x10(%1),%%rsi \n" - "xor 0x18(%1),%%rdi \n" - "popcnt %%rsi,%%rsi \n" - "popcnt %%rdi,%%rdi \n" - "add $0x20,%0 \n" - "add $0x20,%1 \n" - "add %%rcx,%3 \n" - "add %%rdx,%%r8 \n" - "add %%rsi,%%r9 \n" - "add %%rdi,%%r10 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "mov (%0),%%rcx \n" + "mov 0x8(%0),%%rdx \n" + "xor (%1),%%rcx \n" + "xor 0x8(%1),%%rdx \n" + "popcnt %%rcx,%%rcx \n" + "popcnt %%rdx,%%rdx \n" + "mov 0x10(%0),%%rsi \n" + "mov 0x18(%0),%%rdi \n" + "xor 0x10(%1),%%rsi \n" + "xor 0x18(%1),%%rdi \n" + "popcnt %%rsi,%%rsi \n" + "popcnt %%rdi,%%rdi \n" + "add $0x20,%0 \n" + "add $0x20,%1 \n" + "add %%rcx,%3 \n" + "add %%rdx,%%r8 \n" + "add %%rsi,%%r9 \n" + "add %%rdi,%%r10 \n" + "sub $0x20,%2 \n" + "jg 1b \n" - "add %%r8, %3 \n" - "add %%r9, %3 \n" - "add %%r10, %3 \n" + "add %%r8, %3 \n" + "add %%r9, %3 \n" + "add %%r10, %3 \n" : "+r"(src_a), // %0 "+r"(src_b), // %1 "+r"(count), // %2 @@ -80,26 +80,26 @@ uint32_t HammingDistance_SSE42(const uint8_t* src_a, // Process 16 bytes per loop. LABELALIGN "1: \n" - "mov (%0),%%ecx \n" - "mov 0x4(%0),%%edx \n" - "xor (%1),%%ecx \n" - "xor 0x4(%1),%%edx \n" - "popcnt %%ecx,%%ecx \n" - "add %%ecx,%3 \n" - "popcnt %%edx,%%edx \n" - "add %%edx,%3 \n" - "mov 0x8(%0),%%ecx \n" - "mov 0xc(%0),%%edx \n" - "xor 0x8(%1),%%ecx \n" - "xor 0xc(%1),%%edx \n" - "popcnt %%ecx,%%ecx \n" - "add %%ecx,%3 \n" - "popcnt %%edx,%%edx \n" - "add %%edx,%3 \n" - "add $0x10,%0 \n" - "add $0x10,%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "mov (%0),%%ecx \n" + "mov 0x4(%0),%%edx \n" + "xor (%1),%%ecx \n" + "xor 0x4(%1),%%edx \n" + "popcnt %%ecx,%%ecx \n" + "add %%ecx,%3 \n" + "popcnt %%edx,%%edx \n" + "add %%edx,%3 \n" + "mov 0x8(%0),%%ecx \n" + "mov 0xc(%0),%%edx \n" + "xor 0x8(%1),%%ecx \n" + "xor 0xc(%1),%%edx \n" + "popcnt %%ecx,%%ecx \n" + "add %%ecx,%3 \n" + "popcnt %%edx,%%edx \n" + "add %%edx,%3 \n" + "add $0x10,%0 \n" + "add $0x10,%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_a), // %0 "+r"(src_b), // %1 "+r"(count), // %2 @@ -121,46 +121,46 @@ uint32_t HammingDistance_SSSE3(const uint8_t* src_a, uint32_t diff = 0u; asm volatile( - "movdqa %4,%%xmm2 \n" - "movdqa %5,%%xmm3 \n" - "pxor %%xmm0,%%xmm0 \n" - "pxor %%xmm1,%%xmm1 \n" - "sub %0,%1 \n" + "movdqa %4,%%xmm2 \n" + "movdqa %5,%%xmm3 \n" + "pxor %%xmm0,%%xmm0 \n" + "pxor %%xmm1,%%xmm1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "movdqa (%0),%%xmm4 \n" - "movdqa 0x10(%0), %%xmm5 \n" - "pxor (%0,%1), %%xmm4 \n" - "movdqa %%xmm4,%%xmm6 \n" - "pand %%xmm2,%%xmm6 \n" - "psrlw $0x4,%%xmm4 \n" - "movdqa %%xmm3,%%xmm7 \n" - "pshufb %%xmm6,%%xmm7 \n" - "pand %%xmm2,%%xmm4 \n" - "movdqa %%xmm3,%%xmm6 \n" - "pshufb %%xmm4,%%xmm6 \n" - "paddb %%xmm7,%%xmm6 \n" - "pxor 0x10(%0,%1),%%xmm5 \n" - "add $0x20,%0 \n" - "movdqa %%xmm5,%%xmm4 \n" - "pand %%xmm2,%%xmm5 \n" - "psrlw $0x4,%%xmm4 \n" - "movdqa %%xmm3,%%xmm7 \n" - "pshufb %%xmm5,%%xmm7 \n" - "pand %%xmm2,%%xmm4 \n" - "movdqa %%xmm3,%%xmm5 \n" - "pshufb %%xmm4,%%xmm5 \n" - "paddb %%xmm7,%%xmm5 \n" - "paddb %%xmm5,%%xmm6 \n" - "psadbw %%xmm1,%%xmm6 \n" - "paddd %%xmm6,%%xmm0 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "movdqa (%0),%%xmm4 \n" + "movdqa 0x10(%0), %%xmm5 \n" + "pxor (%0,%1), %%xmm4 \n" + "movdqa %%xmm4,%%xmm6 \n" + "pand %%xmm2,%%xmm6 \n" + "psrlw $0x4,%%xmm4 \n" + "movdqa %%xmm3,%%xmm7 \n" + "pshufb %%xmm6,%%xmm7 \n" + "pand %%xmm2,%%xmm4 \n" + "movdqa %%xmm3,%%xmm6 \n" + "pshufb %%xmm4,%%xmm6 \n" + "paddb %%xmm7,%%xmm6 \n" + "pxor 0x10(%0,%1),%%xmm5 \n" + "add $0x20,%0 \n" + "movdqa %%xmm5,%%xmm4 \n" + "pand %%xmm2,%%xmm5 \n" + "psrlw $0x4,%%xmm4 \n" + "movdqa %%xmm3,%%xmm7 \n" + "pshufb %%xmm5,%%xmm7 \n" + "pand %%xmm2,%%xmm4 \n" + "movdqa %%xmm3,%%xmm5 \n" + "pshufb %%xmm4,%%xmm5 \n" + "paddb %%xmm7,%%xmm5 \n" + "paddb %%xmm5,%%xmm6 \n" + "psadbw %%xmm1,%%xmm6 \n" + "paddd %%xmm6,%%xmm0 \n" + "sub $0x20,%2 \n" + "jg 1b \n" - "pshufd $0xaa,%%xmm0,%%xmm1 \n" - "paddd %%xmm1,%%xmm0 \n" - "movd %%xmm0, %3 \n" + "pshufd $0xaa,%%xmm0,%%xmm1 \n" + "paddd %%xmm1,%%xmm0 \n" + "movd %%xmm0, %3 \n" : "+r"(src_a), // %0 "+r"(src_b), // %1 "+r"(count), // %2 @@ -182,40 +182,40 @@ uint32_t HammingDistance_AVX2(const uint8_t* src_a, asm volatile( "vbroadcastf128 %4,%%ymm2 \n" "vbroadcastf128 %5,%%ymm3 \n" - "vpxor %%ymm0,%%ymm0,%%ymm0 \n" - "vpxor %%ymm1,%%ymm1,%%ymm1 \n" - "sub %0,%1 \n" + "vpxor %%ymm0,%%ymm0,%%ymm0 \n" + "vpxor %%ymm1,%%ymm1,%%ymm1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "vmovdqa (%0),%%ymm4 \n" - "vmovdqa 0x20(%0), %%ymm5 \n" - "vpxor (%0,%1), %%ymm4, %%ymm4 \n" - "vpand %%ymm2,%%ymm4,%%ymm6 \n" - "vpsrlw $0x4,%%ymm4,%%ymm4 \n" - "vpshufb %%ymm6,%%ymm3,%%ymm6 \n" - "vpand %%ymm2,%%ymm4,%%ymm4 \n" - "vpshufb %%ymm4,%%ymm3,%%ymm4 \n" - "vpaddb %%ymm4,%%ymm6,%%ymm6 \n" - "vpxor 0x20(%0,%1),%%ymm5,%%ymm4 \n" - "add $0x40,%0 \n" - "vpand %%ymm2,%%ymm4,%%ymm5 \n" - "vpsrlw $0x4,%%ymm4,%%ymm4 \n" - "vpshufb %%ymm5,%%ymm3,%%ymm5 \n" - "vpand %%ymm2,%%ymm4,%%ymm4 \n" - "vpshufb %%ymm4,%%ymm3,%%ymm4 \n" - "vpaddb %%ymm5,%%ymm4,%%ymm4 \n" - "vpaddb %%ymm6,%%ymm4,%%ymm4 \n" - "vpsadbw %%ymm1,%%ymm4,%%ymm4 \n" - "vpaddd %%ymm0,%%ymm4,%%ymm0 \n" - "sub $0x40,%2 \n" - "jg 1b \n" + "vmovdqa (%0),%%ymm4 \n" + "vmovdqa 0x20(%0), %%ymm5 \n" + "vpxor (%0,%1), %%ymm4, %%ymm4 \n" + "vpand %%ymm2,%%ymm4,%%ymm6 \n" + "vpsrlw $0x4,%%ymm4,%%ymm4 \n" + "vpshufb %%ymm6,%%ymm3,%%ymm6 \n" + "vpand %%ymm2,%%ymm4,%%ymm4 \n" + "vpshufb %%ymm4,%%ymm3,%%ymm4 \n" + "vpaddb %%ymm4,%%ymm6,%%ymm6 \n" + "vpxor 0x20(%0,%1),%%ymm5,%%ymm4 \n" + "add $0x40,%0 \n" + "vpand %%ymm2,%%ymm4,%%ymm5 \n" + "vpsrlw $0x4,%%ymm4,%%ymm4 \n" + "vpshufb %%ymm5,%%ymm3,%%ymm5 \n" + "vpand %%ymm2,%%ymm4,%%ymm4 \n" + "vpshufb %%ymm4,%%ymm3,%%ymm4 \n" + "vpaddb %%ymm5,%%ymm4,%%ymm4 \n" + "vpaddb %%ymm6,%%ymm4,%%ymm4 \n" + "vpsadbw %%ymm1,%%ymm4,%%ymm4 \n" + "vpaddd %%ymm0,%%ymm4,%%ymm0 \n" + "sub $0x40,%2 \n" + "jg 1b \n" - "vpermq $0xb1,%%ymm0,%%ymm1 \n" - "vpaddd %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xaa,%%ymm0,%%ymm1 \n" - "vpaddd %%ymm1,%%ymm0,%%ymm0 \n" - "vmovd %%xmm0, %3 \n" + "vpermq $0xb1,%%ymm0,%%ymm1 \n" + "vpaddd %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xaa,%%ymm0,%%ymm1 \n" + "vpaddd %%ymm1,%%ymm0,%%ymm0 \n" + "vmovd %%xmm0, %3 \n" "vzeroupper \n" : "+r"(src_a), // %0 "+r"(src_b), // %1 @@ -234,34 +234,34 @@ uint32_t SumSquareError_SSE2(const uint8_t* src_a, int count) { uint32_t sse; asm volatile( - "pxor %%xmm0,%%xmm0 \n" - "pxor %%xmm5,%%xmm5 \n" + "pxor %%xmm0,%%xmm0 \n" + "pxor %%xmm5,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "movdqu (%1),%%xmm2 \n" - "lea 0x10(%1),%1 \n" - "movdqa %%xmm1,%%xmm3 \n" - "psubusb %%xmm2,%%xmm1 \n" - "psubusb %%xmm3,%%xmm2 \n" - "por %%xmm2,%%xmm1 \n" - "movdqa %%xmm1,%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "punpckhbw %%xmm5,%%xmm2 \n" - "pmaddwd %%xmm1,%%xmm1 \n" - "pmaddwd %%xmm2,%%xmm2 \n" - "paddd %%xmm1,%%xmm0 \n" - "paddd %%xmm2,%%xmm0 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "movdqu (%1),%%xmm2 \n" + "lea 0x10(%1),%1 \n" + "movdqa %%xmm1,%%xmm3 \n" + "psubusb %%xmm2,%%xmm1 \n" + "psubusb %%xmm3,%%xmm2 \n" + "por %%xmm2,%%xmm1 \n" + "movdqa %%xmm1,%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "punpckhbw %%xmm5,%%xmm2 \n" + "pmaddwd %%xmm1,%%xmm1 \n" + "pmaddwd %%xmm2,%%xmm2 \n" + "paddd %%xmm1,%%xmm0 \n" + "paddd %%xmm2,%%xmm0 \n" + "sub $0x10,%2 \n" + "jg 1b \n" - "pshufd $0xee,%%xmm0,%%xmm1 \n" - "paddd %%xmm1,%%xmm0 \n" - "pshufd $0x1,%%xmm0,%%xmm1 \n" - "paddd %%xmm1,%%xmm0 \n" - "movd %%xmm0,%3 \n" + "pshufd $0xee,%%xmm0,%%xmm1 \n" + "paddd %%xmm1,%%xmm0 \n" + "pshufd $0x1,%%xmm0,%%xmm1 \n" + "paddd %%xmm1,%%xmm0 \n" + "movd %%xmm0,%3 \n" : "+r"(src_a), // %0 "+r"(src_b), // %1 @@ -301,44 +301,44 @@ static const uvec32 kHashMul3 = { uint32_t HashDjb2_SSE41(const uint8_t* src, int count, uint32_t seed) { uint32_t hash; asm volatile( - "movd %2,%%xmm0 \n" - "pxor %%xmm7,%%xmm7 \n" - "movdqa %4,%%xmm6 \n" + "movd %2,%%xmm0 \n" + "pxor %%xmm7,%%xmm7 \n" + "movdqa %4,%%xmm6 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "pmulld %%xmm6,%%xmm0 \n" - "movdqa %5,%%xmm5 \n" - "movdqa %%xmm1,%%xmm2 \n" - "punpcklbw %%xmm7,%%xmm2 \n" - "movdqa %%xmm2,%%xmm3 \n" - "punpcklwd %%xmm7,%%xmm3 \n" - "pmulld %%xmm5,%%xmm3 \n" - "movdqa %6,%%xmm5 \n" - "movdqa %%xmm2,%%xmm4 \n" - "punpckhwd %%xmm7,%%xmm4 \n" - "pmulld %%xmm5,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "punpckhbw %%xmm7,%%xmm1 \n" - "movdqa %%xmm1,%%xmm2 \n" - "punpcklwd %%xmm7,%%xmm2 \n" - "pmulld %%xmm5,%%xmm2 \n" - "movdqa %8,%%xmm5 \n" - "punpckhwd %%xmm7,%%xmm1 \n" - "pmulld %%xmm5,%%xmm1 \n" - "paddd %%xmm4,%%xmm3 \n" - "paddd %%xmm2,%%xmm1 \n" - "paddd %%xmm3,%%xmm1 \n" - "pshufd $0xe,%%xmm1,%%xmm2 \n" - "paddd %%xmm2,%%xmm1 \n" - "pshufd $0x1,%%xmm1,%%xmm2 \n" - "paddd %%xmm2,%%xmm1 \n" - "paddd %%xmm1,%%xmm0 \n" - "sub $0x10,%1 \n" - "jg 1b \n" - "movd %%xmm0,%3 \n" + "movdqu (%0),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "pmulld %%xmm6,%%xmm0 \n" + "movdqa %5,%%xmm5 \n" + "movdqa %%xmm1,%%xmm2 \n" + "punpcklbw %%xmm7,%%xmm2 \n" + "movdqa %%xmm2,%%xmm3 \n" + "punpcklwd %%xmm7,%%xmm3 \n" + "pmulld %%xmm5,%%xmm3 \n" + "movdqa %6,%%xmm5 \n" + "movdqa %%xmm2,%%xmm4 \n" + "punpckhwd %%xmm7,%%xmm4 \n" + "pmulld %%xmm5,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "punpckhbw %%xmm7,%%xmm1 \n" + "movdqa %%xmm1,%%xmm2 \n" + "punpcklwd %%xmm7,%%xmm2 \n" + "pmulld %%xmm5,%%xmm2 \n" + "movdqa %8,%%xmm5 \n" + "punpckhwd %%xmm7,%%xmm1 \n" + "pmulld %%xmm5,%%xmm1 \n" + "paddd %%xmm4,%%xmm3 \n" + "paddd %%xmm2,%%xmm1 \n" + "paddd %%xmm3,%%xmm1 \n" + "pshufd $0xe,%%xmm1,%%xmm2 \n" + "paddd %%xmm2,%%xmm1 \n" + "pshufd $0x1,%%xmm1,%%xmm2 \n" + "paddd %%xmm2,%%xmm1 \n" + "paddd %%xmm1,%%xmm0 \n" + "sub $0x10,%1 \n" + "jg 1b \n" + "movd %%xmm0,%3 \n" : "+r"(src), // %0 "+r"(count), // %1 "+rm"(seed), // %2 diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_neon.cc index 2a2181e0c..afdd60121 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_neon.cc @@ -29,24 +29,24 @@ uint32_t HammingDistance_NEON(const uint8_t* src_a, uint32_t diff; asm volatile( - "vmov.u16 q4, #0 \n" // accumulator + "vmov.u16 q4, #0 \n" // accumulator "1: \n" - "vld1.8 {q0, q1}, [%0]! \n" - "vld1.8 {q2, q3}, [%1]! \n" - "veor.32 q0, q0, q2 \n" - "veor.32 q1, q1, q3 \n" - "vcnt.i8 q0, q0 \n" - "vcnt.i8 q1, q1 \n" - "subs %2, %2, #32 \n" - "vadd.u8 q0, q0, q1 \n" // 16 byte counts - "vpadal.u8 q4, q0 \n" // 8 shorts - "bgt 1b \n" + "vld1.8 {q0, q1}, [%0]! \n" + "vld1.8 {q2, q3}, [%1]! \n" + "veor.32 q0, q0, q2 \n" + "veor.32 q1, q1, q3 \n" + "vcnt.i8 q0, q0 \n" + "vcnt.i8 q1, q1 \n" + "subs %2, %2, #32 \n" + "vadd.u8 q0, q0, q1 \n" // 16 byte counts + "vpadal.u8 q4, q0 \n" // 8 shorts + "bgt 1b \n" - "vpaddl.u16 q0, q4 \n" // 4 ints - "vpadd.u32 d0, d0, d1 \n" - "vpadd.u32 d0, d0, d0 \n" - "vmov.32 %3, d0[0] \n" + "vpaddl.u16 q0, q4 \n" // 4 ints + "vpadd.u32 d0, d0, d1 \n" + "vpadd.u32 d0, d0, d0 \n" + "vmov.32 %3, d0[0] \n" : "+r"(src_a), "+r"(src_b), "+r"(count), "=r"(diff) : @@ -59,29 +59,29 @@ uint32_t SumSquareError_NEON(const uint8_t* src_a, int count) { uint32_t sse; asm volatile( - "vmov.u8 q8, #0 \n" - "vmov.u8 q10, #0 \n" - "vmov.u8 q9, #0 \n" - "vmov.u8 q11, #0 \n" + "vmov.u8 q8, #0 \n" + "vmov.u8 q10, #0 \n" + "vmov.u8 q9, #0 \n" + "vmov.u8 q11, #0 \n" "1: \n" - "vld1.8 {q0}, [%0]! \n" - "vld1.8 {q1}, [%1]! \n" - "subs %2, %2, #16 \n" - "vsubl.u8 q2, d0, d2 \n" - "vsubl.u8 q3, d1, d3 \n" - "vmlal.s16 q8, d4, d4 \n" - "vmlal.s16 q9, d6, d6 \n" - "vmlal.s16 q10, d5, d5 \n" - "vmlal.s16 q11, d7, d7 \n" - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" + "vld1.8 {q1}, [%1]! \n" + "subs %2, %2, #16 \n" + "vsubl.u8 q2, d0, d2 \n" + "vsubl.u8 q3, d1, d3 \n" + "vmlal.s16 q8, d4, d4 \n" + "vmlal.s16 q9, d6, d6 \n" + "vmlal.s16 q10, d5, d5 \n" + "vmlal.s16 q11, d7, d7 \n" + "bgt 1b \n" - "vadd.u32 q8, q8, q9 \n" - "vadd.u32 q10, q10, q11 \n" - "vadd.u32 q11, q8, q10 \n" - "vpaddl.u32 q1, q11 \n" - "vadd.u64 d0, d2, d3 \n" - "vmov.32 %3, d0[0] \n" + "vadd.u32 q8, q8, q9 \n" + "vadd.u32 q10, q10, q11 \n" + "vadd.u32 q11, q8, q10 \n" + "vpaddl.u32 q1, q11 \n" + "vadd.u64 d0, d2, d3 \n" + "vmov.32 %3, d0[0] \n" : "+r"(src_a), "+r"(src_b), "+r"(count), "=r"(sse) : : "memory", "cc", "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11"); diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_neon64.cc index 6e8f672ab..70fb9b914 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_neon64.cc @@ -27,22 +27,24 @@ uint32_t HammingDistance_NEON(const uint8_t* src_a, int count) { uint32_t diff; asm volatile( - "movi v4.8h, #0 \n" + "movi v4.8h, #0 \n" "1: \n" - "ld1 {v0.16b, v1.16b}, [%0], #32 \n" - "ld1 {v2.16b, v3.16b}, [%1], #32 \n" - "eor v0.16b, v0.16b, v2.16b \n" - "eor v1.16b, v1.16b, v3.16b \n" - "cnt v0.16b, v0.16b \n" - "cnt v1.16b, v1.16b \n" - "subs %w2, %w2, #32 \n" - "add v0.16b, v0.16b, v1.16b \n" - "uadalp v4.8h, v0.16b \n" - "b.gt 1b \n" + "ld1 {v0.16b, v1.16b}, [%0], #32 \n" + "ld1 {v2.16b, v3.16b}, [%1], #32 \n" + "eor v0.16b, v0.16b, v2.16b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "eor v1.16b, v1.16b, v3.16b \n" + "cnt v0.16b, v0.16b \n" + "prfm pldl1keep, [%1, 448] \n" + "cnt v1.16b, v1.16b \n" + "subs %w2, %w2, #32 \n" + "add v0.16b, v0.16b, v1.16b \n" + "uadalp v4.8h, v0.16b \n" + "b.gt 1b \n" - "uaddlv s4, v4.8h \n" - "fmov %w3, s4 \n" + "uaddlv s4, v4.8h \n" + "fmov %w3, s4 \n" : "+r"(src_a), "+r"(src_b), "+r"(count), "=r"(diff) : : "cc", "v0", "v1", "v2", "v3", "v4"); @@ -54,28 +56,30 @@ uint32_t SumSquareError_NEON(const uint8_t* src_a, int count) { uint32_t sse; asm volatile( - "eor v16.16b, v16.16b, v16.16b \n" - "eor v18.16b, v18.16b, v18.16b \n" - "eor v17.16b, v17.16b, v17.16b \n" - "eor v19.16b, v19.16b, v19.16b \n" + "eor v16.16b, v16.16b, v16.16b \n" + "eor v18.16b, v18.16b, v18.16b \n" + "eor v17.16b, v17.16b, v17.16b \n" + "eor v19.16b, v19.16b, v19.16b \n" "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" - "ld1 {v1.16b}, [%1], #16 \n" - "subs %w2, %w2, #16 \n" - "usubl v2.8h, v0.8b, v1.8b \n" - "usubl2 v3.8h, v0.16b, v1.16b \n" - "smlal v16.4s, v2.4h, v2.4h \n" - "smlal v17.4s, v3.4h, v3.4h \n" - "smlal2 v18.4s, v2.8h, v2.8h \n" - "smlal2 v19.4s, v3.8h, v3.8h \n" - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" + "ld1 {v1.16b}, [%1], #16 \n" + "subs %w2, %w2, #16 \n" + "usubl v2.8h, v0.8b, v1.8b \n" + "usubl2 v3.8h, v0.16b, v1.16b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "smlal v16.4s, v2.4h, v2.4h \n" + "smlal v17.4s, v3.4h, v3.4h \n" + "prfm pldl1keep, [%1, 448] \n" + "smlal2 v18.4s, v2.8h, v2.8h \n" + "smlal2 v19.4s, v3.8h, v3.8h \n" + "b.gt 1b \n" - "add v16.4s, v16.4s, v17.4s \n" - "add v18.4s, v18.4s, v19.4s \n" - "add v19.4s, v16.4s, v18.4s \n" - "addv s0, v19.4s \n" - "fmov %w3, s0 \n" + "add v16.4s, v16.4s, v17.4s \n" + "add v18.4s, v18.4s, v19.4s \n" + "add v19.4s, v16.4s, v18.4s \n" + "addv s0, v19.4s \n" + "fmov %w3, s0 \n" : "+r"(src_a), "+r"(src_b), "+r"(count), "=r"(sse) : : "cc", "v0", "v1", "v2", "v3", "v16", "v17", "v18", "v19"); diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert.cc b/TMessagesProj/jni/third_party/libyuv/source/convert.cc index 614fa4824..98258b9bc 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert.cc @@ -320,14 +320,6 @@ int I422ToNV21(const uint8_t* src_y, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow = MergeUVRow_Any_MSA; - if (IS_ALIGNED(halfwidth, 16)) { - MergeUVRow = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow = MergeUVRow_Any_MMI; @@ -336,6 +328,14 @@ int I422ToNV21(const uint8_t* src_y, } } #endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow = MergeUVRow_MSA; + } + } +#endif #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; @@ -360,14 +360,6 @@ int I422ToNV21(const uint8_t* src_y, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -376,6 +368,14 @@ int I422ToNV21(const uint8_t* src_y, } } #endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif if (dst_y) { CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, halfwidth, height); @@ -426,7 +426,41 @@ int I444ToI420(const uint8_t* src_y, dst_v, dst_stride_v, width, height, width, height); } -// TODO(fbarchard): Implement row conversion. +LIBYUV_API +int I444ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (!src_y || !src_u || !src_v || !dst_y || !dst_uv || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + if (dst_y) { + CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + HalfMergeUVPlane(src_u, src_stride_u, src_v, src_stride_v, dst_uv, + dst_stride_uv, width, height); + return 0; +} + LIBYUV_API int I444ToNV21(const uint8_t* src_y, int src_stride_y, @@ -440,30 +474,9 @@ int I444ToNV21(const uint8_t* src_y, int dst_stride_vu, int width, int height) { - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - // Negative height means invert the image. - if (height < 0) { - height = -height; - halfheight = (height + 1) >> 1; - src_y = src_y + (height - 1) * src_stride_y; - src_u = src_u + (height - 1) * src_stride_u; - src_v = src_v + (height - 1) * src_stride_v; - src_stride_y = -src_stride_y; - src_stride_u = -src_stride_u; - src_stride_v = -src_stride_v; - } - // Allocate u and v buffers - align_buffer_64(plane_u, halfwidth * halfheight * 2); - uint8_t* plane_v = plane_u + halfwidth * halfheight; - - I444ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, - dst_y, dst_stride_y, plane_u, halfwidth, plane_v, halfwidth, width, - height); - MergeUVPlane(plane_v, halfwidth, plane_u, halfwidth, dst_vu, dst_stride_vu, - halfwidth, halfheight); - free_aligned_buffer_64(plane_u); - return 0; + return I444ToNV12(src_y, src_stride_y, src_v, src_stride_v, src_u, + src_stride_u, dst_y, dst_stride_y, dst_vu, dst_stride_vu, + width, height); } // I400 is greyscale typically used in MJPG @@ -527,120 +540,8 @@ int I400ToNV21(const uint8_t* src_y, return 0; } -static void CopyPlane2(const uint8_t* src, - int src_stride_0, - int src_stride_1, - uint8_t* dst, - int dst_stride, - int width, - int height) { - int y; - void (*CopyRow)(const uint8_t* src, uint8_t* dst, int width) = CopyRow_C; -#if defined(HAS_COPYROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - CopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2; - } -#endif -#if defined(HAS_COPYROW_AVX) - if (TestCpuFlag(kCpuHasAVX)) { - CopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX; - } -#endif -#if defined(HAS_COPYROW_ERMS) - if (TestCpuFlag(kCpuHasERMS)) { - CopyRow = CopyRow_ERMS; - } -#endif -#if defined(HAS_COPYROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - CopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON; - } -#endif - - // Copy plane - for (y = 0; y < height - 1; y += 2) { - CopyRow(src, dst, width); - CopyRow(src + src_stride_0, dst + dst_stride, width); - src += src_stride_0 + src_stride_1; - dst += dst_stride * 2; - } - if (height & 1) { - CopyRow(src, dst, width); - } -} - -// Support converting from FOURCC_M420 -// Useful for bandwidth constrained transports like USB 1.0 and 2.0 and for -// easy conversion to I420. -// M420 format description: -// M420 is row biplanar 420: 2 rows of Y and 1 row of UV. -// Chroma is half width / half height. (420) -// src_stride_m420 is row planar. Normally this will be the width in pixels. -// The UV plane is half width, but 2 values, so src_stride_m420 applies to -// this as well as the two Y planes. -static int X420ToI420(const uint8_t* src_y, - int src_stride_y0, - int src_stride_y1, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height) { - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - if (!src_uv || !dst_u || !dst_v || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - halfheight = (height + 1) >> 1; - if (dst_y) { - dst_y = dst_y + (height - 1) * dst_stride_y; - } - dst_u = dst_u + (halfheight - 1) * dst_stride_u; - dst_v = dst_v + (halfheight - 1) * dst_stride_v; - dst_stride_y = -dst_stride_y; - dst_stride_u = -dst_stride_u; - dst_stride_v = -dst_stride_v; - } - // Coalesce rows. - if (src_stride_y0 == width && src_stride_y1 == width && - dst_stride_y == width) { - width *= height; - height = 1; - src_stride_y0 = src_stride_y1 = dst_stride_y = 0; - } - // Coalesce rows. - if (src_stride_uv == halfwidth * 2 && dst_stride_u == halfwidth && - dst_stride_v == halfwidth) { - halfwidth *= halfheight; - halfheight = 1; - src_stride_uv = dst_stride_u = dst_stride_v = 0; - } - - if (dst_y) { - if (src_stride_y0 == src_stride_y1) { - CopyPlane(src_y, src_stride_y0, dst_y, dst_stride_y, width, height); - } else { - CopyPlane2(src_y, src_stride_y0, src_stride_y1, dst_y, dst_stride_y, - width, height); - } - } - - // Split UV plane - NV12 / NV21 - SplitUVPlane(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, dst_stride_v, - halfwidth, halfheight); - - return 0; -} - // Convert NV12 to I420. +// TODO(fbarchard): Consider inverting destination. Faster on ARM with prfm. LIBYUV_API int NV12ToI420(const uint8_t* src_y, int src_stride_y, @@ -654,9 +555,43 @@ int NV12ToI420(const uint8_t* src_y, int dst_stride_v, int width, int height) { - return X420ToI420(src_y, src_stride_y, src_stride_y, src_uv, src_stride_uv, - dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v, - dst_stride_v, width, height); + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_uv || !dst_u || !dst_v || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_uv = src_uv + (halfheight - 1) * src_stride_uv; + src_stride_y = -src_stride_y; + src_stride_uv = -src_stride_uv; + } + // Coalesce rows. + if (src_stride_y == width && dst_stride_y == width) { + width *= height; + height = 1; + src_stride_y = dst_stride_y = 0; + } + // Coalesce rows. + if (src_stride_uv == halfwidth * 2 && dst_stride_u == halfwidth && + dst_stride_v == halfwidth) { + halfwidth *= halfheight; + halfheight = 1; + src_stride_uv = dst_stride_u = dst_stride_v = 0; + } + + if (dst_y) { + CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + + // Split UV plane - NV12 / NV21 + SplitUVPlane(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, dst_stride_v, + halfwidth, halfheight); + + return 0; } // Convert NV21 to I420. Same as NV12 but u and v pointers swapped. @@ -673,26 +608,8 @@ int NV21ToI420(const uint8_t* src_y, int dst_stride_v, int width, int height) { - return X420ToI420(src_y, src_stride_y, src_stride_y, src_vu, src_stride_vu, - dst_y, dst_stride_y, dst_v, dst_stride_v, dst_u, - dst_stride_u, width, height); -} - -// Convert M420 to I420. -LIBYUV_API -int M420ToI420(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height) { - return X420ToI420(src_m420, src_stride_m420, src_stride_m420 * 2, - src_m420 + src_stride_m420 * 2, src_stride_m420 * 3, dst_y, - dst_stride_y, dst_u, dst_stride_u, dst_v, dst_stride_v, + return NV12ToI420(src_y, src_stride_y, src_vu, src_stride_vu, dst_y, + dst_stride_y, dst_v, dst_stride_v, dst_u, dst_stride_u, width, height); } @@ -750,17 +667,7 @@ int YUY2ToI420(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToYRow = YUY2ToYRow_Any_MSA; - YUY2ToUVRow = YUY2ToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - YUY2ToYRow = YUY2ToYRow_MSA; - YUY2ToUVRow = YUY2ToUVRow_MSA; - } - } -#endif -#if defined(HAS_YUY2TOYROW_MMI) +#if defined(HAS_YUY2TOYROW_MMI) && defined(HAS_YUY2TOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { YUY2ToYRow = YUY2ToYRow_Any_MMI; YUY2ToUVRow = YUY2ToUVRow_Any_MMI; @@ -772,6 +679,16 @@ int YUY2ToI420(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_YUY2TOYROW_MSA) && defined(HAS_YUY2TOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + YUY2ToYRow = YUY2ToYRow_Any_MSA; + YUY2ToUVRow = YUY2ToUVRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_MSA; + YUY2ToUVRow = YUY2ToUVRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { YUY2ToUVRow(src_yuy2, src_stride_yuy2, dst_u, dst_v, width); @@ -843,6 +760,16 @@ int UYVYToI420(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_UYVYTOYROW_MMI) && defined(HAS_UYVYTOUVROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + UYVYToYRow = UYVYToYRow_Any_MMI; + UYVYToUVRow = UYVYToUVRow_Any_MMI; + if (IS_ALIGNED(width, 16)) { + UYVYToYRow = UYVYToYRow_MMI; + UYVYToUVRow = UYVYToUVRow_MMI; + } + } +#endif #if defined(HAS_UYVYTOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { UYVYToYRow = UYVYToYRow_Any_MSA; @@ -853,16 +780,6 @@ int UYVYToI420(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_UYVYTOYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - UYVYToYRow = UYVYToYRow_Any_MMI; - UYVYToUVRow = UYVYToUVRow_Any_MMI; - if (IS_ALIGNED(width, 16)) { - UYVYToYRow = UYVYToYRow_MMI; - UYVYToUVRow = UYVYToUVRow_MMI; - } - } -#endif for (y = 0; y < height - 1; y += 2) { UYVYToUVRow(src_uyvy, src_stride_uyvy, dst_u, dst_v, width); @@ -1081,38 +998,30 @@ int ARGBToI420(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ARGBToUVRow = ARGBToUVRow_MMI; } } #endif +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { ARGBToUVRow(src_argb, src_stride_argb, dst_u, dst_v, width); @@ -1183,38 +1092,28 @@ int BGRAToI420(const uint8_t* src_bgra, } } #endif -#if defined(HAS_BGRATOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - BGRAToYRow = BGRAToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - BGRAToYRow = BGRAToYRow_MSA; - } - } -#endif -#if defined(HAS_BGRATOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - BGRAToUVRow = BGRAToUVRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - BGRAToUVRow = BGRAToUVRow_MSA; - } - } -#endif -#if defined(HAS_BGRATOYROW_MMI) +#if defined(HAS_BGRATOYROW_MMI) && defined(HAS_BGRATOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { BGRAToYRow = BGRAToYRow_Any_MMI; + BGRAToUVRow = BGRAToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { BGRAToYRow = BGRAToYRow_MMI; } - } -#endif -#if defined(HAS_BGRATOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - BGRAToUVRow = BGRAToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { BGRAToUVRow = BGRAToUVRow_MMI; } } #endif +#if defined(HAS_BGRATOYROW_MSA) && defined(HAS_BGRATOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + BGRAToYRow = BGRAToYRow_Any_MSA; + BGRAToUVRow = BGRAToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + BGRAToYRow = BGRAToYRow_MSA; + BGRAToUVRow = BGRAToUVRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { BGRAToUVRow(src_bgra, src_stride_bgra, dst_u, dst_v, width); @@ -1269,6 +1168,16 @@ int ABGRToI420(const uint8_t* src_abgr, } } #endif +#if defined(HAS_ABGRTOYROW_AVX2) && defined(HAS_ABGRTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVRow = ABGRToUVRow_Any_AVX2; + ABGRToYRow = ABGRToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_AVX2; + ABGRToYRow = ABGRToYRow_AVX2; + } + } +#endif #if defined(HAS_ABGRTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ABGRToYRow = ABGRToYRow_Any_NEON; @@ -1285,38 +1194,28 @@ int ABGRToI420(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToYRow = ABGRToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ABGRToYRow = ABGRToYRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToUVRow = ABGRToUVRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOYROW_MMI) +#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ABGRToYRow = ABGRToYRow_Any_MMI; + ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ABGRToYRow = ABGRToYRow_MMI; } - } -#endif -#if defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ABGRToUVRow = ABGRToUVRow_MMI; } } #endif +#if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYRow = ABGRToYRow_Any_MSA; + ABGRToUVRow = ABGRToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_MSA; + ABGRToUVRow = ABGRToUVRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { ABGRToUVRow(src_abgr, src_stride_abgr, dst_u, dst_v, width); @@ -1387,38 +1286,28 @@ int RGBAToI420(const uint8_t* src_rgba, } } #endif -#if defined(HAS_RGBATOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGBAToYRow = RGBAToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGBAToYRow = RGBAToYRow_MSA; - } - } -#endif -#if defined(HAS_RGBATOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGBAToUVRow = RGBAToUVRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGBAToUVRow = RGBAToUVRow_MSA; - } - } -#endif -#if defined(HAS_RGBATOYROW_MMI) +#if defined(HAS_RGBATOYROW_MMI) && defined(HAS_RGBATOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGBAToYRow = RGBAToYRow_Any_MMI; + RGBAToUVRow = RGBAToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { RGBAToYRow = RGBAToYRow_MMI; } - } -#endif -#if defined(HAS_RGBATOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGBAToUVRow = RGBAToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { RGBAToUVRow = RGBAToUVRow_MMI; } } #endif +#if defined(HAS_RGBATOYROW_MSA) && defined(HAS_RGBATOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGBAToYRow = RGBAToYRow_Any_MSA; + RGBAToUVRow = RGBAToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGBAToYRow = RGBAToYRow_MSA; + RGBAToUVRow = RGBAToUVRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { RGBAToUVRow(src_rgba, src_stride_rgba, dst_u, dst_v, width); @@ -1487,16 +1376,9 @@ int RGB24ToI420(const uint8_t* src_rgb24, } } } -#elif defined(HAS_RGB24TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB24ToUVRow = RGB24ToUVRow_Any_MSA; - RGB24ToYRow = RGB24ToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB24ToYRow = RGB24ToYRow_MSA; - RGB24ToUVRow = RGB24ToUVRow_MSA; - } - } -#elif defined(HAS_RGB24TOYROW_MMI) +// MMI and MSA version does direct RGB24 to YUV. +#elif (defined(HAS_RGB24TOYROW_MMI) || defined(HAS_RGB24TOYROW_MSA)) +#if defined(HAS_RGB24TOYROW_MMI) && defined(HAS_RGB24TOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB24ToUVRow = RGB24ToUVRow_Any_MMI; RGB24ToYRow = RGB24ToYRow_Any_MMI; @@ -1507,6 +1389,17 @@ int RGB24ToI420(const uint8_t* src_rgb24, } } } +#endif +#if defined(HAS_RGB24TOYROW_MSA) && defined(HAS_RGB24TOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB24ToUVRow = RGB24ToUVRow_Any_MSA; + RGB24ToYRow = RGB24ToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB24ToYRow = RGB24ToYRow_MSA; + RGB24ToUVRow = RGB24ToUVRow_MSA; + } + } +#endif // Other platforms do intermediate conversion from RGB24 to ARGB. #else #if defined(HAS_RGB24TOARGBROW_SSSE3) @@ -1598,8 +1491,8 @@ int RGB24ToJ420(const uint8_t* src_rgb24, int width, int height) { int y; -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) +#if (defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ + defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI) void (*RGB24ToUVJRow)(const uint8_t* src_rgb24, int src_stride_rgb24, uint8_t* dst_u, uint8_t* dst_v, int width) = RGB24ToUVJRow_C; @@ -1625,7 +1518,7 @@ int RGB24ToJ420(const uint8_t* src_rgb24, } // Neon version does direct RGB24 to YUV. -#if defined(HAS_RGB24TOYJROW_NEON) +#if defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGB24ToUVJRow = RGB24ToUVJRow_Any_NEON; RGB24ToYJRow = RGB24ToYJRow_Any_NEON; @@ -1636,16 +1529,9 @@ int RGB24ToJ420(const uint8_t* src_rgb24, } } } -#elif defined(HAS_RGB24TOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB24ToUVJRow = RGB24ToUVJRow_Any_MSA; - RGB24ToYJRow = RGB24ToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB24ToYJRow = RGB24ToYJRow_MSA; - RGB24ToUVJRow = RGB24ToUVJRow_MSA; - } - } -#elif defined(HAS_RGB24TOYJROW_MMI) +// MMI and MSA version does direct RGB24 to YUV. +#elif (defined(HAS_RGB24TOYJROW_MMI) || defined(HAS_RGB24TOYJROW_MSA)) +#if defined(HAS_RGB24TOYJROW_MMI) && defined(HAS_RGB24TOUVJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB24ToUVJRow = RGB24ToUVJRow_Any_MMI; RGB24ToYJRow = RGB24ToYJRow_Any_MMI; @@ -1656,7 +1542,17 @@ int RGB24ToJ420(const uint8_t* src_rgb24, } } } -// Other platforms do intermediate conversion from RGB24 to ARGB. +#endif +#if defined(HAS_RGB24TOYJROW_MSA) && defined(HAS_RGB24TOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB24ToUVJRow = RGB24ToUVJRow_Any_MSA; + RGB24ToYJRow = RGB24ToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB24ToYJRow = RGB24ToYJRow_MSA; + RGB24ToUVJRow = RGB24ToUVJRow_MSA; + } + } +#endif #else #if defined(HAS_RGB24TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { @@ -1689,16 +1585,16 @@ int RGB24ToJ420(const uint8_t* src_rgb24, #endif { -#if !(defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) +#if !((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ + defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) // Allocate 2 rows of ARGB. const int kRowSize = (width * 4 + 31) & ~31; align_buffer_64(row, kRowSize * 2); #endif for (y = 0; y < height - 1; y += 2) { -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) +#if ((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ + defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) RGB24ToUVJRow(src_rgb24, src_stride_rgb24, dst_u, dst_v, width); RGB24ToYJRow(src_rgb24, dst_y, width); RGB24ToYJRow(src_rgb24 + src_stride_rgb24, dst_y + dst_stride_y, width); @@ -1715,8 +1611,8 @@ int RGB24ToJ420(const uint8_t* src_rgb24, dst_v += dst_stride_v; } if (height & 1) { -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) +#if ((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ + defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) RGB24ToUVJRow(src_rgb24, 0, dst_u, dst_v, width); RGB24ToYJRow(src_rgb24, dst_y, width); #else @@ -1725,8 +1621,8 @@ int RGB24ToJ420(const uint8_t* src_rgb24, ARGBToYJRow(row, dst_y, width); #endif } -#if !(defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) +#if !((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ + defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) free_aligned_buffer_64(row); #endif } @@ -1746,8 +1642,8 @@ int RAWToI420(const uint8_t* src_raw, int width, int height) { int y; -#if (defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ - defined(HAS_RAWTOYROW_MMI)) +#if (defined(HAS_RAWTOYROW_NEON) && defined(HAS_RAWTOUVROW_NEON)) || \ + defined(HAS_RAWTOYROW_MSA) || defined(HAS_RAWTOYROW_MMI) void (*RAWToUVRow)(const uint8_t* src_raw, int src_stride_raw, uint8_t* dst_u, uint8_t* dst_v, int width) = RAWToUVRow_C; void (*RAWToYRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) = @@ -1772,7 +1668,7 @@ int RAWToI420(const uint8_t* src_raw, } // Neon version does direct RAW to YUV. -#if defined(HAS_RAWTOYROW_NEON) +#if defined(HAS_RAWTOYROW_NEON) && defined(HAS_RAWTOUVROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RAWToUVRow = RAWToUVRow_Any_NEON; RAWToYRow = RAWToYRow_Any_NEON; @@ -1783,16 +1679,9 @@ int RAWToI420(const uint8_t* src_raw, } } } -#elif defined(HAS_RAWTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RAWToUVRow = RAWToUVRow_Any_MSA; - RAWToYRow = RAWToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RAWToYRow = RAWToYRow_MSA; - RAWToUVRow = RAWToUVRow_MSA; - } - } -#elif defined(HAS_RAWTOYROW_MMI) +// MMI and MSA version does direct RAW to YUV. +#elif (defined(HAS_RAWTOYROW_MMI) || defined(HAS_RAWTOYROW_MSA)) +#if defined(HAS_RAWTOYROW_MMI) && defined(HAS_RAWTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RAWToUVRow = RAWToUVRow_Any_MMI; RAWToYRow = RAWToYRow_Any_MMI; @@ -1803,6 +1692,17 @@ int RAWToI420(const uint8_t* src_raw, } } } +#endif +#if defined(HAS_RAWTOYROW_MSA) && defined(HAS_RAWTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToUVRow = RAWToUVRow_Any_MSA; + RAWToYRow = RAWToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToYRow = RAWToYRow_MSA; + RAWToUVRow = RAWToUVRow_MSA; + } + } +#endif // Other platforms do intermediate conversion from RAW to ARGB. #else #if defined(HAS_RAWTOARGBROW_SSSE3) @@ -1931,16 +1831,9 @@ int RGB565ToI420(const uint8_t* src_rgb565, } } } -#elif defined(HAS_RGB565TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB565ToUVRow = RGB565ToUVRow_Any_MSA; - RGB565ToYRow = RGB565ToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB565ToYRow = RGB565ToYRow_MSA; - RGB565ToUVRow = RGB565ToUVRow_MSA; - } - } -#elif defined(HAS_RGB565TOYROW_MMI) +// MMI and MSA version does direct RGB565 to YUV. +#elif (defined(HAS_RGB565TOYROW_MMI) || defined(HAS_RGB565TOYROW_MSA)) +#if defined(HAS_RGB565TOYROW_MMI) && defined(HAS_RGB565TOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB565ToUVRow = RGB565ToUVRow_Any_MMI; RGB565ToYRow = RGB565ToYRow_Any_MMI; @@ -1951,6 +1844,17 @@ int RGB565ToI420(const uint8_t* src_rgb565, } } } +#endif +#if defined(HAS_RGB565TOYROW_MSA) && defined(HAS_RGB565TOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB565ToUVRow = RGB565ToUVRow_Any_MSA; + RGB565ToYRow = RGB565ToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB565ToYRow = RGB565ToYRow_MSA; + RGB565ToUVRow = RGB565ToUVRow_MSA; + } + } +#endif // Other platforms do intermediate conversion from RGB565 to ARGB. #else #if defined(HAS_RGB565TOARGBROW_SSE2) @@ -2086,16 +1990,9 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } } } -#elif defined(HAS_ARGB1555TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGB1555ToUVRow = ARGB1555ToUVRow_Any_MSA; - ARGB1555ToYRow = ARGB1555ToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGB1555ToYRow = ARGB1555ToYRow_MSA; - ARGB1555ToUVRow = ARGB1555ToUVRow_MSA; - } - } -#elif defined(HAS_ARGB1555TOYROW_MMI) +// MMI and MSA version does direct ARGB1555 to YUV. +#elif (defined(HAS_ARGB1555TOYROW_MMI) || defined(HAS_ARGB1555TOYROW_MSA)) +#if defined(HAS_ARGB1555TOYROW_MMI) && defined(HAS_ARGB1555TOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGB1555ToUVRow = ARGB1555ToUVRow_Any_MMI; ARGB1555ToYRow = ARGB1555ToYRow_Any_MMI; @@ -2106,6 +2003,17 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } } } +#endif +#if defined(HAS_ARGB1555TOYROW_MSA) && defined(HAS_ARGB1555TOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGB1555ToUVRow = ARGB1555ToUVRow_Any_MSA; + ARGB1555ToYRow = ARGB1555ToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToYRow = ARGB1555ToYRow_MSA; + ARGB1555ToUVRow = ARGB1555ToUVRow_MSA; + } + } +#endif // Other platforms do intermediate conversion from ARGB1555 to ARGB. #else #if defined(HAS_ARGB1555TOARGBROW_SSE2) @@ -2243,7 +2151,7 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } } -#elif defined(HAS_ARGB4444TOYROW_MMI) +#elif defined(HAS_ARGB4444TOYROW_MMI) && defined(HAS_ARGB4444TOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGB4444ToUVRow = ARGB4444ToUVRow_Any_MMI; ARGB4444ToYRow = ARGB4444ToYRow_Any_MMI; @@ -2300,19 +2208,7 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToUVRow = ARGBToUVRow_Any_MMI; ARGBToYRow = ARGBToYRow_Any_MMI; @@ -2324,6 +2220,18 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } #endif +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToUVRow = ARGBToUVRow_Any_MSA; + ARGBToYRow = ARGBToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } + } +#endif #endif { @@ -2378,27 +2286,38 @@ int RGB24ToJ400(const uint8_t* src_rgb24, int width, int height) { int y; -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) void (*RGB24ToYJRow)(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) = RGB24ToYJRow_C; -#else - void (*RGB24ToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = - RGB24ToARGBRow_C; - void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_yj, int width) = - ARGBToYJRow_C; -#endif if (!src_rgb24 || !dst_yj || width <= 0 || height == 0) { return -1; } - // Negative height means invert the image. if (height < 0) { height = -height; src_rgb24 = src_rgb24 + (height - 1) * src_stride_rgb24; src_stride_rgb24 = -src_stride_rgb24; } - -// Neon version does direct RGB24 to YUV. + // Coalesce rows. + if (src_stride_rgb24 == width * 3 && dst_stride_yj == width) { + width *= height; + height = 1; + src_stride_rgb24 = dst_stride_yj = 0; + } +#if defined(HAS_RGB24TOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RGB24ToYJRow = RGB24ToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RGB24ToYJRow = RGB24ToYJRow_SSSE3; + } + } +#endif +#if defined(HAS_RGB24TOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + RGB24ToYJRow = RGB24ToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + RGB24ToYJRow = RGB24ToYJRow_AVX2; + } + } +#endif #if defined(HAS_RGB24TOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGB24ToYJRow = RGB24ToYJRow_Any_NEON; @@ -2406,83 +2325,102 @@ int RGB24ToJ400(const uint8_t* src_rgb24, RGB24ToYJRow = RGB24ToYJRow_NEON; } } -#elif defined(HAS_RGB24TOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB24ToYJRow = RGB24ToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB24ToYJRow = RGB24ToYJRow_MSA; - } - } -#elif defined(HAS_RGB24TOYJROW_MMI) +#endif +#if defined(HAS_RGB24TOYJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB24ToYJRow = RGB24ToYJRow_Any_MMI; if (IS_ALIGNED(width, 8)) { RGB24ToYJRow = RGB24ToYJRow_MMI; } } -// Other platforms do intermediate conversion from RGB24 to ARGB. -#else -#if defined(HAS_RGB24TOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3; +#endif +#if defined(HAS_RGB24TOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB24ToYJRow = RGB24ToYJRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - RGB24ToARGBRow = RGB24ToARGBRow_SSSE3; + RGB24ToYJRow = RGB24ToYJRow_MSA; } } #endif -#if defined(HAS_ARGBTOYJROW_SSSE3) + + for (y = 0; y < height; ++y) { + RGB24ToYJRow(src_rgb24, dst_yj, width); + src_rgb24 += src_stride_rgb24; + dst_yj += dst_stride_yj; + } + return 0; +} + +// Convert RAW to J400. +LIBYUV_API +int RAWToJ400(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_yj, + int dst_stride_yj, + int width, + int height) { + int y; + void (*RAWToYJRow)(const uint8_t* src_raw, uint8_t* dst_yj, int width) = + RAWToYJRow_C; + if (!src_raw || !dst_yj || width <= 0 || height == 0) { + return -1; + } + if (height < 0) { + height = -height; + src_raw = src_raw + (height - 1) * src_stride_raw; + src_stride_raw = -src_stride_raw; + } + // Coalesce rows. + if (src_stride_raw == width * 3 && dst_stride_yj == width) { + width *= height; + height = 1; + src_stride_raw = dst_stride_yj = 0; + } +#if defined(HAS_RAWTOYJROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToYJRow = ARGBToYJRow_Any_SSSE3; + RAWToYJRow = RAWToYJRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToYJRow = ARGBToYJRow_SSSE3; + RAWToYJRow = RAWToYJRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYJROW_AVX2) +#if defined(HAS_RAWTOYJROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToYJRow = ARGBToYJRow_Any_AVX2; + RAWToYJRow = RAWToYJRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToYJRow = ARGBToYJRow_AVX2; + RAWToYJRow = RAWToYJRow_AVX2; } } #endif +#if defined(HAS_RAWTOYJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + RAWToYJRow = RAWToYJRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + RAWToYJRow = RAWToYJRow_NEON; + } + } +#endif +#if defined(HAS_RAWTOYJROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + RAWToYJRow = RAWToYJRow_Any_MMI; + if (IS_ALIGNED(width, 8)) { + RAWToYJRow = RAWToYJRow_MMI; + } + } +#endif +#if defined(HAS_RAWTOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToYJRow = RAWToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_MSA; + } + } #endif - { -#if !(defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) - // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); -#endif - - for (y = 0; y < height - 1; y += 2) { -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) - RGB24ToYJRow(src_rgb24, dst_yj, width); - RGB24ToYJRow(src_rgb24 + src_stride_rgb24, dst_yj + dst_stride_yj, width); -#else - RGB24ToARGBRow(src_rgb24, row, width); - RGB24ToARGBRow(src_rgb24 + src_stride_rgb24, row + kRowSize, width); - ARGBToYJRow(row, dst_yj, width); - ARGBToYJRow(row + kRowSize, dst_yj + dst_stride_yj, width); -#endif - src_rgb24 += src_stride_rgb24 * 2; - dst_yj += dst_stride_yj * 2; - } - if (height & 1) { -#if (defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) - RGB24ToYJRow(src_rgb24, dst_yj, width); -#else - RGB24ToARGBRow(src_rgb24, row, width); - ARGBToYJRow(row, dst_yj, width); -#endif - } -#if !(defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) || \ - defined(HAS_RGB24TOYJROW_MMI)) - free_aligned_buffer_64(row); -#endif + for (y = 0; y < height; ++y) { + RAWToYJRow(src_raw, dst_yj, width); + src_raw += src_stride_raw; + dst_yj += dst_stride_yj; } return 0; } diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc index 4217b1dc9..5e7225faf 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc @@ -47,18 +47,19 @@ int ARGBCopy(const uint8_t* src_argb, return 0; } -// Convert I420 to ARGB with matrix -static int I420ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert I420 to ARGB with matrix. +LIBYUV_API +int I420ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, @@ -97,14 +98,6 @@ static int I420ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGBRow = I422ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_MSA; - } - } -#endif #if defined(HAS_I422TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToARGBRow = I422ToARGBRow_Any_MMI; @@ -113,6 +106,14 @@ static int I420ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGBRow = I422ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { I422ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -270,18 +271,19 @@ int U420ToABGR(const uint8_t* src_y, width, height); } -// Convert I422 to ARGB with matrix -static int I422ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert I422 to ARGB with matrix. +LIBYUV_API +int I422ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, @@ -327,14 +329,6 @@ static int I422ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGBRow = I422ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_MSA; - } - } -#endif #if defined(HAS_I422TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToARGBRow = I422ToARGBRow_Any_MMI; @@ -343,6 +337,14 @@ static int I422ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGBRow = I422ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { I422ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -498,18 +500,19 @@ int U422ToABGR(const uint8_t* src_y, width, height); } -// Convert I444 to ARGB with matrix -static int I444ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert I444 to ARGB with matrix. +LIBYUV_API +int I444ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I444ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, @@ -555,14 +558,6 @@ static int I444ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I444TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I444ToARGBRow = I444ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I444ToARGBRow = I444ToARGBRow_MSA; - } - } -#endif #if defined(HAS_I444TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I444ToARGBRow = I444ToARGBRow_Any_MMI; @@ -571,6 +566,14 @@ static int I444ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I444TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444ToARGBRow = I444ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { I444ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -726,20 +729,21 @@ int U444ToABGR(const uint8_t* src_y, width, height); } -// Convert 10 bit YUV to ARGB with matrix +// Convert 10 bit YUV to ARGB with matrix. // TODO(fbarchard): Consider passing scale multiplier to I210ToARGB to // multiply 10 bit yuv into high bits to allow any number of bits. -static int I010ToAR30Matrix(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - const struct YuvConstants* yuvconstants, - int width, - int height) { +LIBYUV_API +int I010ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I210ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* rgb_buf, @@ -884,20 +888,21 @@ int U010ToAB30(const uint16_t* src_y, &kYuv2020Constants, width, height); } -// Convert 10 bit YUV to ARGB with matrix +// Convert 10 bit YUV to ARGB with matrix. // TODO(fbarchard): Consider passing scale multiplier to I210ToARGB to // multiply 10 bit yuv into high bits to allow any number of bits. -static int I210ToAR30Matrix(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - const struct YuvConstants* yuvconstants, - int width, - int height) { +LIBYUV_API +int I210ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I210ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* rgb_buf, @@ -1040,18 +1045,19 @@ int U210ToAB30(const uint16_t* src_y, &kYuv2020Constants, width, height); } -// Convert 10 bit YUV to ARGB with matrix -static int I010ToARGBMatrix(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert 10 bit YUV to ARGB with matrix. +LIBYUV_API +int I010ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I210ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* rgb_buf, @@ -1210,18 +1216,19 @@ int U010ToABGR(const uint16_t* src_y, width, height); } -// Convert 10 bit 422 YUV to ARGB with matrix -static int I210ToARGBMatrix(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert 10 bit 422 YUV to ARGB with matrix. +LIBYUV_API +int I210ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*I210ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* rgb_buf, @@ -1270,9 +1277,6 @@ static int I210ToARGBMatrix(const uint16_t* src_y, return 0; } - - - // Convert I210 to ARGB. LIBYUV_API int I210ToARGB(const uint16_t* src_y, @@ -1381,21 +1385,22 @@ int U210ToABGR(const uint16_t* src_y, width, height); } -// Convert I420 with Alpha to preattenuated ARGB. -static int I420AlphaToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height, - int attenuate) { +// Convert I420 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I420AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; void (*I422AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, const uint8_t* a_buf, @@ -1437,14 +1442,6 @@ static int I420AlphaToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I422ALPHATOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_MSA; - } - } -#endif #if defined(HAS_I422ALPHATOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MMI; @@ -1453,6 +1450,14 @@ static int I420AlphaToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_MSA; + } + } +#endif #if defined(HAS_ARGBATTENUATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; @@ -1477,14 +1482,6 @@ static int I420AlphaToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_ARGBATTENUATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBAttenuateRow = ARGBAttenuateRow_MSA; - } - } -#endif #if defined(HAS_ARGBATTENUATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI; @@ -1493,6 +1490,14 @@ static int I420AlphaToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { I422AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, @@ -1554,16 +1559,18 @@ int I420AlphaToABGR(const uint8_t* src_y, width, height, attenuate); } -// Convert I400 to ARGB. +// Convert I400 to ARGB with matrix. LIBYUV_API -int I400ToARGB(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int I400ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*I400ToARGBRow)(const uint8_t* y_buf, uint8_t* rgb_buf, int width) = + void (*I400ToARGBRow)(const uint8_t* y_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = I400ToARGBRow_C; if (!src_y || !dst_argb || width <= 0 || height == 0) { return -1; @@ -1604,14 +1611,6 @@ int I400ToARGB(const uint8_t* src_y, } } #endif -#if defined(HAS_I400TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I400ToARGBRow = I400ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - I400ToARGBRow = I400ToARGBRow_MSA; - } - } -#endif #if defined(HAS_I400TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I400ToARGBRow = I400ToARGBRow_Any_MMI; @@ -1620,15 +1619,35 @@ int I400ToARGB(const uint8_t* src_y, } } #endif +#if defined(HAS_I400TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I400ToARGBRow = I400ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + I400ToARGBRow = I400ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { - I400ToARGBRow(src_y, dst_argb, width); + I400ToARGBRow(src_y, dst_argb, yuvconstants, width); dst_argb += dst_stride_argb; src_y += src_stride_y; } return 0; } +// Convert I400 to ARGB. +LIBYUV_API +int I400ToARGB(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return I400ToARGBMatrix(src_y, src_stride_y, dst_argb, dst_stride_argb, + &kYuvI601Constants, width, height); +} + // Convert J400 to ARGB. LIBYUV_API int J400ToARGB(const uint8_t* src_y, @@ -1679,14 +1698,6 @@ int J400ToARGB(const uint8_t* src_y, } } #endif -#if defined(HAS_J400TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - J400ToARGBRow = J400ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - J400ToARGBRow = J400ToARGBRow_MSA; - } - } -#endif #if defined(HAS_J400TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { J400ToARGBRow = J400ToARGBRow_Any_MMI; @@ -1694,6 +1705,14 @@ int J400ToARGB(const uint8_t* src_y, J400ToARGBRow = J400ToARGBRow_MMI; } } +#endif +#if defined(HAS_J400TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + J400ToARGBRow = J400ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + J400ToARGBRow = J400ToARGBRow_MSA; + } + } #endif for (y = 0; y < height; ++y) { J400ToARGBRow(src_y, dst_argb, width); @@ -1817,14 +1836,6 @@ int RGB24ToARGB(const uint8_t* src_rgb24, } } #endif -#if defined(HAS_RGB24TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB24ToARGBRow = RGB24ToARGBRow_MSA; - } - } -#endif #if defined(HAS_RGB24TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB24ToARGBRow = RGB24ToARGBRow_Any_MMI; @@ -1833,6 +1844,14 @@ int RGB24ToARGB(const uint8_t* src_rgb24, } } #endif +#if defined(HAS_RGB24TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB24ToARGBRow = RGB24ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB24ToARGBRow = RGB24ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { RGB24ToARGBRow(src_rgb24, dst_argb, width); @@ -1884,14 +1903,6 @@ int RAWToARGB(const uint8_t* src_raw, } } #endif -#if defined(HAS_RAWTOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RAWToARGBRow = RAWToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RAWToARGBRow = RAWToARGBRow_MSA; - } - } -#endif #if defined(HAS_RAWTOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RAWToARGBRow = RAWToARGBRow_Any_MMI; @@ -1900,6 +1911,14 @@ int RAWToARGB(const uint8_t* src_raw, } } #endif +#if defined(HAS_RAWTOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToARGBRow = RAWToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToARGBRow = RAWToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { RAWToARGBRow(src_raw, dst_argb, width); @@ -2010,14 +2029,6 @@ int RGB565ToARGB(const uint8_t* src_rgb565, } } #endif -#if defined(HAS_RGB565TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB565ToARGBRow = RGB565ToARGBRow_MSA; - } - } -#endif #if defined(HAS_RGB565TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGB565ToARGBRow = RGB565ToARGBRow_Any_MMI; @@ -2026,6 +2037,14 @@ int RGB565ToARGB(const uint8_t* src_rgb565, } } #endif +#if defined(HAS_RGB565TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB565ToARGBRow = RGB565ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB565ToARGBRow = RGB565ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { RGB565ToARGBRow(src_rgb565, dst_argb, width); @@ -2085,14 +2104,6 @@ int ARGB1555ToARGB(const uint8_t* src_argb1555, } } #endif -#if defined(HAS_ARGB1555TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_MSA; - } - } -#endif #if defined(HAS_ARGB1555TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MMI; @@ -2101,6 +2112,14 @@ int ARGB1555ToARGB(const uint8_t* src_argb1555, } } #endif +#if defined(HAS_ARGB1555TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGB1555ToARGBRow(src_argb1555, dst_argb, width); @@ -2160,14 +2179,6 @@ int ARGB4444ToARGB(const uint8_t* src_argb4444, } } #endif -#if defined(HAS_ARGB4444TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_MSA; - } - } -#endif #if defined(HAS_ARGB4444TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MMI; @@ -2176,6 +2187,14 @@ int ARGB4444ToARGB(const uint8_t* src_argb4444, } } #endif +#if defined(HAS_ARGB4444TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGB4444ToARGBRow(src_argb4444, dst_argb, width); @@ -2281,16 +2300,17 @@ int AR30ToAB30(const uint8_t* src_ar30, return 0; } -// Convert NV12 to ARGB with matrix -static int NV12ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert NV12 to ARGB with matrix. +LIBYUV_API +int NV12ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*NV12ToARGBRow)( const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, @@ -2328,14 +2348,6 @@ static int NV12ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_NV12TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV12ToARGBRow = NV12ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_MSA; - } - } -#endif #if defined(HAS_NV12TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { NV12ToARGBRow = NV12ToARGBRow_Any_MMI; @@ -2344,6 +2356,14 @@ static int NV12ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_NV12TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV12ToARGBRow = NV12ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV12ToARGBRow = NV12ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { NV12ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width); @@ -2356,16 +2376,17 @@ static int NV12ToARGBMatrix(const uint8_t* src_y, return 0; } -// Convert NV21 to ARGB with matrix -static int NV21ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert NV21 to ARGB with matrix. +LIBYUV_API +int NV21ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*NV21ToARGBRow)( const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, @@ -2403,14 +2424,6 @@ static int NV21ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_NV21TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV21ToARGBRow = NV21ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - NV21ToARGBRow = NV21ToARGBRow_MSA; - } - } -#endif #if defined(HAS_NV21TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { NV21ToARGBRow = NV21ToARGBRow_Any_MMI; @@ -2419,6 +2432,14 @@ static int NV21ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_NV21TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV21ToARGBRow = NV21ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV21ToARGBRow = NV21ToARGBRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { NV21ToARGBRow(src_y, src_vu, dst_argb, yuvconstants, width); @@ -2490,16 +2511,17 @@ int NV21ToABGR(const uint8_t* src_y, } // TODO(fbarchard): Consider SSSE3 2 step conversion. -// Convert NV12 to RGB24 with matrix -static int NV12ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert NV12 to RGB24 with matrix. +LIBYUV_API +int NV12ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*NV12ToRGB24Row)( const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, @@ -2557,16 +2579,17 @@ static int NV12ToRGB24Matrix(const uint8_t* src_y, return 0; } -// Convert NV21 to RGB24 with matrix -static int NV21ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { +// Convert NV21 to RGB24 with matrix. +LIBYUV_API +int NV21ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; void (*NV21ToRGB24Row)( const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, @@ -2730,83 +2753,6 @@ int NV21ToYUV24(const uint8_t* src_y, return 0; } -// Convert M420 to ARGB. -LIBYUV_API -int M420ToARGB(const uint8_t* src_m420, - int src_stride_m420, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - int y; - void (*NV12ToARGBRow)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV12ToARGBRow_C; - if (!src_m420 || !dst_argb || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_argb = dst_argb + (height - 1) * dst_stride_argb; - dst_stride_argb = -dst_stride_argb; - } -#if defined(HAS_NV12TOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - NV12ToARGBRow = NV12ToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_SSSE3; - } - } -#endif -#if defined(HAS_NV12TOARGBROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - NV12ToARGBRow = NV12ToARGBRow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - NV12ToARGBRow = NV12ToARGBRow_AVX2; - } - } -#endif -#if defined(HAS_NV12TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - NV12ToARGBRow = NV12ToARGBRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_NEON; - } - } -#endif -#if defined(HAS_NV12TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV12ToARGBRow = NV12ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_MSA; - } - } -#endif -#if defined(HAS_NV12TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV12ToARGBRow = NV12ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - NV12ToARGBRow = NV12ToARGBRow_MMI; - } - } -#endif - - for (y = 0; y < height - 1; y += 2) { - NV12ToARGBRow(src_m420, src_m420 + src_stride_m420 * 2, dst_argb, - &kYuvI601Constants, width); - NV12ToARGBRow(src_m420 + src_stride_m420, src_m420 + src_stride_m420 * 2, - dst_argb + dst_stride_argb, &kYuvI601Constants, width); - dst_argb += dst_stride_argb * 2; - src_m420 += src_stride_m420 * 3; - } - if (height & 1) { - NV12ToARGBRow(src_m420, src_m420 + src_stride_m420 * 2, dst_argb, - &kYuvI601Constants, width); - } - return 0; -} - // Convert YUY2 to ARGB. LIBYUV_API int YUY2ToARGB(const uint8_t* src_yuy2, @@ -2858,14 +2804,6 @@ int YUY2ToARGB(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - YUY2ToARGBRow = YUY2ToARGBRow_MSA; - } - } -#endif #if defined(HAS_YUY2TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { YUY2ToARGBRow = YUY2ToARGBRow_Any_MMI; @@ -2873,6 +2811,14 @@ int YUY2ToARGB(const uint8_t* src_yuy2, YUY2ToARGBRow = YUY2ToARGBRow_MMI; } } +#endif +#if defined(HAS_YUY2TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + YUY2ToARGBRow = YUY2ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + YUY2ToARGBRow = YUY2ToARGBRow_MSA; + } + } #endif for (y = 0; y < height; ++y) { YUY2ToARGBRow(src_yuy2, dst_argb, &kYuvI601Constants, width); @@ -2933,14 +2879,6 @@ int UYVYToARGB(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_UYVYTOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - UYVYToARGBRow = UYVYToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - UYVYToARGBRow = UYVYToARGBRow_MSA; - } - } -#endif #if defined(HAS_UYVYTOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { UYVYToARGBRow = UYVYToARGBRow_Any_MMI; @@ -2948,6 +2886,14 @@ int UYVYToARGB(const uint8_t* src_uyvy, UYVYToARGBRow = UYVYToARGBRow_MMI; } } +#endif +#if defined(HAS_UYVYTOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + UYVYToARGBRow = UYVYToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + UYVYToARGBRow = UYVYToARGBRow_MSA; + } + } #endif for (y = 0; y < height; ++y) { UYVYToARGBRow(src_uyvy, dst_argb, &kYuvI601Constants, width); @@ -2971,7 +2917,7 @@ static void WeavePixels(const uint8_t* src_u, } } -// Convert Android420 to ARGB. +// Convert Android420 to ARGB with matrix. LIBYUV_API int Android420ToARGBMatrix(const uint8_t* src_y, int src_stride_y, @@ -3072,6 +3018,1107 @@ int Android420ToABGR(const uint8_t* src_y, height); } +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I422ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGBARow_C; + if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; + dst_stride_rgba = -dst_stride_rgba; + } +#if defined(HAS_I422TORGBAROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGBARow = I422ToRGBARow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGBAROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGBARow = I422ToRGBARow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGBARow = I422ToRGBARow_AVX2; + } + } +#endif +#if defined(HAS_I422TORGBAROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGBARow = I422ToRGBARow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_NEON; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToRGBARow = I422ToRGBARow_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToRGBARow = I422ToRGBARow_MMI; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGBARow = I422ToRGBARow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); + dst_rgba += dst_stride_rgba; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I422 to RGBA. +LIBYUV_API +int I422ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return I422ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, + &kYuvI601Constants, width, height); +} + +// Convert I422 to BGRA. +LIBYUV_API +int I422ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return I422ToRGBAMatrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_bgra, dst_stride_bgra, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert NV12 to RGB565 with matrix. +LIBYUV_API +int NV12ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*NV12ToRGB565Row)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV12ToRGB565Row_C; + if (!src_y || !src_uv || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_NV12TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + NV12ToRGB565Row = NV12ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_MMI; + if (IS_ALIGNED(width, 4)) { + NV12ToRGB565Row = NV12ToRGB565Row_MMI; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + NV12ToRGB565Row(src_y, src_uv, dst_rgb565, yuvconstants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_uv += src_stride_uv; + } + } + return 0; +} + +// Convert NV12 to RGB565. +LIBYUV_API +int NV12ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return NV12ToRGB565Matrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_rgb565, dst_stride_rgb565, &kYuvI601Constants, + width, height); +} + +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I420ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGBARow_C; + if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; + dst_stride_rgba = -dst_stride_rgba; + } +#if defined(HAS_I422TORGBAROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGBARow = I422ToRGBARow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGBAROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGBARow = I422ToRGBARow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGBARow = I422ToRGBARow_AVX2; + } + } +#endif +#if defined(HAS_I422TORGBAROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGBARow = I422ToRGBARow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_NEON; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToRGBARow = I422ToRGBARow_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToRGBARow = I422ToRGBARow_MMI; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGBARow = I422ToRGBARow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); + dst_rgba += dst_stride_rgba; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGBA. +LIBYUV_API +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return I420ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, + &kYuvI601Constants, width, height); +} + +// Convert I420 to BGRA. +LIBYUV_API +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return I420ToRGBAMatrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_bgra, dst_stride_bgra, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert I420 to RGB24 with matrix. +LIBYUV_API +int I420ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB24Row_C; + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } +#if defined(HAS_I422TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB24Row = I422ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB24Row = I422ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I422ToRGB24Row = I422ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB24Row = I422ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB24Row = I422ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToRGB24Row = I422ToRGB24Row_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToRGB24Row = I422ToRGB24Row_MMI; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB24Row = I422ToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB24. +LIBYUV_API +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvI601Constants, width, height); +} + +// Convert I420 to RAW. +LIBYUV_API +int I420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert J420 to RGB24. +LIBYUV_API +int J420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvJPEGConstants, width, height); +} + +// Convert J420 to RAW. +LIBYUV_API +int J420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuJPEGConstants, // Use Yvu matrix + width, height); +} + +// Convert H420 to RGB24. +LIBYUV_API +int H420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvH709Constants, width, height); +} + +// Convert H420 to RAW. +LIBYUV_API +int H420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuH709Constants, // Use Yvu matrix + width, height); +} + +// Convert I420 to ARGB1555. +LIBYUV_API +int I420ToARGB1555(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb1555, + int dst_stride_argb1555, + int width, + int height) { + int y; + void (*I422ToARGB1555Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) = I422ToARGB1555Row_C; + if (!src_y || !src_u || !src_v || !dst_argb1555 || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb1555 = dst_argb1555 + (height - 1) * dst_stride_argb1555; + dst_stride_argb1555 = -dst_stride_argb1555; + } +#if defined(HAS_I422TOARGB1555ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGB1555Row = I422ToARGB1555Row_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_NEON; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToARGB1555Row = I422ToARGB1555Row_MMI; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToARGB1555Row(src_y, src_u, src_v, dst_argb1555, &kYuvI601Constants, + width); + dst_argb1555 += dst_stride_argb1555; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to ARGB4444. +LIBYUV_API +int I420ToARGB4444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb4444, + int dst_stride_argb4444, + int width, + int height) { + int y; + void (*I422ToARGB4444Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) = I422ToARGB4444Row_C; + if (!src_y || !src_u || !src_v || !dst_argb4444 || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb4444 = dst_argb4444 + (height - 1) * dst_stride_argb4444; + dst_stride_argb4444 = -dst_stride_argb4444; + } +#if defined(HAS_I422TOARGB4444ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGB4444Row = I422ToARGB4444Row_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_NEON; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToARGB4444Row = I422ToARGB4444Row_MMI; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToARGB4444Row(src_y, src_u, src_v, dst_argb4444, &kYuvI601Constants, + width); + dst_argb4444 += dst_stride_argb4444; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB565 with specified color matrix. +LIBYUV_API +int I420ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB565Row_C; + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_I422TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGB565Row = I422ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB565Row = I422ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToRGB565Row = I422ToRGB565Row_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToRGB565Row = I422ToRGB565Row_MMI; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB565Row = I422ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, yuvconstants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB565. +LIBYUV_API +int I420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvI601Constants, width, height); +} + +// Convert J420 to RGB565. +LIBYUV_API +int J420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvJPEGConstants, width, height); +} + +// Convert H420 to RGB565. +LIBYUV_API +int H420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvH709Constants, width, height); +} + +// Convert I422 to RGB565. +LIBYUV_API +int I422ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + int y; + void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB565Row_C; + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_I422TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGB565Row = I422ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB565Row = I422ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB565Row = I422ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_MSA; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, &kYuvI601Constants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Ordered 8x8 dither for 888 to 565. Values from 0 to 7. +static const uint8_t kDither565_4x4[16] = { + 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2, +}; + +// Convert I420 to RGB565 with dithering. +LIBYUV_API +int I420ToRGB565Dither(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const uint8_t* dither4x4, + int width, + int height) { + int y; + void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToARGBRow_C; + void (*ARGBToRGB565DitherRow)(const uint8_t* src_argb, uint8_t* dst_rgb, + const uint32_t dither4, int width) = + ARGBToRGB565DitherRow_C; + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } + if (!dither4x4) { + dither4x4 = kDither565_4x4; + } +#if defined(HAS_I422TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGBRow = I422ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGBRow = I422ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGBRow = I422ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I422TOARGBROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + I422ToARGBRow = I422ToARGBRow_Any_MMI; + if (IS_ALIGNED(width, 4)) { + I422ToARGBRow = I422ToARGBRow_MMI; + } + } +#endif +#if defined(HAS_I422TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGBRow = I422ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_SSE2; + if (IS_ALIGNED(width, 4)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_SSE2; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_NEON; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MMI; + if (IS_ALIGNED(width, 4)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MMI; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; + } + } +#endif + { + // Allocate a row of argb. + align_buffer_64(row_argb, width * 4); + for (y = 0; y < height; ++y) { + I422ToARGBRow(src_y, src_u, src_v, row_argb, &kYuvI601Constants, width); + ARGBToRGB565DitherRow(row_argb, dst_rgb565, + *(const uint32_t*)(dither4x4 + ((y & 3) << 2)), + width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + free_aligned_buffer_64(row_argb); + } + return 0; +} + +// Convert I420 to AR30 with matrix. +LIBYUV_API +int I420ToAR30Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToAR30Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToAR30Row_C; + + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } + +#if defined(HAS_I422TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToAR30Row = I422ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToAR30Row = I422ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToAR30Row = I422ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToAR30Row = I422ToAR30Row_AVX2; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to AR30. +LIBYUV_API +int I420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + &kYuvI601Constants, width, height); +} + +// Convert H420 to AR30. +LIBYUV_API +int H420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + &kYvuH709Constants, width, height); +} + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc index 0c95f1f29..f2cfc1d8f 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc @@ -294,14 +294,6 @@ int I420ToYUY2(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOYUY2ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToYUY2Row = I422ToYUY2Row_Any_MSA; - if (IS_ALIGNED(width, 32)) { - I422ToYUY2Row = I422ToYUY2Row_MSA; - } - } -#endif #if defined(HAS_I422TOYUY2ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToYUY2Row = I422ToYUY2Row_Any_MMI; @@ -310,6 +302,14 @@ int I420ToYUY2(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOYUY2ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToYUY2Row = I422ToYUY2Row_Any_MSA; + if (IS_ALIGNED(width, 32)) { + I422ToYUY2Row = I422ToYUY2Row_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { I422ToYUY2Row(src_y, src_u, src_v, dst_yuy2, width); @@ -381,14 +381,6 @@ int I422ToUYVY(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOUYVYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - I422ToUYVYRow = I422ToUYVYRow_MSA; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToUYVYRow = I422ToUYVYRow_Any_MMI; @@ -397,6 +389,14 @@ int I422ToUYVY(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOUYVYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToUYVYRow = I422ToUYVYRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { I422ToUYVYRow(src_y, src_u, src_v, dst_uyvy, width); @@ -456,14 +456,6 @@ int I420ToUYVY(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOUYVYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - I422ToUYVYRow = I422ToUYVYRow_MSA; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToUYVYRow = I422ToUYVYRow_Any_MMI; @@ -472,6 +464,14 @@ int I420ToUYVY(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOUYVYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToUYVYRow = I422ToUYVYRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_MSA; + } + } +#endif for (y = 0; y < height - 1; y += 2) { I422ToUYVYRow(src_y, src_u, src_v, dst_uyvy, width); @@ -488,7 +488,6 @@ int I420ToUYVY(const uint8_t* src_y, return 0; } -// TODO(fbarchard): test negative height for invert. LIBYUV_API int I420ToNV12(const uint8_t* src_y, int src_stride_y, @@ -502,12 +501,23 @@ int I420ToNV12(const uint8_t* src_y, int dst_stride_uv, int width, int height) { + int halfwidth = (width + 1) / 2; + int halfheight = (height + 1) / 2; if (!src_y || !src_u || !src_v || !dst_y || !dst_uv || width <= 0 || height == 0) { return -1; } - int halfwidth = (width + 1) / 2; - int halfheight = height > 0 ? (height + 1) / 2 : (height - 1) / 2; + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (halfheight - 1) * src_stride_u; + src_v = src_v + (halfheight - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } if (dst_y) { CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); } @@ -534,899 +544,6 @@ int I420ToNV21(const uint8_t* src_y, width, height); } -// Convert I422 to RGBA with matrix -static int I420ToRGBAMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGBARow_C; - if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; - dst_stride_rgba = -dst_stride_rgba; - } -#if defined(HAS_I422TORGBAROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGBARow = I422ToRGBARow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGBAROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGBARow = I422ToRGBARow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGBARow = I422ToRGBARow_AVX2; - } - } -#endif -#if defined(HAS_I422TORGBAROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGBARow = I422ToRGBARow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_NEON; - } - } -#endif -#if defined(HAS_I422TORGBAROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGBARow = I422ToRGBARow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_MSA; - } - } -#endif -#if defined(HAS_I422TORGBAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGBARow = I422ToRGBARow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGBARow = I422ToRGBARow_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); - dst_rgba += dst_stride_rgba; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to RGBA. -LIBYUV_API -int I420ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height) { - return I420ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgba, dst_stride_rgba, - &kYuvI601Constants, width, height); -} - -// Convert I420 to BGRA. -LIBYUV_API -int I420ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height) { - return I420ToRGBAMatrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_bgra, dst_stride_bgra, - &kYvuI601Constants, // Use Yvu matrix - width, height); -} - -// Convert I420 to RGB24 with matrix -static int I420ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB24Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; - dst_stride_rgb24 = -dst_stride_rgb24; - } -#if defined(HAS_I422TORGB24ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB24Row = I422ToRGB24Row_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - I422ToRGB24Row = I422ToRGB24Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGB24ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB24Row = I422ToRGB24Row_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - I422ToRGB24Row = I422ToRGB24Row_AVX2; - } - } -#endif -#if defined(HAS_I422TORGB24ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB24Row = I422ToRGB24Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGB24Row = I422ToRGB24Row_NEON; - } - } -#endif -#if defined(HAS_I422TORGB24ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB24Row = I422ToRGB24Row_Any_MSA; - if (IS_ALIGNED(width, 16)) { - I422ToRGB24Row = I422ToRGB24Row_MSA; - } - } -#endif -#if defined(HAS_I422TORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGB24Row = I422ToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGB24Row = I422ToRGB24Row_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); - dst_rgb24 += dst_stride_rgb24; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to RGB24. -LIBYUV_API -int I420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvI601Constants, width, height); -} - -// Convert I420 to RAW. -LIBYUV_API -int I420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuI601Constants, // Use Yvu matrix - width, height); -} - -// Convert J420 to RGB24. -LIBYUV_API -int J420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvJPEGConstants, width, height); -} - -// Convert J420 to RAW. -LIBYUV_API -int J420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuJPEGConstants, // Use Yvu matrix - width, height); -} - -// Convert H420 to RGB24. -LIBYUV_API -int H420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvH709Constants, width, height); -} - -// Convert H420 to RAW. -LIBYUV_API -int H420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuH709Constants, // Use Yvu matrix - width, height); -} - -// Convert I420 to ARGB1555. -LIBYUV_API -int I420ToARGB1555(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb1555, - int dst_stride_argb1555, - int width, - int height) { - int y; - void (*I422ToARGB1555Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width) = I422ToARGB1555Row_C; - if (!src_y || !src_u || !src_v || !dst_argb1555 || width <= 0 || - height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_argb1555 = dst_argb1555 + (height - 1) * dst_stride_argb1555; - dst_stride_argb1555 = -dst_stride_argb1555; - } -#if defined(HAS_I422TOARGB1555ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TOARGB1555ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGB1555Row = I422ToARGB1555Row_AVX2; - } - } -#endif -#if defined(HAS_I422TOARGB1555ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_NEON; - } - } -#endif -#if defined(HAS_I422TOARGB1555ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_MSA; - } - } -#endif -#if defined(HAS_I422TOARGB1555ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGB1555Row = I422ToARGB1555Row_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToARGB1555Row(src_y, src_u, src_v, dst_argb1555, &kYuvI601Constants, - width); - dst_argb1555 += dst_stride_argb1555; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to ARGB4444. -LIBYUV_API -int I420ToARGB4444(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb4444, - int dst_stride_argb4444, - int width, - int height) { - int y; - void (*I422ToARGB4444Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width) = I422ToARGB4444Row_C; - if (!src_y || !src_u || !src_v || !dst_argb4444 || width <= 0 || - height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_argb4444 = dst_argb4444 + (height - 1) * dst_stride_argb4444; - dst_stride_argb4444 = -dst_stride_argb4444; - } -#if defined(HAS_I422TOARGB4444ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TOARGB4444ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGB4444Row = I422ToARGB4444Row_AVX2; - } - } -#endif -#if defined(HAS_I422TOARGB4444ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_NEON; - } - } -#endif -#if defined(HAS_I422TOARGB4444ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_MSA; - } - } -#endif -#if defined(HAS_I422TOARGB4444ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGB4444Row = I422ToARGB4444Row_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToARGB4444Row(src_y, src_u, src_v, dst_argb4444, &kYuvI601Constants, - width); - dst_argb4444 += dst_stride_argb4444; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to RGB565 with specified color matrix. -LIBYUV_API -int I420ToRGB565Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB565Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; - } -#if defined(HAS_I422TORGB565ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGB565Row = I422ToRGB565Row_AVX2; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB565Row = I422ToRGB565Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_NEON; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_MSA; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGB565Row = I422ToRGB565Row_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, yuvconstants, width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to RGB565. -LIBYUV_API -int I420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvI601Constants, width, height); -} - -// Convert J420 to RGB565. -LIBYUV_API -int J420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvJPEGConstants, width, height); -} - -// Convert H420 to RGB565. -LIBYUV_API -int H420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvH709Constants, width, height); -} - -// Convert I422 to RGB565. -LIBYUV_API -int I422ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - int y; - void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB565Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; - } -#if defined(HAS_I422TORGB565ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGB565Row = I422ToRGB565Row_AVX2; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB565Row = I422ToRGB565Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_NEON; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_MSA; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, &kYuvI601Constants, width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - src_u += src_stride_u; - src_v += src_stride_v; - } - return 0; -} - -// Ordered 8x8 dither for 888 to 565. Values from 0 to 7. -static const uint8_t kDither565_4x4[16] = { - 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2, -}; - -// Convert I420 to RGB565 with dithering. -LIBYUV_API -int I420ToRGB565Dither(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const uint8_t* dither4x4, - int width, - int height) { - int y; - void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToARGBRow_C; - void (*ARGBToRGB565DitherRow)(const uint8_t* src_argb, uint8_t* dst_rgb, - const uint32_t dither4, int width) = - ARGBToRGB565DitherRow_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; - } - if (!dither4x4) { - dither4x4 = kDither565_4x4; - } -#if defined(HAS_I422TOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGBRow = I422ToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_SSSE3; - } - } -#endif -#if defined(HAS_I422TOARGBROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGBRow = I422ToARGBRow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGBRow = I422ToARGBRow_AVX2; - } - } -#endif -#if defined(HAS_I422TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGBRow = I422ToARGBRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_NEON; - } - } -#endif -#if defined(HAS_I422TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGBRow = I422ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_MSA; - } - } -#endif -#if defined(HAS_I422TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGBRow = I422ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGBRow = I422ToARGBRow_MMI; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_SSE2; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_SSE2; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_AVX2; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_AVX2; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_NEON; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MMI; - } - } -#endif - { - // Allocate a row of argb. - align_buffer_64(row_argb, width * 4); - for (y = 0; y < height; ++y) { - I422ToARGBRow(src_y, src_u, src_v, row_argb, &kYuvI601Constants, width); - ARGBToRGB565DitherRow(row_argb, dst_rgb565, - *(const uint32_t*)(dither4x4 + ((y & 3) << 2)), - width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - free_aligned_buffer_64(row_argb); - } - return 0; -} - -// Convert I420 to AR30 with matrix -static int I420ToAR30Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToAR30Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToAR30Row_C; - - if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; - dst_stride_ar30 = -dst_stride_ar30; - } - -#if defined(HAS_I422TOAR30ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToAR30Row = I422ToAR30Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToAR30Row = I422ToAR30Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TOAR30ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToAR30Row = I422ToAR30Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToAR30Row = I422ToAR30Row_AVX2; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); - dst_ar30 += dst_stride_ar30; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - return 0; -} - -// Convert I420 to AR30. -LIBYUV_API -int I420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height) { - return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_ar30, dst_stride_ar30, - &kYuvI601Constants, width, height); -} - -// Convert H420 to AR30. -LIBYUV_API -int H420ToAR30(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, - int width, - int height) { - return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_ar30, dst_stride_ar30, - &kYvuH709Constants, width, height); -} - // Convert I420 to specified format LIBYUV_API int ConvertFromI420(const uint8_t* y, @@ -1528,7 +645,6 @@ int ConvertFromI420(const uint8_t* y, height); break; } - // TODO(fbarchard): Add M420. // Triplanar formats case FOURCC_I420: case FOURCC_YV12: { diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc index de301ebbc..4ba4bb5e0 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc @@ -68,14 +68,6 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOUV444ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUV444Row = ARGBToUV444Row_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToUV444Row = ARGBToUV444Row_MSA; - } - } -#endif #if defined(HAS_ARGBTOUV444ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToUV444Row = ARGBToUV444Row_Any_MMI; @@ -84,6 +76,14 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOUV444ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToUV444Row = ARGBToUV444Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToUV444Row = ARGBToUV444Row_MSA; + } + } +#endif #if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { ARGBToYRow = ARGBToYRow_Any_SSSE3; @@ -108,14 +108,6 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif #if defined(HAS_ARGBTOYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; @@ -124,6 +116,14 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToUV444Row(src_argb, dst_u, dst_v, width); @@ -207,36 +207,28 @@ int ARGBToI422(const uint8_t* src_argb, } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; + if (IS_ALIGNED(width, 8)) { + ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_MMI; } } #endif -#if defined(HAS_ARGBTOYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; } } #endif @@ -315,38 +307,30 @@ int ARGBToNV12(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ARGBToUVRow = ARGBToUVRow_MMI; } } #endif +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -371,14 +355,6 @@ int ARGBToNV12(const uint8_t* src_argb, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow_ = MergeUVRow_Any_MSA; - if (IS_ALIGNED(halfwidth, 16)) { - MergeUVRow_ = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow_ = MergeUVRow_Any_MMI; @@ -386,6 +362,14 @@ int ARGBToNV12(const uint8_t* src_argb, MergeUVRow_ = MergeUVRow_MMI; } } +#endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow_ = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_MSA; + } + } #endif { // Allocate a rows of uv. @@ -475,39 +459,30 @@ int ARGBToNV21(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ARGBToUVRow = ARGBToUVRow_MMI; } } #endif - +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -532,14 +507,6 @@ int ARGBToNV21(const uint8_t* src_argb, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow_ = MergeUVRow_Any_MSA; - if (IS_ALIGNED(halfwidth, 16)) { - MergeUVRow_ = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow_ = MergeUVRow_Any_MMI; @@ -547,6 +514,14 @@ int ARGBToNV21(const uint8_t* src_argb, MergeUVRow_ = MergeUVRow_MMI; } } +#endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow_ = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_MSA; + } + } #endif { // Allocate a rows of uv. @@ -635,38 +610,30 @@ int ABGRToNV12(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToYRow = ABGRToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ABGRToYRow = ABGRToYRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToUVRow = ABGRToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ABGRToUVRow = ABGRToUVRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOYROW_MMI) +#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ABGRToYRow = ABGRToYRow_Any_MMI; + ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ABGRToYRow = ABGRToYRow_MMI; } - } -#endif -#if defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ABGRToUVRow = ABGRToUVRow_MMI; } } #endif +#if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYRow = ABGRToYRow_Any_MSA; + ABGRToUVRow = ABGRToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_MSA; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -691,14 +658,6 @@ int ABGRToNV12(const uint8_t* src_abgr, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow_ = MergeUVRow_Any_MSA; - if (IS_ALIGNED(halfwidth, 16)) { - MergeUVRow_ = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow_ = MergeUVRow_Any_MMI; @@ -706,6 +665,14 @@ int ABGRToNV12(const uint8_t* src_abgr, MergeUVRow_ = MergeUVRow_MMI; } } +#endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow_ = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_MSA; + } + } #endif { // Allocate a rows of uv. @@ -795,39 +762,30 @@ int ABGRToNV21(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToYRow = ABGRToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ABGRToYRow = ABGRToYRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ABGRToUVRow = ABGRToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ABGRToUVRow = ABGRToUVRow_MSA; - } - } -#endif -#if defined(HAS_ABGRTOYROW_MMI) +#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ABGRToYRow = ABGRToYRow_Any_MMI; + ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ABGRToYRow = ABGRToYRow_MMI; } - } -#endif -#if defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToUVRow = ABGRToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ABGRToUVRow = ABGRToUVRow_MMI; } } #endif - +#if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYRow = ABGRToYRow_Any_MSA; + ABGRToUVRow = ABGRToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_MSA; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -852,14 +810,6 @@ int ABGRToNV21(const uint8_t* src_abgr, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow_ = MergeUVRow_Any_MSA; - if (IS_ALIGNED(halfwidth, 16)) { - MergeUVRow_ = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow_ = MergeUVRow_Any_MMI; @@ -867,6 +817,14 @@ int ABGRToNV21(const uint8_t* src_abgr, MergeUVRow_ = MergeUVRow_MMI; } } +#endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow_ = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_MSA; + } + } #endif { // Allocate a rows of uv. @@ -961,38 +919,30 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ARGBToUVRow = ARGBToUVRow_MMI; } } #endif +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } +#endif #if defined(HAS_I422TOYUY2ROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { I422ToYUY2Row = I422ToYUY2Row_Any_SSE2; @@ -1017,14 +967,6 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif -#if defined(HAS_I422TOYUY2ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToYUY2Row = I422ToYUY2Row_Any_MSA; - if (IS_ALIGNED(width, 32)) { - I422ToYUY2Row = I422ToYUY2Row_MSA; - } - } -#endif #if defined(HAS_I422TOYUY2ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToYUY2Row = I422ToYUY2Row_Any_MMI; @@ -1033,6 +975,14 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif +#if defined(HAS_I422TOYUY2ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToYUY2Row = I422ToYUY2Row_Any_MSA; + if (IS_ALIGNED(width, 32)) { + I422ToYUY2Row = I422ToYUY2Row_MSA; + } + } +#endif { // Allocate a rows of yuv. @@ -1122,38 +1072,30 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVRow = ARGBToUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYROW_MMI) +#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; + ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYRow = ARGBToYRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; if (IS_ALIGNED(width, 16)) { ARGBToUVRow = ARGBToUVRow_MMI; } } #endif +#if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + ARGBToUVRow = ARGBToUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_MSA; + } + } +#endif #if defined(HAS_I422TOUYVYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { I422ToUYVYRow = I422ToUYVYRow_Any_SSE2; @@ -1178,14 +1120,6 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif -#if defined(HAS_I422TOUYVYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - I422ToUYVYRow = I422ToUYVYRow_MSA; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToUYVYRow = I422ToUYVYRow_Any_MMI; @@ -1194,6 +1128,14 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif +#if defined(HAS_I422TOUYVYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToUYVYRow = I422ToUYVYRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_MSA; + } + } +#endif { // Allocate a rows of yuv. @@ -1263,14 +1205,6 @@ int ARGBToI400(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYRow = ARGBToYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYRow = ARGBToYRow_MSA; - } - } -#endif #if defined(HAS_ARGBTOYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYRow = ARGBToYRow_Any_MMI; @@ -1279,6 +1213,14 @@ int ARGBToI400(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYRow = ARGBToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToYRow(src_argb, dst_y, width); @@ -1361,14 +1303,6 @@ int ARGBToRGB24(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORGB24ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRGB24Row = ARGBToRGB24Row_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToRGB24Row = ARGBToRGB24Row_MSA; - } - } -#endif #if defined(HAS_ARGBTORGB24ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToRGB24Row = ARGBToRGB24Row_Any_MMI; @@ -1377,6 +1311,14 @@ int ARGBToRGB24(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRGB24Row = ARGBToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToRGB24Row = ARGBToRGB24Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB24Row(src_argb, dst_rgb24, width); @@ -1435,14 +1377,6 @@ int ARGBToRAW(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORAWROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRAWRow = ARGBToRAWRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToRAWRow = ARGBToRAWRow_MSA; - } - } -#endif #if defined(HAS_ARGBTORAWROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToRAWRow = ARGBToRAWRow_Any_MMI; @@ -1451,6 +1385,14 @@ int ARGBToRAW(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORAWROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRAWRow = ARGBToRAWRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToRAWRow = ARGBToRAWRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRAWRow(src_argb, dst_raw, width); @@ -1513,14 +1455,6 @@ int ARGBToRGB565Dither(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORGB565DITHERROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; - } - } -#endif #if defined(HAS_ARGBTORGB565DITHERROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MMI; @@ -1529,6 +1463,14 @@ int ARGBToRGB565Dither(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB565DITHERROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB565DitherRow(src_argb, dst_rgb565, @@ -1590,14 +1532,6 @@ int ARGBToRGB565(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRGB565Row = ARGBToRGB565Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565Row = ARGBToRGB565Row_MSA; - } - } -#endif #if defined(HAS_ARGBTORGB565ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToRGB565Row = ARGBToRGB565Row_Any_MMI; @@ -1606,6 +1540,14 @@ int ARGBToRGB565(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRGB565Row = ARGBToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565Row = ARGBToRGB565Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB565Row(src_argb, dst_rgb565, width); @@ -1664,14 +1606,6 @@ int ARGBToARGB1555(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOARGB1555ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToARGB1555Row = ARGBToARGB1555Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToARGB1555Row = ARGBToARGB1555Row_MSA; - } - } -#endif #if defined(HAS_ARGBTOARGB1555ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToARGB1555Row = ARGBToARGB1555Row_Any_MMI; @@ -1680,6 +1614,14 @@ int ARGBToARGB1555(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOARGB1555ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToARGB1555Row = ARGBToARGB1555Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToARGB1555Row = ARGBToARGB1555Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToARGB1555Row(src_argb, dst_argb1555, width); @@ -1738,14 +1680,6 @@ int ARGBToARGB4444(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOARGB4444ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToARGB4444Row = ARGBToARGB4444Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToARGB4444Row = ARGBToARGB4444Row_MSA; - } - } -#endif #if defined(HAS_ARGBTOARGB4444ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToARGB4444Row = ARGBToARGB4444Row_Any_MMI; @@ -1754,6 +1688,14 @@ int ARGBToARGB4444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOARGB4444ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToARGB4444Row = ARGBToARGB4444Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToARGB4444Row = ARGBToARGB4444Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToARGB4444Row(src_argb, dst_argb4444, width); @@ -1922,35 +1864,27 @@ int ARGBToJ420(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYJRow = ARGBToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYJRow = ARGBToYJRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYJROW_MMI) +#if defined(HAS_ARGBTOYJROW_MMI) && defined(HAS_ARGBTOUVJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYJRow = ARGBToYJRow_Any_MMI; + ARGBToUVJRow = ARGBToUVJRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYJRow = ARGBToYJRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVJRow = ARGBToUVJRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVJRow = ARGBToUVJRow_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_MMI; } } #endif -#if defined(HAS_ARGBTOUVJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVJRow = ARGBToUVJRow_Any_MMI; +#if defined(HAS_ARGBTOYJROW_MSA) && defined(HAS_ARGBTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYJRow = ARGBToYJRow_Any_MSA; + ARGBToUVJRow = ARGBToUVJRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_MMI; + ARGBToYJRow = ARGBToYJRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_MSA; } } #endif @@ -2039,35 +1973,27 @@ int ARGBToJ422(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYJRow = ARGBToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYJRow = ARGBToYJRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTOYJROW_MMI) +#if defined(HAS_ARGBTOYJROW_MMI) && defined(HAS_ARGBTOUVJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYJRow = ARGBToYJRow_Any_MMI; + ARGBToUVJRow = ARGBToUVJRow_Any_MMI; if (IS_ALIGNED(width, 8)) { ARGBToYJRow = ARGBToYJRow_MMI; } - } -#endif -#if defined(HAS_ARGBTOUVJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToUVJRow = ARGBToUVJRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - ARGBToUVJRow = ARGBToUVJRow_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_MMI; } } #endif -#if defined(HAS_ARGBTOUVJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVJRow = ARGBToUVJRow_Any_MMI; +#if defined(HAS_ARGBTOYJROW_MSA) && defined(HAS_ARGBTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYJRow = ARGBToYJRow_Any_MSA; + ARGBToUVJRow = ARGBToUVJRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_MMI; + ARGBToYJRow = ARGBToYJRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_MSA; } } #endif @@ -2132,14 +2058,6 @@ int ARGBToJ400(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYJRow = ARGBToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYJRow = ARGBToYJRow_MSA; - } - } -#endif #if defined(HAS_ARGBTOYJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYJRow = ARGBToYJRow_Any_MMI; @@ -2148,6 +2066,14 @@ int ARGBToJ400(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYJRow = ARGBToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToYJRow(src_argb, dst_yj, width); @@ -2206,14 +2132,6 @@ int RGBAToJ400(const uint8_t* src_rgba, } } #endif -#if defined(HAS_RGBATOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGBAToYJRow = RGBAToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGBAToYJRow = RGBAToYJRow_MSA; - } - } -#endif #if defined(HAS_RGBATOYJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RGBAToYJRow = RGBAToYJRow_Any_MMI; @@ -2222,6 +2140,14 @@ int RGBAToJ400(const uint8_t* src_rgba, } } #endif +#if defined(HAS_RGBATOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGBAToYJRow = RGBAToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGBAToYJRow = RGBAToYJRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { RGBAToYJRow(src_rgba, dst_yj, width); diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_jpeg.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_jpeg.cc index f440c7c2e..d7556ee91 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_jpeg.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_jpeg.cc @@ -328,6 +328,140 @@ int MJPGToNV21(const uint8_t* src_mjpg, return ret ? 0 : 1; } +static void JpegI420ToNV12(void* opaque, + const uint8_t* const* data, + const int* strides, + int rows) { + NV21Buffers* dest = (NV21Buffers*)(opaque); + // Use NV21 with VU swapped. + I420ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1], + dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows); + dest->y += rows * dest->y_stride; + dest->vu += ((rows + 1) >> 1) * dest->vu_stride; + dest->h -= rows; +} + +static void JpegI422ToNV12(void* opaque, + const uint8_t* const* data, + const int* strides, + int rows) { + NV21Buffers* dest = (NV21Buffers*)(opaque); + // Use NV21 with VU swapped. + I422ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1], + dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows); + dest->y += rows * dest->y_stride; + dest->vu += ((rows + 1) >> 1) * dest->vu_stride; + dest->h -= rows; +} + +static void JpegI444ToNV12(void* opaque, + const uint8_t* const* data, + const int* strides, + int rows) { + NV21Buffers* dest = (NV21Buffers*)(opaque); + // Use NV21 with VU swapped. + I444ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1], + dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows); + dest->y += rows * dest->y_stride; + dest->vu += ((rows + 1) >> 1) * dest->vu_stride; + dest->h -= rows; +} + +static void JpegI400ToNV12(void* opaque, + const uint8_t* const* data, + const int* strides, + int rows) { + NV21Buffers* dest = (NV21Buffers*)(opaque); + // Use NV21 since there is no UV plane. + I400ToNV21(data[0], strides[0], dest->y, dest->y_stride, dest->vu, + dest->vu_stride, dest->w, rows); + dest->y += rows * dest->y_stride; + dest->vu += ((rows + 1) >> 1) * dest->vu_stride; + dest->h -= rows; +} + +// MJPG (Motion JPEG) to NV12. +LIBYUV_API +int MJPGToNV12(const uint8_t* sample, + size_t sample_size, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int src_width, + int src_height, + int dst_width, + int dst_height) { + if (sample_size == kUnknownDataSize) { + // ERROR: MJPEG frame size unknown + return -1; + } + + // TODO(fbarchard): Port MJpeg to C. + MJpegDecoder mjpeg_decoder; + LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size); + if (ret && (mjpeg_decoder.GetWidth() != src_width || + mjpeg_decoder.GetHeight() != src_height)) { + // ERROR: MJPEG frame has unexpected dimensions + mjpeg_decoder.UnloadFrame(); + return 1; // runtime failure + } + if (ret) { + // Use NV21Buffers but with UV instead of VU. + NV21Buffers bufs = {dst_y, dst_stride_y, dst_uv, + dst_stride_uv, dst_width, dst_height}; + // YUV420 + if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr && + mjpeg_decoder.GetNumComponents() == 3 && + mjpeg_decoder.GetVertSampFactor(0) == 2 && + mjpeg_decoder.GetHorizSampFactor(0) == 2 && + mjpeg_decoder.GetVertSampFactor(1) == 1 && + mjpeg_decoder.GetHorizSampFactor(1) == 1 && + mjpeg_decoder.GetVertSampFactor(2) == 1 && + mjpeg_decoder.GetHorizSampFactor(2) == 1) { + ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToNV12, &bufs, dst_width, + dst_height); + // YUV422 + } else if (mjpeg_decoder.GetColorSpace() == + MJpegDecoder::kColorSpaceYCbCr && + mjpeg_decoder.GetNumComponents() == 3 && + mjpeg_decoder.GetVertSampFactor(0) == 1 && + mjpeg_decoder.GetHorizSampFactor(0) == 2 && + mjpeg_decoder.GetVertSampFactor(1) == 1 && + mjpeg_decoder.GetHorizSampFactor(1) == 1 && + mjpeg_decoder.GetVertSampFactor(2) == 1 && + mjpeg_decoder.GetHorizSampFactor(2) == 1) { + ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToNV12, &bufs, dst_width, + dst_height); + // YUV444 + } else if (mjpeg_decoder.GetColorSpace() == + MJpegDecoder::kColorSpaceYCbCr && + mjpeg_decoder.GetNumComponents() == 3 && + mjpeg_decoder.GetVertSampFactor(0) == 1 && + mjpeg_decoder.GetHorizSampFactor(0) == 1 && + mjpeg_decoder.GetVertSampFactor(1) == 1 && + mjpeg_decoder.GetHorizSampFactor(1) == 1 && + mjpeg_decoder.GetVertSampFactor(2) == 1 && + mjpeg_decoder.GetHorizSampFactor(2) == 1) { + ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToNV12, &bufs, dst_width, + dst_height); + // YUV400 + } else if (mjpeg_decoder.GetColorSpace() == + MJpegDecoder::kColorSpaceGrayscale && + mjpeg_decoder.GetNumComponents() == 1 && + mjpeg_decoder.GetVertSampFactor(0) == 1 && + mjpeg_decoder.GetHorizSampFactor(0) == 1) { + ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToNV12, &bufs, dst_width, + dst_height); + } else { + // Unknown colorspace. + mjpeg_decoder.UnloadFrame(); + return 1; + } + } + return ret ? 0 : 1; +} + struct ARGBBuffers { uint8_t* argb; int argb_stride; diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_to_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_to_argb.cc index c08f61013..84df16c8c 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_to_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_to_argb.cc @@ -180,12 +180,6 @@ int ConvertToARGB(const uint8_t* sample, r = NV21ToARGB(src, src_width, src_uv, aligned_src_width, dst_argb, dst_stride_argb, crop_width, inv_crop_height); break; - case FOURCC_M420: - src = sample + (src_width * crop_y) * 12 / 8 + crop_x; - r = M420ToARGB(src, src_width, dst_argb, dst_stride_argb, crop_width, - inv_crop_height); - break; - // Triplanar formats case FOURCC_I420: case FOURCC_YV12: { diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc index 584be0ac3..ac6eeab24 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc @@ -179,11 +179,6 @@ int ConvertToI420(const uint8_t* sample, dst_stride_y, dst_v, dst_stride_v, dst_u, dst_stride_u, crop_width, inv_crop_height, rotation); break; - case FOURCC_M420: - src = sample + (src_width * crop_y) * 12 / 8 + crop_x; - r = M420ToI420(src, src_width, dst_y, dst_stride_y, dst_u, dst_stride_u, - dst_v, dst_stride_v, crop_width, inv_crop_height); - break; // Triplanar formats case FOURCC_I420: case FOURCC_YV12: { diff --git a/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc b/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc index 48e2b6152..fe89452b7 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc @@ -75,9 +75,9 @@ void CpuId(int info_eax, int info_ecx, int* cpu_info) { asm volatile( #if defined(__i386__) && defined(__PIC__) // Preserve ebx for fpic 32 bit. - "mov %%ebx, %%edi \n" + "mov %%ebx, %%edi \n" "cpuid \n" - "xchg %%edi, %%ebx \n" + "xchg %%edi, %%ebx \n" : "=D"(info_ebx), #else "cpuid \n" @@ -163,44 +163,38 @@ LIBYUV_API SAFEBUFFERS int ArmCpuCaps(const char* cpuinfo_name) { } // TODO(fbarchard): Consider read_msa_ir(). -// TODO(fbarchard): Add unittest. -LIBYUV_API SAFEBUFFERS int MipsCpuCaps(const char* cpuinfo_name, - const char ase[]) { +LIBYUV_API SAFEBUFFERS int MipsCpuCaps(const char* cpuinfo_name) { char cpuinfo_line[512]; + int flag = 0x0; FILE* f = fopen(cpuinfo_name, "r"); if (!f) { - // ase enabled if /proc/cpuinfo is unavailable. - if (strcmp(ase, " msa") == 0) { - return kCpuHasMSA; - } - if (strcmp(ase, " mmi") == 0) { - return kCpuHasMMI; - } + // Assume nothing if /proc/cpuinfo is unavailable. + // This will occur for Chrome sandbox for Pepper or Render process. return 0; } while (fgets(cpuinfo_line, sizeof(cpuinfo_line) - 1, f)) { + if (memcmp(cpuinfo_line, "cpu model", 9) == 0) { + // Workaround early kernel without mmi in ASEs line. + if (strstr(cpuinfo_line, "Loongson-3")) { + flag |= kCpuHasMMI; + } else if (strstr(cpuinfo_line, "Loongson-2K")) { + flag |= kCpuHasMMI | kCpuHasMSA; + } + } if (memcmp(cpuinfo_line, "ASEs implemented", 16) == 0) { - char* p = strstr(cpuinfo_line, ase); - if (p) { - fclose(f); - if (strcmp(ase, " msa") == 0) { - return kCpuHasMSA; - } - return 0; + if (strstr(cpuinfo_line, "loongson-mmi") && + strstr(cpuinfo_line, "loongson-ext")) { + flag |= kCpuHasMMI; } - } else if (memcmp(cpuinfo_line, "cpu model", 9) == 0) { - char* p = strstr(cpuinfo_line, "Loongson-3"); - if (p) { - fclose(f); - if (strcmp(ase, " mmi") == 0) { - return kCpuHasMMI; - } - return 0; + if (strstr(cpuinfo_line, "msa")) { + flag |= kCpuHasMSA; } + // ASEs is the last line, so we can break here. + break; } } fclose(f); - return 0; + return flag; } static SAFEBUFFERS int GetCpuFlags(void) { @@ -242,11 +236,7 @@ static SAFEBUFFERS int GetCpuFlags(void) { } #endif #if defined(__mips__) && defined(__linux__) -#if defined(__mips_msa) - cpu_info = MipsCpuCaps("/proc/cpuinfo", " msa"); -#elif defined(_MIPS_ARCH_LOONGSON3A) - cpu_info = MipsCpuCaps("/proc/cpuinfo", " mmi"); -#endif + cpu_info = MipsCpuCaps("/proc/cpuinfo"); cpu_info |= kCpuHasMIPS; #endif #if defined(__arm__) || defined(__aarch64__) diff --git a/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc b/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc index 1aa151b62..d5cd7e680 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc @@ -402,14 +402,6 @@ void SplitUVPlane(const uint8_t* src_uv, } } #endif -#if defined(HAS_SPLITUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SplitUVRow = SplitUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - SplitUVRow = SplitUVRow_MSA; - } - } -#endif #if defined(HAS_SPLITUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SplitUVRow = SplitUVRow_Any_MMI; @@ -418,6 +410,14 @@ void SplitUVPlane(const uint8_t* src_uv, } } #endif +#if defined(HAS_SPLITUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SplitUVRow = SplitUVRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + SplitUVRow = SplitUVRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { // Copy a row of UV. @@ -477,14 +477,6 @@ void MergeUVPlane(const uint8_t* src_u, } } #endif -#if defined(HAS_MERGEUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MergeUVRow = MergeUVRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - MergeUVRow = MergeUVRow_MSA; - } - } -#endif #if defined(HAS_MERGEUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MergeUVRow = MergeUVRow_Any_MMI; @@ -493,6 +485,14 @@ void MergeUVPlane(const uint8_t* src_u, } } #endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow = MergeUVRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + MergeUVRow = MergeUVRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { // Merge a row of U and V into a row of UV. @@ -579,6 +579,15 @@ int NV21ToNV12(const uint8_t* src_y, if (dst_y) { CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_vu = src_vu + (halfheight - 1) * src_stride_vu; + src_stride_vu = -src_stride_vu; + } + SwapUVPlane(src_vu, src_stride_vu, dst_uv, dst_stride_uv, halfwidth, halfheight); return 0; @@ -625,14 +634,6 @@ void SplitRGBPlane(const uint8_t* src_rgb, } } #endif -#if defined(HAS_SPLITRGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - SplitRGBRow = SplitRGBRow_Any_NEON; - if (IS_ALIGNED(width, 16)) { - SplitRGBRow = SplitRGBRow_NEON; - } - } -#endif #if defined(HAS_SPLITRGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SplitRGBRow = SplitRGBRow_Any_MMI; @@ -641,6 +642,14 @@ void SplitRGBPlane(const uint8_t* src_rgb, } } #endif +#if defined(HAS_SPLITRGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + SplitRGBRow = SplitRGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + SplitRGBRow = SplitRGBRow_NEON; + } + } +#endif for (y = 0; y < height; ++y) { // Copy a row of RGB. @@ -716,70 +725,6 @@ void MergeRGBPlane(const uint8_t* src_r, } } -// Mirror a plane of data. -void MirrorPlane(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_y, - int dst_stride_y, - int width, - int height) { - int y; - void (*MirrorRow)(const uint8_t* src, uint8_t* dst, int width) = MirrorRow_C; - // Negative height means invert the image. - if (height < 0) { - height = -height; - src_y = src_y + (height - 1) * src_stride_y; - src_stride_y = -src_stride_y; - } -#if defined(HAS_MIRRORROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - MirrorRow = MirrorRow_Any_NEON; - if (IS_ALIGNED(width, 16)) { - MirrorRow = MirrorRow_NEON; - } - } -#endif -#if defined(HAS_MIRRORROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - MirrorRow = MirrorRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - MirrorRow = MirrorRow_SSSE3; - } - } -#endif -#if defined(HAS_MIRRORROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - MirrorRow = MirrorRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - MirrorRow = MirrorRow_AVX2; - } - } -#endif -#if defined(HAS_MIRRORROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MirrorRow = MirrorRow_Any_MSA; - if (IS_ALIGNED(width, 64)) { - MirrorRow = MirrorRow_MSA; - } - } -#endif -#if defined(HAS_MIRRORROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MirrorRow = MirrorRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - MirrorRow = MirrorRow_MMI; - } - } -#endif - - // Mirror plane - for (y = 0; y < height; ++y) { - MirrorRow(src_y, dst_y, width); - src_y += src_stride_y; - dst_y += dst_stride_y; - } -} - // Convert YUY2 to I422. LIBYUV_API int YUY2ToI422(const uint8_t* src_yuy2, @@ -844,17 +789,7 @@ int YUY2ToI422(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToYRow = YUY2ToYRow_Any_MSA; - YUY2ToUV422Row = YUY2ToUV422Row_Any_MSA; - if (IS_ALIGNED(width, 32)) { - YUY2ToYRow = YUY2ToYRow_MSA; - YUY2ToUV422Row = YUY2ToUV422Row_MSA; - } - } -#endif -#if defined(HAS_YUY2TOYROW_MMI) +#if defined(HAS_YUY2TOYROW_MMI) && defined(HAS_YUY2TOUV422ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { YUY2ToYRow = YUY2ToYRow_Any_MMI; YUY2ToUV422Row = YUY2ToUV422Row_Any_MMI; @@ -864,6 +799,16 @@ int YUY2ToI422(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_YUY2TOYROW_MSA) && defined(HAS_YUY2TOUV422ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + YUY2ToYRow = YUY2ToYRow_Any_MSA; + YUY2ToUV422Row = YUY2ToUV422Row_Any_MSA; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_MSA; + YUY2ToUV422Row = YUY2ToUV422Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { YUY2ToUV422Row(src_yuy2, dst_u, dst_v, width); @@ -940,17 +885,7 @@ int UYVYToI422(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_UYVYTOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - UYVYToYRow = UYVYToYRow_Any_MSA; - UYVYToUV422Row = UYVYToUV422Row_Any_MSA; - if (IS_ALIGNED(width, 32)) { - UYVYToYRow = UYVYToYRow_MSA; - UYVYToUV422Row = UYVYToUV422Row_MSA; - } - } -#endif -#if defined(HAS_UYVYTOYROW_MMI) +#if defined(HAS_UYVYTOYROW_MMI) && defined(HAS_UYVYTOUV422ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { UYVYToYRow = UYVYToYRow_Any_MMI; UYVYToUV422Row = UYVYToUV422Row_Any_MMI; @@ -960,6 +895,16 @@ int UYVYToI422(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_UYVYTOYROW_MSA) && defined(HAS_UYVYTOUV422ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + UYVYToYRow = UYVYToYRow_Any_MSA; + UYVYToUV422Row = UYVYToUV422Row_Any_MSA; + if (IS_ALIGNED(width, 32)) { + UYVYToYRow = UYVYToYRow_MSA; + UYVYToUV422Row = UYVYToUV422Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { UYVYToUV422Row(src_uyvy, dst_u, dst_v, width); @@ -1022,14 +967,6 @@ int YUY2ToY(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToYRow = YUY2ToYRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - YUY2ToYRow = YUY2ToYRow_MSA; - } - } -#endif #if defined(HAS_YUY2TOYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { YUY2ToYRow = YUY2ToYRow_Any_MMI; @@ -1038,6 +975,14 @@ int YUY2ToY(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_YUY2TOYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + YUY2ToYRow = YUY2ToYRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { YUY2ToYRow(src_yuy2, dst_y, width); @@ -1047,6 +992,130 @@ int YUY2ToY(const uint8_t* src_yuy2, return 0; } +// Mirror a plane of data. +// See Also I400Mirror +LIBYUV_API +void MirrorPlane(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_y, + int dst_stride_y, + int width, + int height) { + int y; + void (*MirrorRow)(const uint8_t* src, uint8_t* dst, int width) = MirrorRow_C; + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_stride_y = -src_stride_y; + } +#if defined(HAS_MIRRORROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MirrorRow = MirrorRow_Any_NEON; + if (IS_ALIGNED(width, 32)) { + MirrorRow = MirrorRow_NEON; + } + } +#endif +#if defined(HAS_MIRRORROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + MirrorRow = MirrorRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + MirrorRow = MirrorRow_SSSE3; + } + } +#endif +#if defined(HAS_MIRRORROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MirrorRow = MirrorRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + MirrorRow = MirrorRow_AVX2; + } + } +#endif +#if defined(HAS_MIRRORROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + MirrorRow = MirrorRow_Any_MMI; + if (IS_ALIGNED(width, 8)) { + MirrorRow = MirrorRow_MMI; + } + } +#endif +#if defined(HAS_MIRRORROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MirrorRow = MirrorRow_Any_MSA; + if (IS_ALIGNED(width, 64)) { + MirrorRow = MirrorRow_MSA; + } + } +#endif + + // Mirror plane + for (y = 0; y < height; ++y) { + MirrorRow(src_y, dst_y, width); + src_y += src_stride_y; + dst_y += dst_stride_y; + } +} + +// Mirror a plane of UV data. +LIBYUV_API +void MirrorUVPlane(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + int y; + void (*MirrorUVRow)(const uint8_t* src, uint8_t* dst, int width) = + MirrorUVRow_C; + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_uv = src_uv + (height - 1) * src_stride_uv; + src_stride_uv = -src_stride_uv; + } +#if defined(HAS_MIRRORUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MirrorUVRow = MirrorUVRow_Any_NEON; + if (IS_ALIGNED(width, 32)) { + MirrorUVRow = MirrorUVRow_NEON; + } + } +#endif +#if defined(HAS_MIRRORUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + MirrorUVRow = MirrorUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + MirrorUVRow = MirrorUVRow_SSSE3; + } + } +#endif +#if defined(HAS_MIRRORUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MirrorUVRow = MirrorUVRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + MirrorUVRow = MirrorUVRow_AVX2; + } + } +#endif +#if defined(HAS_MIRRORUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MirrorUVRow = MirrorUVRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + MirrorUVRow = MirrorUVRow_MSA; + } + } +#endif + + // MirrorUV plane + for (y = 0; y < height; ++y) { + MirrorUVRow(src_uv, dst_uv, width); + src_uv += src_stride_uv; + dst_uv += dst_stride_uv; + } +} + // Mirror I400 with optional flipping LIBYUV_API int I400Mirror(const uint8_t* src_y, @@ -1087,7 +1156,7 @@ int I420Mirror(const uint8_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; - if (!src_y || !src_u || !src_v || !dst_y || !dst_u || !dst_v || width <= 0 || + if (!src_y || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { return -1; } @@ -1111,6 +1180,41 @@ int I420Mirror(const uint8_t* src_y, return 0; } +// NV12 mirror. +LIBYUV_API +int NV12Mirror(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_y || !src_uv || !dst_uv || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_uv = src_uv + (halfheight - 1) * src_stride_uv; + src_stride_y = -src_stride_y; + src_stride_uv = -src_stride_uv; + } + + if (dst_y) { + MirrorPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + MirrorUVPlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv, halfwidth, + halfheight); + return 0; +} + // ARGB mirror. LIBYUV_API int ARGBMirror(const uint8_t* src_argb, @@ -1134,7 +1238,7 @@ int ARGBMirror(const uint8_t* src_argb, #if defined(HAS_ARGBMIRRORROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBMirrorRow = ARGBMirrorRow_Any_NEON; - if (IS_ALIGNED(width, 4)) { + if (IS_ALIGNED(width, 8)) { ARGBMirrorRow = ARGBMirrorRow_NEON; } } @@ -1155,14 +1259,6 @@ int ARGBMirror(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBMIRRORROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBMirrorRow = ARGBMirrorRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBMirrorRow = ARGBMirrorRow_MSA; - } - } -#endif #if defined(HAS_ARGBMIRRORROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBMirrorRow = ARGBMirrorRow_Any_MMI; @@ -1171,6 +1267,14 @@ int ARGBMirror(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBMIRRORROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBMirrorRow = ARGBMirrorRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBMirrorRow = ARGBMirrorRow_MSA; + } + } +#endif // Mirror plane for (y = 0; y < height; ++y) { @@ -1181,6 +1285,52 @@ int ARGBMirror(const uint8_t* src_argb, return 0; } +// RGB24 mirror. +LIBYUV_API +int RGB24Mirror(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + int y; + void (*RGB24MirrorRow)(const uint8_t* src, uint8_t* dst, int width) = + RGB24MirrorRow_C; + if (!src_rgb24 || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_rgb24 = src_rgb24 + (height - 1) * src_stride_rgb24; + src_stride_rgb24 = -src_stride_rgb24; + } +#if defined(HAS_RGB24MIRRORROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + RGB24MirrorRow = RGB24MirrorRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + RGB24MirrorRow = RGB24MirrorRow_NEON; + } + } +#endif +#if defined(HAS_RGB24MIRRORROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RGB24MirrorRow = RGB24MirrorRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RGB24MirrorRow = RGB24MirrorRow_SSSE3; + } + } +#endif + + // Mirror plane + for (y = 0; y < height; ++y) { + RGB24MirrorRow(src_rgb24, dst_rgb24, width); + src_rgb24 += src_stride_rgb24; + dst_rgb24 += dst_stride_rgb24; + } + return 0; +} + // Get a blender that optimized for the CPU and pixel count. // As there are 6 blenders to choose from, the caller should try to use // the same blend function for all pixels if possible. @@ -1199,15 +1349,15 @@ ARGBBlendRow GetARGBBlend() { ARGBBlendRow = ARGBBlendRow_NEON; } #endif -#if defined(HAS_ARGBBLENDROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBBlendRow = ARGBBlendRow_MSA; - } -#endif #if defined(HAS_ARGBBLENDROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBBlendRow = ARGBBlendRow_MMI; } +#endif +#if defined(HAS_ARGBBLENDROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBBlendRow = ARGBBlendRow_MSA; + } #endif return ARGBBlendRow; } @@ -1517,14 +1667,6 @@ int ARGBMultiply(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBMULTIPLYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBMultiplyRow = ARGBMultiplyRow_Any_MSA; - if (IS_ALIGNED(width, 4)) { - ARGBMultiplyRow = ARGBMultiplyRow_MSA; - } - } -#endif #if defined(HAS_ARGBMULTIPLYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBMultiplyRow = ARGBMultiplyRow_Any_MMI; @@ -1533,6 +1675,14 @@ int ARGBMultiply(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBMULTIPLYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBMultiplyRow = ARGBMultiplyRow_Any_MSA; + if (IS_ALIGNED(width, 4)) { + ARGBMultiplyRow = ARGBMultiplyRow_MSA; + } + } +#endif // Multiply plane for (y = 0; y < height; ++y) { @@ -1602,14 +1752,6 @@ int ARGBAdd(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBADDROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBAddRow = ARGBAddRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBAddRow = ARGBAddRow_MSA; - } - } -#endif #if defined(HAS_ARGBADDROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBAddRow = ARGBAddRow_Any_MMI; @@ -1618,6 +1760,14 @@ int ARGBAdd(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBADDROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAddRow = ARGBAddRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAddRow = ARGBAddRow_MSA; + } + } +#endif // Add plane for (y = 0; y < height; ++y) { @@ -1682,14 +1832,6 @@ int ARGBSubtract(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBSUBTRACTROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBSubtractRow = ARGBSubtractRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBSubtractRow = ARGBSubtractRow_MSA; - } - } -#endif #if defined(HAS_ARGBSUBTRACTROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBSubtractRow = ARGBSubtractRow_Any_MMI; @@ -1698,6 +1840,14 @@ int ARGBSubtract(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBSUBTRACTROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBSubtractRow = ARGBSubtractRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBSubtractRow = ARGBSubtractRow_MSA; + } + } +#endif // Subtract plane for (y = 0; y < height; ++y) { @@ -1708,193 +1858,6 @@ int ARGBSubtract(const uint8_t* src_argb0, } return 0; } -// Convert I422 to RGBA with matrix -static int I422ToRGBAMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGBARow_C; - if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; - dst_stride_rgba = -dst_stride_rgba; - } -#if defined(HAS_I422TORGBAROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGBARow = I422ToRGBARow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGBAROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGBARow = I422ToRGBARow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGBARow = I422ToRGBARow_AVX2; - } - } -#endif -#if defined(HAS_I422TORGBAROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGBARow = I422ToRGBARow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_NEON; - } - } -#endif -#if defined(HAS_I422TORGBAROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGBARow = I422ToRGBARow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_MSA; - } - } -#endif -#if defined(HAS_I422TORGBAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGBARow = I422ToRGBARow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGBARow = I422ToRGBARow_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); - dst_rgba += dst_stride_rgba; - src_y += src_stride_y; - src_u += src_stride_u; - src_v += src_stride_v; - } - return 0; -} - -// Convert I422 to RGBA. -LIBYUV_API -int I422ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height) { - return I422ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgba, dst_stride_rgba, - &kYuvI601Constants, width, height); -} - -// Convert I422 to BGRA. -LIBYUV_API -int I422ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height) { - return I422ToRGBAMatrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_bgra, dst_stride_bgra, - &kYvuI601Constants, // Use Yvu matrix - width, height); -} - -// Convert NV12 to RGB565. -LIBYUV_API -int NV12ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - int y; - void (*NV12ToRGB565Row)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV12ToRGB565Row_C; - if (!src_y || !src_uv || !dst_rgb565 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; - } -#if defined(HAS_NV12TORGB565ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_SSSE3; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - NV12ToRGB565Row = NV12ToRGB565Row_AVX2; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_NEON; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_MSA; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - NV12ToRGB565Row = NV12ToRGB565Row_MMI; - } - } -#endif - - for (y = 0; y < height; ++y) { - NV12ToRGB565Row(src_y, src_uv, dst_rgb565, &kYuvI601Constants, width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - if (y & 1) { - src_uv += src_stride_uv; - } - } - return 0; -} // Convert RAW to RGB24. LIBYUV_API @@ -1938,14 +1901,6 @@ int RAWToRGB24(const uint8_t* src_raw, } } #endif -#if defined(HAS_RAWTORGB24ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RAWToRGB24Row = RAWToRGB24Row_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RAWToRGB24Row = RAWToRGB24Row_MSA; - } - } -#endif #if defined(HAS_RAWTORGB24ROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { RAWToRGB24Row = RAWToRGB24Row_Any_MMI; @@ -1954,6 +1909,14 @@ int RAWToRGB24(const uint8_t* src_raw, } } #endif +#if defined(HAS_RAWTORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToRGB24Row = RAWToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToRGB24Row = RAWToRGB24Row_MSA; + } + } +#endif for (y = 0; y < height; ++y) { RAWToRGB24Row(src_raw, dst_rgb24, width); @@ -2089,14 +2052,6 @@ int ARGBRect(uint8_t* dst_argb, ARGBSetRow = ARGBSetRow_X86; } #endif -#if defined(HAS_ARGBSETROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBSetRow = ARGBSetRow_Any_MSA; - if (IS_ALIGNED(width, 4)) { - ARGBSetRow = ARGBSetRow_MSA; - } - } -#endif #if defined(HAS_ARGBSETROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBSetRow = ARGBSetRow_Any_MMI; @@ -2105,6 +2060,14 @@ int ARGBRect(uint8_t* dst_argb, } } #endif +#if defined(HAS_ARGBSETROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBSetRow = ARGBSetRow_Any_MSA; + if (IS_ALIGNED(width, 4)) { + ARGBSetRow = ARGBSetRow_MSA; + } + } +#endif // Set plane for (y = 0; y < height; ++y) { @@ -2175,14 +2138,6 @@ int ARGBAttenuate(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBATTENUATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBAttenuateRow = ARGBAttenuateRow_MSA; - } - } -#endif #if defined(HAS_ARGBATTENUATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI; @@ -2191,6 +2146,14 @@ int ARGBAttenuate(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBAttenuateRow(src_argb, dst_argb, width); @@ -2286,16 +2249,16 @@ int ARGBGrayTo(const uint8_t* src_argb, ARGBGrayRow = ARGBGrayRow_NEON; } #endif -#if defined(HAS_ARGBGRAYROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { - ARGBGrayRow = ARGBGrayRow_MSA; - } -#endif #if defined(HAS_ARGBGRAYROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { ARGBGrayRow = ARGBGrayRow_MMI; } #endif +#if defined(HAS_ARGBGRAYROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { + ARGBGrayRow = ARGBGrayRow_MSA; + } +#endif for (y = 0; y < height; ++y) { ARGBGrayRow(src_argb, dst_argb, width); @@ -2336,16 +2299,16 @@ int ARGBGray(uint8_t* dst_argb, ARGBGrayRow = ARGBGrayRow_NEON; } #endif -#if defined(HAS_ARGBGRAYROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { - ARGBGrayRow = ARGBGrayRow_MSA; - } -#endif #if defined(HAS_ARGBGRAYROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { ARGBGrayRow = ARGBGrayRow_MMI; } #endif +#if defined(HAS_ARGBGRAYROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { + ARGBGrayRow = ARGBGrayRow_MSA; + } +#endif for (y = 0; y < height; ++y) { ARGBGrayRow(dst, dst, width); @@ -2384,16 +2347,16 @@ int ARGBSepia(uint8_t* dst_argb, ARGBSepiaRow = ARGBSepiaRow_NEON; } #endif -#if defined(HAS_ARGBSEPIAROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { - ARGBSepiaRow = ARGBSepiaRow_MSA; - } -#endif #if defined(HAS_ARGBSEPIAROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { ARGBSepiaRow = ARGBSepiaRow_MMI; } #endif +#if defined(HAS_ARGBSEPIAROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { + ARGBSepiaRow = ARGBSepiaRow_MSA; + } +#endif for (y = 0; y < height; ++y) { ARGBSepiaRow(dst, width); @@ -2440,15 +2403,15 @@ int ARGBColorMatrix(const uint8_t* src_argb, ARGBColorMatrixRow = ARGBColorMatrixRow_NEON; } #endif -#if defined(HAS_ARGBCOLORMATRIXROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { - ARGBColorMatrixRow = ARGBColorMatrixRow_MSA; - } -#endif #if defined(HAS_ARGBCOLORMATRIXROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { ARGBColorMatrixRow = ARGBColorMatrixRow_MMI; } +#endif +#if defined(HAS_ARGBCOLORMATRIXROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { + ARGBColorMatrixRow = ARGBColorMatrixRow_MSA; + } #endif for (y = 0; y < height; ++y) { ARGBColorMatrixRow(src_argb, dst_argb, matrix_argb, width); @@ -2814,16 +2777,16 @@ int ARGBShade(const uint8_t* src_argb, ARGBShadeRow = ARGBShadeRow_NEON; } #endif -#if defined(HAS_ARGBSHADEROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 4)) { - ARGBShadeRow = ARGBShadeRow_MSA; - } -#endif #if defined(HAS_ARGBSHADEROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { ARGBShadeRow = ARGBShadeRow_MMI; } #endif +#if defined(HAS_ARGBSHADEROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 4)) { + ARGBShadeRow = ARGBShadeRow_MSA; + } +#endif for (y = 0; y < height; ++y) { ARGBShadeRow(src_argb, dst_argb, width, value); @@ -2887,14 +2850,6 @@ int InterpolatePlane(const uint8_t* src0, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -2903,6 +2858,14 @@ int InterpolatePlane(const uint8_t* src0, } } #endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { InterpolateRow(dst, src0, src1 - src0, width, interpolation); @@ -3018,14 +2981,6 @@ int ARGBShuffle(const uint8_t* src_bgra, } } #endif -#if defined(HAS_ARGBSHUFFLEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBShuffleRow = ARGBShuffleRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBShuffleRow = ARGBShuffleRow_MSA; - } - } -#endif #if defined(HAS_ARGBSHUFFLEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBShuffleRow = ARGBShuffleRow_Any_MMI; @@ -3034,6 +2989,14 @@ int ARGBShuffle(const uint8_t* src_bgra, } } #endif +#if defined(HAS_ARGBSHUFFLEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBShuffleRow = ARGBShuffleRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBShuffleRow = ARGBShuffleRow_MSA; + } + } +#endif for (y = 0; y < height; ++y) { ARGBShuffleRow(src_bgra, dst_argb, shuffler, width); @@ -3043,6 +3006,80 @@ int ARGBShuffle(const uint8_t* src_bgra, return 0; } +// Gauss blur a float plane using Gaussian 5x5 filter with +// coefficients of 1, 4, 6, 4, 1. +// Each destination pixel is a blur of the 5x5 +// pixels from the source. +// Source edges are clamped. +// Edge is 2 pixels on each side, and interior is multiple of 4. +LIBYUV_API +int GaussPlane_F32(const float* src, + int src_stride, + float* dst, + int dst_stride, + int width, + int height) { + int y; + void (*GaussCol_F32)(const float* src0, const float* src1, const float* src2, + const float* src3, const float* src4, float* dst, + int width) = GaussCol_F32_C; + void (*GaussRow_F32)(const float* src, float* dst, int width) = + GaussRow_F32_C; + if (!src || !dst || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src = src + (height - 1) * src_stride; + src_stride = -src_stride; + } + +#if defined(HAS_GAUSSCOL_F32_NEON) + if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 8)) { + GaussCol_F32 = GaussCol_F32_NEON; + } +#endif +#if defined(HAS_GAUSSROW_F32_NEON) + if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 8)) { + GaussRow_F32 = GaussRow_F32_NEON; + } +#endif + { + // 2 pixels on each side, but aligned out to 16 bytes. + align_buffer_64(rowbuf, (4 + width + 4) * 4); + memset(rowbuf, 0, 16); + memset(rowbuf + (4 + width) * 4, 0, 16); + float* row = (float*)(rowbuf + 16); + const float* src0 = src; + const float* src1 = src; + const float* src2 = src; + const float* src3 = src2 + ((height > 1) ? src_stride : 0); + const float* src4 = src3 + ((height > 2) ? src_stride : 0); + + for (y = 0; y < height; ++y) { + GaussCol_F32(src0, src1, src2, src3, src4, row, width); + + // Extrude edge by 2 floats + row[-2] = row[-1] = row[0]; + row[width + 1] = row[width] = row[width - 1]; + + GaussRow_F32(row - 2, dst, width); + + src0 = src1; + src1 = src2; + src2 = src3; + src3 = src4; + if ((y + 2) < (height - 1)) { + src4 += src_stride; + } + dst += dst_stride; + } + free_aligned_buffer_64(rowbuf); + } + return 0; +} + // Sobel ARGB effect. static int ARGBSobelize(const uint8_t* src_argb, int src_stride_argb, @@ -3097,14 +3134,6 @@ static int ARGBSobelize(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToYJRow = ARGBToYJRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBToYJRow = ARGBToYJRow_MSA; - } - } -#endif #if defined(HAS_ARGBTOYJROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBToYJRow = ARGBToYJRow_Any_MMI; @@ -3113,6 +3142,14 @@ static int ARGBSobelize(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToYJRow = ARGBToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_MSA; + } + } +#endif #if defined(HAS_SOBELYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { @@ -3124,16 +3161,16 @@ static int ARGBSobelize(const uint8_t* src_argb, SobelYRow = SobelYRow_NEON; } #endif -#if defined(HAS_SOBELYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SobelYRow = SobelYRow_MSA; - } -#endif #if defined(HAS_SOBELYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SobelYRow = SobelYRow_MMI; } #endif +#if defined(HAS_SOBELYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SobelYRow = SobelYRow_MSA; + } +#endif #if defined(HAS_SOBELXROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { SobelXRow = SobelXRow_SSE2; @@ -3144,15 +3181,15 @@ static int ARGBSobelize(const uint8_t* src_argb, SobelXRow = SobelXRow_NEON; } #endif -#if defined(HAS_SOBELXROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SobelXRow = SobelXRow_MSA; - } -#endif #if defined(HAS_SOBELXROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SobelXRow = SobelXRow_MMI; } +#endif +#if defined(HAS_SOBELXROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SobelXRow = SobelXRow_MSA; + } #endif { // 3 rows with edges before/after. @@ -3228,14 +3265,6 @@ int ARGBSobel(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SobelRow = SobelRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - SobelRow = SobelRow_MSA; - } - } -#endif #if defined(HAS_SOBELROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SobelRow = SobelRow_Any_MMI; @@ -3243,6 +3272,14 @@ int ARGBSobel(const uint8_t* src_argb, SobelRow = SobelRow_MMI; } } +#endif +#if defined(HAS_SOBELROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SobelRow = SobelRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + SobelRow = SobelRow_MSA; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, height, SobelRow); @@ -3274,14 +3311,6 @@ int ARGBSobelToPlane(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELTOPLANEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SobelToPlaneRow = SobelToPlaneRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - SobelToPlaneRow = SobelToPlaneRow_MSA; - } - } -#endif #if defined(HAS_SOBELTOPLANEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SobelToPlaneRow = SobelToPlaneRow_Any_MMI; @@ -3289,6 +3318,14 @@ int ARGBSobelToPlane(const uint8_t* src_argb, SobelToPlaneRow = SobelToPlaneRow_MMI; } } +#endif +#if defined(HAS_SOBELTOPLANEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SobelToPlaneRow = SobelToPlaneRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + SobelToPlaneRow = SobelToPlaneRow_MSA; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_y, dst_stride_y, width, height, SobelToPlaneRow); @@ -3321,14 +3358,6 @@ int ARGBSobelXY(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELXYROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SobelXYRow = SobelXYRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - SobelXYRow = SobelXYRow_MSA; - } - } -#endif #if defined(HAS_SOBELXYROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SobelXYRow = SobelXYRow_Any_MMI; @@ -3336,6 +3365,14 @@ int ARGBSobelXY(const uint8_t* src_argb, SobelXYRow = SobelXYRow_MMI; } } +#endif +#if defined(HAS_SOBELXYROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SobelXYRow = SobelXYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + SobelXYRow = SobelXYRow_MSA; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, height, SobelXYRow); @@ -3634,18 +3671,18 @@ int ARGBExtractAlpha(const uint8_t* src_argb, : ARGBExtractAlphaRow_Any_NEON; } #endif -#if defined(HAS_ARGBEXTRACTALPHAROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBExtractAlphaRow = IS_ALIGNED(width, 16) ? ARGBExtractAlphaRow_MSA - : ARGBExtractAlphaRow_Any_MSA; - } -#endif #if defined(HAS_ARGBEXTRACTALPHAROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBExtractAlphaRow = IS_ALIGNED(width, 8) ? ARGBExtractAlphaRow_MMI : ARGBExtractAlphaRow_Any_MMI; } #endif +#if defined(HAS_ARGBEXTRACTALPHAROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBExtractAlphaRow = IS_ALIGNED(width, 16) ? ARGBExtractAlphaRow_MSA + : ARGBExtractAlphaRow_Any_MSA; + } +#endif for (int y = 0; y < height; ++y) { ARGBExtractAlphaRow(src_argb, dst_a, width); @@ -3766,14 +3803,6 @@ int YUY2ToNV12(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_SPLITUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SplitUVRow = SplitUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - SplitUVRow = SplitUVRow_MSA; - } - } -#endif #if defined(HAS_SPLITUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SplitUVRow = SplitUVRow_Any_MMI; @@ -3782,6 +3811,14 @@ int YUY2ToNV12(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_SPLITUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SplitUVRow = SplitUVRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + SplitUVRow = SplitUVRow_MSA; + } + } +#endif #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; @@ -3806,14 +3843,6 @@ int YUY2ToNV12(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -3822,6 +3851,14 @@ int YUY2ToNV12(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif { int awidth = halfwidth * 2; @@ -3898,14 +3935,6 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_SPLITUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SplitUVRow = SplitUVRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - SplitUVRow = SplitUVRow_MSA; - } - } -#endif #if defined(HAS_SPLITUVROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { SplitUVRow = SplitUVRow_Any_MMI; @@ -3914,6 +3943,14 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_SPLITUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + SplitUVRow = SplitUVRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + SplitUVRow = SplitUVRow_MSA; + } + } +#endif #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; @@ -3938,14 +3975,6 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -3954,6 +3983,14 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif { int awidth = halfwidth * 2; @@ -3981,6 +4018,56 @@ int UYVYToNV12(const uint8_t* src_uyvy, return 0; } +// width and height are src size allowing odd size handling. +LIBYUV_API +void HalfMergeUVPlane(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + int y; + void (*HalfMergeUVRow)(const uint8_t* src_u, int src_stride_u, + const uint8_t* src_v, int src_stride_v, + uint8_t* dst_uv, int width) = HalfMergeUVRow_C; + + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } +#if defined(HAS_HALFMERGEUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 16)) { + HalfMergeUVRow = HalfMergeUVRow_NEON; + } +#endif +#if defined(HAS_HALFMERGEUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(width, 16)) { + HalfMergeUVRow = HalfMergeUVRow_SSSE3; + } +#endif +#if defined(HAS_HALFMERGEUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2) && IS_ALIGNED(width, 32)) { + HalfMergeUVRow = HalfMergeUVRow_AVX2; + } +#endif + for (y = 0; y < height - 1; y += 2) { + // Merge a row of U and V into a row of UV. + HalfMergeUVRow(src_u, src_stride_u, src_v, src_stride_v, dst_uv, width); + src_u += src_stride_u * 2; + src_v += src_stride_v * 2; + dst_uv += dst_stride_uv; + } + if (height & 1) { + HalfMergeUVRow(src_u, 0, src_v, 0, dst_uv, width); + } +} + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate.cc index d414186a5..32904e473 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate.cc @@ -36,6 +36,15 @@ void TransposePlane(const uint8_t* src, void (*TransposeWx8)(const uint8_t* src, int src_stride, uint8_t* dst, int dst_stride, int width) = TransposeWx8_C; #endif + +#if defined(HAS_TRANSPOSEWX16_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + TransposeWx16 = TransposeWx16_Any_MSA; + if (IS_ALIGNED(width, 16)) { + TransposeWx16 = TransposeWx16_MSA; + } + } +#else #if defined(HAS_TRANSPOSEWX8_NEON) if (TestCpuFlag(kCpuHasNEON)) { TransposeWx8 = TransposeWx8_NEON; @@ -62,14 +71,7 @@ void TransposePlane(const uint8_t* src, } } #endif -#if defined(HAS_TRANSPOSEWX16_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - TransposeWx16 = TransposeWx16_Any_MSA; - if (IS_ALIGNED(width, 16)) { - TransposeWx16 = TransposeWx16_MSA; - } - } -#endif +#endif /* defined(HAS_TRANSPOSEWX16_MSA) */ #if defined(HAS_TRANSPOSEWX16_MSA) // Work across the source in 16x16 tiles @@ -142,7 +144,7 @@ void RotatePlane180(const uint8_t* src, #if defined(HAS_MIRRORROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { MirrorRow = MirrorRow_Any_NEON; - if (IS_ALIGNED(width, 16)) { + if (IS_ALIGNED(width, 32)) { MirrorRow = MirrorRow_NEON; } } @@ -163,14 +165,6 @@ void RotatePlane180(const uint8_t* src, } } #endif -#if defined(HAS_MIRRORROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - MirrorRow = MirrorRow_Any_MSA; - if (IS_ALIGNED(width, 64)) { - MirrorRow = MirrorRow_MSA; - } - } -#endif #if defined(HAS_MIRRORROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { MirrorRow = MirrorRow_Any_MMI; @@ -179,6 +173,14 @@ void RotatePlane180(const uint8_t* src, } } #endif +#if defined(HAS_MIRRORROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MirrorRow = MirrorRow_Any_MSA; + if (IS_ALIGNED(width, 64)) { + MirrorRow = MirrorRow_MSA; + } + } +#endif #if defined(HAS_COPYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { CopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2; @@ -207,11 +209,11 @@ void RotatePlane180(const uint8_t* src, // Odd height will harmlessly mirror the middle row twice. for (y = 0; y < half_height; ++y) { - MirrorRow(src, row, width); // Mirror first row into a buffer - src += src_stride; + CopyRow(src, row, width); // Copy first row into buffer MirrorRow(src_bot, dst, width); // Mirror last row into first row + MirrorRow(row, dst_bot, width); // Mirror buffer into last row + src += src_stride; dst += dst_stride; - CopyRow(row, dst_bot, width); // Copy first mirrored row into last src_bot -= src_stride; dst_bot -= dst_stride; } @@ -237,6 +239,15 @@ void TransposeUV(const uint8_t* src, int dst_stride_a, uint8_t* dst_b, int dst_stride_b, int width) = TransposeUVWx8_C; #endif + +#if defined(HAS_TRANSPOSEUVWX16_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + TransposeUVWx16 = TransposeUVWx16_Any_MSA; + if (IS_ALIGNED(width, 8)) { + TransposeUVWx16 = TransposeUVWx16_MSA; + } + } +#else #if defined(HAS_TRANSPOSEUVWX8_NEON) if (TestCpuFlag(kCpuHasNEON)) { TransposeUVWx8 = TransposeUVWx8_NEON; @@ -258,14 +269,7 @@ void TransposeUV(const uint8_t* src, } } #endif -#if defined(HAS_TRANSPOSEUVWX16_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - TransposeUVWx16 = TransposeUVWx16_Any_MSA; - if (IS_ALIGNED(width, 8)) { - TransposeUVWx16 = TransposeUVWx16_MSA; - } - } -#endif +#endif /* defined(HAS_TRANSPOSEUVWX16_MSA) */ #if defined(HAS_TRANSPOSEUVWX16_MSA) // Work through the source in 8x8 tiles. @@ -340,26 +344,26 @@ void RotateUV180(const uint8_t* src, int width, int height) { int i; - void (*MirrorUVRow)(const uint8_t* src, uint8_t* dst_u, uint8_t* dst_v, - int width) = MirrorUVRow_C; -#if defined(HAS_MIRRORUVROW_NEON) - if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 8)) { - MirrorUVRow = MirrorUVRow_NEON; + void (*MirrorSplitUVRow)(const uint8_t* src, uint8_t* dst_u, uint8_t* dst_v, + int width) = MirrorSplitUVRow_C; +#if defined(HAS_MIRRORSPLITUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 16)) { + MirrorSplitUVRow = MirrorSplitUVRow_NEON; } #endif -#if defined(HAS_MIRRORUVROW_SSSE3) +#if defined(HAS_MIRRORSPLITUVROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(width, 16)) { - MirrorUVRow = MirrorUVRow_SSSE3; + MirrorSplitUVRow = MirrorSplitUVRow_SSSE3; } #endif -#if defined(HAS_MIRRORUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 32)) { - MirrorUVRow = MirrorUVRow_MSA; - } -#endif -#if defined(HAS_MIRRORUVROW_MMI) +#if defined(HAS_MIRRORSPLITUVROW_MMI) if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 8)) { - MirrorUVRow = MirrorUVRow_MMI; + MirrorSplitUVRow = MirrorSplitUVRow_MMI; + } +#endif +#if defined(HAS_MIRRORSPLITUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 32)) { + MirrorSplitUVRow = MirrorSplitUVRow_MSA; } #endif @@ -367,7 +371,7 @@ void RotateUV180(const uint8_t* src, dst_b += dst_stride_b * (height - 1); for (i = 0; i < height; ++i) { - MirrorUVRow(src, dst_a, dst_b, width); + MirrorSplitUVRow(src, dst_a, dst_b, width); src += src_stride; dst_a -= dst_stride_a; dst_b -= dst_stride_b; diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc index a93fd55f9..ae6538860 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc @@ -21,17 +21,21 @@ namespace libyuv { extern "C" { #endif -static void ARGBTranspose(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +static int ARGBTranspose(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int i; int src_pixel_step = src_stride_argb >> 2; void (*ScaleARGBRowDownEven)( const uint8_t* src_argb, ptrdiff_t src_stride_argb, int src_step, uint8_t* dst_argb, int dst_width) = ScaleARGBRowDownEven_C; + // Check stride is a multiple of 4. + if (src_stride_argb & 3) { + return -1; + } #if defined(HAS_SCALEARGBROWDOWNEVEN_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_SSE2; @@ -48,14 +52,6 @@ static void ARGBTranspose(const uint8_t* src_argb, } } #endif -#if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_MSA; - if (IS_ALIGNED(height, 4)) { // Width of dest. - ScaleARGBRowDownEven = ScaleARGBRowDownEven_MSA; - } - } -#endif #if defined(HAS_SCALEARGBROWDOWNEVEN_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_MMI; @@ -64,50 +60,59 @@ static void ARGBTranspose(const uint8_t* src_argb, } } #endif +#if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_MSA; + if (IS_ALIGNED(height, 4)) { // Width of dest. + ScaleARGBRowDownEven = ScaleARGBRowDownEven_MSA; + } + } +#endif for (i = 0; i < width; ++i) { // column of source to row of dest. ScaleARGBRowDownEven(src_argb, 0, src_pixel_step, dst_argb, height); dst_argb += dst_stride_argb; src_argb += 4; } + return 0; } -void ARGBRotate90(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +static int ARGBRotate90(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { // Rotate by 90 is a ARGBTranspose with the source read // from bottom to top. So set the source pointer to the end // of the buffer and flip the sign of the source stride. src_argb += src_stride_argb * (height - 1); src_stride_argb = -src_stride_argb; - ARGBTranspose(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, - height); + return ARGBTranspose(src_argb, src_stride_argb, dst_argb, dst_stride_argb, + width, height); } -void ARGBRotate270(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +static int ARGBRotate270(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { // Rotate by 270 is a ARGBTranspose with the destination written // from bottom to top. So set the destination pointer to the end // of the buffer and flip the sign of the destination stride. dst_argb += dst_stride_argb * (width - 1); dst_stride_argb = -dst_stride_argb; - ARGBTranspose(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, - height); + return ARGBTranspose(src_argb, src_stride_argb, dst_argb, dst_stride_argb, + width, height); } -void ARGBRotate180(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +static int ARGBRotate180(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { // Swap first and last row and mirror the content. Uses a temporary row. align_buffer_64(row, width * 4); const uint8_t* src_bot = src_argb + src_stride_argb * (height - 1); @@ -121,7 +126,7 @@ void ARGBRotate180(const uint8_t* src_argb, #if defined(HAS_ARGBMIRRORROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBMirrorRow = ARGBMirrorRow_Any_NEON; - if (IS_ALIGNED(width, 4)) { + if (IS_ALIGNED(width, 8)) { ARGBMirrorRow = ARGBMirrorRow_NEON; } } @@ -142,14 +147,6 @@ void ARGBRotate180(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBMIRRORROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBMirrorRow = ARGBMirrorRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGBMirrorRow = ARGBMirrorRow_MSA; - } - } -#endif #if defined(HAS_ARGBMIRRORROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ARGBMirrorRow = ARGBMirrorRow_Any_MMI; @@ -158,6 +155,14 @@ void ARGBRotate180(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBMIRRORROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBMirrorRow = ARGBMirrorRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGBMirrorRow = ARGBMirrorRow_MSA; + } + } +#endif #if defined(HAS_COPYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { CopyRow = IS_ALIGNED(width * 4, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2; @@ -190,6 +195,7 @@ void ARGBRotate180(const uint8_t* src_argb, dst_bot -= dst_stride_argb; } free_aligned_buffer_64(row); + return 0; } LIBYUV_API @@ -217,17 +223,14 @@ int ARGBRotate(const uint8_t* src_argb, return ARGBCopy(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, height); case kRotate90: - ARGBRotate90(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, - height); - return 0; + return ARGBRotate90(src_argb, src_stride_argb, dst_argb, dst_stride_argb, + width, height); case kRotate270: - ARGBRotate270(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, - height); - return 0; + return ARGBRotate270(src_argb, src_stride_argb, dst_argb, dst_stride_argb, + width, height); case kRotate180: - ARGBRotate180(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, - height); - return 0; + return ARGBRotate180(src_argb, src_stride_argb, dst_argb, dst_stride_argb, + width, height); default: break; } diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc index 04e19e29e..fd359d4ae 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc @@ -31,75 +31,75 @@ void TransposeWx8_SSSE3(const uint8_t* src, // Read in the data from the source pointer. // First round of bit swap. LABELALIGN - "1: \n" - "movq (%0),%%xmm0 \n" - "movq (%0,%3),%%xmm1 \n" - "lea (%0,%3,2),%0 \n" - "punpcklbw %%xmm1,%%xmm0 \n" - "movq (%0),%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "palignr $0x8,%%xmm1,%%xmm1 \n" - "movq (%0,%3),%%xmm3 \n" - "lea (%0,%3,2),%0 \n" - "punpcklbw %%xmm3,%%xmm2 \n" - "movdqa %%xmm2,%%xmm3 \n" - "movq (%0),%%xmm4 \n" - "palignr $0x8,%%xmm3,%%xmm3 \n" - "movq (%0,%3),%%xmm5 \n" - "lea (%0,%3,2),%0 \n" - "punpcklbw %%xmm5,%%xmm4 \n" - "movdqa %%xmm4,%%xmm5 \n" - "movq (%0),%%xmm6 \n" - "palignr $0x8,%%xmm5,%%xmm5 \n" - "movq (%0,%3),%%xmm7 \n" - "lea (%0,%3,2),%0 \n" - "punpcklbw %%xmm7,%%xmm6 \n" - "neg %3 \n" - "movdqa %%xmm6,%%xmm7 \n" - "lea 0x8(%0,%3,8),%0 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" - "neg %3 \n" + "1: \n" + "movq (%0),%%xmm0 \n" + "movq (%0,%3),%%xmm1 \n" + "lea (%0,%3,2),%0 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "movq (%0),%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "palignr $0x8,%%xmm1,%%xmm1 \n" + "movq (%0,%3),%%xmm3 \n" + "lea (%0,%3,2),%0 \n" + "punpcklbw %%xmm3,%%xmm2 \n" + "movdqa %%xmm2,%%xmm3 \n" + "movq (%0),%%xmm4 \n" + "palignr $0x8,%%xmm3,%%xmm3 \n" + "movq (%0,%3),%%xmm5 \n" + "lea (%0,%3,2),%0 \n" + "punpcklbw %%xmm5,%%xmm4 \n" + "movdqa %%xmm4,%%xmm5 \n" + "movq (%0),%%xmm6 \n" + "palignr $0x8,%%xmm5,%%xmm5 \n" + "movq (%0,%3),%%xmm7 \n" + "lea (%0,%3,2),%0 \n" + "punpcklbw %%xmm7,%%xmm6 \n" + "neg %3 \n" + "movdqa %%xmm6,%%xmm7 \n" + "lea 0x8(%0,%3,8),%0 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" + "neg %3 \n" // Second round of bit swap. - "punpcklwd %%xmm2,%%xmm0 \n" - "punpcklwd %%xmm3,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "movdqa %%xmm1,%%xmm3 \n" - "palignr $0x8,%%xmm2,%%xmm2 \n" - "palignr $0x8,%%xmm3,%%xmm3 \n" - "punpcklwd %%xmm6,%%xmm4 \n" - "punpcklwd %%xmm7,%%xmm5 \n" - "movdqa %%xmm4,%%xmm6 \n" - "movdqa %%xmm5,%%xmm7 \n" - "palignr $0x8,%%xmm6,%%xmm6 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" + "punpcklwd %%xmm2,%%xmm0 \n" + "punpcklwd %%xmm3,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "palignr $0x8,%%xmm2,%%xmm2 \n" + "palignr $0x8,%%xmm3,%%xmm3 \n" + "punpcklwd %%xmm6,%%xmm4 \n" + "punpcklwd %%xmm7,%%xmm5 \n" + "movdqa %%xmm4,%%xmm6 \n" + "movdqa %%xmm5,%%xmm7 \n" + "palignr $0x8,%%xmm6,%%xmm6 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" // Third round of bit swap. // Write to the destination pointer. - "punpckldq %%xmm4,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "movdqa %%xmm0,%%xmm4 \n" - "palignr $0x8,%%xmm4,%%xmm4 \n" - "movq %%xmm4,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm6,%%xmm2 \n" - "movdqa %%xmm2,%%xmm6 \n" - "movq %%xmm2,(%1) \n" - "palignr $0x8,%%xmm6,%%xmm6 \n" - "punpckldq %%xmm5,%%xmm1 \n" - "movq %%xmm6,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "movdqa %%xmm1,%%xmm5 \n" - "movq %%xmm1,(%1) \n" - "palignr $0x8,%%xmm5,%%xmm5 \n" - "movq %%xmm5,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm7,%%xmm3 \n" - "movq %%xmm3,(%1) \n" - "movdqa %%xmm3,%%xmm7 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" - "sub $0x8,%2 \n" - "movq %%xmm7,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "jg 1b \n" + "punpckldq %%xmm4,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "movdqa %%xmm0,%%xmm4 \n" + "palignr $0x8,%%xmm4,%%xmm4 \n" + "movq %%xmm4,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm6,%%xmm2 \n" + "movdqa %%xmm2,%%xmm6 \n" + "movq %%xmm2,(%1) \n" + "palignr $0x8,%%xmm6,%%xmm6 \n" + "punpckldq %%xmm5,%%xmm1 \n" + "movq %%xmm6,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "movdqa %%xmm1,%%xmm5 \n" + "movq %%xmm1,(%1) \n" + "palignr $0x8,%%xmm5,%%xmm5 \n" + "movq %%xmm5,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm7,%%xmm3 \n" + "movq %%xmm3,(%1) \n" + "movdqa %%xmm3,%%xmm7 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" + "sub $0x8,%2 \n" + "movq %%xmm7,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -121,127 +121,127 @@ void TransposeWx8_Fast_SSSE3(const uint8_t* src, // Read in the data from the source pointer. // First round of bit swap. LABELALIGN - "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%0,%3),%%xmm1 \n" - "lea (%0,%3,2),%0 \n" - "movdqa %%xmm0,%%xmm8 \n" - "punpcklbw %%xmm1,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm8 \n" - "movdqu (%0),%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm8,%%xmm9 \n" - "palignr $0x8,%%xmm1,%%xmm1 \n" - "palignr $0x8,%%xmm9,%%xmm9 \n" - "movdqu (%0,%3),%%xmm3 \n" - "lea (%0,%3,2),%0 \n" - "movdqa %%xmm2,%%xmm10 \n" - "punpcklbw %%xmm3,%%xmm2 \n" - "punpckhbw %%xmm3,%%xmm10 \n" - "movdqa %%xmm2,%%xmm3 \n" - "movdqa %%xmm10,%%xmm11 \n" - "movdqu (%0),%%xmm4 \n" - "palignr $0x8,%%xmm3,%%xmm3 \n" - "palignr $0x8,%%xmm11,%%xmm11 \n" - "movdqu (%0,%3),%%xmm5 \n" - "lea (%0,%3,2),%0 \n" - "movdqa %%xmm4,%%xmm12 \n" - "punpcklbw %%xmm5,%%xmm4 \n" - "punpckhbw %%xmm5,%%xmm12 \n" - "movdqa %%xmm4,%%xmm5 \n" - "movdqa %%xmm12,%%xmm13 \n" - "movdqu (%0),%%xmm6 \n" - "palignr $0x8,%%xmm5,%%xmm5 \n" - "palignr $0x8,%%xmm13,%%xmm13 \n" - "movdqu (%0,%3),%%xmm7 \n" - "lea (%0,%3,2),%0 \n" - "movdqa %%xmm6,%%xmm14 \n" - "punpcklbw %%xmm7,%%xmm6 \n" - "punpckhbw %%xmm7,%%xmm14 \n" - "neg %3 \n" - "movdqa %%xmm6,%%xmm7 \n" - "movdqa %%xmm14,%%xmm15 \n" - "lea 0x10(%0,%3,8),%0 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" - "palignr $0x8,%%xmm15,%%xmm15 \n" - "neg %3 \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%0,%3),%%xmm1 \n" + "lea (%0,%3,2),%0 \n" + "movdqa %%xmm0,%%xmm8 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm8 \n" + "movdqu (%0),%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm8,%%xmm9 \n" + "palignr $0x8,%%xmm1,%%xmm1 \n" + "palignr $0x8,%%xmm9,%%xmm9 \n" + "movdqu (%0,%3),%%xmm3 \n" + "lea (%0,%3,2),%0 \n" + "movdqa %%xmm2,%%xmm10 \n" + "punpcklbw %%xmm3,%%xmm2 \n" + "punpckhbw %%xmm3,%%xmm10 \n" + "movdqa %%xmm2,%%xmm3 \n" + "movdqa %%xmm10,%%xmm11 \n" + "movdqu (%0),%%xmm4 \n" + "palignr $0x8,%%xmm3,%%xmm3 \n" + "palignr $0x8,%%xmm11,%%xmm11 \n" + "movdqu (%0,%3),%%xmm5 \n" + "lea (%0,%3,2),%0 \n" + "movdqa %%xmm4,%%xmm12 \n" + "punpcklbw %%xmm5,%%xmm4 \n" + "punpckhbw %%xmm5,%%xmm12 \n" + "movdqa %%xmm4,%%xmm5 \n" + "movdqa %%xmm12,%%xmm13 \n" + "movdqu (%0),%%xmm6 \n" + "palignr $0x8,%%xmm5,%%xmm5 \n" + "palignr $0x8,%%xmm13,%%xmm13 \n" + "movdqu (%0,%3),%%xmm7 \n" + "lea (%0,%3,2),%0 \n" + "movdqa %%xmm6,%%xmm14 \n" + "punpcklbw %%xmm7,%%xmm6 \n" + "punpckhbw %%xmm7,%%xmm14 \n" + "neg %3 \n" + "movdqa %%xmm6,%%xmm7 \n" + "movdqa %%xmm14,%%xmm15 \n" + "lea 0x10(%0,%3,8),%0 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" + "palignr $0x8,%%xmm15,%%xmm15 \n" + "neg %3 \n" // Second round of bit swap. - "punpcklwd %%xmm2,%%xmm0 \n" - "punpcklwd %%xmm3,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "movdqa %%xmm1,%%xmm3 \n" - "palignr $0x8,%%xmm2,%%xmm2 \n" - "palignr $0x8,%%xmm3,%%xmm3 \n" - "punpcklwd %%xmm6,%%xmm4 \n" - "punpcklwd %%xmm7,%%xmm5 \n" - "movdqa %%xmm4,%%xmm6 \n" - "movdqa %%xmm5,%%xmm7 \n" - "palignr $0x8,%%xmm6,%%xmm6 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" - "punpcklwd %%xmm10,%%xmm8 \n" - "punpcklwd %%xmm11,%%xmm9 \n" - "movdqa %%xmm8,%%xmm10 \n" - "movdqa %%xmm9,%%xmm11 \n" - "palignr $0x8,%%xmm10,%%xmm10 \n" - "palignr $0x8,%%xmm11,%%xmm11 \n" - "punpcklwd %%xmm14,%%xmm12 \n" - "punpcklwd %%xmm15,%%xmm13 \n" - "movdqa %%xmm12,%%xmm14 \n" - "movdqa %%xmm13,%%xmm15 \n" - "palignr $0x8,%%xmm14,%%xmm14 \n" - "palignr $0x8,%%xmm15,%%xmm15 \n" + "punpcklwd %%xmm2,%%xmm0 \n" + "punpcklwd %%xmm3,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "palignr $0x8,%%xmm2,%%xmm2 \n" + "palignr $0x8,%%xmm3,%%xmm3 \n" + "punpcklwd %%xmm6,%%xmm4 \n" + "punpcklwd %%xmm7,%%xmm5 \n" + "movdqa %%xmm4,%%xmm6 \n" + "movdqa %%xmm5,%%xmm7 \n" + "palignr $0x8,%%xmm6,%%xmm6 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" + "punpcklwd %%xmm10,%%xmm8 \n" + "punpcklwd %%xmm11,%%xmm9 \n" + "movdqa %%xmm8,%%xmm10 \n" + "movdqa %%xmm9,%%xmm11 \n" + "palignr $0x8,%%xmm10,%%xmm10 \n" + "palignr $0x8,%%xmm11,%%xmm11 \n" + "punpcklwd %%xmm14,%%xmm12 \n" + "punpcklwd %%xmm15,%%xmm13 \n" + "movdqa %%xmm12,%%xmm14 \n" + "movdqa %%xmm13,%%xmm15 \n" + "palignr $0x8,%%xmm14,%%xmm14 \n" + "palignr $0x8,%%xmm15,%%xmm15 \n" // Third round of bit swap. // Write to the destination pointer. - "punpckldq %%xmm4,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "movdqa %%xmm0,%%xmm4 \n" - "palignr $0x8,%%xmm4,%%xmm4 \n" - "movq %%xmm4,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm6,%%xmm2 \n" - "movdqa %%xmm2,%%xmm6 \n" - "movq %%xmm2,(%1) \n" - "palignr $0x8,%%xmm6,%%xmm6 \n" - "punpckldq %%xmm5,%%xmm1 \n" - "movq %%xmm6,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "movdqa %%xmm1,%%xmm5 \n" - "movq %%xmm1,(%1) \n" - "palignr $0x8,%%xmm5,%%xmm5 \n" - "movq %%xmm5,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm7,%%xmm3 \n" - "movq %%xmm3,(%1) \n" - "movdqa %%xmm3,%%xmm7 \n" - "palignr $0x8,%%xmm7,%%xmm7 \n" - "movq %%xmm7,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm12,%%xmm8 \n" - "movq %%xmm8,(%1) \n" - "movdqa %%xmm8,%%xmm12 \n" - "palignr $0x8,%%xmm12,%%xmm12 \n" - "movq %%xmm12,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm14,%%xmm10 \n" - "movdqa %%xmm10,%%xmm14 \n" - "movq %%xmm10,(%1) \n" - "palignr $0x8,%%xmm14,%%xmm14 \n" - "punpckldq %%xmm13,%%xmm9 \n" - "movq %%xmm14,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "movdqa %%xmm9,%%xmm13 \n" - "movq %%xmm9,(%1) \n" - "palignr $0x8,%%xmm13,%%xmm13 \n" - "movq %%xmm13,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "punpckldq %%xmm15,%%xmm11 \n" - "movq %%xmm11,(%1) \n" - "movdqa %%xmm11,%%xmm15 \n" - "palignr $0x8,%%xmm15,%%xmm15 \n" - "sub $0x10,%2 \n" - "movq %%xmm15,(%1,%4) \n" - "lea (%1,%4,2),%1 \n" - "jg 1b \n" + "punpckldq %%xmm4,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "movdqa %%xmm0,%%xmm4 \n" + "palignr $0x8,%%xmm4,%%xmm4 \n" + "movq %%xmm4,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm6,%%xmm2 \n" + "movdqa %%xmm2,%%xmm6 \n" + "movq %%xmm2,(%1) \n" + "palignr $0x8,%%xmm6,%%xmm6 \n" + "punpckldq %%xmm5,%%xmm1 \n" + "movq %%xmm6,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "movdqa %%xmm1,%%xmm5 \n" + "movq %%xmm1,(%1) \n" + "palignr $0x8,%%xmm5,%%xmm5 \n" + "movq %%xmm5,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm7,%%xmm3 \n" + "movq %%xmm3,(%1) \n" + "movdqa %%xmm3,%%xmm7 \n" + "palignr $0x8,%%xmm7,%%xmm7 \n" + "movq %%xmm7,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm12,%%xmm8 \n" + "movq %%xmm8,(%1) \n" + "movdqa %%xmm8,%%xmm12 \n" + "palignr $0x8,%%xmm12,%%xmm12 \n" + "movq %%xmm12,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm14,%%xmm10 \n" + "movdqa %%xmm10,%%xmm14 \n" + "movq %%xmm10,(%1) \n" + "palignr $0x8,%%xmm14,%%xmm14 \n" + "punpckldq %%xmm13,%%xmm9 \n" + "movq %%xmm14,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "movdqa %%xmm9,%%xmm13 \n" + "movq %%xmm9,(%1) \n" + "palignr $0x8,%%xmm13,%%xmm13 \n" + "movq %%xmm13,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "punpckldq %%xmm15,%%xmm11 \n" + "movq %%xmm11,(%1) \n" + "movdqa %%xmm11,%%xmm15 \n" + "palignr $0x8,%%xmm15,%%xmm15 \n" + "sub $0x10,%2 \n" + "movq %%xmm15,(%1,%4) \n" + "lea (%1,%4,2),%1 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -266,95 +266,95 @@ void TransposeUVWx8_SSE2(const uint8_t* src, // Read in the data from the source pointer. // First round of bit swap. LABELALIGN - "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%0,%4),%%xmm1 \n" - "lea (%0,%4,2),%0 \n" - "movdqa %%xmm0,%%xmm8 \n" - "punpcklbw %%xmm1,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm8 \n" - "movdqa %%xmm8,%%xmm1 \n" - "movdqu (%0),%%xmm2 \n" - "movdqu (%0,%4),%%xmm3 \n" - "lea (%0,%4,2),%0 \n" - "movdqa %%xmm2,%%xmm8 \n" - "punpcklbw %%xmm3,%%xmm2 \n" - "punpckhbw %%xmm3,%%xmm8 \n" - "movdqa %%xmm8,%%xmm3 \n" - "movdqu (%0),%%xmm4 \n" - "movdqu (%0,%4),%%xmm5 \n" - "lea (%0,%4,2),%0 \n" - "movdqa %%xmm4,%%xmm8 \n" - "punpcklbw %%xmm5,%%xmm4 \n" - "punpckhbw %%xmm5,%%xmm8 \n" - "movdqa %%xmm8,%%xmm5 \n" - "movdqu (%0),%%xmm6 \n" - "movdqu (%0,%4),%%xmm7 \n" - "lea (%0,%4,2),%0 \n" - "movdqa %%xmm6,%%xmm8 \n" - "punpcklbw %%xmm7,%%xmm6 \n" - "neg %4 \n" - "lea 0x10(%0,%4,8),%0 \n" - "punpckhbw %%xmm7,%%xmm8 \n" - "movdqa %%xmm8,%%xmm7 \n" - "neg %4 \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%0,%4),%%xmm1 \n" + "lea (%0,%4,2),%0 \n" + "movdqa %%xmm0,%%xmm8 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm8 \n" + "movdqa %%xmm8,%%xmm1 \n" + "movdqu (%0),%%xmm2 \n" + "movdqu (%0,%4),%%xmm3 \n" + "lea (%0,%4,2),%0 \n" + "movdqa %%xmm2,%%xmm8 \n" + "punpcklbw %%xmm3,%%xmm2 \n" + "punpckhbw %%xmm3,%%xmm8 \n" + "movdqa %%xmm8,%%xmm3 \n" + "movdqu (%0),%%xmm4 \n" + "movdqu (%0,%4),%%xmm5 \n" + "lea (%0,%4,2),%0 \n" + "movdqa %%xmm4,%%xmm8 \n" + "punpcklbw %%xmm5,%%xmm4 \n" + "punpckhbw %%xmm5,%%xmm8 \n" + "movdqa %%xmm8,%%xmm5 \n" + "movdqu (%0),%%xmm6 \n" + "movdqu (%0,%4),%%xmm7 \n" + "lea (%0,%4,2),%0 \n" + "movdqa %%xmm6,%%xmm8 \n" + "punpcklbw %%xmm7,%%xmm6 \n" + "neg %4 \n" + "lea 0x10(%0,%4,8),%0 \n" + "punpckhbw %%xmm7,%%xmm8 \n" + "movdqa %%xmm8,%%xmm7 \n" + "neg %4 \n" // Second round of bit swap. - "movdqa %%xmm0,%%xmm8 \n" - "movdqa %%xmm1,%%xmm9 \n" - "punpckhwd %%xmm2,%%xmm8 \n" - "punpckhwd %%xmm3,%%xmm9 \n" - "punpcklwd %%xmm2,%%xmm0 \n" - "punpcklwd %%xmm3,%%xmm1 \n" - "movdqa %%xmm8,%%xmm2 \n" - "movdqa %%xmm9,%%xmm3 \n" - "movdqa %%xmm4,%%xmm8 \n" - "movdqa %%xmm5,%%xmm9 \n" - "punpckhwd %%xmm6,%%xmm8 \n" - "punpckhwd %%xmm7,%%xmm9 \n" - "punpcklwd %%xmm6,%%xmm4 \n" - "punpcklwd %%xmm7,%%xmm5 \n" - "movdqa %%xmm8,%%xmm6 \n" - "movdqa %%xmm9,%%xmm7 \n" + "movdqa %%xmm0,%%xmm8 \n" + "movdqa %%xmm1,%%xmm9 \n" + "punpckhwd %%xmm2,%%xmm8 \n" + "punpckhwd %%xmm3,%%xmm9 \n" + "punpcklwd %%xmm2,%%xmm0 \n" + "punpcklwd %%xmm3,%%xmm1 \n" + "movdqa %%xmm8,%%xmm2 \n" + "movdqa %%xmm9,%%xmm3 \n" + "movdqa %%xmm4,%%xmm8 \n" + "movdqa %%xmm5,%%xmm9 \n" + "punpckhwd %%xmm6,%%xmm8 \n" + "punpckhwd %%xmm7,%%xmm9 \n" + "punpcklwd %%xmm6,%%xmm4 \n" + "punpcklwd %%xmm7,%%xmm5 \n" + "movdqa %%xmm8,%%xmm6 \n" + "movdqa %%xmm9,%%xmm7 \n" // Third round of bit swap. // Write to the destination pointer. - "movdqa %%xmm0,%%xmm8 \n" - "punpckldq %%xmm4,%%xmm0 \n" - "movlpd %%xmm0,(%1) \n" // Write back U channel - "movhpd %%xmm0,(%2) \n" // Write back V channel - "punpckhdq %%xmm4,%%xmm8 \n" - "movlpd %%xmm8,(%1,%5) \n" - "lea (%1,%5,2),%1 \n" - "movhpd %%xmm8,(%2,%6) \n" - "lea (%2,%6,2),%2 \n" - "movdqa %%xmm2,%%xmm8 \n" - "punpckldq %%xmm6,%%xmm2 \n" - "movlpd %%xmm2,(%1) \n" - "movhpd %%xmm2,(%2) \n" - "punpckhdq %%xmm6,%%xmm8 \n" - "movlpd %%xmm8,(%1,%5) \n" - "lea (%1,%5,2),%1 \n" - "movhpd %%xmm8,(%2,%6) \n" - "lea (%2,%6,2),%2 \n" - "movdqa %%xmm1,%%xmm8 \n" - "punpckldq %%xmm5,%%xmm1 \n" - "movlpd %%xmm1,(%1) \n" - "movhpd %%xmm1,(%2) \n" - "punpckhdq %%xmm5,%%xmm8 \n" - "movlpd %%xmm8,(%1,%5) \n" - "lea (%1,%5,2),%1 \n" - "movhpd %%xmm8,(%2,%6) \n" - "lea (%2,%6,2),%2 \n" - "movdqa %%xmm3,%%xmm8 \n" - "punpckldq %%xmm7,%%xmm3 \n" - "movlpd %%xmm3,(%1) \n" - "movhpd %%xmm3,(%2) \n" - "punpckhdq %%xmm7,%%xmm8 \n" - "sub $0x8,%3 \n" - "movlpd %%xmm8,(%1,%5) \n" - "lea (%1,%5,2),%1 \n" - "movhpd %%xmm8,(%2,%6) \n" - "lea (%2,%6,2),%2 \n" - "jg 1b \n" + "movdqa %%xmm0,%%xmm8 \n" + "punpckldq %%xmm4,%%xmm0 \n" + "movlpd %%xmm0,(%1) \n" // Write back U channel + "movhpd %%xmm0,(%2) \n" // Write back V channel + "punpckhdq %%xmm4,%%xmm8 \n" + "movlpd %%xmm8,(%1,%5) \n" + "lea (%1,%5,2),%1 \n" + "movhpd %%xmm8,(%2,%6) \n" + "lea (%2,%6,2),%2 \n" + "movdqa %%xmm2,%%xmm8 \n" + "punpckldq %%xmm6,%%xmm2 \n" + "movlpd %%xmm2,(%1) \n" + "movhpd %%xmm2,(%2) \n" + "punpckhdq %%xmm6,%%xmm8 \n" + "movlpd %%xmm8,(%1,%5) \n" + "lea (%1,%5,2),%1 \n" + "movhpd %%xmm8,(%2,%6) \n" + "lea (%2,%6,2),%2 \n" + "movdqa %%xmm1,%%xmm8 \n" + "punpckldq %%xmm5,%%xmm1 \n" + "movlpd %%xmm1,(%1) \n" + "movhpd %%xmm1,(%2) \n" + "punpckhdq %%xmm5,%%xmm8 \n" + "movlpd %%xmm8,(%1,%5) \n" + "lea (%1,%5,2),%1 \n" + "movhpd %%xmm8,(%2,%6) \n" + "lea (%2,%6,2),%2 \n" + "movdqa %%xmm3,%%xmm8 \n" + "punpckldq %%xmm7,%%xmm3 \n" + "movlpd %%xmm3,(%1) \n" + "movhpd %%xmm3,(%2) \n" + "punpckhdq %%xmm7,%%xmm8 \n" + "sub $0x8,%3 \n" + "movlpd %%xmm8,(%1,%5) \n" + "lea (%1,%5,2),%1 \n" + "movhpd %%xmm8,(%2,%6) \n" + "lea (%2,%6,2),%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst_a), // %1 "+r"(dst_b), // %2 diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon.cc index fdc0dd476..844df2bf3 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon.cc @@ -38,52 +38,52 @@ void TransposeWx8_NEON(const uint8_t* src, // handle 8x8 blocks. this should be the majority of the plane "1: \n" - "mov %0, %1 \n" + "mov %0, %1 \n" - "vld1.8 {d0}, [%0], %2 \n" - "vld1.8 {d1}, [%0], %2 \n" - "vld1.8 {d2}, [%0], %2 \n" - "vld1.8 {d3}, [%0], %2 \n" - "vld1.8 {d4}, [%0], %2 \n" - "vld1.8 {d5}, [%0], %2 \n" - "vld1.8 {d6}, [%0], %2 \n" - "vld1.8 {d7}, [%0] \n" + "vld1.8 {d0}, [%0], %2 \n" + "vld1.8 {d1}, [%0], %2 \n" + "vld1.8 {d2}, [%0], %2 \n" + "vld1.8 {d3}, [%0], %2 \n" + "vld1.8 {d4}, [%0], %2 \n" + "vld1.8 {d5}, [%0], %2 \n" + "vld1.8 {d6}, [%0], %2 \n" + "vld1.8 {d7}, [%0] \n" - "vtrn.8 d1, d0 \n" - "vtrn.8 d3, d2 \n" - "vtrn.8 d5, d4 \n" - "vtrn.8 d7, d6 \n" + "vtrn.8 d1, d0 \n" + "vtrn.8 d3, d2 \n" + "vtrn.8 d5, d4 \n" + "vtrn.8 d7, d6 \n" - "vtrn.16 d1, d3 \n" - "vtrn.16 d0, d2 \n" - "vtrn.16 d5, d7 \n" - "vtrn.16 d4, d6 \n" + "vtrn.16 d1, d3 \n" + "vtrn.16 d0, d2 \n" + "vtrn.16 d5, d7 \n" + "vtrn.16 d4, d6 \n" - "vtrn.32 d1, d5 \n" - "vtrn.32 d0, d4 \n" - "vtrn.32 d3, d7 \n" - "vtrn.32 d2, d6 \n" + "vtrn.32 d1, d5 \n" + "vtrn.32 d0, d4 \n" + "vtrn.32 d3, d7 \n" + "vtrn.32 d2, d6 \n" - "vrev16.8 q0, q0 \n" - "vrev16.8 q1, q1 \n" - "vrev16.8 q2, q2 \n" - "vrev16.8 q3, q3 \n" + "vrev16.8 q0, q0 \n" + "vrev16.8 q1, q1 \n" + "vrev16.8 q2, q2 \n" + "vrev16.8 q3, q3 \n" - "mov %0, %3 \n" + "mov %0, %3 \n" - "vst1.8 {d1}, [%0], %4 \n" - "vst1.8 {d0}, [%0], %4 \n" - "vst1.8 {d3}, [%0], %4 \n" - "vst1.8 {d2}, [%0], %4 \n" - "vst1.8 {d5}, [%0], %4 \n" - "vst1.8 {d4}, [%0], %4 \n" - "vst1.8 {d7}, [%0], %4 \n" - "vst1.8 {d6}, [%0] \n" + "vst1.8 {d1}, [%0], %4 \n" + "vst1.8 {d0}, [%0], %4 \n" + "vst1.8 {d3}, [%0], %4 \n" + "vst1.8 {d2}, [%0], %4 \n" + "vst1.8 {d5}, [%0], %4 \n" + "vst1.8 {d4}, [%0], %4 \n" + "vst1.8 {d7}, [%0], %4 \n" + "vst1.8 {d6}, [%0] \n" - "add %1, #8 \n" // src += 8 - "add %3, %3, %4, lsl #3 \n" // dst += 8 * dst_stride - "subs %5, #8 \n" // w -= 8 - "bge 1b \n" + "add %1, #8 \n" // src += 8 + "add %3, %3, %4, lsl #3 \n" // dst += 8 * dst_stride + "subs %5, #8 \n" // w -= 8 + "bge 1b \n" // add 8 back to counter. if the result is 0 there are // no residuals. @@ -208,68 +208,70 @@ void TransposeUVWx8_NEON(const uint8_t* src, // handle 8x8 blocks. this should be the majority of the plane "1: \n" - "mov %0, %1 \n" + "mov %0, %1 \n" - "vld2.8 {d0, d1}, [%0], %2 \n" - "vld2.8 {d2, d3}, [%0], %2 \n" - "vld2.8 {d4, d5}, [%0], %2 \n" - "vld2.8 {d6, d7}, [%0], %2 \n" - "vld2.8 {d16, d17}, [%0], %2 \n" - "vld2.8 {d18, d19}, [%0], %2 \n" - "vld2.8 {d20, d21}, [%0], %2 \n" - "vld2.8 {d22, d23}, [%0] \n" + "vld2.8 {d0, d1}, [%0], %2 \n" + "vld2.8 {d2, d3}, [%0], %2 \n" + "vld2.8 {d4, d5}, [%0], %2 \n" + "vld2.8 {d6, d7}, [%0], %2 \n" + "vld2.8 {d16, d17}, [%0], %2 \n" + "vld2.8 {d18, d19}, [%0], %2 \n" + "vld2.8 {d20, d21}, [%0], %2 \n" + "vld2.8 {d22, d23}, [%0] \n" - "vtrn.8 q1, q0 \n" - "vtrn.8 q3, q2 \n" - "vtrn.8 q9, q8 \n" - "vtrn.8 q11, q10 \n" + "vtrn.8 q1, q0 \n" + "vtrn.8 q3, q2 \n" + "vtrn.8 q9, q8 \n" + "vtrn.8 q11, q10 \n" - "vtrn.16 q1, q3 \n" - "vtrn.16 q0, q2 \n" - "vtrn.16 q9, q11 \n" - "vtrn.16 q8, q10 \n" + "vtrn.16 q1, q3 \n" + "vtrn.16 q0, q2 \n" + "vtrn.16 q9, q11 \n" + "vtrn.16 q8, q10 \n" - "vtrn.32 q1, q9 \n" - "vtrn.32 q0, q8 \n" - "vtrn.32 q3, q11 \n" - "vtrn.32 q2, q10 \n" + "vtrn.32 q1, q9 \n" + "vtrn.32 q0, q8 \n" + "vtrn.32 q3, q11 \n" + "vtrn.32 q2, q10 \n" - "vrev16.8 q0, q0 \n" - "vrev16.8 q1, q1 \n" - "vrev16.8 q2, q2 \n" - "vrev16.8 q3, q3 \n" - "vrev16.8 q8, q8 \n" - "vrev16.8 q9, q9 \n" - "vrev16.8 q10, q10 \n" - "vrev16.8 q11, q11 \n" + "vrev16.8 q0, q0 \n" + "vrev16.8 q1, q1 \n" + "vrev16.8 q2, q2 \n" + "vrev16.8 q3, q3 \n" + "vrev16.8 q8, q8 \n" + "vrev16.8 q9, q9 \n" + "vrev16.8 q10, q10 \n" + "vrev16.8 q11, q11 \n" - "mov %0, %3 \n" + "mov %0, %3 \n" - "vst1.8 {d2}, [%0], %4 \n" - "vst1.8 {d0}, [%0], %4 \n" - "vst1.8 {d6}, [%0], %4 \n" - "vst1.8 {d4}, [%0], %4 \n" - "vst1.8 {d18}, [%0], %4 \n" - "vst1.8 {d16}, [%0], %4 \n" - "vst1.8 {d22}, [%0], %4 \n" - "vst1.8 {d20}, [%0] \n" + "vst1.8 {d2}, [%0], %4 \n" + "vst1.8 {d0}, [%0], %4 \n" + "vst1.8 {d6}, [%0], %4 \n" + "vst1.8 {d4}, [%0], %4 \n" + "vst1.8 {d18}, [%0], %4 \n" + "vst1.8 {d16}, [%0], %4 \n" + "vst1.8 {d22}, [%0], %4 \n" + "vst1.8 {d20}, [%0] \n" - "mov %0, %5 \n" + "mov %0, %5 \n" - "vst1.8 {d3}, [%0], %6 \n" - "vst1.8 {d1}, [%0], %6 \n" - "vst1.8 {d7}, [%0], %6 \n" - "vst1.8 {d5}, [%0], %6 \n" - "vst1.8 {d19}, [%0], %6 \n" - "vst1.8 {d17}, [%0], %6 \n" - "vst1.8 {d23}, [%0], %6 \n" - "vst1.8 {d21}, [%0] \n" + "vst1.8 {d3}, [%0], %6 \n" + "vst1.8 {d1}, [%0], %6 \n" + "vst1.8 {d7}, [%0], %6 \n" + "vst1.8 {d5}, [%0], %6 \n" + "vst1.8 {d19}, [%0], %6 \n" + "vst1.8 {d17}, [%0], %6 \n" + "vst1.8 {d23}, [%0], %6 \n" + "vst1.8 {d21}, [%0] \n" - "add %1, #8*2 \n" // src += 8*2 - "add %3, %3, %4, lsl #3 \n" // dst_a += 8 * dst_stride_a - "add %5, %5, %6, lsl #3 \n" // dst_b += 8 * dst_stride_b - "subs %7, #8 \n" // w -= 8 - "bge 1b \n" + "add %1, #8*2 \n" // src += 8*2 + "add %3, %3, %4, lsl #3 \n" // dst_a += 8 * + // dst_stride_a + "add %5, %5, %6, lsl #3 \n" // dst_b += 8 * + // dst_stride_b + "subs %7, #8 \n" // w -= 8 + "bge 1b \n" // add 8 back to counter. if the result is 0 there are // no residuals. diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc index f469baacf..43c158173 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc @@ -34,58 +34,74 @@ void TransposeWx8_NEON(const uint8_t* src, // loops are on blocks of 8. loop will stop when // counter gets to or below 0. starting the counter // at w-8 allow for this - "sub %w3, %w3, #8 \n" + "sub %w3, %w3, #8 \n" // handle 8x8 blocks. this should be the majority of the plane - "1: \n" + "1: \n" "mov %0, %1 \n" - "ld1 {v0.8b}, [%0], %5 \n" - "ld1 {v1.8b}, [%0], %5 \n" - "ld1 {v2.8b}, [%0], %5 \n" - "ld1 {v3.8b}, [%0], %5 \n" - "ld1 {v4.8b}, [%0], %5 \n" - "ld1 {v5.8b}, [%0], %5 \n" - "ld1 {v6.8b}, [%0], %5 \n" - "ld1 {v7.8b}, [%0] \n" + "ld1 {v0.8b}, [%0], %5 \n" + "ld1 {v1.8b}, [%0], %5 \n" + "ld1 {v2.8b}, [%0], %5 \n" + "ld1 {v3.8b}, [%0], %5 \n" + "ld1 {v4.8b}, [%0], %5 \n" + "ld1 {v5.8b}, [%0], %5 \n" + "ld1 {v6.8b}, [%0], %5 \n" + "ld1 {v7.8b}, [%0] \n" + "mov %0, %1 \n" - "trn2 v16.8b, v0.8b, v1.8b \n" - "trn1 v17.8b, v0.8b, v1.8b \n" - "trn2 v18.8b, v2.8b, v3.8b \n" - "trn1 v19.8b, v2.8b, v3.8b \n" - "trn2 v20.8b, v4.8b, v5.8b \n" - "trn1 v21.8b, v4.8b, v5.8b \n" - "trn2 v22.8b, v6.8b, v7.8b \n" - "trn1 v23.8b, v6.8b, v7.8b \n" + "trn2 v16.8b, v0.8b, v1.8b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "trn1 v17.8b, v0.8b, v1.8b \n" + "add %0, %0, %5 \n" + "trn2 v18.8b, v2.8b, v3.8b \n" + "prfm pldl1keep, [%0, 448] \n" // row 1 + "trn1 v19.8b, v2.8b, v3.8b \n" + "add %0, %0, %5 \n" + "trn2 v20.8b, v4.8b, v5.8b \n" + "prfm pldl1keep, [%0, 448] \n" // row 2 + "trn1 v21.8b, v4.8b, v5.8b \n" + "add %0, %0, %5 \n" + "trn2 v22.8b, v6.8b, v7.8b \n" + "prfm pldl1keep, [%0, 448] \n" // row 3 + "trn1 v23.8b, v6.8b, v7.8b \n" + "add %0, %0, %5 \n" - "trn2 v3.4h, v17.4h, v19.4h \n" - "trn1 v1.4h, v17.4h, v19.4h \n" - "trn2 v2.4h, v16.4h, v18.4h \n" - "trn1 v0.4h, v16.4h, v18.4h \n" - "trn2 v7.4h, v21.4h, v23.4h \n" - "trn1 v5.4h, v21.4h, v23.4h \n" - "trn2 v6.4h, v20.4h, v22.4h \n" - "trn1 v4.4h, v20.4h, v22.4h \n" + "trn2 v3.4h, v17.4h, v19.4h \n" + "prfm pldl1keep, [%0, 448] \n" // row 4 + "trn1 v1.4h, v17.4h, v19.4h \n" + "add %0, %0, %5 \n" + "trn2 v2.4h, v16.4h, v18.4h \n" + "prfm pldl1keep, [%0, 448] \n" // row 5 + "trn1 v0.4h, v16.4h, v18.4h \n" + "add %0, %0, %5 \n" + "trn2 v7.4h, v21.4h, v23.4h \n" + "prfm pldl1keep, [%0, 448] \n" // row 6 + "trn1 v5.4h, v21.4h, v23.4h \n" + "add %0, %0, %5 \n" + "trn2 v6.4h, v20.4h, v22.4h \n" + "prfm pldl1keep, [%0, 448] \n" // row 7 + "trn1 v4.4h, v20.4h, v22.4h \n" - "trn2 v21.2s, v1.2s, v5.2s \n" - "trn1 v17.2s, v1.2s, v5.2s \n" - "trn2 v20.2s, v0.2s, v4.2s \n" - "trn1 v16.2s, v0.2s, v4.2s \n" - "trn2 v23.2s, v3.2s, v7.2s \n" - "trn1 v19.2s, v3.2s, v7.2s \n" - "trn2 v22.2s, v2.2s, v6.2s \n" - "trn1 v18.2s, v2.2s, v6.2s \n" + "trn2 v21.2s, v1.2s, v5.2s \n" + "trn1 v17.2s, v1.2s, v5.2s \n" + "trn2 v20.2s, v0.2s, v4.2s \n" + "trn1 v16.2s, v0.2s, v4.2s \n" + "trn2 v23.2s, v3.2s, v7.2s \n" + "trn1 v19.2s, v3.2s, v7.2s \n" + "trn2 v22.2s, v2.2s, v6.2s \n" + "trn1 v18.2s, v2.2s, v6.2s \n" "mov %0, %2 \n" - "st1 {v17.8b}, [%0], %6 \n" - "st1 {v16.8b}, [%0], %6 \n" - "st1 {v19.8b}, [%0], %6 \n" - "st1 {v18.8b}, [%0], %6 \n" - "st1 {v21.8b}, [%0], %6 \n" - "st1 {v20.8b}, [%0], %6 \n" - "st1 {v23.8b}, [%0], %6 \n" - "st1 {v22.8b}, [%0] \n" + "st1 {v17.8b}, [%0], %6 \n" + "st1 {v16.8b}, [%0], %6 \n" + "st1 {v19.8b}, [%0], %6 \n" + "st1 {v18.8b}, [%0], %6 \n" + "st1 {v21.8b}, [%0], %6 \n" + "st1 {v20.8b}, [%0], %6 \n" + "st1 {v23.8b}, [%0], %6 \n" + "st1 {v22.8b}, [%0] \n" "add %1, %1, #8 \n" // src += 8 "add %2, %2, %6, lsl #3 \n" // dst += 8 * dst_stride @@ -94,33 +110,33 @@ void TransposeWx8_NEON(const uint8_t* src, // add 8 back to counter. if the result is 0 there are // no residuals. - "adds %w3, %w3, #8 \n" - "b.eq 4f \n" + "adds %w3, %w3, #8 \n" + "b.eq 4f \n" // some residual, so between 1 and 7 lines left to transpose - "cmp %w3, #2 \n" - "b.lt 3f \n" + "cmp %w3, #2 \n" + "b.lt 3f \n" - "cmp %w3, #4 \n" - "b.lt 2f \n" + "cmp %w3, #4 \n" + "b.lt 2f \n" // 4x8 block - "mov %0, %1 \n" - "ld1 {v0.s}[0], [%0], %5 \n" - "ld1 {v0.s}[1], [%0], %5 \n" - "ld1 {v0.s}[2], [%0], %5 \n" - "ld1 {v0.s}[3], [%0], %5 \n" - "ld1 {v1.s}[0], [%0], %5 \n" - "ld1 {v1.s}[1], [%0], %5 \n" - "ld1 {v1.s}[2], [%0], %5 \n" - "ld1 {v1.s}[3], [%0] \n" + "mov %0, %1 \n" + "ld1 {v0.s}[0], [%0], %5 \n" + "ld1 {v0.s}[1], [%0], %5 \n" + "ld1 {v0.s}[2], [%0], %5 \n" + "ld1 {v0.s}[3], [%0], %5 \n" + "ld1 {v1.s}[0], [%0], %5 \n" + "ld1 {v1.s}[1], [%0], %5 \n" + "ld1 {v1.s}[2], [%0], %5 \n" + "ld1 {v1.s}[3], [%0] \n" - "mov %0, %2 \n" + "mov %0, %2 \n" - "ld1 {v2.16b}, [%4] \n" + "ld1 {v2.16b}, [%4] \n" - "tbl v3.16b, {v0.16b}, v2.16b \n" - "tbl v0.16b, {v1.16b}, v2.16b \n" + "tbl v3.16b, {v0.16b}, v2.16b \n" + "tbl v0.16b, {v1.16b}, v2.16b \n" // TODO(frkoenig): Rework shuffle above to // write out with 4 instead of 8 writes. @@ -212,89 +228,90 @@ void TransposeUVWx8_NEON(const uint8_t* src, // loops are on blocks of 8. loop will stop when // counter gets to or below 0. starting the counter // at w-8 allow for this - "sub %w4, %w4, #8 \n" + "sub %w4, %w4, #8 \n" // handle 8x8 blocks. this should be the majority of the plane "1: \n" - "mov %0, %1 \n" + "mov %0, %1 \n" - "ld1 {v0.16b}, [%0], %5 \n" - "ld1 {v1.16b}, [%0], %5 \n" - "ld1 {v2.16b}, [%0], %5 \n" - "ld1 {v3.16b}, [%0], %5 \n" - "ld1 {v4.16b}, [%0], %5 \n" - "ld1 {v5.16b}, [%0], %5 \n" - "ld1 {v6.16b}, [%0], %5 \n" - "ld1 {v7.16b}, [%0] \n" + "ld1 {v0.16b}, [%0], %5 \n" + "ld1 {v1.16b}, [%0], %5 \n" + "ld1 {v2.16b}, [%0], %5 \n" + "ld1 {v3.16b}, [%0], %5 \n" + "ld1 {v4.16b}, [%0], %5 \n" + "ld1 {v5.16b}, [%0], %5 \n" + "ld1 {v6.16b}, [%0], %5 \n" + "ld1 {v7.16b}, [%0] \n" + "mov %0, %1 \n" - "trn1 v16.16b, v0.16b, v1.16b \n" - "trn2 v17.16b, v0.16b, v1.16b \n" - "trn1 v18.16b, v2.16b, v3.16b \n" - "trn2 v19.16b, v2.16b, v3.16b \n" - "trn1 v20.16b, v4.16b, v5.16b \n" - "trn2 v21.16b, v4.16b, v5.16b \n" - "trn1 v22.16b, v6.16b, v7.16b \n" - "trn2 v23.16b, v6.16b, v7.16b \n" + "trn1 v16.16b, v0.16b, v1.16b \n" + "trn2 v17.16b, v0.16b, v1.16b \n" + "trn1 v18.16b, v2.16b, v3.16b \n" + "trn2 v19.16b, v2.16b, v3.16b \n" + "trn1 v20.16b, v4.16b, v5.16b \n" + "trn2 v21.16b, v4.16b, v5.16b \n" + "trn1 v22.16b, v6.16b, v7.16b \n" + "trn2 v23.16b, v6.16b, v7.16b \n" - "trn1 v0.8h, v16.8h, v18.8h \n" - "trn2 v1.8h, v16.8h, v18.8h \n" - "trn1 v2.8h, v20.8h, v22.8h \n" - "trn2 v3.8h, v20.8h, v22.8h \n" - "trn1 v4.8h, v17.8h, v19.8h \n" - "trn2 v5.8h, v17.8h, v19.8h \n" - "trn1 v6.8h, v21.8h, v23.8h \n" - "trn2 v7.8h, v21.8h, v23.8h \n" + "trn1 v0.8h, v16.8h, v18.8h \n" + "trn2 v1.8h, v16.8h, v18.8h \n" + "trn1 v2.8h, v20.8h, v22.8h \n" + "trn2 v3.8h, v20.8h, v22.8h \n" + "trn1 v4.8h, v17.8h, v19.8h \n" + "trn2 v5.8h, v17.8h, v19.8h \n" + "trn1 v6.8h, v21.8h, v23.8h \n" + "trn2 v7.8h, v21.8h, v23.8h \n" - "trn1 v16.4s, v0.4s, v2.4s \n" - "trn2 v17.4s, v0.4s, v2.4s \n" - "trn1 v18.4s, v1.4s, v3.4s \n" - "trn2 v19.4s, v1.4s, v3.4s \n" - "trn1 v20.4s, v4.4s, v6.4s \n" - "trn2 v21.4s, v4.4s, v6.4s \n" - "trn1 v22.4s, v5.4s, v7.4s \n" - "trn2 v23.4s, v5.4s, v7.4s \n" + "trn1 v16.4s, v0.4s, v2.4s \n" + "trn2 v17.4s, v0.4s, v2.4s \n" + "trn1 v18.4s, v1.4s, v3.4s \n" + "trn2 v19.4s, v1.4s, v3.4s \n" + "trn1 v20.4s, v4.4s, v6.4s \n" + "trn2 v21.4s, v4.4s, v6.4s \n" + "trn1 v22.4s, v5.4s, v7.4s \n" + "trn2 v23.4s, v5.4s, v7.4s \n" - "mov %0, %2 \n" + "mov %0, %2 \n" - "st1 {v16.d}[0], [%0], %6 \n" - "st1 {v18.d}[0], [%0], %6 \n" - "st1 {v17.d}[0], [%0], %6 \n" - "st1 {v19.d}[0], [%0], %6 \n" - "st1 {v16.d}[1], [%0], %6 \n" - "st1 {v18.d}[1], [%0], %6 \n" - "st1 {v17.d}[1], [%0], %6 \n" - "st1 {v19.d}[1], [%0] \n" + "st1 {v16.d}[0], [%0], %6 \n" + "st1 {v18.d}[0], [%0], %6 \n" + "st1 {v17.d}[0], [%0], %6 \n" + "st1 {v19.d}[0], [%0], %6 \n" + "st1 {v16.d}[1], [%0], %6 \n" + "st1 {v18.d}[1], [%0], %6 \n" + "st1 {v17.d}[1], [%0], %6 \n" + "st1 {v19.d}[1], [%0] \n" - "mov %0, %3 \n" + "mov %0, %3 \n" - "st1 {v20.d}[0], [%0], %7 \n" - "st1 {v22.d}[0], [%0], %7 \n" - "st1 {v21.d}[0], [%0], %7 \n" - "st1 {v23.d}[0], [%0], %7 \n" - "st1 {v20.d}[1], [%0], %7 \n" - "st1 {v22.d}[1], [%0], %7 \n" - "st1 {v21.d}[1], [%0], %7 \n" - "st1 {v23.d}[1], [%0] \n" + "st1 {v20.d}[0], [%0], %7 \n" + "st1 {v22.d}[0], [%0], %7 \n" + "st1 {v21.d}[0], [%0], %7 \n" + "st1 {v23.d}[0], [%0], %7 \n" + "st1 {v20.d}[1], [%0], %7 \n" + "st1 {v22.d}[1], [%0], %7 \n" + "st1 {v21.d}[1], [%0], %7 \n" + "st1 {v23.d}[1], [%0] \n" - "add %1, %1, #16 \n" // src += 8*2 - "add %2, %2, %6, lsl #3 \n" // dst_a += 8 * + "add %1, %1, #16 \n" // src += 8*2 + "add %2, %2, %6, lsl #3 \n" // dst_a += 8 * // dst_stride_a - "add %3, %3, %7, lsl #3 \n" // dst_b += 8 * + "add %3, %3, %7, lsl #3 \n" // dst_b += 8 * // dst_stride_b - "subs %w4, %w4, #8 \n" // w -= 8 - "b.ge 1b \n" + "subs %w4, %w4, #8 \n" // w -= 8 + "b.ge 1b \n" // add 8 back to counter. if the result is 0 there are // no residuals. - "adds %w4, %w4, #8 \n" - "b.eq 4f \n" + "adds %w4, %w4, #8 \n" + "b.eq 4f \n" // some residual, so between 1 and 7 lines left to transpose - "cmp %w4, #2 \n" - "b.lt 3f \n" + "cmp %w4, #2 \n" + "b.lt 3f \n" - "cmp %w4, #4 \n" - "b.lt 2f \n" + "cmp %w4, #4 \n" + "b.lt 2f \n" // TODO(frkoenig): Clean this up // 4x8 block diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_any.cc b/TMessagesProj/jni/third_party/libyuv/source/row_any.cc index 9b29b2bfb..7216373bc 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_any.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_any.cc @@ -546,12 +546,6 @@ ANY11(J400ToARGBRow_Any_SSE2, J400ToARGBRow_SSE2, 0, 1, 4, 7) #if defined(HAS_J400TOARGBROW_AVX2) ANY11(J400ToARGBRow_Any_AVX2, J400ToARGBRow_AVX2, 0, 1, 4, 15) #endif -#if defined(HAS_I400TOARGBROW_SSE2) -ANY11(I400ToARGBRow_Any_SSE2, I400ToARGBRow_SSE2, 0, 1, 4, 7) -#endif -#if defined(HAS_I400TOARGBROW_AVX2) -ANY11(I400ToARGBRow_Any_AVX2, I400ToARGBRow_AVX2, 0, 1, 4, 15) -#endif #if defined(HAS_RGB24TOARGBROW_SSSE3) ANY11(RGB24ToARGBRow_Any_SSSE3, RGB24ToARGBRow_SSSE3, 0, 3, 4, 15) ANY11(RAWToARGBRow_Any_SSSE3, RAWToARGBRow_SSSE3, 0, 3, 4, 15) @@ -581,7 +575,6 @@ ANY11(ARGBToRGB565Row_Any_NEON, ARGBToRGB565Row_NEON, 0, 4, 2, 7) ANY11(ARGBToARGB1555Row_Any_NEON, ARGBToARGB1555Row_NEON, 0, 4, 2, 7) ANY11(ARGBToARGB4444Row_Any_NEON, ARGBToARGB4444Row_NEON, 0, 4, 2, 7) ANY11(J400ToARGBRow_Any_NEON, J400ToARGBRow_NEON, 0, 1, 4, 7) -ANY11(I400ToARGBRow_Any_NEON, I400ToARGBRow_NEON, 0, 1, 4, 7) #endif #if defined(HAS_ARGBTORGB24ROW_MSA) ANY11(ARGBToRGB24Row_Any_MSA, ARGBToRGB24Row_MSA, 0, 4, 3, 15) @@ -590,7 +583,6 @@ ANY11(ARGBToRGB565Row_Any_MSA, ARGBToRGB565Row_MSA, 0, 4, 2, 7) ANY11(ARGBToARGB1555Row_Any_MSA, ARGBToARGB1555Row_MSA, 0, 4, 2, 7) ANY11(ARGBToARGB4444Row_Any_MSA, ARGBToARGB4444Row_MSA, 0, 4, 2, 7) ANY11(J400ToARGBRow_Any_MSA, J400ToARGBRow_MSA, 0, 1, 4, 15) -ANY11(I400ToARGBRow_Any_MSA, I400ToARGBRow_MSA, 0, 1, 4, 15) #endif #if defined(HAS_ARGBTORGB24ROW_MMI) ANY11(ARGBToRGB24Row_Any_MMI, ARGBToRGB24Row_MMI, 0, 4, 3, 3) @@ -599,7 +591,6 @@ ANY11(ARGBToRGB565Row_Any_MMI, ARGBToRGB565Row_MMI, 0, 4, 2, 3) ANY11(ARGBToARGB1555Row_Any_MMI, ARGBToARGB1555Row_MMI, 0, 4, 2, 3) ANY11(ARGBToARGB4444Row_Any_MMI, ARGBToARGB4444Row_MMI, 0, 4, 2, 3) ANY11(J400ToARGBRow_Any_MMI, J400ToARGBRow_MMI, 0, 1, 4, 3) -ANY11(I400ToARGBRow_Any_MMI, I400ToARGBRow_MMI, 0, 1, 4, 7) #endif #if defined(HAS_RAWTORGB24ROW_NEON) ANY11(RAWToRGB24Row_Any_NEON, RAWToRGB24Row_NEON, 0, 3, 3, 7) @@ -695,6 +686,15 @@ ANY11(RGBAToYRow_Any_MMI, RGBAToYRow_MMI, 0, 4, 1, 7) #ifdef HAS_RGB24TOYROW_NEON ANY11(RGB24ToYRow_Any_NEON, RGB24ToYRow_NEON, 0, 3, 1, 7) #endif +#ifdef HAS_RGB24TOYJROW_AVX2 +ANY11(RGB24ToYJRow_Any_AVX2, RGB24ToYJRow_AVX2, 0, 3, 1, 31) +#endif +#ifdef HAS_RGB24TOYJROW_SSSE3 +ANY11(RGB24ToYJRow_Any_SSSE3, RGB24ToYJRow_SSSE3, 0, 3, 1, 15) +#endif +#ifdef HAS_RGB24TOYJROW_NEON +ANY11(RGB24ToYJRow_Any_NEON, RGB24ToYJRow_NEON, 0, 3, 1, 7) +#endif #ifdef HAS_RGB24TOYROW_MSA ANY11(RGB24ToYRow_Any_MSA, RGB24ToYRow_MSA, 0, 3, 1, 15) #endif @@ -704,6 +704,15 @@ ANY11(RGB24ToYRow_Any_MMI, RGB24ToYRow_MMI, 0, 3, 1, 7) #ifdef HAS_RAWTOYROW_NEON ANY11(RAWToYRow_Any_NEON, RAWToYRow_NEON, 0, 3, 1, 7) #endif +#ifdef HAS_RAWTOYJROW_AVX2 +ANY11(RAWToYJRow_Any_AVX2, RAWToYJRow_AVX2, 0, 3, 1, 31) +#endif +#ifdef HAS_RAWTOYJROW_SSSE3 +ANY11(RAWToYJRow_Any_SSSE3, RAWToYJRow_SSSE3, 0, 3, 1, 15) +#endif +#ifdef HAS_RAWTOYJROW_NEON +ANY11(RAWToYJRow_Any_NEON, RAWToYJRow_NEON, 0, 3, 1, 7) +#endif #ifdef HAS_RAWTOYROW_MSA ANY11(RAWToYRow_Any_MSA, RAWToYRow_MSA, 0, 3, 1, 15) #endif @@ -901,6 +910,47 @@ ANY11B(ARGBCopyYToAlphaRow_Any_MMI, ARGBCopyYToAlphaRow_MMI, 0, 1, 4, 7) memcpy(dst_ptr + n * BPP, temp + 64, r * BPP); \ } +#if defined(HAS_I400TOARGBROW_SSE2) +ANY11P(I400ToARGBRow_Any_SSE2, + I400ToARGBRow_SSE2, + const struct YuvConstants*, + 1, + 4, + 7) +#endif +#if defined(HAS_I400TOARGBROW_AVX2) +ANY11P(I400ToARGBRow_Any_AVX2, + I400ToARGBRow_AVX2, + const struct YuvConstants*, + 1, + 4, + 15) +#endif +#if defined(HAS_I400TOARGBROW_NEON) +ANY11P(I400ToARGBRow_Any_NEON, + I400ToARGBRow_NEON, + const struct YuvConstants*, + 1, + 4, + 7) +#endif +#if defined(HAS_I400TOARGBROW_MSA) +ANY11P(I400ToARGBRow_Any_MSA, + I400ToARGBRow_MSA, + const struct YuvConstants*, + 1, + 4, + 15) +#endif +#if defined(HAS_I400TOARGBROW_MMI) +ANY11P(I400ToARGBRow_Any_MMI, + I400ToARGBRow_MMI, + const struct YuvConstants*, + 1, + 4, + 7) +#endif + #if defined(HAS_ARGBTORGB565DITHERROW_SSE2) ANY11P(ARGBToRGB565DitherRow_Any_SSE2, ARGBToRGB565DitherRow_SSE2, @@ -1156,7 +1206,7 @@ ANY11M(MirrorRow_Any_AVX2, MirrorRow_AVX2, 1, 31) ANY11M(MirrorRow_Any_SSSE3, MirrorRow_SSSE3, 1, 15) #endif #ifdef HAS_MIRRORROW_NEON -ANY11M(MirrorRow_Any_NEON, MirrorRow_NEON, 1, 15) +ANY11M(MirrorRow_Any_NEON, MirrorRow_NEON, 1, 31) #endif #ifdef HAS_MIRRORROW_MSA ANY11M(MirrorRow_Any_MSA, MirrorRow_MSA, 1, 63) @@ -1164,6 +1214,18 @@ ANY11M(MirrorRow_Any_MSA, MirrorRow_MSA, 1, 63) #ifdef HAS_MIRRORROW_MMI ANY11M(MirrorRow_Any_MMI, MirrorRow_MMI, 1, 7) #endif +#ifdef HAS_MIRRORUVROW_AVX2 +ANY11M(MirrorUVRow_Any_AVX2, MirrorUVRow_AVX2, 2, 15) +#endif +#ifdef HAS_MIRRORUVROW_SSSE3 +ANY11M(MirrorUVRow_Any_SSSE3, MirrorUVRow_SSSE3, 2, 7) +#endif +#ifdef HAS_MIRRORUVROW_NEON +ANY11M(MirrorUVRow_Any_NEON, MirrorUVRow_NEON, 2, 31) +#endif +#ifdef HAS_MIRRORUVROW_MSA +ANY11M(MirrorUVRow_Any_MSA, MirrorUVRow_MSA, 2, 7) +#endif #ifdef HAS_ARGBMIRRORROW_AVX2 ANY11M(ARGBMirrorRow_Any_AVX2, ARGBMirrorRow_AVX2, 4, 7) #endif @@ -1171,7 +1233,7 @@ ANY11M(ARGBMirrorRow_Any_AVX2, ARGBMirrorRow_AVX2, 4, 7) ANY11M(ARGBMirrorRow_Any_SSE2, ARGBMirrorRow_SSE2, 4, 3) #endif #ifdef HAS_ARGBMIRRORROW_NEON -ANY11M(ARGBMirrorRow_Any_NEON, ARGBMirrorRow_NEON, 4, 3) +ANY11M(ARGBMirrorRow_Any_NEON, ARGBMirrorRow_NEON, 4, 7) #endif #ifdef HAS_ARGBMIRRORROW_MSA ANY11M(ARGBMirrorRow_Any_MSA, ARGBMirrorRow_MSA, 4, 15) @@ -1179,12 +1241,19 @@ ANY11M(ARGBMirrorRow_Any_MSA, ARGBMirrorRow_MSA, 4, 15) #ifdef HAS_ARGBMIRRORROW_MMI ANY11M(ARGBMirrorRow_Any_MMI, ARGBMirrorRow_MMI, 4, 1) #endif +#ifdef HAS_RGB24MIRRORROW_SSSE3 +ANY11M(RGB24MirrorRow_Any_SSSE3, RGB24MirrorRow_SSSE3, 3, 15) +#endif +#ifdef HAS_RGB24MIRRORROW_NEON +ANY11M(RGB24MirrorRow_Any_NEON, RGB24MirrorRow_NEON, 3, 15) +#endif #undef ANY11M // Any 1 plane. (memset) #define ANY1(NAMEANY, ANY_SIMD, T, BPP, MASK) \ void NAMEANY(uint8_t* dst_ptr, T v32, int width) { \ SIMD_ALIGNED(uint8_t temp[64]); \ + memset(temp, 0, 64); /* for msan */ \ int r = width & MASK; \ int n = width & ~MASK; \ if (n > 0) { \ @@ -1371,7 +1440,7 @@ ANY12S(ARGBToUVJRow_Any_MMI, ARGBToUVJRow_MMI, 0, 4, 15) ANY12S(BGRAToUVRow_Any_NEON, BGRAToUVRow_NEON, 0, 4, 15) #endif #ifdef HAS_BGRATOUVROW_MSA -ANY12S(BGRAToUVRow_Any_MSA, BGRAToUVRow_MSA, 0, 4, 31) +ANY12S(BGRAToUVRow_Any_MSA, BGRAToUVRow_MSA, 0, 4, 15) #endif #ifdef HAS_BGRATOUVROW_MMI ANY12S(BGRAToUVRow_Any_MMI, BGRAToUVRow_MMI, 0, 4, 15) @@ -1380,7 +1449,7 @@ ANY12S(BGRAToUVRow_Any_MMI, BGRAToUVRow_MMI, 0, 4, 15) ANY12S(ABGRToUVRow_Any_NEON, ABGRToUVRow_NEON, 0, 4, 15) #endif #ifdef HAS_ABGRTOUVROW_MSA -ANY12S(ABGRToUVRow_Any_MSA, ABGRToUVRow_MSA, 0, 4, 31) +ANY12S(ABGRToUVRow_Any_MSA, ABGRToUVRow_MSA, 0, 4, 15) #endif #ifdef HAS_ABGRTOUVROW_MMI ANY12S(ABGRToUVRow_Any_MMI, ABGRToUVRow_MMI, 0, 4, 15) @@ -1389,7 +1458,7 @@ ANY12S(ABGRToUVRow_Any_MMI, ABGRToUVRow_MMI, 0, 4, 15) ANY12S(RGBAToUVRow_Any_NEON, RGBAToUVRow_NEON, 0, 4, 15) #endif #ifdef HAS_RGBATOUVROW_MSA -ANY12S(RGBAToUVRow_Any_MSA, RGBAToUVRow_MSA, 0, 4, 31) +ANY12S(RGBAToUVRow_Any_MSA, RGBAToUVRow_MSA, 0, 4, 15) #endif #ifdef HAS_RGBATOUVROW_MMI ANY12S(RGBAToUVRow_Any_MMI, RGBAToUVRow_MMI, 0, 4, 15) diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_common.cc b/TMessagesProj/jni/third_party/libyuv/source/row_common.cc index 70aa2e13c..79aed5c78 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_common.cc @@ -14,6 +14,7 @@ #include // For memcpy and memset. #include "libyuv/basic_types.h" +#include "libyuv/convert_argb.h" // For kYuvI601Constants #ifdef __cplusplus namespace libyuv { @@ -26,10 +27,11 @@ extern "C" { (defined(_M_IX86) || (defined(_M_X64) && !defined(__clang__))) #define LIBYUV_RGB7 1 #endif -// mips use 7 bit RGBToY -#if (!defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A)) || \ - (!defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa)) -#define LIBYUV_RGB7 1 + +#if defined(__x86_64__) || defined(_M_X64) || defined(__i386__) || \ + defined(_M_IX86) +#define LIBYUV_ARGBTOUV_PAVGB 1 +#define LIBYUV_RGBTOU_TRUNCATE 1 #endif // llvm x86 is poor at ternary operator, so use branchless min/max. @@ -37,19 +39,19 @@ extern "C" { #define USE_BRANCHLESS 1 #if USE_BRANCHLESS static __inline int32_t clamp0(int32_t v) { - return ((-(v) >> 31) & (v)); + return -(v >= 0) & v; } - +// TODO(fbarchard): make clamp255 preserve negative values. static __inline int32_t clamp255(int32_t v) { - return (((255 - (v)) >> 31) | (v)) & 255; + return (-(v >= 255) | v) & 255; } static __inline int32_t clamp1023(int32_t v) { - return (((1023 - (v)) >> 31) | (v)) & 1023; + return (-(v >= 1023) | v) & 1023; } static __inline uint32_t Abs(int32_t v) { - int m = v >> 31; + int m = -(v < 0); return (v + m) ^ m; } #else // USE_BRANCHLESS @@ -208,7 +210,8 @@ void ARGB4444ToARGBRow_C(const uint8_t* src_argb4444, void AR30ToARGBRow_C(const uint8_t* src_ar30, uint8_t* dst_argb, int width) { int x; for (x = 0; x < width; ++x) { - uint32_t ar30 = *(const uint32_t*)src_ar30; + uint32_t ar30; + memcpy(&ar30, src_ar30, sizeof ar30); uint32_t b = (ar30 >> 2) & 0xff; uint32_t g = (ar30 >> 12) & 0xff; uint32_t r = (ar30 >> 22) & 0xff; @@ -222,7 +225,8 @@ void AR30ToARGBRow_C(const uint8_t* src_ar30, uint8_t* dst_argb, int width) { void AR30ToABGRRow_C(const uint8_t* src_ar30, uint8_t* dst_abgr, int width) { int x; for (x = 0; x < width; ++x) { - uint32_t ar30 = *(const uint32_t*)src_ar30; + uint32_t ar30; + memcpy(&ar30, src_ar30, sizeof ar30); uint32_t b = (ar30 >> 2) & 0xff; uint32_t g = (ar30 >> 12) & 0xff; uint32_t r = (ar30 >> 22) & 0xff; @@ -236,7 +240,8 @@ void AR30ToABGRRow_C(const uint8_t* src_ar30, uint8_t* dst_abgr, int width) { void AR30ToAB30Row_C(const uint8_t* src_ar30, uint8_t* dst_ab30, int width) { int x; for (x = 0; x < width; ++x) { - uint32_t ar30 = *(const uint32_t*)src_ar30; + uint32_t ar30; + memcpy(&ar30, src_ar30, sizeof ar30); uint32_t b = ar30 & 0x3ff; uint32_t ga = ar30 & 0xc00ffc00; uint32_t r = (ar30 >> 20) & 0x3ff; @@ -425,14 +430,38 @@ static __inline int RGBToY(uint8_t r, uint8_t g, uint8_t b) { } #endif +#define AVGB(a, b) (((a) + (b) + 1) >> 1) + +#ifdef LIBYUV_RGBTOU_TRUNCATE +static __inline int RGBToU(uint8_t r, uint8_t g, uint8_t b) { + return (112 * b - 74 * g - 38 * r + 0x8000) >> 8; +} +static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { + return (112 * r - 94 * g - 18 * b + 0x8000) >> 8; +} +#else +// TODO(fbarchard): Add rounding to SIMD and use this static __inline int RGBToU(uint8_t r, uint8_t g, uint8_t b) { return (112 * b - 74 * g - 38 * r + 0x8080) >> 8; } static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { return (112 * r - 94 * g - 18 * b + 0x8080) >> 8; } +#endif + +#if !defined(LIBYUV_ARGBTOUV_PAVGB) +static __inline int RGB2xToU(uint16_t r, uint16_t g, uint16_t b) { + return ((112 / 2) * b - (74 / 2) * g - (38 / 2) * r + 0x8080) >> 8; +} +static __inline int RGB2xToV(uint16_t r, uint16_t g, uint16_t b) { + return ((112 / 2) * r - (94 / 2) * g - (18 / 2) * b + 0x8080) >> 8; +} +#endif // ARGBToY_C and ARGBToUV_C +// Intel version mimic SSE/AVX which does 2 pavgb +#if LIBYUV_ARGBTOUV_PAVGB + #define MAKEROWY(NAME, R, G, B, BPP) \ void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ int x; \ @@ -447,15 +476,12 @@ static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ int x; \ for (x = 0; x < width - 1; x += 2) { \ - uint8_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \ - src_rgb1[B + BPP]) >> \ - 2; \ - uint8_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \ - src_rgb1[G + BPP]) >> \ - 2; \ - uint8_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \ - src_rgb1[R + BPP]) >> \ - 2; \ + uint8_t ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \ + AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \ + uint8_t ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \ + AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \ + uint8_t ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \ + AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ src_rgb0 += BPP * 2; \ @@ -464,13 +490,54 @@ static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { dst_v += 1; \ } \ if (width & 1) { \ - uint8_t ab = (src_rgb0[B] + src_rgb1[B]) >> 1; \ - uint8_t ag = (src_rgb0[G] + src_rgb1[G]) >> 1; \ - uint8_t ar = (src_rgb0[R] + src_rgb1[R]) >> 1; \ + uint8_t ab = AVGB(src_rgb0[B], src_rgb1[B]); \ + uint8_t ag = AVGB(src_rgb0[G], src_rgb1[G]); \ + uint8_t ar = AVGB(src_rgb0[R], src_rgb1[R]); \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ } \ } +#else +// ARM version does sum / 2 then multiply by 2x smaller coefficients +#define MAKEROWY(NAME, R, G, B, BPP) \ + void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \ + src_argb0 += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \ + src_rgb1[B + BPP] + 1) >> \ + 1; \ + uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \ + src_rgb1[G + BPP] + 1) >> \ + 1; \ + uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \ + src_rgb1[R + BPP] + 1) >> \ + 1; \ + dst_u[0] = RGB2xToU(ar, ag, ab); \ + dst_v[0] = RGB2xToV(ar, ag, ab); \ + src_rgb0 += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint16_t ab = src_rgb0[B] + src_rgb1[B]; \ + uint16_t ag = src_rgb0[G] + src_rgb1[G]; \ + uint16_t ar = src_rgb0[R] + src_rgb1[R]; \ + dst_u[0] = RGB2xToU(ar, ag, ab); \ + dst_v[0] = RGB2xToV(ar, ag, ab); \ + } \ + } +#endif MAKEROWY(ARGB, 2, 1, 0, 4) MAKEROWY(BGRA, 1, 2, 3, 4) @@ -517,16 +584,25 @@ static __inline int RGBToYJ(uint8_t r, uint8_t g, uint8_t b) { } #endif +#if defined(LIBYUV_ARGBTOUV_PAVGB) static __inline int RGBToUJ(uint8_t r, uint8_t g, uint8_t b) { return (127 * b - 84 * g - 43 * r + 0x8080) >> 8; } static __inline int RGBToVJ(uint8_t r, uint8_t g, uint8_t b) { return (127 * r - 107 * g - 20 * b + 0x8080) >> 8; } - -#define AVGB(a, b) (((a) + (b) + 1) >> 1) +#else +static __inline int RGB2xToUJ(uint16_t r, uint16_t g, uint16_t b) { + return ((127 / 2) * b - (84 / 2) * g - (43 / 2) * r + 0x8080) >> 8; +} +static __inline int RGB2xToVJ(uint16_t r, uint16_t g, uint16_t b) { + return ((127 / 2) * r - (107 / 2) * g - (20 / 2) * b + 0x8080) >> 8; +} +#endif // ARGBToYJ_C and ARGBToUVJ_C +// Intel version mimic SSE/AVX which does 2 pavgb +#if LIBYUV_ARGBTOUV_PAVGB #define MAKEROWYJ(NAME, R, G, B, BPP) \ void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ int x; \ @@ -562,9 +638,53 @@ static __inline int RGBToVJ(uint8_t r, uint8_t g, uint8_t b) { dst_v[0] = RGBToVJ(ar, ag, ab); \ } \ } +#else +// ARM version does sum / 2 then multiply by 2x smaller coefficients +#define MAKEROWYJ(NAME, R, G, B, BPP) \ + void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \ + src_argb0 += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVJRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \ + src_rgb1[B + BPP] + 1) >> \ + 1; \ + uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \ + src_rgb1[G + BPP] + 1) >> \ + 1; \ + uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \ + src_rgb1[R + BPP] + 1) >> \ + 1; \ + dst_u[0] = RGB2xToUJ(ar, ag, ab); \ + dst_v[0] = RGB2xToVJ(ar, ag, ab); \ + src_rgb0 += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint16_t ab = (src_rgb0[B] + src_rgb1[B]); \ + uint16_t ag = (src_rgb0[G] + src_rgb1[G]); \ + uint16_t ar = (src_rgb0[R] + src_rgb1[R]); \ + dst_u[0] = RGB2xToUJ(ar, ag, ab); \ + dst_v[0] = RGB2xToVJ(ar, ag, ab); \ + } \ + } + +#endif MAKEROWYJ(ARGB, 2, 1, 0, 4) MAKEROWYJ(RGBA, 3, 2, 1, 4) +MAKEROWYJ(RGB24, 2, 1, 0, 3) +MAKEROWYJ(RAW, 0, 1, 2, 3) #undef MAKEROWYJ void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { @@ -632,13 +752,34 @@ void RGB565ToUVRow_C(const uint8_t* src_rgb565, uint8_t b3 = next_rgb565[2] & 0x1f; uint8_t g3 = (next_rgb565[2] >> 5) | ((next_rgb565[3] & 0x07) << 3); uint8_t r3 = next_rgb565[3] >> 3; - uint8_t b = (b0 + b1 + b2 + b3); // 565 * 4 = 787. - uint8_t g = (g0 + g1 + g2 + g3); - uint8_t r = (r0 + r1 + r2 + r3); - b = (b << 1) | (b >> 6); // 787 -> 888. - r = (r << 1) | (r >> 6); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 3) | (b0 >> 2); + g0 = (g0 << 2) | (g0 >> 4); + r0 = (r0 << 3) | (r0 >> 2); + b1 = (b1 << 3) | (b1 >> 2); + g1 = (g1 << 2) | (g1 >> 4); + r1 = (r1 << 3) | (r1 >> 2); + b2 = (b2 << 3) | (b2 >> 2); + g2 = (g2 << 2) | (g2 >> 4); + r2 = (r2 << 3) | (r2 >> 2); + b3 = (b3 << 3) | (b3 >> 2); + g3 = (g3 << 2) | (g3 >> 4); + r3 = (r3 << 3) | (r3 >> 2); + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); + uint8_t ag = AVGB(AVGB(g0, g2), AVGB(g1, g3)); + uint8_t ar = AVGB(AVGB(r0, r2), AVGB(r1, r3)); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = (b0 + b1 + b2 + b3 + 1) >> 1; + uint16_t g = (g0 + g1 + g2 + g3 + 1) >> 1; + uint16_t r = (r0 + r1 + r2 + r3 + 1) >> 1; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif + src_rgb565 += 4; next_rgb565 += 4; dst_u += 1; @@ -651,14 +792,27 @@ void RGB565ToUVRow_C(const uint8_t* src_rgb565, uint8_t b2 = next_rgb565[0] & 0x1f; uint8_t g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); uint8_t r2 = next_rgb565[1] >> 3; - uint8_t b = (b0 + b2); // 565 * 2 = 676. - uint8_t g = (g0 + g2); - uint8_t r = (r0 + r2); - b = (b << 2) | (b >> 4); // 676 -> 888 - g = (g << 1) | (g >> 6); - r = (r << 2) | (r >> 4); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 3) | (b0 >> 2); + g0 = (g0 << 2) | (g0 >> 4); + r0 = (r0 << 3) | (r0 >> 2); + b2 = (b2 << 3) | (b2 >> 2); + g2 = (g2 << 2) | (g2 >> 4); + r2 = (r2 << 3) | (r2 >> 2); + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(b0, b2); + uint8_t ag = AVGB(g0, g2); + uint8_t ar = AVGB(r0, r2); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = b0 + b2; + uint16_t g = g0 + g2; + uint16_t r = r0 + r2; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif } } @@ -682,14 +836,34 @@ void ARGB1555ToUVRow_C(const uint8_t* src_argb1555, uint8_t b3 = next_argb1555[2] & 0x1f; uint8_t g3 = (next_argb1555[2] >> 5) | ((next_argb1555[3] & 0x03) << 3); uint8_t r3 = (next_argb1555[3] & 0x7c) >> 2; - uint8_t b = (b0 + b1 + b2 + b3); // 555 * 4 = 777. - uint8_t g = (g0 + g1 + g2 + g3); - uint8_t r = (r0 + r1 + r2 + r3); - b = (b << 1) | (b >> 6); // 777 -> 888. - g = (g << 1) | (g >> 6); - r = (r << 1) | (r >> 6); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 3) | (b0 >> 2); + g0 = (g0 << 3) | (g0 >> 2); + r0 = (r0 << 3) | (r0 >> 2); + b1 = (b1 << 3) | (b1 >> 2); + g1 = (g1 << 3) | (g1 >> 2); + r1 = (r1 << 3) | (r1 >> 2); + b2 = (b2 << 3) | (b2 >> 2); + g2 = (g2 << 3) | (g2 >> 2); + r2 = (r2 << 3) | (r2 >> 2); + b3 = (b3 << 3) | (b3 >> 2); + g3 = (g3 << 3) | (g3 >> 2); + r3 = (r3 << 3) | (r3 >> 2); + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); + uint8_t ag = AVGB(AVGB(g0, g2), AVGB(g1, g3)); + uint8_t ar = AVGB(AVGB(r0, r2), AVGB(r1, r3)); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = (b0 + b1 + b2 + b3 + 1) >> 1; + uint16_t g = (g0 + g1 + g2 + g3 + 1) >> 1; + uint16_t r = (r0 + r1 + r2 + r3 + 1) >> 1; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif + src_argb1555 += 4; next_argb1555 += 4; dst_u += 1; @@ -702,14 +876,27 @@ void ARGB1555ToUVRow_C(const uint8_t* src_argb1555, uint8_t b2 = next_argb1555[0] & 0x1f; uint8_t g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); uint8_t r2 = next_argb1555[1] >> 3; - uint8_t b = (b0 + b2); // 555 * 2 = 666. - uint8_t g = (g0 + g2); - uint8_t r = (r0 + r2); - b = (b << 2) | (b >> 4); // 666 -> 888. - g = (g << 2) | (g >> 4); - r = (r << 2) | (r >> 4); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 3) | (b0 >> 2); + g0 = (g0 << 3) | (g0 >> 2); + r0 = (r0 << 3) | (r0 >> 2); + b2 = (b2 << 3) | (b2 >> 2); + g2 = (g2 << 3) | (g2 >> 2); + r2 = (r2 << 3) | (r2 >> 2); + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(b0, b2); + uint8_t ag = AVGB(g0, g2); + uint8_t ar = AVGB(r0, r2); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = b0 + b2; + uint16_t g = g0 + g2; + uint16_t r = r0 + r2; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif } } @@ -733,14 +920,34 @@ void ARGB4444ToUVRow_C(const uint8_t* src_argb4444, uint8_t b3 = next_argb4444[2] & 0x0f; uint8_t g3 = next_argb4444[2] >> 4; uint8_t r3 = next_argb4444[3] & 0x0f; - uint8_t b = (b0 + b1 + b2 + b3); // 444 * 4 = 666. - uint8_t g = (g0 + g1 + g2 + g3); - uint8_t r = (r0 + r1 + r2 + r3); - b = (b << 2) | (b >> 4); // 666 -> 888. - g = (g << 2) | (g >> 4); - r = (r << 2) | (r >> 4); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 4) | b0; + g0 = (g0 << 4) | g0; + r0 = (r0 << 4) | r0; + b1 = (b1 << 4) | b1; + g1 = (g1 << 4) | g1; + r1 = (r1 << 4) | r1; + b2 = (b2 << 4) | b2; + g2 = (g2 << 4) | g2; + r2 = (r2 << 4) | r2; + b3 = (b3 << 4) | b3; + g3 = (g3 << 4) | g3; + r3 = (r3 << 4) | r3; + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); + uint8_t ag = AVGB(AVGB(g0, g2), AVGB(g1, g3)); + uint8_t ar = AVGB(AVGB(r0, r2), AVGB(r1, r3)); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = (b0 + b1 + b2 + b3 + 1) >> 1; + uint16_t g = (g0 + g1 + g2 + g3 + 1) >> 1; + uint16_t r = (r0 + r1 + r2 + r3 + 1) >> 1; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif + src_argb4444 += 4; next_argb4444 += 4; dst_u += 1; @@ -753,14 +960,27 @@ void ARGB4444ToUVRow_C(const uint8_t* src_argb4444, uint8_t b2 = next_argb4444[0] & 0x0f; uint8_t g2 = next_argb4444[0] >> 4; uint8_t r2 = next_argb4444[1] & 0x0f; - uint8_t b = (b0 + b2); // 444 * 2 = 555. - uint8_t g = (g0 + g2); - uint8_t r = (r0 + r2); - b = (b << 3) | (b >> 2); // 555 -> 888. - g = (g << 3) | (g >> 2); - r = (r << 3) | (r >> 2); - dst_u[0] = RGBToU(r, g, b); - dst_v[0] = RGBToV(r, g, b); + + b0 = (b0 << 4) | b0; + g0 = (g0 << 4) | g0; + r0 = (r0 << 4) | r0; + b2 = (b2 << 4) | b2; + g2 = (g2 << 4) | g2; + r2 = (r2 << 4) | r2; + +#if LIBYUV_ARGBTOUV_PAVGB + uint8_t ab = AVGB(b0, b2); + uint8_t ag = AVGB(g0, g2); + uint8_t ar = AVGB(r0, r2); + dst_u[0] = RGBToU(ar, ag, ab); + dst_v[0] = RGBToV(ar, ag, ab); +#else + uint16_t b = b0 + b2; + uint16_t g = g0 + g2; + uint16_t r = r0 + r2; + dst_u[0] = RGB2xToU(r, g, b); + dst_v[0] = RGB2xToV(r, g, b); +#endif } } @@ -1136,26 +1356,26 @@ const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, {UG, VG, UG, VG, UG, VG, UG, VG}, {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {VG, UG, VG, UG, VG, UG, VG, UG}, {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #elif defined(__arm__) // 32 bit arm const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #else const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, @@ -1167,7 +1387,9 @@ const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, @@ -1178,7 +1400,9 @@ const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; #endif #undef BB @@ -1217,26 +1441,26 @@ const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, {UG, VG, UG, VG, UG, VG, UG, VG}, {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {VG, UG, VG, UG, VG, UG, VG, UG}, {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #elif defined(__arm__) const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #else const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, @@ -1248,7 +1472,9 @@ const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, @@ -1259,7 +1485,9 @@ const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; #endif #undef BB @@ -1300,26 +1528,26 @@ const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, {UG, VG, UG, VG, UG, VG, UG, VG}, {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {VG, UG, VG, UG, VG, UG, VG, UG}, {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #elif defined(__arm__) const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #else const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, @@ -1331,7 +1559,9 @@ const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, @@ -1342,7 +1572,9 @@ const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; #endif #undef BB @@ -1357,7 +1589,7 @@ const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { // BT.2020 YUV to RGB reference // R = (Y - 16) * 1.164384 - V * -1.67867 -// G = (Y - 16) * 1.164384 - U * 0.187326 - V * -0.65042 +// G = (Y - 16) * 1.164384 - U * 0.187326 - V * 0.65042 // B = (Y - 16) * 1.164384 - U * -2.14177 // Y contribution to R,G,B. Scale and bias. @@ -1365,6 +1597,7 @@ const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { #define YGB -1160 /* 1.164384 * 64 * -16 + 64 / 2 */ // TODO(fbarchard): Improve accuracy; the B channel is off by 7%. +// U and V contributions to R,G,B. #define UB -128 /* max(-128, round(-2.142 * 64)) */ #define UG 12 /* round(0.187326 * 64) */ #define VG 42 /* round(0.65042 * 64) */ @@ -1381,26 +1614,26 @@ const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, {UG, VG, UG, VG, UG, VG, UG, VG}, {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, {VG, UG, VG, UG, VG, UG, VG, UG}, {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #elif defined(__arm__) const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BB, BG, BR, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, 0, 0, 0, 0, 0}, - {0x0101 * YG, 0, 0, 0}}; + {BR, BG, BB, YGB, 0, 0, 0, 0}, + {0x0101 * YG, YG, 0, 0}}; #else const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, @@ -1412,7 +1645,9 @@ const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, @@ -1423,7 +1658,9 @@ const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}}; + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, + {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, + YGB}}; #endif #undef BB @@ -1438,7 +1675,6 @@ const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { // C reference code that mimics the YUV assembly. // Reads 8 bit YUV and leaves result as 16 bit. - static __inline void YuvPixel(uint8_t y, uint8_t u, uint8_t v, @@ -1454,7 +1690,7 @@ static __inline void YuvPixel(uint8_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #elif defined(__arm__) int ub = -yuvconstants->kUVToRB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1463,7 +1699,7 @@ static __inline void YuvPixel(uint8_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #else int ub = yuvconstants->kUVToB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1497,7 +1733,7 @@ static __inline void YuvPixel8_16(uint8_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #elif defined(__arm__) int ub = -yuvconstants->kUVToRB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1506,7 +1742,7 @@ static __inline void YuvPixel8_16(uint8_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #else int ub = yuvconstants->kUVToB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1541,7 +1777,7 @@ static __inline void YuvPixel16(int16_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #elif defined(__arm__) int ub = -yuvconstants->kUVToRB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1550,7 +1786,7 @@ static __inline void YuvPixel16(int16_t y, int bb = yuvconstants->kUVBiasBGR[0]; int bg = yuvconstants->kUVBiasBGR[1]; int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[0] / 0x0101; + int yg = yuvconstants->kYToRgb[1]; #else int ub = yuvconstants->kUVToB[0]; int ug = yuvconstants->kUVToG[0]; @@ -1588,21 +1824,26 @@ static __inline void YuvPixel10(uint16_t y, *r = Clamp(r16 >> 6); } -// Y contribution to R,G,B. Scale and bias. -#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */ -#define YGB -1160 /* 1.164 * 64 * -16 + 64 / 2 */ - // C reference code that mimics the YUV assembly. -static __inline void YPixel(uint8_t y, uint8_t* b, uint8_t* g, uint8_t* r) { - uint32_t y1 = (uint32_t)(y * 0x0101 * YG) >> 16; - *b = Clamp((int32_t)(y1 + YGB) >> 6); - *g = Clamp((int32_t)(y1 + YGB) >> 6); - *r = Clamp((int32_t)(y1 + YGB) >> 6); +// Reads 8 bit YUV and leaves result as 16 bit. +static __inline void YPixel(uint8_t y, + uint8_t* b, + uint8_t* g, + uint8_t* r, + const struct YuvConstants* yuvconstants) { +#if defined(__aarch64__) || defined(__arm__) + int ygb = yuvconstants->kUVBiasBGR[3]; + int yg = yuvconstants->kYToRgb[1]; +#else + int ygb = yuvconstants->kYBiasToRgb[0]; + int yg = yuvconstants->kYToRgb[0]; +#endif + uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16; + *b = Clamp(((int32_t)(y1) + ygb) >> 6); + *g = Clamp(((int32_t)(y1) + ygb) >> 6); + *r = Clamp(((int32_t)(y1) + ygb) >> 6); } -#undef YG -#undef YGB - #if !defined(LIBYUV_DISABLE_NEON) && \ (defined(__ARM_NEON__) || defined(__aarch64__) || defined(LIBYUV_NEON)) // C mimic assembly. @@ -2136,18 +2377,21 @@ void I422ToRGBARow_C(const uint8_t* src_y, } } -void I400ToARGBRow_C(const uint8_t* src_y, uint8_t* rgb_buf, int width) { +void I400ToARGBRow_C(const uint8_t* src_y, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { int x; for (x = 0; x < width - 1; x += 2) { - YPixel(src_y[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); + YPixel(src_y[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2, yuvconstants); rgb_buf[3] = 255; - YPixel(src_y[1], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); + YPixel(src_y[1], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6, yuvconstants); rgb_buf[7] = 255; src_y += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { - YPixel(src_y[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); + YPixel(src_y[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2, yuvconstants); rgb_buf[3] = 255; } } @@ -2165,10 +2409,21 @@ void MirrorRow_C(const uint8_t* src, uint8_t* dst, int width) { } } -void MirrorUVRow_C(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void MirrorUVRow_C(const uint8_t* src_uv, uint8_t* dst_uv, int width) { + int x; + src_uv += (width - 1) << 1; + for (x = 0; x < width; ++x) { + dst_uv[0] = src_uv[0]; + dst_uv[1] = src_uv[1]; + src_uv -= 2; + dst_uv += 2; + } +} + +void MirrorSplitUVRow_C(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { int x; src_uv += (width - 1) << 1; for (x = 0; x < width - 1; x += 2) { @@ -2199,6 +2454,21 @@ void ARGBMirrorRow_C(const uint8_t* src, uint8_t* dst, int width) { } } +void RGB24MirrorRow_C(const uint8_t* src_rgb24, uint8_t* dst_rgb24, int width) { + int x; + src_rgb24 += width * 3 - 3; + for (x = 0; x < width; ++x) { + uint8_t b = src_rgb24[0]; + uint8_t g = src_rgb24[1]; + uint8_t r = src_rgb24[2]; + dst_rgb24[0] = b; + dst_rgb24[1] = g; + dst_rgb24[2] = r; + src_rgb24 -= 3; + dst_rgb24 += 3; + } +} + void SplitUVRow_C(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, @@ -2338,10 +2608,9 @@ void SetRow_C(uint8_t* dst, uint8_t v8, int width) { } void ARGBSetRow_C(uint8_t* dst_argb, uint32_t v32, int width) { - uint32_t* d = (uint32_t*)(dst_argb); int x; for (x = 0; x < width; ++x) { - d[x] = v32; + memcpy(dst_argb + x * sizeof v32, &v32, sizeof v32); } } @@ -2439,7 +2708,7 @@ void UYVYToYRow_C(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { } } -#define BLEND(f, b, a) (((256 - a) * b) >> 8) + f +#define BLEND(f, b, a) clamp255((((256 - a) * b) >> 8) + f) // Blend src_argb0 over src_argb1 and store to dst_argb. // dst_argb may be src_argb0 or src_argb1. @@ -2515,10 +2784,14 @@ void BlendPlaneRow_C(const uint8_t* src0, } #undef UBLEND +#if defined(__aarch64__) || defined(__arm__) +#define ATTENUATE(f, a) (f * a + 128) >> 8 +#else +// This code mimics the SSSE3 version for better testability. #define ATTENUATE(f, a) (a | (a << 8)) * (f | (f << 8)) >> 24 +#endif // Multiply source RGB by alpha and store to destination. -// This code mimics the SSSE3 version for better testability. void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width) { int i; for (i = 0; i < width - 1; i += 2) { @@ -3305,6 +3578,70 @@ void NV12ToRGB565Row_AVX2(const uint8_t* src_y, } #endif +#ifdef HAS_RGB24TOYJROW_AVX2 +// Convert 16 RGB24 pixels (64 bytes) to 16 YJ values. +void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + // Row buffer for intermediate ARGB pixels. + SIMD_ALIGNED(uint8_t row[MAXTWIDTH * 4]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + RGB24ToARGBRow_SSSE3(src_rgb24, row, twidth); + ARGBToYJRow_AVX2(row, dst_yj, twidth); + src_rgb24 += twidth * 3; + dst_yj += twidth; + width -= twidth; + } +} +#endif // HAS_RGB24TOYJROW_AVX2 + +#ifdef HAS_RAWTOYJROW_AVX2 +// Convert 16 RAW pixels (64 bytes) to 16 YJ values. +void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + // Row buffer for intermediate ARGB pixels. + SIMD_ALIGNED(uint8_t row[MAXTWIDTH * 4]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + RAWToARGBRow_SSSE3(src_raw, row, twidth); + ARGBToYJRow_AVX2(row, dst_yj, twidth); + src_raw += twidth * 3; + dst_yj += twidth; + width -= twidth; + } +} +#endif // HAS_RAWTOYJROW_AVX2 + +#ifdef HAS_RGB24TOYJROW_SSSE3 +// Convert 16 RGB24 pixels (64 bytes) to 16 YJ values. +void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + // Row buffer for intermediate ARGB pixels. + SIMD_ALIGNED(uint8_t row[MAXTWIDTH * 4]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + RGB24ToARGBRow_SSSE3(src_rgb24, row, twidth); + ARGBToYJRow_SSSE3(row, dst_yj, twidth); + src_rgb24 += twidth * 3; + dst_yj += twidth; + width -= twidth; + } +} +#endif // HAS_RGB24TOYJROW_SSSE3 + +#ifdef HAS_RAWTOYJROW_SSSE3 +// Convert 16 RAW pixels (64 bytes) to 16 YJ values. +void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + // Row buffer for intermediate ARGB pixels. + SIMD_ALIGNED(uint8_t row[MAXTWIDTH * 4]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + RAWToARGBRow_SSSE3(src_raw, row, twidth); + ARGBToYJRow_SSSE3(row, dst_yj, twidth); + src_raw += twidth * 3; + dst_yj += twidth; + width -= twidth; + } +} +#endif // HAS_RAWTOYJROW_SSSE3 + float ScaleSumSamples_C(const float* src, float* dst, float scale, int width) { float fsum = 0.f; int i; @@ -3358,6 +3695,29 @@ void GaussCol_C(const uint16_t* src0, } } +void GaussRow_F32_C(const float* src, float* dst, int width) { + int i; + for (i = 0; i < width; ++i) { + *dst++ = (src[0] + src[1] * 4 + src[2] * 6 + src[3] * 4 + src[4]) * + (1.0f / 256.0f); + ++src; + } +} + +// filter 5 rows with 1, 4, 6, 4, 1 coefficients to produce 1 row. +void GaussCol_F32_C(const float* src0, + const float* src1, + const float* src2, + const float* src3, + const float* src4, + float* dst, + int width) { + int i; + for (i = 0; i < width; ++i) { + *dst++ = *src0++ + *src1++ * 4 + *src2++ * 6 + *src3++ * 4 + *src4++; + } +} + // Convert biplanar NV21 to packed YUV24 void NV21ToYUV24Row_C(const uint8_t* src_y, const uint8_t* src_vu, @@ -3459,6 +3819,30 @@ void SwapUVRow_C(const uint8_t* src_uv, uint8_t* dst_vu, int width) { } } +void HalfMergeUVRow_C(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width) { + int x; + for (x = 0; x < width - 1; x += 2) { + dst_uv[0] = (src_u[0] + src_u[1] + src_u[src_stride_u] + + src_u[src_stride_u + 1] + 2) >> + 2; + dst_uv[1] = (src_v[0] + src_v[1] + src_v[src_stride_v] + + src_v[src_stride_v + 1] + 2) >> + 2; + src_u += 2; + src_v += 2; + dst_uv += 2; + } + if (width & 1) { + dst_uv[0] = (src_u[0] + src_u[src_stride_u] + 1) >> 1; + dst_uv[1] = (src_v[0] + src_v[src_stride_v] + 1) >> 1; + } +} + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc index 3088bb755..a107c30e7 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc @@ -159,24 +159,24 @@ static const lvec8 kShuffleNV21 = { #ifdef HAS_J400TOARGBROW_SSE2 void J400ToARGBRow_SSE2(const uint8_t* src_y, uint8_t* dst_argb, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "pslld $0x18,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "pslld $0x18,%%xmm5 \n" LABELALIGN "1: \n" - "movq (%0),%%xmm0 \n" - "lea 0x8(%0),%0 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklwd %%xmm0,%%xmm0 \n" - "punpckhwd %%xmm1,%%xmm1 \n" - "por %%xmm5,%%xmm0 \n" - "por %%xmm5,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movq (%0),%%xmm0 \n" + "lea 0x8(%0),%0 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklwd %%xmm0,%%xmm0 \n" + "punpckhwd %%xmm1,%%xmm1 \n" + "por %%xmm5,%%xmm0 \n" + "por %%xmm5,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_y), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -190,35 +190,35 @@ void RGB24ToARGBRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_argb, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" // 0xff000000 - "pslld $0x18,%%xmm5 \n" - "movdqa %3,%%xmm4 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" // 0xff000000 + "pslld $0x18,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm3 \n" - "lea 0x30(%0),%0 \n" - "movdqa %%xmm3,%%xmm2 \n" - "palignr $0x8,%%xmm1,%%xmm2 \n" - "pshufb %%xmm4,%%xmm2 \n" - "por %%xmm5,%%xmm2 \n" - "palignr $0xc,%%xmm0,%%xmm1 \n" - "pshufb %%xmm4,%%xmm0 \n" - "movdqu %%xmm2,0x20(%1) \n" - "por %%xmm5,%%xmm0 \n" - "pshufb %%xmm4,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "por %%xmm5,%%xmm1 \n" - "palignr $0x4,%%xmm3,%%xmm3 \n" - "pshufb %%xmm4,%%xmm3 \n" - "movdqu %%xmm1,0x10(%1) \n" - "por %%xmm5,%%xmm3 \n" - "movdqu %%xmm3,0x30(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm3 \n" + "lea 0x30(%0),%0 \n" + "movdqa %%xmm3,%%xmm2 \n" + "palignr $0x8,%%xmm1,%%xmm2 \n" + "pshufb %%xmm4,%%xmm2 \n" + "por %%xmm5,%%xmm2 \n" + "palignr $0xc,%%xmm0,%%xmm1 \n" + "pshufb %%xmm4,%%xmm0 \n" + "movdqu %%xmm2,0x20(%1) \n" + "por %%xmm5,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "por %%xmm5,%%xmm1 \n" + "palignr $0x4,%%xmm3,%%xmm3 \n" + "pshufb %%xmm4,%%xmm3 \n" + "movdqu %%xmm1,0x10(%1) \n" + "por %%xmm5,%%xmm3 \n" + "movdqu %%xmm3,0x30(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_rgb24), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -228,35 +228,35 @@ void RGB24ToARGBRow_SSSE3(const uint8_t* src_rgb24, void RAWToARGBRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_argb, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" // 0xff000000 - "pslld $0x18,%%xmm5 \n" - "movdqa %3,%%xmm4 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" // 0xff000000 + "pslld $0x18,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm3 \n" - "lea 0x30(%0),%0 \n" - "movdqa %%xmm3,%%xmm2 \n" - "palignr $0x8,%%xmm1,%%xmm2 \n" - "pshufb %%xmm4,%%xmm2 \n" - "por %%xmm5,%%xmm2 \n" - "palignr $0xc,%%xmm0,%%xmm1 \n" - "pshufb %%xmm4,%%xmm0 \n" - "movdqu %%xmm2,0x20(%1) \n" - "por %%xmm5,%%xmm0 \n" - "pshufb %%xmm4,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "por %%xmm5,%%xmm1 \n" - "palignr $0x4,%%xmm3,%%xmm3 \n" - "pshufb %%xmm4,%%xmm3 \n" - "movdqu %%xmm1,0x10(%1) \n" - "por %%xmm5,%%xmm3 \n" - "movdqu %%xmm3,0x30(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm3 \n" + "lea 0x30(%0),%0 \n" + "movdqa %%xmm3,%%xmm2 \n" + "palignr $0x8,%%xmm1,%%xmm2 \n" + "pshufb %%xmm4,%%xmm2 \n" + "por %%xmm5,%%xmm2 \n" + "palignr $0xc,%%xmm0,%%xmm1 \n" + "pshufb %%xmm4,%%xmm0 \n" + "movdqu %%xmm2,0x20(%1) \n" + "por %%xmm5,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "por %%xmm5,%%xmm1 \n" + "palignr $0x4,%%xmm3,%%xmm3 \n" + "pshufb %%xmm4,%%xmm3 \n" + "movdqu %%xmm1,0x10(%1) \n" + "por %%xmm5,%%xmm3 \n" + "movdqu %%xmm3,0x30(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_raw), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -267,35 +267,35 @@ void RAWToARGBRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_argb, int width) { // Same code as RAWToARGB with different shuffler and A in low bits void RAWToRGBARow_SSSE3(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" // 0x000000ff - "psrld $0x18,%%xmm5 \n" - "movdqa %3,%%xmm4 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" // 0x000000ff + "psrld $0x18,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm3 \n" - "lea 0x30(%0),%0 \n" - "movdqa %%xmm3,%%xmm2 \n" - "palignr $0x8,%%xmm1,%%xmm2 \n" - "pshufb %%xmm4,%%xmm2 \n" - "por %%xmm5,%%xmm2 \n" - "palignr $0xc,%%xmm0,%%xmm1 \n" - "pshufb %%xmm4,%%xmm0 \n" - "movdqu %%xmm2,0x20(%1) \n" - "por %%xmm5,%%xmm0 \n" - "pshufb %%xmm4,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "por %%xmm5,%%xmm1 \n" - "palignr $0x4,%%xmm3,%%xmm3 \n" - "pshufb %%xmm4,%%xmm3 \n" - "movdqu %%xmm1,0x10(%1) \n" - "por %%xmm5,%%xmm3 \n" - "movdqu %%xmm3,0x30(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm3 \n" + "lea 0x30(%0),%0 \n" + "movdqa %%xmm3,%%xmm2 \n" + "palignr $0x8,%%xmm1,%%xmm2 \n" + "pshufb %%xmm4,%%xmm2 \n" + "por %%xmm5,%%xmm2 \n" + "palignr $0xc,%%xmm0,%%xmm1 \n" + "pshufb %%xmm4,%%xmm0 \n" + "movdqu %%xmm2,0x20(%1) \n" + "por %%xmm5,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "por %%xmm5,%%xmm1 \n" + "palignr $0x4,%%xmm3,%%xmm3 \n" + "pshufb %%xmm4,%%xmm3 \n" + "movdqu %%xmm1,0x10(%1) \n" + "por %%xmm5,%%xmm3 \n" + "movdqu %%xmm3,0x30(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgba), // %1 "+r"(width) // %2 @@ -307,25 +307,25 @@ void RAWToRGB24Row_SSSE3(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) { asm volatile( - "movdqa %3,%%xmm3 \n" - "movdqa %4,%%xmm4 \n" - "movdqa %5,%%xmm5 \n" + "movdqa %3,%%xmm3 \n" + "movdqa %4,%%xmm4 \n" + "movdqa %5,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x4(%0),%%xmm1 \n" - "movdqu 0x8(%0),%%xmm2 \n" - "lea 0x18(%0),%0 \n" - "pshufb %%xmm3,%%xmm0 \n" - "pshufb %%xmm4,%%xmm1 \n" - "pshufb %%xmm5,%%xmm2 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x8(%1) \n" - "movq %%xmm2,0x10(%1) \n" - "lea 0x18(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x4(%0),%%xmm1 \n" + "movdqu 0x8(%0),%%xmm2 \n" + "lea 0x18(%0),%0 \n" + "pshufb %%xmm3,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "pshufb %%xmm5,%%xmm2 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x8(%1) \n" + "movq %%xmm2,0x10(%1) \n" + "lea 0x18(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 @@ -337,44 +337,44 @@ void RAWToRGB24Row_SSSE3(const uint8_t* src_raw, void RGB565ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "mov $0x1080108,%%eax \n" - "movd %%eax,%%xmm5 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "mov $0x20802080,%%eax \n" - "movd %%eax,%%xmm6 \n" - "pshufd $0x0,%%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm3,%%xmm3 \n" - "psllw $0xb,%%xmm3 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "psllw $0xa,%%xmm4 \n" - "psrlw $0x5,%%xmm4 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" - "psllw $0x8,%%xmm7 \n" - "sub %0,%1 \n" - "sub %0,%1 \n" + "mov $0x1080108,%%eax \n" + "movd %%eax,%%xmm5 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "mov $0x20802080,%%eax \n" + "movd %%eax,%%xmm6 \n" + "pshufd $0x0,%%xmm6,%%xmm6 \n" + "pcmpeqb %%xmm3,%%xmm3 \n" + "psllw $0xb,%%xmm3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psllw $0xa,%%xmm4 \n" + "psrlw $0x5,%%xmm4 \n" + "pcmpeqb %%xmm7,%%xmm7 \n" + "psllw $0x8,%%xmm7 \n" + "sub %0,%1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "pand %%xmm3,%%xmm1 \n" - "psllw $0xb,%%xmm2 \n" - "pmulhuw %%xmm5,%%xmm1 \n" - "pmulhuw %%xmm5,%%xmm2 \n" - "psllw $0x8,%%xmm1 \n" - "por %%xmm2,%%xmm1 \n" - "pand %%xmm4,%%xmm0 \n" - "pmulhuw %%xmm6,%%xmm0 \n" - "por %%xmm7,%%xmm0 \n" - "movdqa %%xmm1,%%xmm2 \n" - "punpcklbw %%xmm0,%%xmm1 \n" - "punpckhbw %%xmm0,%%xmm2 \n" - "movdqu %%xmm1,0x00(%1,%0,2) \n" - "movdqu %%xmm2,0x10(%1,%0,2) \n" - "lea 0x10(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "pand %%xmm3,%%xmm1 \n" + "psllw $0xb,%%xmm2 \n" + "pmulhuw %%xmm5,%%xmm1 \n" + "pmulhuw %%xmm5,%%xmm2 \n" + "psllw $0x8,%%xmm1 \n" + "por %%xmm2,%%xmm1 \n" + "pand %%xmm4,%%xmm0 \n" + "pmulhuw %%xmm6,%%xmm0 \n" + "por %%xmm7,%%xmm0 \n" + "movdqa %%xmm1,%%xmm2 \n" + "punpcklbw %%xmm0,%%xmm1 \n" + "punpckhbw %%xmm0,%%xmm2 \n" + "movdqu %%xmm1,0x00(%1,%0,2) \n" + "movdqu %%xmm2,0x10(%1,%0,2) \n" + "lea 0x10(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -385,47 +385,47 @@ void RGB565ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { void ARGB1555ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "mov $0x1080108,%%eax \n" - "movd %%eax,%%xmm5 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "mov $0x42004200,%%eax \n" - "movd %%eax,%%xmm6 \n" - "pshufd $0x0,%%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm3,%%xmm3 \n" - "psllw $0xb,%%xmm3 \n" - "movdqa %%xmm3,%%xmm4 \n" - "psrlw $0x6,%%xmm4 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" - "psllw $0x8,%%xmm7 \n" - "sub %0,%1 \n" - "sub %0,%1 \n" + "mov $0x1080108,%%eax \n" + "movd %%eax,%%xmm5 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "mov $0x42004200,%%eax \n" + "movd %%eax,%%xmm6 \n" + "pshufd $0x0,%%xmm6,%%xmm6 \n" + "pcmpeqb %%xmm3,%%xmm3 \n" + "psllw $0xb,%%xmm3 \n" + "movdqa %%xmm3,%%xmm4 \n" + "psrlw $0x6,%%xmm4 \n" + "pcmpeqb %%xmm7,%%xmm7 \n" + "psllw $0x8,%%xmm7 \n" + "sub %0,%1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "psllw $0x1,%%xmm1 \n" - "psllw $0xb,%%xmm2 \n" - "pand %%xmm3,%%xmm1 \n" - "pmulhuw %%xmm5,%%xmm2 \n" - "pmulhuw %%xmm5,%%xmm1 \n" - "psllw $0x8,%%xmm1 \n" - "por %%xmm2,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "pand %%xmm4,%%xmm0 \n" - "psraw $0x8,%%xmm2 \n" - "pmulhuw %%xmm6,%%xmm0 \n" - "pand %%xmm7,%%xmm2 \n" - "por %%xmm2,%%xmm0 \n" - "movdqa %%xmm1,%%xmm2 \n" - "punpcklbw %%xmm0,%%xmm1 \n" - "punpckhbw %%xmm0,%%xmm2 \n" - "movdqu %%xmm1,0x00(%1,%0,2) \n" - "movdqu %%xmm2,0x10(%1,%0,2) \n" - "lea 0x10(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "psllw $0x1,%%xmm1 \n" + "psllw $0xb,%%xmm2 \n" + "pand %%xmm3,%%xmm1 \n" + "pmulhuw %%xmm5,%%xmm2 \n" + "pmulhuw %%xmm5,%%xmm1 \n" + "psllw $0x8,%%xmm1 \n" + "por %%xmm2,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "pand %%xmm4,%%xmm0 \n" + "psraw $0x8,%%xmm2 \n" + "pmulhuw %%xmm6,%%xmm0 \n" + "pand %%xmm7,%%xmm2 \n" + "por %%xmm2,%%xmm0 \n" + "movdqa %%xmm1,%%xmm2 \n" + "punpcklbw %%xmm0,%%xmm1 \n" + "punpckhbw %%xmm0,%%xmm2 \n" + "movdqu %%xmm1,0x00(%1,%0,2) \n" + "movdqu %%xmm2,0x10(%1,%0,2) \n" + "lea 0x10(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -436,34 +436,34 @@ void ARGB1555ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { void ARGB4444ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "mov $0xf0f0f0f,%%eax \n" - "movd %%eax,%%xmm4 \n" - "pshufd $0x0,%%xmm4,%%xmm4 \n" - "movdqa %%xmm4,%%xmm5 \n" - "pslld $0x4,%%xmm5 \n" - "sub %0,%1 \n" - "sub %0,%1 \n" + "mov $0xf0f0f0f,%%eax \n" + "movd %%eax,%%xmm4 \n" + "pshufd $0x0,%%xmm4,%%xmm4 \n" + "movdqa %%xmm4,%%xmm5 \n" + "pslld $0x4,%%xmm5 \n" + "sub %0,%1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "pand %%xmm4,%%xmm0 \n" - "pand %%xmm5,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm3 \n" - "psllw $0x4,%%xmm1 \n" - "psrlw $0x4,%%xmm3 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm3,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm2,%%xmm0 \n" - "punpckhbw %%xmm2,%%xmm1 \n" - "movdqu %%xmm0,0x00(%1,%0,2) \n" - "movdqu %%xmm1,0x10(%1,%0,2) \n" - "lea 0x10(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "pand %%xmm4,%%xmm0 \n" + "pand %%xmm5,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm3 \n" + "psllw $0x4,%%xmm1 \n" + "psrlw $0x4,%%xmm3 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm3,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" + "punpckhbw %%xmm2,%%xmm1 \n" + "movdqu %%xmm0,0x00(%1,%0,2) \n" + "movdqu %%xmm1,0x10(%1,%0,2) \n" + "lea 0x10(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -474,35 +474,35 @@ void ARGB4444ToARGBRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { void ARGBToRGB24Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "movdqa %3,%%xmm6 \n" + "movdqa %3,%%xmm6 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm3 \n" - "lea 0x40(%0),%0 \n" - "pshufb %%xmm6,%%xmm0 \n" - "pshufb %%xmm6,%%xmm1 \n" - "pshufb %%xmm6,%%xmm2 \n" - "pshufb %%xmm6,%%xmm3 \n" - "movdqa %%xmm1,%%xmm4 \n" - "psrldq $0x4,%%xmm1 \n" - "pslldq $0xc,%%xmm4 \n" - "movdqa %%xmm2,%%xmm5 \n" - "por %%xmm4,%%xmm0 \n" - "pslldq $0x8,%%xmm5 \n" - "movdqu %%xmm0,(%1) \n" - "por %%xmm5,%%xmm1 \n" - "psrldq $0x8,%%xmm2 \n" - "pslldq $0x4,%%xmm3 \n" - "por %%xmm3,%%xmm2 \n" - "movdqu %%xmm1,0x10(%1) \n" - "movdqu %%xmm2,0x20(%1) \n" - "lea 0x30(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm3 \n" + "lea 0x40(%0),%0 \n" + "pshufb %%xmm6,%%xmm0 \n" + "pshufb %%xmm6,%%xmm1 \n" + "pshufb %%xmm6,%%xmm2 \n" + "pshufb %%xmm6,%%xmm3 \n" + "movdqa %%xmm1,%%xmm4 \n" + "psrldq $0x4,%%xmm1 \n" + "pslldq $0xc,%%xmm4 \n" + "movdqa %%xmm2,%%xmm5 \n" + "por %%xmm4,%%xmm0 \n" + "pslldq $0x8,%%xmm5 \n" + "movdqu %%xmm0,(%1) \n" + "por %%xmm5,%%xmm1 \n" + "psrldq $0x8,%%xmm2 \n" + "pslldq $0x4,%%xmm3 \n" + "por %%xmm3,%%xmm2 \n" + "movdqu %%xmm1,0x10(%1) \n" + "movdqu %%xmm2,0x20(%1) \n" + "lea 0x30(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -513,35 +513,35 @@ void ARGBToRGB24Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { void ARGBToRAWRow_SSSE3(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "movdqa %3,%%xmm6 \n" + "movdqa %3,%%xmm6 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm3 \n" - "lea 0x40(%0),%0 \n" - "pshufb %%xmm6,%%xmm0 \n" - "pshufb %%xmm6,%%xmm1 \n" - "pshufb %%xmm6,%%xmm2 \n" - "pshufb %%xmm6,%%xmm3 \n" - "movdqa %%xmm1,%%xmm4 \n" - "psrldq $0x4,%%xmm1 \n" - "pslldq $0xc,%%xmm4 \n" - "movdqa %%xmm2,%%xmm5 \n" - "por %%xmm4,%%xmm0 \n" - "pslldq $0x8,%%xmm5 \n" - "movdqu %%xmm0,(%1) \n" - "por %%xmm5,%%xmm1 \n" - "psrldq $0x8,%%xmm2 \n" - "pslldq $0x4,%%xmm3 \n" - "por %%xmm3,%%xmm2 \n" - "movdqu %%xmm1,0x10(%1) \n" - "movdqu %%xmm2,0x20(%1) \n" - "lea 0x30(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm3 \n" + "lea 0x40(%0),%0 \n" + "pshufb %%xmm6,%%xmm0 \n" + "pshufb %%xmm6,%%xmm1 \n" + "pshufb %%xmm6,%%xmm2 \n" + "pshufb %%xmm6,%%xmm3 \n" + "movdqa %%xmm1,%%xmm4 \n" + "psrldq $0x4,%%xmm1 \n" + "pslldq $0xc,%%xmm4 \n" + "movdqa %%xmm2,%%xmm5 \n" + "por %%xmm4,%%xmm0 \n" + "pslldq $0x8,%%xmm5 \n" + "movdqu %%xmm0,(%1) \n" + "por %%xmm5,%%xmm1 \n" + "psrldq $0x8,%%xmm2 \n" + "pslldq $0x4,%%xmm3 \n" + "por %%xmm3,%%xmm2 \n" + "movdqu %%xmm1,0x10(%1) \n" + "movdqu %%xmm2,0x20(%1) \n" + "lea 0x30(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -556,37 +556,37 @@ static const lvec32 kPermdRGB24_AVX = {0, 1, 2, 4, 5, 6, 3, 7}; void ARGBToRGB24Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "vbroadcastf128 %3,%%ymm6 \n" - "vmovdqa %4,%%ymm7 \n" + "vmovdqa %4,%%ymm7 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vpshufb %%ymm6,%%ymm0,%%ymm0 \n" // xxx0yyy0 - "vpshufb %%ymm6,%%ymm1,%%ymm1 \n" - "vpshufb %%ymm6,%%ymm2,%%ymm2 \n" - "vpshufb %%ymm6,%%ymm3,%%ymm3 \n" - "vpermd %%ymm0,%%ymm7,%%ymm0 \n" // pack to 24 bytes - "vpermd %%ymm1,%%ymm7,%%ymm1 \n" - "vpermd %%ymm2,%%ymm7,%%ymm2 \n" - "vpermd %%ymm3,%%ymm7,%%ymm3 \n" - "vpermq $0x3f,%%ymm1,%%ymm4 \n" // combine 24 + 8 - "vpor %%ymm4,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "vpermq $0xf9,%%ymm1,%%ymm1 \n" // combine 16 + 16 - "vpermq $0x4f,%%ymm2,%%ymm4 \n" - "vpor %%ymm4,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "vpermq $0xfe,%%ymm2,%%ymm2 \n" // combine 8 + 24 - "vpermq $0x93,%%ymm3,%%ymm3 \n" - "vpor %%ymm3,%%ymm2,%%ymm2 \n" - "vmovdqu %%ymm2,0x40(%1) \n" - "lea 0x60(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vpshufb %%ymm6,%%ymm0,%%ymm0 \n" // xxx0yyy0 + "vpshufb %%ymm6,%%ymm1,%%ymm1 \n" + "vpshufb %%ymm6,%%ymm2,%%ymm2 \n" + "vpshufb %%ymm6,%%ymm3,%%ymm3 \n" + "vpermd %%ymm0,%%ymm7,%%ymm0 \n" // pack to 24 bytes + "vpermd %%ymm1,%%ymm7,%%ymm1 \n" + "vpermd %%ymm2,%%ymm7,%%ymm2 \n" + "vpermd %%ymm3,%%ymm7,%%ymm3 \n" + "vpermq $0x3f,%%ymm1,%%ymm4 \n" // combine 24 + 8 + "vpor %%ymm4,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "vpermq $0xf9,%%ymm1,%%ymm1 \n" // combine 16 + 16 + "vpermq $0x4f,%%ymm2,%%ymm4 \n" + "vpor %%ymm4,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "vpermq $0xfe,%%ymm2,%%ymm2 \n" // combine 8 + 24 + "vpermq $0x93,%%ymm3,%%ymm3 \n" + "vpor %%ymm3,%%ymm2,%%ymm2 \n" + "vmovdqu %%ymm2,0x40(%1) \n" + "lea 0x60(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -615,26 +615,26 @@ static const ulvec8 kPermARGBToRGB24_2 = { void ARGBToRGB24Row_AVX512VBMI(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "vmovdqa %3,%%ymm5 \n" - "vmovdqa %4,%%ymm6 \n" - "vmovdqa %5,%%ymm7 \n" + "vmovdqa %3,%%ymm5 \n" + "vmovdqa %4,%%ymm6 \n" + "vmovdqa %5,%%ymm7 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vpermt2b %%ymm1,%%ymm5,%%ymm0 \n" - "vpermt2b %%ymm2,%%ymm6,%%ymm1 \n" - "vpermt2b %%ymm3,%%ymm7,%%ymm2 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "vmovdqu %%ymm2,0x40(%1) \n" - "lea 0x60(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vpermt2b %%ymm1,%%ymm5,%%ymm0 \n" + "vpermt2b %%ymm2,%%ymm6,%%ymm1 \n" + "vpermt2b %%ymm3,%%ymm7,%%ymm2 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "vmovdqu %%ymm2,0x40(%1) \n" + "lea 0x60(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -650,37 +650,37 @@ void ARGBToRGB24Row_AVX512VBMI(const uint8_t* src, uint8_t* dst, int width) { void ARGBToRAWRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "vbroadcastf128 %3,%%ymm6 \n" - "vmovdqa %4,%%ymm7 \n" + "vmovdqa %4,%%ymm7 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vpshufb %%ymm6,%%ymm0,%%ymm0 \n" // xxx0yyy0 - "vpshufb %%ymm6,%%ymm1,%%ymm1 \n" - "vpshufb %%ymm6,%%ymm2,%%ymm2 \n" - "vpshufb %%ymm6,%%ymm3,%%ymm3 \n" - "vpermd %%ymm0,%%ymm7,%%ymm0 \n" // pack to 24 bytes - "vpermd %%ymm1,%%ymm7,%%ymm1 \n" - "vpermd %%ymm2,%%ymm7,%%ymm2 \n" - "vpermd %%ymm3,%%ymm7,%%ymm3 \n" - "vpermq $0x3f,%%ymm1,%%ymm4 \n" // combine 24 + 8 - "vpor %%ymm4,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "vpermq $0xf9,%%ymm1,%%ymm1 \n" // combine 16 + 16 - "vpermq $0x4f,%%ymm2,%%ymm4 \n" - "vpor %%ymm4,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "vpermq $0xfe,%%ymm2,%%ymm2 \n" // combine 8 + 24 - "vpermq $0x93,%%ymm3,%%ymm3 \n" - "vpor %%ymm3,%%ymm2,%%ymm2 \n" - "vmovdqu %%ymm2,0x40(%1) \n" - "lea 0x60(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vpshufb %%ymm6,%%ymm0,%%ymm0 \n" // xxx0yyy0 + "vpshufb %%ymm6,%%ymm1,%%ymm1 \n" + "vpshufb %%ymm6,%%ymm2,%%ymm2 \n" + "vpshufb %%ymm6,%%ymm3,%%ymm3 \n" + "vpermd %%ymm0,%%ymm7,%%ymm0 \n" // pack to 24 bytes + "vpermd %%ymm1,%%ymm7,%%ymm1 \n" + "vpermd %%ymm2,%%ymm7,%%ymm2 \n" + "vpermd %%ymm3,%%ymm7,%%ymm3 \n" + "vpermq $0x3f,%%ymm1,%%ymm4 \n" // combine 24 + 8 + "vpor %%ymm4,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "vpermq $0xf9,%%ymm1,%%ymm1 \n" // combine 16 + 16 + "vpermq $0x4f,%%ymm2,%%ymm4 \n" + "vpor %%ymm4,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "vpermq $0xfe,%%ymm2,%%ymm2 \n" // combine 8 + 24 + "vpermq $0x93,%%ymm3,%%ymm3 \n" + "vpor %%ymm3,%%ymm2,%%ymm2 \n" + "vmovdqu %%ymm2,0x40(%1) \n" + "lea 0x60(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -694,34 +694,34 @@ void ARGBToRAWRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { void ARGBToRGB565Row_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm3,%%xmm3 \n" - "psrld $0x1b,%%xmm3 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrld $0x1a,%%xmm4 \n" - "pslld $0x5,%%xmm4 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "pslld $0xb,%%xmm5 \n" + "pcmpeqb %%xmm3,%%xmm3 \n" + "psrld $0x1b,%%xmm3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrld $0x1a,%%xmm4 \n" + "pslld $0x5,%%xmm4 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "pslld $0xb,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "pslld $0x8,%%xmm0 \n" - "psrld $0x3,%%xmm1 \n" - "psrld $0x5,%%xmm2 \n" - "psrad $0x10,%%xmm0 \n" - "pand %%xmm3,%%xmm1 \n" - "pand %%xmm4,%%xmm2 \n" - "pand %%xmm5,%%xmm0 \n" - "por %%xmm2,%%xmm1 \n" - "por %%xmm1,%%xmm0 \n" - "packssdw %%xmm0,%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "pslld $0x8,%%xmm0 \n" + "psrld $0x3,%%xmm1 \n" + "psrld $0x5,%%xmm2 \n" + "psrad $0x10,%%xmm0 \n" + "pand %%xmm3,%%xmm1 \n" + "pand %%xmm4,%%xmm2 \n" + "pand %%xmm5,%%xmm0 \n" + "por %%xmm2,%%xmm1 \n" + "por %%xmm1,%%xmm0 \n" + "packssdw %%xmm0,%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -734,40 +734,40 @@ void ARGBToRGB565DitherRow_SSE2(const uint8_t* src, const uint32_t dither4, int width) { asm volatile( - "movd %3,%%xmm6 \n" - "punpcklbw %%xmm6,%%xmm6 \n" - "movdqa %%xmm6,%%xmm7 \n" - "punpcklwd %%xmm6,%%xmm6 \n" - "punpckhwd %%xmm7,%%xmm7 \n" - "pcmpeqb %%xmm3,%%xmm3 \n" - "psrld $0x1b,%%xmm3 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrld $0x1a,%%xmm4 \n" - "pslld $0x5,%%xmm4 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "pslld $0xb,%%xmm5 \n" + "movd %3,%%xmm6 \n" + "punpcklbw %%xmm6,%%xmm6 \n" + "movdqa %%xmm6,%%xmm7 \n" + "punpcklwd %%xmm6,%%xmm6 \n" + "punpckhwd %%xmm7,%%xmm7 \n" + "pcmpeqb %%xmm3,%%xmm3 \n" + "psrld $0x1b,%%xmm3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrld $0x1a,%%xmm4 \n" + "pslld $0x5,%%xmm4 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "pslld $0xb,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "paddusb %%xmm6,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "pslld $0x8,%%xmm0 \n" - "psrld $0x3,%%xmm1 \n" - "psrld $0x5,%%xmm2 \n" - "psrad $0x10,%%xmm0 \n" - "pand %%xmm3,%%xmm1 \n" - "pand %%xmm4,%%xmm2 \n" - "pand %%xmm5,%%xmm0 \n" - "por %%xmm2,%%xmm1 \n" - "por %%xmm1,%%xmm0 \n" - "packssdw %%xmm0,%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "paddusb %%xmm6,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "pslld $0x8,%%xmm0 \n" + "psrld $0x3,%%xmm1 \n" + "psrld $0x5,%%xmm2 \n" + "psrad $0x10,%%xmm0 \n" + "pand %%xmm3,%%xmm1 \n" + "pand %%xmm4,%%xmm2 \n" + "pand %%xmm5,%%xmm0 \n" + "por %%xmm2,%%xmm1 \n" + "por %%xmm1,%%xmm0 \n" + "packssdw %%xmm0,%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -783,35 +783,35 @@ void ARGBToRGB565DitherRow_AVX2(const uint8_t* src, int width) { asm volatile( "vbroadcastss %3,%%xmm6 \n" - "vpunpcklbw %%xmm6,%%xmm6,%%xmm6 \n" - "vpermq $0xd8,%%ymm6,%%ymm6 \n" - "vpunpcklwd %%ymm6,%%ymm6,%%ymm6 \n" - "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n" - "vpsrld $0x1b,%%ymm3,%%ymm3 \n" - "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" - "vpsrld $0x1a,%%ymm4,%%ymm4 \n" - "vpslld $0x5,%%ymm4,%%ymm4 \n" - "vpslld $0xb,%%ymm3,%%ymm5 \n" + "vpunpcklbw %%xmm6,%%xmm6,%%xmm6 \n" + "vpermq $0xd8,%%ymm6,%%ymm6 \n" + "vpunpcklwd %%ymm6,%%ymm6,%%ymm6 \n" + "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n" + "vpsrld $0x1b,%%ymm3,%%ymm3 \n" + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrld $0x1a,%%ymm4,%%ymm4 \n" + "vpslld $0x5,%%ymm4,%%ymm4 \n" + "vpslld $0xb,%%ymm3,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vpaddusb %%ymm6,%%ymm0,%%ymm0 \n" - "vpsrld $0x5,%%ymm0,%%ymm2 \n" - "vpsrld $0x3,%%ymm0,%%ymm1 \n" - "vpsrld $0x8,%%ymm0,%%ymm0 \n" - "vpand %%ymm4,%%ymm2,%%ymm2 \n" - "vpand %%ymm3,%%ymm1,%%ymm1 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpor %%ymm2,%%ymm1,%%ymm1 \n" - "vpor %%ymm1,%%ymm0,%%ymm0 \n" - "vpackusdw %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "lea 0x20(%0),%0 \n" - "vmovdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vpaddusb %%ymm6,%%ymm0,%%ymm0 \n" + "vpsrld $0x5,%%ymm0,%%ymm2 \n" + "vpsrld $0x3,%%ymm0,%%ymm1 \n" + "vpsrld $0x8,%%ymm0,%%ymm0 \n" + "vpand %%ymm4,%%ymm2,%%ymm2 \n" + "vpand %%ymm3,%%ymm1,%%ymm1 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpor %%ymm2,%%ymm1,%%ymm1 \n" + "vpor %%ymm1,%%ymm0,%%ymm0 \n" + "vpackusdw %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "lea 0x20(%0),%0 \n" + "vmovdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -824,38 +824,38 @@ void ARGBToRGB565DitherRow_AVX2(const uint8_t* src, void ARGBToARGB1555Row_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrld $0x1b,%%xmm4 \n" - "movdqa %%xmm4,%%xmm5 \n" - "pslld $0x5,%%xmm5 \n" - "movdqa %%xmm4,%%xmm6 \n" - "pslld $0xa,%%xmm6 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" - "pslld $0xf,%%xmm7 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrld $0x1b,%%xmm4 \n" + "movdqa %%xmm4,%%xmm5 \n" + "pslld $0x5,%%xmm5 \n" + "movdqa %%xmm4,%%xmm6 \n" + "pslld $0xa,%%xmm6 \n" + "pcmpeqb %%xmm7,%%xmm7 \n" + "pslld $0xf,%%xmm7 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "movdqa %%xmm0,%%xmm3 \n" - "psrad $0x10,%%xmm0 \n" - "psrld $0x3,%%xmm1 \n" - "psrld $0x6,%%xmm2 \n" - "psrld $0x9,%%xmm3 \n" - "pand %%xmm7,%%xmm0 \n" - "pand %%xmm4,%%xmm1 \n" - "pand %%xmm5,%%xmm2 \n" - "pand %%xmm6,%%xmm3 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm3,%%xmm2 \n" - "por %%xmm2,%%xmm0 \n" - "packssdw %%xmm0,%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm0,%%xmm3 \n" + "psrad $0x10,%%xmm0 \n" + "psrld $0x3,%%xmm1 \n" + "psrld $0x6,%%xmm2 \n" + "psrld $0x9,%%xmm3 \n" + "pand %%xmm7,%%xmm0 \n" + "pand %%xmm4,%%xmm1 \n" + "pand %%xmm5,%%xmm2 \n" + "pand %%xmm6,%%xmm3 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm3,%%xmm2 \n" + "por %%xmm2,%%xmm0 \n" + "packssdw %%xmm0,%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -865,26 +865,26 @@ void ARGBToARGB1555Row_SSE2(const uint8_t* src, uint8_t* dst, int width) { void ARGBToARGB4444Row_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm4,%%xmm4 \n" - "psllw $0xc,%%xmm4 \n" - "movdqa %%xmm4,%%xmm3 \n" - "psrlw $0x8,%%xmm3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psllw $0xc,%%xmm4 \n" + "movdqa %%xmm4,%%xmm3 \n" + "psrlw $0x8,%%xmm3 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pand %%xmm3,%%xmm0 \n" - "pand %%xmm4,%%xmm1 \n" - "psrlq $0x4,%%xmm0 \n" - "psrlq $0x8,%%xmm1 \n" - "por %%xmm1,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pand %%xmm3,%%xmm0 \n" + "pand %%xmm4,%%xmm1 \n" + "psrlq $0x4,%%xmm0 \n" + "psrlq $0x8,%%xmm1 \n" + "por %%xmm1,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -928,31 +928,31 @@ static const uint32_t kMulAG10 = 64 * 65536 + 1028; void ARGBToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "movdqa %3,%%xmm2 \n" // shuffler for RB - "movd %4,%%xmm3 \n" // multipler for RB - "movd %5,%%xmm4 \n" // mask for R10 B10 - "movd %6,%%xmm5 \n" // mask for AG - "movd %7,%%xmm6 \n" // multipler for AG - "pshufd $0x0,%%xmm3,%%xmm3 \n" - "pshufd $0x0,%%xmm4,%%xmm4 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "pshufd $0x0,%%xmm6,%%xmm6 \n" - "sub %0,%1 \n" + "movdqa %3,%%xmm2 \n" // shuffler for RB + "movd %4,%%xmm3 \n" // multipler for RB + "movd %5,%%xmm4 \n" // mask for R10 B10 + "movd %6,%%xmm5 \n" // mask for AG + "movd %7,%%xmm6 \n" // multipler for AG + "pshufd $0x0,%%xmm3,%%xmm3 \n" + "pshufd $0x0,%%xmm4,%%xmm4 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "pshufd $0x0,%%xmm6,%%xmm6 \n" + "sub %0,%1 \n" - "1: \n" - "movdqu (%0),%%xmm0 \n" // fetch 4 ARGB pixels - "movdqa %%xmm0,%%xmm1 \n" - "pshufb %%xmm2,%%xmm1 \n" // R0B0 - "pand %%xmm5,%%xmm0 \n" // A0G0 - "pmulhuw %%xmm3,%%xmm1 \n" // X2 R16 X4 B10 - "pmulhuw %%xmm6,%%xmm0 \n" // X10 A2 X10 G10 - "pand %%xmm4,%%xmm1 \n" // X2 R10 X10 B10 - "pslld $10,%%xmm0 \n" // A2 x10 G10 x10 - "por %%xmm1,%%xmm0 \n" // A2 R10 G10 B10 - "movdqu %%xmm0,(%1,%0) \n" // store 4 AR30 pixels - "add $0x10,%0 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" // fetch 4 ARGB pixels + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm2,%%xmm1 \n" // R0B0 + "pand %%xmm5,%%xmm0 \n" // A0G0 + "pmulhuw %%xmm3,%%xmm1 \n" // X2 R16 X4 B10 + "pmulhuw %%xmm6,%%xmm0 \n" // X10 A2 X10 G10 + "pand %%xmm4,%%xmm1 \n" // X2 R10 X10 B10 + "pslld $10,%%xmm0 \n" // A2 x10 G10 x10 + "por %%xmm1,%%xmm0 \n" // A2 R10 G10 B10 + "movdqu %%xmm0,(%1,%0) \n" // store 4 AR30 pixels + "add $0x10,%0 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -967,31 +967,31 @@ void ARGBToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { void ABGRToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "movdqa %3,%%xmm2 \n" // shuffler for RB - "movd %4,%%xmm3 \n" // multipler for RB - "movd %5,%%xmm4 \n" // mask for R10 B10 - "movd %6,%%xmm5 \n" // mask for AG - "movd %7,%%xmm6 \n" // multipler for AG - "pshufd $0x0,%%xmm3,%%xmm3 \n" - "pshufd $0x0,%%xmm4,%%xmm4 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "pshufd $0x0,%%xmm6,%%xmm6 \n" - "sub %0,%1 \n" + "movdqa %3,%%xmm2 \n" // shuffler for RB + "movd %4,%%xmm3 \n" // multipler for RB + "movd %5,%%xmm4 \n" // mask for R10 B10 + "movd %6,%%xmm5 \n" // mask for AG + "movd %7,%%xmm6 \n" // multipler for AG + "pshufd $0x0,%%xmm3,%%xmm3 \n" + "pshufd $0x0,%%xmm4,%%xmm4 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "pshufd $0x0,%%xmm6,%%xmm6 \n" + "sub %0,%1 \n" - "1: \n" - "movdqu (%0),%%xmm0 \n" // fetch 4 ABGR pixels - "movdqa %%xmm0,%%xmm1 \n" - "pshufb %%xmm2,%%xmm1 \n" // R0B0 - "pand %%xmm5,%%xmm0 \n" // A0G0 - "pmulhuw %%xmm3,%%xmm1 \n" // X2 R16 X4 B10 - "pmulhuw %%xmm6,%%xmm0 \n" // X10 A2 X10 G10 - "pand %%xmm4,%%xmm1 \n" // X2 R10 X10 B10 - "pslld $10,%%xmm0 \n" // A2 x10 G10 x10 - "por %%xmm1,%%xmm0 \n" // A2 R10 G10 B10 - "movdqu %%xmm0,(%1,%0) \n" // store 4 AR30 pixels - "add $0x10,%0 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" // fetch 4 ABGR pixels + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm2,%%xmm1 \n" // R0B0 + "pand %%xmm5,%%xmm0 \n" // A0G0 + "pmulhuw %%xmm3,%%xmm1 \n" // X2 R16 X4 B10 + "pmulhuw %%xmm6,%%xmm0 \n" // X10 A2 X10 G10 + "pand %%xmm4,%%xmm1 \n" // X2 R10 X10 B10 + "pslld $10,%%xmm0 \n" // A2 x10 G10 x10 + "por %%xmm1,%%xmm0 \n" // A2 R10 G10 B10 + "movdqu %%xmm0,(%1,%0) \n" // store 4 AR30 pixels + "add $0x10,%0 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -1008,25 +1008,25 @@ void ABGRToAR30Row_SSSE3(const uint8_t* src, uint8_t* dst, int width) { void ARGBToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "vbroadcastf128 %3,%%ymm2 \n" // shuffler for RB - "vbroadcastss %4,%%ymm3 \n" // multipler for RB - "vbroadcastss %5,%%ymm4 \n" // mask for R10 B10 - "vbroadcastss %6,%%ymm5 \n" // mask for AG - "vbroadcastss %7,%%ymm6 \n" // multipler for AG - "sub %0,%1 \n" + "vbroadcastss %4,%%ymm3 \n" // multipler for RB + "vbroadcastss %5,%%ymm4 \n" // mask for R10 B10 + "vbroadcastss %6,%%ymm5 \n" // mask for AG + "vbroadcastss %7,%%ymm6 \n" // multipler for AG + "sub %0,%1 \n" "1: \n" - "vmovdqu (%0),%%ymm0 \n" // fetch 8 ARGB pixels - "vpshufb %%ymm2,%%ymm0,%%ymm1 \n" // R0B0 - "vpand %%ymm5,%%ymm0,%%ymm0 \n" // A0G0 - "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" // X2 R16 X4 B10 - "vpmulhuw %%ymm6,%%ymm0,%%ymm0 \n" // X10 A2 X10 G10 - "vpand %%ymm4,%%ymm1,%%ymm1 \n" // X2 R10 X10 B10 - "vpslld $10,%%ymm0,%%ymm0 \n" // A2 x10 G10 x10 - "vpor %%ymm1,%%ymm0,%%ymm0 \n" // A2 R10 G10 B10 - "vmovdqu %%ymm0,(%1,%0) \n" // store 8 AR30 pixels - "add $0x20,%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" // fetch 8 ARGB pixels + "vpshufb %%ymm2,%%ymm0,%%ymm1 \n" // R0B0 + "vpand %%ymm5,%%ymm0,%%ymm0 \n" // A0G0 + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" // X2 R16 X4 B10 + "vpmulhuw %%ymm6,%%ymm0,%%ymm0 \n" // X10 A2 X10 G10 + "vpand %%ymm4,%%ymm1,%%ymm1 \n" // X2 R10 X10 B10 + "vpslld $10,%%ymm0,%%ymm0 \n" // A2 x10 G10 x10 + "vpor %%ymm1,%%ymm0,%%ymm0 \n" // A2 R10 G10 B10 + "vmovdqu %%ymm0,(%1,%0) \n" // store 8 AR30 pixels + "add $0x20,%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 @@ -1045,25 +1045,25 @@ void ARGBToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "vbroadcastf128 %3,%%ymm2 \n" // shuffler for RB - "vbroadcastss %4,%%ymm3 \n" // multipler for RB - "vbroadcastss %5,%%ymm4 \n" // mask for R10 B10 - "vbroadcastss %6,%%ymm5 \n" // mask for AG - "vbroadcastss %7,%%ymm6 \n" // multipler for AG - "sub %0,%1 \n" + "vbroadcastss %4,%%ymm3 \n" // multipler for RB + "vbroadcastss %5,%%ymm4 \n" // mask for R10 B10 + "vbroadcastss %6,%%ymm5 \n" // mask for AG + "vbroadcastss %7,%%ymm6 \n" // multipler for AG + "sub %0,%1 \n" "1: \n" - "vmovdqu (%0),%%ymm0 \n" // fetch 8 ABGR pixels - "vpshufb %%ymm2,%%ymm0,%%ymm1 \n" // R0B0 - "vpand %%ymm5,%%ymm0,%%ymm0 \n" // A0G0 - "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" // X2 R16 X4 B10 - "vpmulhuw %%ymm6,%%ymm0,%%ymm0 \n" // X10 A2 X10 G10 - "vpand %%ymm4,%%ymm1,%%ymm1 \n" // X2 R10 X10 B10 - "vpslld $10,%%ymm0,%%ymm0 \n" // A2 x10 G10 x10 - "vpor %%ymm1,%%ymm0,%%ymm0 \n" // A2 R10 G10 B10 - "vmovdqu %%ymm0,(%1,%0) \n" // store 8 AR30 pixels - "add $0x20,%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" // fetch 8 ABGR pixels + "vpshufb %%ymm2,%%ymm0,%%ymm1 \n" // R0B0 + "vpand %%ymm5,%%ymm0,%%ymm0 \n" // A0G0 + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" // X2 R16 X4 B10 + "vpmulhuw %%ymm6,%%ymm0,%%ymm0 \n" // X10 A2 X10 G10 + "vpand %%ymm4,%%ymm1,%%ymm1 \n" // X2 R10 X10 B10 + "vpslld $10,%%ymm0,%%ymm0 \n" // A2 x10 G10 x10 + "vpor %%ymm1,%%ymm0,%%ymm0 \n" // A2 R10 G10 B10 + "vmovdqu %%ymm0,(%1,%0) \n" // store 8 AR30 pixels + "add $0x20,%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 @@ -1078,6 +1078,8 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { } #endif +// clang-format off + // TODO(mraptis): Consider passing R, G, B multipliers as parameter. // round parameter is register containing value to add before shift. #define RGBTOY(round) \ @@ -1101,10 +1103,9 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { "lea 0x40(%0),%0 \n" \ "phaddw %%xmm0,%%xmm6 \n" \ "phaddw %%xmm2,%%xmm1 \n" \ - "paddw %%" #round \ - ",%%xmm6 \n" \ - "paddw %%" #round \ - ",%%xmm1 \n" \ + "prefetcht0 1280(%0) \n" \ + "paddw %%" #round ",%%xmm6 \n" \ + "paddw %%" #round ",%%xmm1 \n" \ "psrlw $0x8,%%xmm6 \n" \ "psrlw $0x8,%%xmm1 \n" \ "packuswb %%xmm1,%%xmm6 \n" \ @@ -1130,10 +1131,9 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { "lea 0x80(%0),%0 \n" \ "vphaddw %%ymm1,%%ymm0,%%ymm0 \n" /* mutates. */ \ "vphaddw %%ymm3,%%ymm2,%%ymm2 \n" \ - "vpaddw %%" #round \ - ",%%ymm0,%%ymm0 \n" /* Add .5 for rounding. */ \ - "vpaddw %%" #round \ - ",%%ymm2,%%ymm2 \n" \ + "prefetcht0 1280(%0) \n" \ + "vpaddw %%" #round ",%%ymm0,%%ymm0 \n" /* Add .5 for rounding. */ \ + "vpaddw %%" #round ",%%ymm2,%%ymm2 \n" \ "vpsrlw $0x8,%%ymm0,%%ymm0 \n" \ "vpsrlw $0x8,%%ymm2,%%ymm2 \n" \ "vpackuswb %%ymm2,%%ymm0,%%ymm0 \n" /* mutates. */ \ @@ -1144,13 +1144,15 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { "jg 1b \n" \ "vzeroupper \n" +// clang-format on + #ifdef HAS_ARGBTOYROW_SSSE3 // Convert 16 ARGB pixels (64 bytes) to 16 Y values. void ARGBToYRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" - "movdqa %5,%%xmm7 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + "movdqa %5,%%xmm7 \n" LABELALIGN RGBTOY(xmm7) : "+r"(src_argb), // %0 @@ -1169,8 +1171,8 @@ void ARGBToYRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width) { // Same as ARGBToYRow but different coefficients, no add 16. void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" LABELALIGN RGBTOY(xmm5) : "+r"(src_argb), // %0 @@ -1187,8 +1189,8 @@ void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width) { // Same as ARGBToYRow but different coefficients, no add 16. void RGBAToYJRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" LABELALIGN RGBTOY(xmm5) : "+r"(src_rgba), // %0 @@ -1210,7 +1212,7 @@ void ARGBToYRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" "vbroadcastf128 %5,%%ymm7 \n" - "vmovdqu %6,%%ymm6 \n" + "vmovdqu %6,%%ymm6 \n" LABELALIGN RGBTOY_AVX2(ymm7) : "+r"(src_argb), // %0 @@ -1232,7 +1234,7 @@ void ABGRToYRow_AVX2(const uint8_t* src_abgr, uint8_t* dst_y, int width) { "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" "vbroadcastf128 %5,%%ymm7 \n" - "vmovdqu %6,%%ymm6 \n" + "vmovdqu %6,%%ymm6 \n" LABELALIGN RGBTOY_AVX2(ymm7) : "+r"(src_abgr), // %0 @@ -1253,7 +1255,7 @@ void ARGBToYJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" - "vmovdqu %5,%%ymm6 \n" + "vmovdqu %5,%%ymm6 \n" LABELALIGN RGBTOY_AVX2(ymm5) : "+r"(src_argb), // %0 @@ -1273,7 +1275,7 @@ void RGBAToYJRow_AVX2(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" - "vmovdqu %5,%%ymm6 \n" + "vmovdqu %5,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( ymm5) "vzeroupper \n" @@ -1294,52 +1296,52 @@ void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, uint8_t* dst_v, int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm2,%%xmm0 \n" - "phaddw %%xmm6,%%xmm1 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm1 \n" - "packsswb %%xmm1,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "movlps %%xmm0,(%1) \n" - "movhps %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_argb0), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1366,44 +1368,44 @@ void ARGBToUVRow_AVX2(const uint8_t* src_argb0, "vbroadcastf128 %5,%%ymm5 \n" "vbroadcastf128 %6,%%ymm6 \n" "vbroadcastf128 %7,%%ymm7 \n" - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" - "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" - "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" - "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" - "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" - "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" - "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" - "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" - "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" + "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" + "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" + "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" + "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" + "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" + "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" - "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" - "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" - "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpsraw $0x8,%%ymm1,%%ymm1 \n" - "vpsraw $0x8,%%ymm0,%%ymm0 \n" - "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpshufb %8,%%ymm0,%%ymm0 \n" - "vpaddb %%ymm5,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" + "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" + "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpsraw $0x8,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm0,%%ymm0 \n" + "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %8,%%ymm0,%%ymm0 \n" + "vpaddb %%ymm5,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm0,(%1) \n" "vextractf128 $0x1,%%ymm0,0x0(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb0), // %0 "+r"(dst_u), // %1 @@ -1429,44 +1431,44 @@ void ABGRToUVRow_AVX2(const uint8_t* src_abgr0, "vbroadcastf128 %5,%%ymm5 \n" "vbroadcastf128 %6,%%ymm6 \n" "vbroadcastf128 %7,%%ymm7 \n" - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" - "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" - "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" - "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" - "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" - "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" - "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" - "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" - "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" + "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" + "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" + "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" + "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" + "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" + "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" - "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" - "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" - "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpsraw $0x8,%%ymm1,%%ymm1 \n" - "vpsraw $0x8,%%ymm0,%%ymm0 \n" - "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpshufb %8,%%ymm0,%%ymm0 \n" - "vpaddb %%ymm5,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" + "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" + "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpsraw $0x8,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm0,%%ymm0 \n" + "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %8,%%ymm0,%%ymm0 \n" + "vpaddb %%ymm5,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm0,(%1) \n" "vextractf128 $0x1,%%ymm0,0x0(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_abgr0), // %0 "+r"(dst_u), // %1 @@ -1492,45 +1494,45 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, "vbroadcastf128 %5,%%ymm5 \n" "vbroadcastf128 %6,%%ymm6 \n" "vbroadcastf128 %7,%%ymm7 \n" - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x40(%0),%%ymm2 \n" - "vmovdqu 0x60(%0),%%ymm3 \n" - "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" - "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" - "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" - "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" - "lea 0x80(%0),%0 \n" - "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" - "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" - "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" - "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" - "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" - "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" + "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" + "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" + "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" + "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" + "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" + "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" - "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" - "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" - "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm5,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm5,%%ymm1,%%ymm1 \n" - "vpsraw $0x8,%%ymm1,%%ymm1 \n" - "vpsraw $0x8,%%ymm0,%%ymm0 \n" - "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpshufb %8,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" + "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" + "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm5,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm5,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm0,%%ymm0 \n" + "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %8,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm0,(%1) \n" "vextractf128 $0x1,%%ymm0,0x0(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb0), // %0 "+r"(dst_u), // %1 @@ -1553,53 +1555,53 @@ void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, uint8_t* dst_v, int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm2,%%xmm0 \n" - "phaddw %%xmm6,%%xmm1 \n" - "paddw %%xmm5,%%xmm0 \n" - "paddw %%xmm5,%%xmm1 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm1 \n" - "packsswb %%xmm1,%%xmm0 \n" - "movlps %%xmm0,(%1) \n" - "movhps %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "paddw %%xmm5,%%xmm0 \n" + "paddw %%xmm5,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_argb0), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1618,47 +1620,47 @@ void ARGBToUV444Row_SSSE3(const uint8_t* src_argb, uint8_t* dst_v, int width) { asm volatile( - "movdqa %4,%%xmm3 \n" - "movdqa %5,%%xmm4 \n" - "movdqa %6,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %4,%%xmm3 \n" + "movdqa %5,%%xmm4 \n" + "movdqa %6,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm4,%%xmm6 \n" - "phaddw %%xmm1,%%xmm0 \n" - "phaddw %%xmm6,%%xmm2 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm2 \n" - "packsswb %%xmm2,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "pmaddubsw %%xmm3,%%xmm0 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm1,%%xmm0 \n" - "phaddw %%xmm6,%%xmm2 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm2 \n" - "packsswb %%xmm2,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "lea 0x40(%0),%0 \n" - "movdqu %%xmm0,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm6 \n" + "phaddw %%xmm1,%%xmm0 \n" + "phaddw %%xmm6,%%xmm2 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm2 \n" + "packsswb %%xmm2,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "pmaddubsw %%xmm3,%%xmm0 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm1,%%xmm0 \n" + "phaddw %%xmm6,%%xmm2 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm2 \n" + "packsswb %%xmm2,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "lea 0x40(%0),%0 \n" + "movdqu %%xmm0,0x00(%1,%2,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1672,9 +1674,9 @@ void ARGBToUV444Row_SSSE3(const uint8_t* src_argb, void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" - "movdqa %5,%%xmm7 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + "movdqa %5,%%xmm7 \n" LABELALIGN RGBTOY(xmm7) : "+r"(src_bgra), // %0 @@ -1693,52 +1695,52 @@ void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0, uint8_t* dst_v, int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm2,%%xmm0 \n" - "phaddw %%xmm6,%%xmm1 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm1 \n" - "packsswb %%xmm1,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "movlps %%xmm0,(%1) \n" - "movhps %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_bgra0), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1752,9 +1754,9 @@ void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0, void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" - "movdqa %5,%%xmm7 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + "movdqa %5,%%xmm7 \n" LABELALIGN RGBTOY(xmm7) : "+r"(src_abgr), // %0 @@ -1769,9 +1771,9 @@ void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width) { void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" - "movdqa %5,%%xmm7 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + "movdqa %5,%%xmm7 \n" LABELALIGN RGBTOY(xmm7) : "+r"(src_rgba), // %0 @@ -1790,52 +1792,52 @@ void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0, uint8_t* dst_v, int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm2,%%xmm0 \n" - "phaddw %%xmm6,%%xmm1 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm1 \n" - "packsswb %%xmm1,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "movlps %%xmm0,(%1) \n" - "movhps %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_abgr0), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1853,52 +1855,52 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, uint8_t* dst_v, int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" - "sub %1,%2 \n" + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "phaddw %%xmm2,%%xmm0 \n" - "phaddw %%xmm6,%%xmm1 \n" - "psraw $0x8,%%xmm0 \n" - "psraw $0x8,%%xmm1 \n" - "packsswb %%xmm1,%%xmm0 \n" - "paddb %%xmm5,%%xmm0 \n" - "movlps %%xmm0,(%1) \n" - "movhps %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "paddb %%xmm5,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_rgba0), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -2115,16 +2117,16 @@ void OMITFP I444ToARGBRow_SSSE3(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV444 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2144,27 +2146,27 @@ void OMITFP I422ToRGB24Row_SSSE3(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "movdqa %[kShuffleMaskARGBToRGB24_0],%%xmm5 \n" - "movdqa %[kShuffleMaskARGBToRGB24],%%xmm6 \n" - "sub %[u_buf],%[v_buf] \n" + "movdqa %[kShuffleMaskARGBToRGB24_0],%%xmm5 \n" + "movdqa %[kShuffleMaskARGBToRGB24],%%xmm6 \n" + "sub %[u_buf],%[v_buf] \n" LABELALIGN - "1: \n" + "1: \n" READYUV422 YUVTORGB(yuvconstants) - "punpcklbw %%xmm1,%%xmm0 \n" - "punpcklbw %%xmm2,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklwd %%xmm2,%%xmm0 \n" - "punpckhwd %%xmm2,%%xmm1 \n" - "pshufb %%xmm5,%%xmm0 \n" - "pshufb %%xmm6,%%xmm1 \n" - "palignr $0xc,%%xmm0,%%xmm1 \n" - "movq %%xmm0,(%[dst_rgb24]) \n" - "movdqu %%xmm1,0x8(%[dst_rgb24]) \n" - "lea 0x18(%[dst_rgb24]),%[dst_rgb24] \n" - "subl $0x8,%[width] \n" - "jg 1b \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "punpcklbw %%xmm2,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklwd %%xmm2,%%xmm0 \n" + "punpckhwd %%xmm2,%%xmm1 \n" + "pshufb %%xmm5,%%xmm0 \n" + "pshufb %%xmm6,%%xmm1 \n" + "palignr $0xc,%%xmm0,%%xmm1 \n" + "movq %%xmm0,(%[dst_rgb24]) \n" + "movdqu %%xmm1,0x8(%[dst_rgb24]) \n" + "lea 0x18(%[dst_rgb24]),%[dst_rgb24] \n" + "subl $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2190,16 +2192,16 @@ void OMITFP I422ToARGBRow_SSSE3(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV422 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2219,21 +2221,21 @@ void OMITFP I422ToAR30Row_SSSE3(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" // AR30 constants - "psrlw $14,%%xmm5 \n" - "psllw $4,%%xmm5 \n" // 2 alpha bits - "pxor %%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min - "psrlw $6,%%xmm7 \n" // 1023 for max + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" // AR30 constants + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" + "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min + "psrlw $6,%%xmm7 \n" // 1023 for max LABELALIGN - "1: \n" + "1: \n" READYUV422 YUVTORGB16(yuvconstants) STOREAR30 - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2254,16 +2256,16 @@ void OMITFP I210ToARGBRow_SSSE3(const uint16_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV210 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2284,21 +2286,21 @@ void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $14,%%xmm5 \n" - "psllw $4,%%xmm5 \n" // 2 alpha bits - "pxor %%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min - "psrlw $6,%%xmm7 \n" // 1023 for max + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" + "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min + "psrlw $6,%%xmm7 \n" // 1023 for max LABELALIGN - "1: \n" + "1: \n" READYUV210 YUVTORGB16(yuvconstants) STOREAR30 - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2321,15 +2323,15 @@ void OMITFP I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" + "sub %[u_buf],%[v_buf] \n" LABELALIGN - "1: \n" + "1: \n" READYUVA422 YUVTORGB(yuvconstants) STOREARGB - "subl $0x8,%[width] \n" - "jg 1b \n" + "subl $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2356,15 +2358,15 @@ void OMITFP NV12ToARGBRow_SSSE3(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP(yuvconstants) - "pcmpeqb %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READNV12 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [uv_buf]"+r"(uv_buf), // %[uv_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] @@ -2384,15 +2386,15 @@ void OMITFP NV21ToARGBRow_SSSE3(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP(yuvconstants) - "pcmpeqb %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READNV21 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [vu_buf]"+r"(vu_buf), // %[vu_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] @@ -2412,15 +2414,15 @@ void OMITFP YUY2ToARGBRow_SSSE3(const uint8_t* yuy2_buf, // clang-format off asm volatile ( YUVTORGB_SETUP(yuvconstants) - "pcmpeqb %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUY2 YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [yuy2_buf]"+r"(yuy2_buf), // %[yuy2_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] [width]"+rm"(width) // %[width] @@ -2440,15 +2442,15 @@ void OMITFP UYVYToARGBRow_SSSE3(const uint8_t* uyvy_buf, // clang-format off asm volatile ( YUVTORGB_SETUP(yuvconstants) - "pcmpeqb %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READUYVY YUVTORGB(yuvconstants) STOREARGB - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [uyvy_buf]"+r"(uyvy_buf), // %[uyvy_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] [width]"+rm"(width) // %[width] @@ -2469,16 +2471,16 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "pcmpeqb %%xmm5,%%xmm5 \n" + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV422 YUVTORGB(yuvconstants) STORERGBA - "sub $0x8,%[width] \n" - "jg 1b \n" + "sub $0x8,%[width] \n" + "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2693,17 +2695,17 @@ void OMITFP I444ToARGBRow_AVX2(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV444_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2727,18 +2729,18 @@ void OMITFP I422ToARGBRow_AVX2(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV422_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" + "sub $0x10,%[width] \n" + "jg 1b \n" - "vzeroupper \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2762,23 +2764,23 @@ void OMITFP I422ToAR30Row_AVX2(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants - "vpsrlw $14,%%ymm5,%%ymm5 \n" - "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits - "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min - "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max - "vpsrlw $6,%%ymm7,%%ymm7 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" LABELALIGN - "1: \n" + "1: \n" READYUV422_AVX2 YUVTORGB16_AVX2(yuvconstants) STOREAR30_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" + "sub $0x10,%[width] \n" + "jg 1b \n" - "vzeroupper \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2802,18 +2804,18 @@ void OMITFP I210ToARGBRow_AVX2(const uint16_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV210_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" + "sub $0x10,%[width] \n" + "jg 1b \n" - "vzeroupper \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2837,23 +2839,23 @@ void OMITFP I210ToAR30Row_AVX2(const uint16_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants - "vpsrlw $14,%%ymm5,%%ymm5 \n" - "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits - "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min - "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max - "vpsrlw $6,%%ymm7,%%ymm7 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" LABELALIGN - "1: \n" + "1: \n" READYUV210_AVX2 YUVTORGB16_AVX2(yuvconstants) STOREAR30_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" + "sub $0x10,%[width] \n" + "jg 1b \n" - "vzeroupper \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2879,16 +2881,16 @@ void OMITFP I422AlphaToARGBRow_AVX2(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" + "sub %[u_buf],%[v_buf] \n" LABELALIGN - "1: \n" + "1: \n" READYUVA422_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "subl $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "subl $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] @@ -2918,11 +2920,11 @@ void OMITFP I422ToRGBARow_AVX2(const uint8_t* y_buf, int width) { asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "sub %[u_buf],%[v_buf] \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUV422_AVX2 YUVTORGB_AVX2(yuvconstants) @@ -2962,16 +2964,16 @@ void OMITFP NV12ToARGBRow_AVX2(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READNV12_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [uv_buf]"+r"(uv_buf), // %[uv_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] @@ -2995,16 +2997,16 @@ void OMITFP NV21ToARGBRow_AVX2(const uint8_t* y_buf, // clang-format off asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READNV21_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [y_buf]"+r"(y_buf), // %[y_buf] [vu_buf]"+r"(vu_buf), // %[vu_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] @@ -3028,16 +3030,16 @@ void OMITFP YUY2ToARGBRow_AVX2(const uint8_t* yuy2_buf, // clang-format off asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READYUY2_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [yuy2_buf]"+r"(yuy2_buf), // %[yuy2_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] [width]"+rm"(width) // %[width] @@ -3061,16 +3063,16 @@ void OMITFP UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf, // clang-format off asm volatile ( YUVTORGB_SETUP_AVX2(yuvconstants) - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN - "1: \n" + "1: \n" READUYVY_AVX2 YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 - "sub $0x10,%[width] \n" - "jg 1b \n" - "vzeroupper \n" + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" : [uyvy_buf]"+r"(uyvy_buf), // %[uyvy_buf] [dst_argb]"+r"(dst_argb), // %[dst_argb] [width]"+rm"(width) // %[width] @@ -3085,17 +3087,15 @@ void OMITFP UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf, #endif // HAS_UYVYTOARGBROW_AVX2 #ifdef HAS_I400TOARGBROW_SSE2 -void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { +void I400ToARGBRow_SSE2(const uint8_t* y_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { asm volatile( - "mov $0x4a354a35,%%eax \n" // 4a35 = 18997 = 1.164 - "movd %%eax,%%xmm2 \n" - "pshufd $0x0,%%xmm2,%%xmm2 \n" - "mov $0x04880488,%%eax \n" // 0488 = 1160 = 1.164 * - // 16 - "movd %%eax,%%xmm3 \n" - "pshufd $0x0,%%xmm3,%%xmm3 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "pslld $0x18,%%xmm4 \n" + "movdqa 192(%3),%%xmm2 \n" // yg = 18997 = 1.164 + "movdqa 224(%3),%%xmm3 \n" // ygb = 1160 = 1.164 * 16 + "pcmpeqb %%xmm4,%%xmm4 \n" // 0xff000000 + "pslld $0x18,%%xmm4 \n" LABELALIGN "1: \n" @@ -3104,8 +3104,8 @@ void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { "lea 0x8(%0),%0 \n" "punpcklbw %%xmm0,%%xmm0 \n" "pmulhuw %%xmm2,%%xmm0 \n" - "psubusw %%xmm3,%%xmm0 \n" - "psrlw $6, %%xmm0 \n" + "paddsw %%xmm3,%%xmm0 \n" + "psraw $6, %%xmm0 \n" "packuswb %%xmm0,%%xmm0 \n" // Step 2: Weave into ARGB @@ -3121,28 +3121,26 @@ void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { "sub $0x8,%2 \n" "jg 1b \n" - : "+r"(y_buf), // %0 - "+r"(dst_argb), // %1 - "+rm"(width) // %2 - : - : "memory", "cc", "eax", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); + : "+r"(y_buf), // %0 + "+r"(dst_argb), // %1 + "+rm"(width) // %2 + : "r"(yuvconstants) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); } #endif // HAS_I400TOARGBROW_SSE2 #ifdef HAS_I400TOARGBROW_AVX2 // 16 pixels of Y converted to 16 pixels of ARGB (64 bytes). // note: vpunpcklbw mutates and vpackuswb unmutates. -void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { +void I400ToARGBRow_AVX2(const uint8_t* y_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { asm volatile( - "mov $0x4a354a35,%%eax \n" // 0488 = 1160 = 1.164 * - // 16 - "vmovd %%eax,%%xmm2 \n" - "vbroadcastss %%xmm2,%%ymm2 \n" - "mov $0x4880488,%%eax \n" // 4a35 = 18997 = 1.164 - "vmovd %%eax,%%xmm3 \n" - "vbroadcastss %%xmm3,%%ymm3 \n" - "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" - "vpslld $0x18,%%ymm4,%%ymm4 \n" + "vmovdqa 192(%3),%%ymm2 \n" // yg = 18997 = 1.164 + "vmovdqa 224(%3),%%ymm3 \n" // ygb = -1160 = 1.164*16 + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" // 0xff000000 + "vpslld $0x18,%%ymm4,%%ymm4 \n" LABELALIGN "1: \n" @@ -3152,8 +3150,8 @@ void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { "vpermq $0xd8,%%ymm0,%%ymm0 \n" "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpsubusw %%ymm3,%%ymm0,%%ymm0 \n" - "vpsrlw $0x6,%%ymm0,%%ymm0 \n" + "vpaddsw %%ymm3,%%ymm0,%%ymm0 \n" + "vpsraw $0x6,%%ymm0,%%ymm0 \n" "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" "vpunpcklbw %%ymm0,%%ymm0,%%ymm1 \n" "vpermq $0xd8,%%ymm1,%%ymm1 \n" @@ -3163,15 +3161,15 @@ void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, int width) { "vpor %%ymm4,%%ymm1,%%ymm1 \n" "vmovdqu %%ymm0,(%1) \n" "vmovdqu %%ymm1,0x20(%1) \n" - "lea 0x40(%1),%1 \n" + "lea 0x40(%1),%1 \n" "sub $0x10,%2 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(y_buf), // %0 - "+r"(dst_argb), // %1 - "+rm"(width) // %2 - : - : "memory", "cc", "eax", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); + : "+r"(y_buf), // %0 + "+r"(dst_argb), // %1 + "+rm"(width) // %2 + : "r"(yuvconstants) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); } #endif // HAS_I400TOARGBROW_AVX2 @@ -3184,16 +3182,16 @@ void MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width) { intptr_t temp_width = (intptr_t)(width); asm volatile( - "movdqa %3,%%xmm5 \n" + "movdqa %3,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu -0x10(%0,%2,1),%%xmm0 \n" - "pshufb %%xmm5,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu -0x10(%0,%2,1),%%xmm0 \n" + "pshufb %%xmm5,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(temp_width) // %2 @@ -3211,13 +3209,13 @@ void MirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { LABELALIGN "1: \n" - "vmovdqu -0x20(%0,%2,1),%%ymm0 \n" - "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" - "vpermq $0x4e,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu -0x20(%0,%2,1),%%ymm0 \n" + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" + "vpermq $0x4e,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -3228,55 +3226,154 @@ void MirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { #endif // HAS_MIRRORROW_AVX2 #ifdef HAS_MIRRORUVROW_SSSE3 -// Shuffle table for reversing the bytes of UV channels. -static const uvec8 kShuffleMirrorUV = {14u, 12u, 10u, 8u, 6u, 4u, 2u, 0u, - 15u, 13u, 11u, 9u, 7u, 5u, 3u, 1u}; -void MirrorUVRow_SSSE3(const uint8_t* src, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +// Shuffle table for reversing the UV. +static const uvec8 kShuffleMirrorUV = {14u, 15u, 12u, 13u, 10u, 11u, 8u, 9u, + 6u, 7u, 4u, 5u, 2u, 3u, 0u, 1u}; + +void MirrorUVRow_SSSE3(const uint8_t* src_uv, uint8_t* dst_uv, int width) { intptr_t temp_width = (intptr_t)(width); asm volatile( - "movdqa %4,%%xmm1 \n" - "lea -0x10(%0,%3,2),%0 \n" - "sub %1,%2 \n" + + "movdqa %3,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "lea -0x10(%0),%0 \n" - "pshufb %%xmm1,%%xmm0 \n" - "movlpd %%xmm0,(%1) \n" - "movhpd %%xmm0,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $8,%3 \n" - "jg 1b \n" - : "+r"(src), // %0 - "+r"(dst_u), // %1 - "+r"(dst_v), // %2 - "+r"(temp_width) // %3 - : "m"(kShuffleMirrorUV) // %4 - : "memory", "cc", "xmm0", "xmm1"); + "movdqu -0x10(%0,%2,2),%%xmm0 \n" + "pshufb %%xmm5,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_uv), // %1 + "+r"(temp_width) // %2 + : "m"(kShuffleMirrorUV) // %3 + : "memory", "cc", "xmm0", "xmm5"); } #endif // HAS_MIRRORUVROW_SSSE3 +#ifdef HAS_MIRRORUVROW_AVX2 +void MirrorUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_uv, int width) { + intptr_t temp_width = (intptr_t)(width); + asm volatile( + + "vbroadcastf128 %3,%%ymm5 \n" + + LABELALIGN + "1: \n" + "vmovdqu -0x20(%0,%2,2),%%ymm0 \n" + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" + "vpermq $0x4e,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_uv), // %0 + "+r"(dst_uv), // %1 + "+r"(temp_width) // %2 + : "m"(kShuffleMirrorUV) // %3 + : "memory", "cc", "xmm0", "xmm5"); +} +#endif // HAS_MIRRORUVROW_AVX2 + +#ifdef HAS_MIRRORSPLITUVROW_SSSE3 +// Shuffle table for reversing the bytes of UV channels. +static const uvec8 kShuffleMirrorSplitUV = {14u, 12u, 10u, 8u, 6u, 4u, 2u, 0u, + 15u, 13u, 11u, 9u, 7u, 5u, 3u, 1u}; +void MirrorSplitUVRow_SSSE3(const uint8_t* src, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + intptr_t temp_width = (intptr_t)(width); + asm volatile( + "movdqa %4,%%xmm1 \n" + "lea -0x10(%0,%3,2),%0 \n" + "sub %1,%2 \n" + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "lea -0x10(%0),%0 \n" + "pshufb %%xmm1,%%xmm0 \n" + "movlpd %%xmm0,(%1) \n" + "movhpd %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $8,%3 \n" + "jg 1b \n" + : "+r"(src), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(temp_width) // %3 + : "m"(kShuffleMirrorSplitUV) // %4 + : "memory", "cc", "xmm0", "xmm1"); +} +#endif // HAS_MIRRORSPLITUVROW_SSSE3 + +#ifdef HAS_RGB24MIRRORROW_SSSE3 + +// Shuffle first 5 pixels to last 5 mirrored. first byte zero +static const uvec8 kShuffleMirrorRGB0 = {128u, 12u, 13u, 14u, 9u, 10u, 11u, 6u, + 7u, 8u, 3u, 4u, 5u, 0u, 1u, 2u}; + +// Shuffle last 5 pixels to first 5 mirrored. last byte zero +static const uvec8 kShuffleMirrorRGB1 = { + 13u, 14u, 15u, 10u, 11u, 12u, 7u, 8u, 9u, 4u, 5u, 6u, 1u, 2u, 3u, 128u}; + +// Shuffle 5 pixels at a time (15 bytes) +void RGB24MirrorRow_SSSE3(const uint8_t* src_rgb24, + uint8_t* dst_rgb24, + int width) { + intptr_t temp_width = (intptr_t)(width); + src_rgb24 += width * 3 - 48; + asm volatile( + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" // first 5 + "movdqu 15(%0),%%xmm1 \n" // next 5 + "movdqu 30(%0),%%xmm2 \n" // next 5 + "movdqu 32(%0),%%xmm3 \n" // last 1 special + "pshufb %%xmm4,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "pshufb %%xmm4,%%xmm2 \n" + "pshufb %%xmm5,%%xmm3 \n" + "lea -0x30(%0),%0 \n" + "movdqu %%xmm0,32(%1) \n" // last 5 + "movdqu %%xmm1,17(%1) \n" // next 5 + "movdqu %%xmm2,2(%1) \n" // next 5 + "movlpd %%xmm3,0(%1) \n" // first 1 + "lea 0x30(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(dst_rgb24), // %1 + "+r"(temp_width) // %2 + : "m"(kShuffleMirrorRGB0), // %3 + "m"(kShuffleMirrorRGB1) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif // HAS_RGB24MIRRORROW_SSSE3 + #ifdef HAS_ARGBMIRRORROW_SSE2 void ARGBMirrorRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { intptr_t temp_width = (intptr_t)(width); asm volatile( - "lea -0x10(%0,%2,4),%0 \n" + "lea -0x10(%0,%2,4),%0 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "pshufd $0x1b,%%xmm0,%%xmm0 \n" - "lea -0x10(%0),%0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "pshufd $0x1b,%%xmm0,%%xmm0 \n" + "lea -0x10(%0),%0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(temp_width) // %2 @@ -3292,15 +3389,15 @@ void ARGBMirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { intptr_t temp_width = (intptr_t)(width); asm volatile( - "vmovdqu %3,%%ymm5 \n" + "vmovdqu %3,%%ymm5 \n" LABELALIGN "1: \n" - "vpermd -0x20(%0,%2,4),%%ymm5,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vpermd -0x20(%0,%2,4),%%ymm5,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -3316,28 +3413,28 @@ void SplitUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_v, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" - "sub %1,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm2 \n" - "vpsrlw $0x8,%%ymm1,%%ymm3 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm3,%%ymm2,%%ymm2 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm2,%%ymm2 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm2,0x00(%1,%2,1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm2 \n" + "vpsrlw $0x8,%%ymm1,%%ymm3 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm3,%%ymm2,%%ymm2 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm2,%%ymm2 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm2,0x00(%1,%2,1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_uv), // %0 "+r"(dst_u), // %1 @@ -3354,28 +3451,28 @@ void SplitUVRow_SSE2(const uint8_t* src_uv, uint8_t* dst_v, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" - "sub %1,%2 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "movdqa %%xmm1,%%xmm3 \n" - "pand %%xmm5,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "psrlw $0x8,%%xmm2 \n" - "psrlw $0x8,%%xmm3 \n" - "packuswb %%xmm3,%%xmm2 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm2,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "pand %%xmm5,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "psrlw $0x8,%%xmm2 \n" + "psrlw $0x8,%%xmm3 \n" + "packuswb %%xmm3,%%xmm2 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm2,0x00(%1,%2,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_uv), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -3392,22 +3489,22 @@ void MergeUVRow_AVX2(const uint8_t* src_u, int width) { asm volatile( - "sub %0,%1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x00(%0,%1,1),%%ymm1 \n" - "lea 0x20(%0),%0 \n" - "vpunpcklbw %%ymm1,%%ymm0,%%ymm2 \n" - "vpunpckhbw %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x00(%0,%1,1),%%ymm1 \n" + "lea 0x20(%0),%0 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpckhbw %%ymm1,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm2,(%2) \n" "vextractf128 $0x0,%%ymm0,0x10(%2) \n" "vextractf128 $0x1,%%ymm2,0x20(%2) \n" "vextractf128 $0x1,%%ymm0,0x30(%2) \n" - "lea 0x40(%2),%2 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x40(%2),%2 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_u), // %0 "+r"(src_v), // %1 @@ -3425,21 +3522,21 @@ void MergeUVRow_SSE2(const uint8_t* src_u, int width) { asm volatile( - "sub %0,%1 \n" + "sub %0,%1 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%1,1),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "punpcklbw %%xmm1,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm2 \n" - "movdqu %%xmm0,(%2) \n" - "movdqu %%xmm2,0x10(%2) \n" - "lea 0x20(%2),%2 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%1,1),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm2 \n" + "movdqu %%xmm0,(%2) \n" + "movdqu %%xmm2,0x10(%2) \n" + "lea 0x20(%2),%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_u), // %0 "+r"(src_v), // %1 "+r"(dst_uv), // %2 @@ -3462,30 +3559,30 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u, int width) { // clang-format off asm volatile ( - "vmovd %4,%%xmm3 \n" - "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" - "vbroadcastss %%xmm3,%%ymm3 \n" - "sub %0,%1 \n" + "vmovd %4,%%xmm3 \n" + "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" + "vbroadcastss %%xmm3,%%ymm3 \n" + "sub %0,%1 \n" // 16 pixels per loop. LABELALIGN - "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu (%0,%1,1),%%ymm1 \n" - "add $0x20,%0 \n" + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu (%0,%1,1),%%ymm1 \n" + "add $0x20,%0 \n" - "vpmullw %%ymm3,%%ymm0,%%ymm0 \n" - "vpmullw %%ymm3,%%ymm1,%%ymm1 \n" - "vpunpcklwd %%ymm1,%%ymm0,%%ymm2 \n" // mutates - "vpunpckhwd %%ymm1,%%ymm0,%%ymm0 \n" - "vextractf128 $0x0,%%ymm2,(%2) \n" - "vextractf128 $0x0,%%ymm0,0x10(%2) \n" - "vextractf128 $0x1,%%ymm2,0x20(%2) \n" - "vextractf128 $0x1,%%ymm0,0x30(%2) \n" - "add $0x40,%2 \n" - "sub $0x10,%3 \n" - "jg 1b \n" - "vzeroupper \n" + "vpmullw %%ymm3,%%ymm0,%%ymm0 \n" + "vpmullw %%ymm3,%%ymm1,%%ymm1 \n" + "vpunpcklwd %%ymm1,%%ymm0,%%ymm2 \n" // mutates + "vpunpckhwd %%ymm1,%%ymm0,%%ymm0 \n" + "vextractf128 $0x0,%%ymm2,(%2) \n" + "vextractf128 $0x0,%%ymm0,0x10(%2) \n" + "vextractf128 $0x1,%%ymm2,0x20(%2) \n" + "vextractf128 $0x1,%%ymm0,0x30(%2) \n" + "add $0x40,%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" + "vzeroupper \n" : "+r"(src_u), // %0 "+r"(src_v), // %1 "+r"(dst_uv), // %2 @@ -3508,24 +3605,24 @@ void MultiplyRow_16_AVX2(const uint16_t* src_y, int width) { // clang-format off asm volatile ( - "vmovd %3,%%xmm3 \n" - "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" - "vbroadcastss %%xmm3,%%ymm3 \n" - "sub %0,%1 \n" + "vmovd %3,%%xmm3 \n" + "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" + "vbroadcastss %%xmm3,%%ymm3 \n" + "sub %0,%1 \n" // 16 pixels per loop. LABELALIGN - "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vpmullw %%ymm3,%%ymm0,%%ymm0 \n" - "vpmullw %%ymm3,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm0,(%0,%1) \n" - "vmovdqu %%ymm1,0x20(%0,%1) \n" - "add $0x40,%0 \n" - "sub $0x20,%2 \n" - "jg 1b \n" - "vzeroupper \n" + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpmullw %%ymm3,%%ymm0,%%ymm0 \n" + "vpmullw %%ymm3,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm0,(%0,%1) \n" + "vmovdqu %%ymm1,0x20(%0,%1) \n" + "add $0x40,%0 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" : "+r"(src_y), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3546,23 +3643,23 @@ void Convert16To8Row_SSSE3(const uint16_t* src_y, int width) { // clang-format off asm volatile ( - "movd %3,%%xmm2 \n" - "punpcklwd %%xmm2,%%xmm2 \n" - "pshufd $0x0,%%xmm2,%%xmm2 \n" + "movd %3,%%xmm2 \n" + "punpcklwd %%xmm2,%%xmm2 \n" + "pshufd $0x0,%%xmm2,%%xmm2 \n" // 32 pixels per loop. LABELALIGN - "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "add $0x20,%0 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "add $0x10,%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "add $0x20,%0 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "add $0x10,%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_y), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3578,25 +3675,25 @@ void Convert16To8Row_AVX2(const uint16_t* src_y, int width) { // clang-format off asm volatile ( - "vmovd %3,%%xmm2 \n" - "vpunpcklwd %%xmm2,%%xmm2,%%xmm2 \n" - "vbroadcastss %%xmm2,%%ymm2 \n" + "vmovd %3,%%xmm2 \n" + "vpunpcklwd %%xmm2,%%xmm2,%%xmm2 \n" + "vbroadcastss %%xmm2,%%ymm2 \n" // 32 pixels per loop. LABELALIGN - "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "add $0x40,%0 \n" - "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm2,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" // mutates - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "add $0x20,%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" - "vzeroupper \n" + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "add $0x40,%0 \n" + "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm2,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" // mutates + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "add $0x20,%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" : "+r"(src_y), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3617,25 +3714,25 @@ void Convert8To16Row_SSE2(const uint8_t* src_y, int width) { // clang-format off asm volatile ( - "movd %3,%%xmm2 \n" - "punpcklwd %%xmm2,%%xmm2 \n" - "pshufd $0x0,%%xmm2,%%xmm2 \n" + "movd %3,%%xmm2 \n" + "punpcklwd %%xmm2,%%xmm2 \n" + "pshufd $0x0,%%xmm2,%%xmm2 \n" // 32 pixels per loop. LABELALIGN - "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm1 \n" - "add $0x10,%0 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "add $0x20,%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "add $0x10,%0 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "add $0x20,%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_y), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3651,26 +3748,26 @@ void Convert8To16Row_AVX2(const uint8_t* src_y, int width) { // clang-format off asm volatile ( - "vmovd %3,%%xmm2 \n" - "vpunpcklwd %%xmm2,%%xmm2,%%xmm2 \n" - "vbroadcastss %%xmm2,%%ymm2 \n" + "vmovd %3,%%xmm2 \n" + "vpunpcklwd %%xmm2,%%xmm2,%%xmm2 \n" + "vbroadcastss %%xmm2,%%ymm2 \n" // 32 pixels per loop. LABELALIGN - "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "add $0x20,%0 \n" - "vpunpckhbw %%ymm0,%%ymm0,%%ymm1 \n" - "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm2,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "add $0x40,%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" - "vzeroupper \n" + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "add $0x20,%0 \n" + "vpunpckhbw %%ymm0,%%ymm0,%%ymm1 \n" + "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm2,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "add $0x40,%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" : "+r"(src_y), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3722,41 +3819,41 @@ void SplitRGBRow_SSSE3(const uint8_t* src_rgb, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %5, %%xmm0 \n" - "pshufb %6, %%xmm1 \n" - "pshufb %7, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb %5, %%xmm0 \n" + "pshufb %6, %%xmm1 \n" + "pshufb %7, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %8, %%xmm0 \n" - "pshufb %9, %%xmm1 \n" - "pshufb %10, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb %8, %%xmm0 \n" + "pshufb %9, %%xmm1 \n" + "pshufb %10, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %11, %%xmm0 \n" - "pshufb %12, %%xmm1 \n" - "pshufb %13, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%3) \n" - "lea 0x10(%3),%3 \n" - "lea 0x30(%0),%0 \n" - "sub $0x10,%4 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb %11, %%xmm0 \n" + "pshufb %12, %%xmm1 \n" + "pshufb %13, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%3) \n" + "lea 0x10(%3),%3 \n" + "lea 0x30(%0),%0 \n" + "sub $0x10,%4 \n" + "jg 1b \n" : "+r"(src_rgb), // %0 "+r"(dst_r), // %1 "+r"(dst_g), // %2 @@ -3817,42 +3914,42 @@ void MergeRGBRow_SSSE3(const uint8_t* src_r, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %5, %%xmm0 \n" - "pshufb %6, %%xmm1 \n" - "pshufb %7, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%3) \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb %5, %%xmm0 \n" + "pshufb %6, %%xmm1 \n" + "pshufb %7, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%3) \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %8, %%xmm0 \n" - "pshufb %9, %%xmm1 \n" - "pshufb %10, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,16(%3) \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb %8, %%xmm0 \n" + "pshufb %9, %%xmm1 \n" + "pshufb %10, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,16(%3) \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %11, %%xmm0 \n" - "pshufb %12, %%xmm1 \n" - "pshufb %13, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,32(%3) \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb %11, %%xmm0 \n" + "pshufb %12, %%xmm1 \n" + "pshufb %13, %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,32(%3) \n" - "lea 0x10(%0),%0 \n" - "lea 0x10(%1),%1 \n" - "lea 0x10(%2),%2 \n" - "lea 0x30(%3),%3 \n" - "sub $0x10,%4 \n" - "jg 1b \n" + "lea 0x10(%0),%0 \n" + "lea 0x10(%1),%1 \n" + "lea 0x10(%2),%2 \n" + "lea 0x30(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" : "+r"(src_r), // %0 "+r"(src_g), // %1 "+r"(src_b), // %2 @@ -3874,35 +3971,35 @@ void MergeRGBRow_SSSE3(const uint8_t* src_r, #ifdef HAS_COPYROW_SSE2 void CopyRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "test $0xf,%0 \n" - "jne 2f \n" - "test $0xf,%1 \n" - "jne 2f \n" + "test $0xf,%0 \n" + "jne 2f \n" + "test $0xf,%1 \n" + "jne 2f \n" LABELALIGN "1: \n" - "movdqa (%0),%%xmm0 \n" - "movdqa 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "movdqa %%xmm0,(%1) \n" - "movdqa %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" - "jmp 9f \n" + "movdqa (%0),%%xmm0 \n" + "movdqa 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "movdqa %%xmm0,(%1) \n" + "movdqa %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "jmp 9f \n" LABELALIGN "2: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 2b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 2b \n" - LABELALIGN "9: \n" + LABELALIGN "9: \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -3917,14 +4014,14 @@ void CopyRow_AVX(const uint8_t* src, uint8_t* dst, int width) { LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x40,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x40,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -3939,7 +4036,7 @@ void CopyRow_ERMS(const uint8_t* src, uint8_t* dst, int width) { size_t width_tmp = (size_t)(width); asm volatile( - "rep movsb \n" + "rep movsb \n" : "+S"(src), // %0 "+D"(dst), // %1 "+c"(width_tmp) // %2 @@ -3952,29 +4049,29 @@ void CopyRow_ERMS(const uint8_t* src, uint8_t* dst, int width) { // width in pixels void ARGBCopyAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm0,%%xmm0 \n" - "pslld $0x18,%%xmm0 \n" - "pcmpeqb %%xmm1,%%xmm1 \n" - "psrld $0x8,%%xmm1 \n" + "pcmpeqb %%xmm0,%%xmm0 \n" + "pslld $0x18,%%xmm0 \n" + "pcmpeqb %%xmm1,%%xmm1 \n" + "psrld $0x8,%%xmm1 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm2 \n" - "movdqu 0x10(%0),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "movdqu (%1),%%xmm4 \n" - "movdqu 0x10(%1),%%xmm5 \n" - "pand %%xmm0,%%xmm2 \n" - "pand %%xmm0,%%xmm3 \n" - "pand %%xmm1,%%xmm4 \n" - "pand %%xmm1,%%xmm5 \n" - "por %%xmm4,%%xmm2 \n" - "por %%xmm5,%%xmm3 \n" - "movdqu %%xmm2,(%1) \n" - "movdqu %%xmm3,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm2 \n" + "movdqu 0x10(%0),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "movdqu (%1),%%xmm4 \n" + "movdqu 0x10(%1),%%xmm5 \n" + "pand %%xmm0,%%xmm2 \n" + "pand %%xmm0,%%xmm3 \n" + "pand %%xmm1,%%xmm4 \n" + "pand %%xmm1,%%xmm5 \n" + "por %%xmm4,%%xmm2 \n" + "por %%xmm5,%%xmm3 \n" + "movdqu %%xmm2,(%1) \n" + "movdqu %%xmm3,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -3987,21 +4084,21 @@ void ARGBCopyAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { // width in pixels void ARGBCopyAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "vpcmpeqb %%ymm0,%%ymm0,%%ymm0 \n" - "vpsrld $0x8,%%ymm0,%%ymm0 \n" + "vpcmpeqb %%ymm0,%%ymm0,%%ymm0 \n" + "vpsrld $0x8,%%ymm0,%%ymm0 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm1 \n" - "vmovdqu 0x20(%0),%%ymm2 \n" - "lea 0x40(%0),%0 \n" - "vpblendvb %%ymm0,(%1),%%ymm1,%%ymm1 \n" - "vpblendvb %%ymm0,0x20(%1),%%ymm2,%%ymm2 \n" - "vmovdqu %%ymm1,(%1) \n" - "vmovdqu %%ymm2,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm1 \n" + "vmovdqu 0x20(%0),%%ymm2 \n" + "lea 0x40(%0),%0 \n" + "vpblendvb %%ymm0,(%1),%%ymm1,%%ymm1 \n" + "vpblendvb %%ymm0,0x20(%1),%%ymm2,%%ymm2 \n" + "vmovdqu %%ymm1,(%1) \n" + "vmovdqu %%ymm2,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -4020,17 +4117,17 @@ void ARGBExtractAlphaRow_SSE2(const uint8_t* src_argb, LABELALIGN "1: \n" - "movdqu (%0), %%xmm0 \n" - "movdqu 0x10(%0), %%xmm1 \n" - "lea 0x20(%0), %0 \n" - "psrld $0x18, %%xmm0 \n" - "psrld $0x18, %%xmm1 \n" - "packssdw %%xmm1, %%xmm0 \n" - "packuswb %%xmm0, %%xmm0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1), %1 \n" - "sub $0x8, %2 \n" - "jg 1b \n" + "movdqu (%0), %%xmm0 \n" + "movdqu 0x10(%0), %%xmm1 \n" + "lea 0x20(%0), %0 \n" + "psrld $0x18, %%xmm0 \n" + "psrld $0x18, %%xmm1 \n" + "packssdw %%xmm1, %%xmm0 \n" + "packuswb %%xmm0, %%xmm0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1), %1 \n" + "sub $0x8, %2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_a), // %1 "+rm"(width) // %2 @@ -4048,28 +4145,28 @@ void ARGBExtractAlphaRow_AVX2(const uint8_t* src_argb, uint8_t* dst_a, int width) { asm volatile( - "vmovdqa %3,%%ymm4 \n" + "vmovdqa %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0), %%ymm0 \n" - "vmovdqu 0x20(%0), %%ymm1 \n" - "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // vpsrld $0x18, %%ymm0 - "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" - "vmovdqu 0x40(%0), %%ymm2 \n" - "vmovdqu 0x60(%0), %%ymm3 \n" - "lea 0x80(%0), %0 \n" - "vpackssdw %%ymm1, %%ymm0, %%ymm0 \n" // mutates - "vpshufb %%ymm5,%%ymm2,%%ymm2 \n" - "vpshufb %%ymm5,%%ymm3,%%ymm3 \n" - "vpackssdw %%ymm3, %%ymm2, %%ymm2 \n" // mutates - "vpackuswb %%ymm2,%%ymm0,%%ymm0 \n" // mutates. - "vpermd %%ymm0,%%ymm4,%%ymm0 \n" // unmutate. - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20, %2 \n" - "jg 1b \n" + "vmovdqu (%0), %%ymm0 \n" + "vmovdqu 0x20(%0), %%ymm1 \n" + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // vpsrld $0x18, %%ymm0 + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" + "vmovdqu 0x40(%0), %%ymm2 \n" + "vmovdqu 0x60(%0), %%ymm3 \n" + "lea 0x80(%0), %0 \n" + "vpackssdw %%ymm1, %%ymm0, %%ymm0 \n" // mutates + "vpshufb %%ymm5,%%ymm2,%%ymm2 \n" + "vpshufb %%ymm5,%%ymm3,%%ymm3 \n" + "vpackssdw %%ymm3, %%ymm2, %%ymm2 \n" // mutates + "vpackuswb %%ymm2,%%ymm0,%%ymm0 \n" // mutates. + "vpermd %%ymm0,%%ymm4,%%ymm0 \n" // unmutate. + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20, %2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_a), // %1 @@ -4084,31 +4181,31 @@ void ARGBExtractAlphaRow_AVX2(const uint8_t* src_argb, // width in pixels void ARGBCopyYToAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm0,%%xmm0 \n" - "pslld $0x18,%%xmm0 \n" - "pcmpeqb %%xmm1,%%xmm1 \n" - "psrld $0x8,%%xmm1 \n" + "pcmpeqb %%xmm0,%%xmm0 \n" + "pslld $0x18,%%xmm0 \n" + "pcmpeqb %%xmm1,%%xmm1 \n" + "psrld $0x8,%%xmm1 \n" LABELALIGN "1: \n" - "movq (%0),%%xmm2 \n" - "lea 0x8(%0),%0 \n" - "punpcklbw %%xmm2,%%xmm2 \n" - "punpckhwd %%xmm2,%%xmm3 \n" - "punpcklwd %%xmm2,%%xmm2 \n" - "movdqu (%1),%%xmm4 \n" - "movdqu 0x10(%1),%%xmm5 \n" - "pand %%xmm0,%%xmm2 \n" - "pand %%xmm0,%%xmm3 \n" - "pand %%xmm1,%%xmm4 \n" - "pand %%xmm1,%%xmm5 \n" - "por %%xmm4,%%xmm2 \n" - "por %%xmm5,%%xmm3 \n" - "movdqu %%xmm2,(%1) \n" - "movdqu %%xmm3,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movq (%0),%%xmm2 \n" + "lea 0x8(%0),%0 \n" + "punpcklbw %%xmm2,%%xmm2 \n" + "punpckhwd %%xmm2,%%xmm3 \n" + "punpcklwd %%xmm2,%%xmm2 \n" + "movdqu (%1),%%xmm4 \n" + "movdqu 0x10(%1),%%xmm5 \n" + "pand %%xmm0,%%xmm2 \n" + "pand %%xmm0,%%xmm3 \n" + "pand %%xmm1,%%xmm4 \n" + "pand %%xmm1,%%xmm5 \n" + "por %%xmm4,%%xmm2 \n" + "por %%xmm5,%%xmm3 \n" + "movdqu %%xmm2,(%1) \n" + "movdqu %%xmm3,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -4121,23 +4218,23 @@ void ARGBCopyYToAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { // width in pixels void ARGBCopyYToAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width) { asm volatile( - "vpcmpeqb %%ymm0,%%ymm0,%%ymm0 \n" - "vpsrld $0x8,%%ymm0,%%ymm0 \n" + "vpcmpeqb %%ymm0,%%ymm0,%%ymm0 \n" + "vpsrld $0x8,%%ymm0,%%ymm0 \n" LABELALIGN "1: \n" - "vpmovzxbd (%0),%%ymm1 \n" - "vpmovzxbd 0x8(%0),%%ymm2 \n" - "lea 0x10(%0),%0 \n" - "vpslld $0x18,%%ymm1,%%ymm1 \n" - "vpslld $0x18,%%ymm2,%%ymm2 \n" - "vpblendvb %%ymm0,(%1),%%ymm1,%%ymm1 \n" - "vpblendvb %%ymm0,0x20(%1),%%ymm2,%%ymm2 \n" - "vmovdqu %%ymm1,(%1) \n" - "vmovdqu %%ymm2,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vpmovzxbd (%0),%%ymm1 \n" + "vpmovzxbd 0x8(%0),%%ymm2 \n" + "lea 0x10(%0),%0 \n" + "vpslld $0x18,%%ymm1,%%ymm1 \n" + "vpslld $0x18,%%ymm2,%%ymm2 \n" + "vpblendvb %%ymm0,(%1),%%ymm1,%%ymm1 \n" + "vpblendvb %%ymm0,0x20(%1),%%ymm2,%%ymm2 \n" + "vmovdqu %%ymm1,(%1) \n" + "vmovdqu %%ymm2,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 @@ -4153,7 +4250,7 @@ void SetRow_X86(uint8_t* dst, uint8_t v8, int width) { const uint32_t v32 = v8 * 0x01010101u; // Duplicate byte to all bytes. asm volatile( - "rep stosl \n" + "rep stosl \n" : "+D"(dst), // %0 "+c"(width_tmp) // %1 : "a"(v32) // %2 @@ -4164,7 +4261,7 @@ void SetRow_ERMS(uint8_t* dst, uint8_t v8, int width) { size_t width_tmp = (size_t)(width); asm volatile( - "rep stosb \n" + "rep stosb \n" : "+D"(dst), // %0 "+c"(width_tmp) // %1 : "a"(v8) // %2 @@ -4175,7 +4272,7 @@ void ARGBSetRow_X86(uint8_t* dst_argb, uint32_t v32, int width) { size_t width_tmp = (size_t)(width); asm volatile( - "rep stosl \n" + "rep stosl \n" : "+D"(dst_argb), // %0 "+c"(width_tmp) // %1 : "a"(v32) // %2 @@ -4186,21 +4283,21 @@ void ARGBSetRow_X86(uint8_t* dst_argb, uint32_t v32, int width) { #ifdef HAS_YUY2TOYROW_SSE2 void YUY2ToYRow_SSE2(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pand %%xmm5,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pand %%xmm5,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -4214,32 +4311,32 @@ void YUY2ToUVRow_SSE2(const uint8_t* src_yuy2, uint8_t* dst_v, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" - "sub %1,%2 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x00(%0,%4,1),%%xmm2 \n" - "movdqu 0x10(%0,%4,1),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "pavgb %%xmm2,%%xmm0 \n" - "pavgb %%xmm3,%%xmm1 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pand %%xmm5,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%4,1),%%xmm2 \n" + "movdqu 0x10(%0,%4,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm2,%%xmm0 \n" + "pavgb %%xmm3,%%xmm1 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pand %%xmm5,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -4253,28 +4350,28 @@ void YUY2ToUV422Row_SSE2(const uint8_t* src_yuy2, uint8_t* dst_v, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" - "sub %1,%2 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pand %%xmm5,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pand %%xmm5,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -4288,16 +4385,16 @@ void UYVYToYRow_SSE2(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -4311,32 +4408,32 @@ void UYVYToUVRow_SSE2(const uint8_t* src_uyvy, uint8_t* dst_v, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" - "sub %1,%2 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x00(%0,%4,1),%%xmm2 \n" - "movdqu 0x10(%0,%4,1),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "pavgb %%xmm2,%%xmm0 \n" - "pavgb %%xmm3,%%xmm1 \n" - "pand %%xmm5,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pand %%xmm5,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%4,1),%%xmm2 \n" + "movdqu 0x10(%0,%4,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm2,%%xmm0 \n" + "pavgb %%xmm3,%%xmm1 \n" + "pand %%xmm5,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pand %%xmm5,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -4350,28 +4447,28 @@ void UYVYToUV422Row_SSE2(const uint8_t* src_uyvy, uint8_t* dst_v, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $0x8,%%xmm5 \n" - "sub %1,%2 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $0x8,%%xmm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pand %%xmm5,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pand %%xmm5,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x00(%1,%2,1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pand %%xmm5,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pand %%xmm5,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -4384,22 +4481,22 @@ void UYVYToUV422Row_SSE2(const uint8_t* src_uyvy, #ifdef HAS_YUY2TOYROW_AVX2 void YUY2ToYRow_AVX2(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_yuy2), // %0 "+r"(dst_y), // %1 @@ -4414,32 +4511,32 @@ void YUY2ToUVRow_AVX2(const uint8_t* src_yuy2, uint8_t* dst_v, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" - "sub %1,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" - "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm0,%%ymm1 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm1,%%ymm1 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm0,%%ymm1 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm1,(%1) \n" "vextractf128 $0x0,%%ymm0,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 @@ -4454,30 +4551,30 @@ void YUY2ToUV422Row_AVX2(const uint8_t* src_yuy2, uint8_t* dst_v, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" - "sub %1,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm0,%%ymm1 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm1,%%ymm1 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm0,%%ymm1 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm1,(%1) \n" "vextractf128 $0x0,%%ymm0,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 @@ -4492,17 +4589,17 @@ void UYVYToYRow_AVX2(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_uyvy), // %0 "+r"(dst_y), // %1 @@ -4516,32 +4613,32 @@ void UYVYToUVRow_AVX2(const uint8_t* src_uyvy, uint8_t* dst_v, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" - "sub %1,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" - "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm0,%%ymm1 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm1,%%ymm1 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm0,%%ymm1 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm1,(%1) \n" "vextractf128 $0x0,%%ymm0,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 @@ -4556,30 +4653,30 @@ void UYVYToUV422Row_AVX2(const uint8_t* src_uyvy, uint8_t* dst_v, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrlw $0x8,%%ymm5,%%ymm5 \n" - "sub %1,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrlw $0x8,%%ymm5,%%ymm5 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm0,%%ymm1 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm1,%%ymm1 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm0,%%ymm1 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm1,(%1) \n" "vextractf128 $0x0,%%ymm0,0x00(%1,%2,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x20,%3 \n" - "jg 1b \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 @@ -4601,71 +4698,71 @@ void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, uint8_t* dst_argb, int width) { asm volatile( - "pcmpeqb %%xmm7,%%xmm7 \n" - "psrlw $0xf,%%xmm7 \n" - "pcmpeqb %%xmm6,%%xmm6 \n" - "psrlw $0x8,%%xmm6 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "psllw $0x8,%%xmm5 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "pslld $0x18,%%xmm4 \n" - "sub $0x4,%3 \n" - "jl 49f \n" + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $0xf,%%xmm7 \n" + "pcmpeqb %%xmm6,%%xmm6 \n" + "psrlw $0x8,%%xmm6 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psllw $0x8,%%xmm5 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "pslld $0x18,%%xmm4 \n" + "sub $0x4,%3 \n" + "jl 49f \n" // 4 pixel loop. LABELALIGN "40: \n" - "movdqu (%0),%%xmm3 \n" - "lea 0x10(%0),%0 \n" - "movdqa %%xmm3,%%xmm0 \n" - "pxor %%xmm4,%%xmm3 \n" - "movdqu (%1),%%xmm2 \n" - "pshufb %4,%%xmm3 \n" - "pand %%xmm6,%%xmm2 \n" - "paddw %%xmm7,%%xmm3 \n" - "pmullw %%xmm3,%%xmm2 \n" - "movdqu (%1),%%xmm1 \n" - "lea 0x10(%1),%1 \n" - "psrlw $0x8,%%xmm1 \n" - "por %%xmm4,%%xmm0 \n" - "pmullw %%xmm3,%%xmm1 \n" - "psrlw $0x8,%%xmm2 \n" - "paddusb %%xmm2,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jge 40b \n" + "movdqu (%0),%%xmm3 \n" + "lea 0x10(%0),%0 \n" + "movdqa %%xmm3,%%xmm0 \n" + "pxor %%xmm4,%%xmm3 \n" + "movdqu (%1),%%xmm2 \n" + "pshufb %4,%%xmm3 \n" + "pand %%xmm6,%%xmm2 \n" + "paddw %%xmm7,%%xmm3 \n" + "pmullw %%xmm3,%%xmm2 \n" + "movdqu (%1),%%xmm1 \n" + "lea 0x10(%1),%1 \n" + "psrlw $0x8,%%xmm1 \n" + "por %%xmm4,%%xmm0 \n" + "pmullw %%xmm3,%%xmm1 \n" + "psrlw $0x8,%%xmm2 \n" + "paddusb %%xmm2,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jge 40b \n" "49: \n" - "add $0x3,%3 \n" - "jl 99f \n" + "add $0x3,%3 \n" + "jl 99f \n" // 1 pixel loop. "91: \n" - "movd (%0),%%xmm3 \n" - "lea 0x4(%0),%0 \n" - "movdqa %%xmm3,%%xmm0 \n" - "pxor %%xmm4,%%xmm3 \n" - "movd (%1),%%xmm2 \n" - "pshufb %4,%%xmm3 \n" - "pand %%xmm6,%%xmm2 \n" - "paddw %%xmm7,%%xmm3 \n" - "pmullw %%xmm3,%%xmm2 \n" - "movd (%1),%%xmm1 \n" - "lea 0x4(%1),%1 \n" - "psrlw $0x8,%%xmm1 \n" - "por %%xmm4,%%xmm0 \n" - "pmullw %%xmm3,%%xmm1 \n" - "psrlw $0x8,%%xmm2 \n" - "paddusb %%xmm2,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movd %%xmm0,(%2) \n" - "lea 0x4(%2),%2 \n" - "sub $0x1,%3 \n" - "jge 91b \n" + "movd (%0),%%xmm3 \n" + "lea 0x4(%0),%0 \n" + "movdqa %%xmm3,%%xmm0 \n" + "pxor %%xmm4,%%xmm3 \n" + "movd (%1),%%xmm2 \n" + "pshufb %4,%%xmm3 \n" + "pand %%xmm6,%%xmm2 \n" + "paddw %%xmm7,%%xmm3 \n" + "pmullw %%xmm3,%%xmm2 \n" + "movd (%1),%%xmm1 \n" + "lea 0x4(%1),%1 \n" + "psrlw $0x8,%%xmm1 \n" + "por %%xmm4,%%xmm0 \n" + "pmullw %%xmm3,%%xmm1 \n" + "psrlw $0x8,%%xmm2 \n" + "paddusb %%xmm2,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movd %%xmm0,(%2) \n" + "lea 0x4(%2),%2 \n" + "sub $0x1,%3 \n" + "jge 91b \n" "99: \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 @@ -4689,36 +4786,36 @@ void BlendPlaneRow_SSSE3(const uint8_t* src0, uint8_t* dst, int width) { asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psllw $0x8,%%xmm5 \n" - "mov $0x80808080,%%eax \n" - "movd %%eax,%%xmm6 \n" - "pshufd $0x0,%%xmm6,%%xmm6 \n" - "mov $0x807f807f,%%eax \n" - "movd %%eax,%%xmm7 \n" - "pshufd $0x0,%%xmm7,%%xmm7 \n" - "sub %2,%0 \n" - "sub %2,%1 \n" - "sub %2,%3 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psllw $0x8,%%xmm5 \n" + "mov $0x80808080,%%eax \n" + "movd %%eax,%%xmm6 \n" + "pshufd $0x0,%%xmm6,%%xmm6 \n" + "mov $0x807f807f,%%eax \n" + "movd %%eax,%%xmm7 \n" + "pshufd $0x0,%%xmm7,%%xmm7 \n" + "sub %2,%0 \n" + "sub %2,%1 \n" + "sub %2,%3 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movq (%2),%%xmm0 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "pxor %%xmm5,%%xmm0 \n" - "movq (%0,%2,1),%%xmm1 \n" - "movq (%1,%2,1),%%xmm2 \n" - "punpcklbw %%xmm2,%%xmm1 \n" - "psubb %%xmm6,%%xmm1 \n" - "pmaddubsw %%xmm1,%%xmm0 \n" - "paddw %%xmm7,%%xmm0 \n" - "psrlw $0x8,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,(%3,%2,1) \n" - "lea 0x8(%2),%2 \n" - "sub $0x8,%4 \n" - "jg 1b \n" + "movq (%2),%%xmm0 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "pxor %%xmm5,%%xmm0 \n" + "movq (%0,%2,1),%%xmm1 \n" + "movq (%1,%2,1),%%xmm2 \n" + "punpcklbw %%xmm2,%%xmm1 \n" + "psubb %%xmm6,%%xmm1 \n" + "pmaddubsw %%xmm1,%%xmm0 \n" + "paddw %%xmm7,%%xmm0 \n" + "psrlw $0x8,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,(%3,%2,1) \n" + "lea 0x8(%2),%2 \n" + "sub $0x8,%4 \n" + "jg 1b \n" : "+r"(src0), // %0 "+r"(src1), // %1 "+r"(alpha), // %2 @@ -4741,43 +4838,43 @@ void BlendPlaneRow_AVX2(const uint8_t* src0, uint8_t* dst, int width) { asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsllw $0x8,%%ymm5,%%ymm5 \n" - "mov $0x80808080,%%eax \n" - "vmovd %%eax,%%xmm6 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsllw $0x8,%%ymm5,%%ymm5 \n" + "mov $0x80808080,%%eax \n" + "vmovd %%eax,%%xmm6 \n" "vbroadcastss %%xmm6,%%ymm6 \n" - "mov $0x807f807f,%%eax \n" - "vmovd %%eax,%%xmm7 \n" + "mov $0x807f807f,%%eax \n" + "vmovd %%eax,%%xmm7 \n" "vbroadcastss %%xmm7,%%ymm7 \n" - "sub %2,%0 \n" - "sub %2,%1 \n" - "sub %2,%3 \n" + "sub %2,%0 \n" + "sub %2,%1 \n" + "sub %2,%3 \n" // 32 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%2),%%ymm0 \n" - "vpunpckhbw %%ymm0,%%ymm0,%%ymm3 \n" - "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" - "vpxor %%ymm5,%%ymm3,%%ymm3 \n" - "vpxor %%ymm5,%%ymm0,%%ymm0 \n" - "vmovdqu (%0,%2,1),%%ymm1 \n" - "vmovdqu (%1,%2,1),%%ymm2 \n" - "vpunpckhbw %%ymm2,%%ymm1,%%ymm4 \n" - "vpunpcklbw %%ymm2,%%ymm1,%%ymm1 \n" - "vpsubb %%ymm6,%%ymm4,%%ymm4 \n" - "vpsubb %%ymm6,%%ymm1,%%ymm1 \n" - "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" - "vpmaddubsw %%ymm1,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm7,%%ymm3,%%ymm3 \n" - "vpaddw %%ymm7,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm3,%%ymm3 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%3,%2,1) \n" - "lea 0x20(%2),%2 \n" - "sub $0x20,%4 \n" - "jg 1b \n" + "vmovdqu (%2),%%ymm0 \n" + "vpunpckhbw %%ymm0,%%ymm0,%%ymm3 \n" + "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" + "vpxor %%ymm5,%%ymm3,%%ymm3 \n" + "vpxor %%ymm5,%%ymm0,%%ymm0 \n" + "vmovdqu (%0,%2,1),%%ymm1 \n" + "vmovdqu (%1,%2,1),%%ymm2 \n" + "vpunpckhbw %%ymm2,%%ymm1,%%ymm4 \n" + "vpunpcklbw %%ymm2,%%ymm1,%%ymm1 \n" + "vpsubb %%ymm6,%%ymm4,%%ymm4 \n" + "vpsubb %%ymm6,%%ymm1,%%ymm1 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "vpmaddubsw %%ymm1,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm7,%%ymm3,%%ymm3 \n" + "vpaddw %%ymm7,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm3,%%ymm3 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%3,%2,1) \n" + "lea 0x20(%2),%2 \n" + "sub $0x20,%4 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src0), // %0 "+r"(src1), // %1 @@ -4791,7 +4888,7 @@ void BlendPlaneRow_AVX2(const uint8_t* src0, #endif // HAS_BLENDPLANEROW_AVX2 #ifdef HAS_ARGBATTENUATEROW_SSSE3 -// Shuffle table duplicating alpha +// Shuffle table duplicating alpha. static const uvec8 kShuffleAlpha0 = {3u, 3u, 3u, 3u, 3u, 3u, 128u, 128u, 7u, 7u, 7u, 7u, 7u, 7u, 128u, 128u}; static const uvec8 kShuffleAlpha1 = {11u, 11u, 11u, 11u, 11u, 11u, 128u, 128u, @@ -4801,35 +4898,35 @@ void ARGBAttenuateRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_argb, int width) { asm volatile( - "pcmpeqb %%xmm3,%%xmm3 \n" - "pslld $0x18,%%xmm3 \n" - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" + "pcmpeqb %%xmm3,%%xmm3 \n" + "pslld $0x18,%%xmm3 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "pshufb %%xmm4,%%xmm0 \n" - "movdqu (%0),%%xmm1 \n" - "punpcklbw %%xmm1,%%xmm1 \n" - "pmulhuw %%xmm1,%%xmm0 \n" - "movdqu (%0),%%xmm1 \n" - "pshufb %%xmm5,%%xmm1 \n" - "movdqu (%0),%%xmm2 \n" - "punpckhbw %%xmm2,%%xmm2 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "movdqu (%0),%%xmm2 \n" - "lea 0x10(%0),%0 \n" - "pand %%xmm3,%%xmm2 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "pshufb %%xmm4,%%xmm0 \n" + "movdqu (%0),%%xmm1 \n" + "punpcklbw %%xmm1,%%xmm1 \n" + "pmulhuw %%xmm1,%%xmm0 \n" + "movdqu (%0),%%xmm1 \n" + "pshufb %%xmm5,%%xmm1 \n" + "movdqu (%0),%%xmm2 \n" + "punpckhbw %%xmm2,%%xmm2 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "movdqu (%0),%%xmm2 \n" + "lea 0x10(%0),%0 \n" + "pand %%xmm3,%%xmm2 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -4850,29 +4947,29 @@ void ARGBAttenuateRow_AVX2(const uint8_t* src_argb, int width) { asm volatile( "vbroadcastf128 %3,%%ymm4 \n" - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpslld $0x18,%%ymm5,%%ymm5 \n" - "sub %0,%1 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpslld $0x18,%%ymm5,%%ymm5 \n" + "sub %0,%1 \n" // 8 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm6 \n" - "vpunpcklbw %%ymm6,%%ymm6,%%ymm0 \n" - "vpunpckhbw %%ymm6,%%ymm6,%%ymm1 \n" - "vpshufb %%ymm4,%%ymm0,%%ymm2 \n" - "vpshufb %%ymm4,%%ymm1,%%ymm3 \n" - "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" - "vpand %%ymm5,%%ymm6,%%ymm6 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpor %%ymm6,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,0x00(%0,%1,1) \n" - "lea 0x20(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm6 \n" + "vpunpcklbw %%ymm6,%%ymm6,%%ymm0 \n" + "vpunpckhbw %%ymm6,%%ymm6,%%ymm1 \n" + "vpshufb %%ymm4,%%ymm0,%%ymm2 \n" + "vpshufb %%ymm4,%%ymm1,%%ymm3 \n" + "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" + "vpand %%ymm5,%%ymm6,%%ymm6 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpor %%ymm6,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,0x00(%0,%1,1) \n" + "lea 0x20(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 @@ -4892,32 +4989,32 @@ void ARGBUnattenuateRow_SSE2(const uint8_t* src_argb, // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movzb 0x03(%0),%3 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "movd 0x00(%4,%3,4),%%xmm2 \n" - "movzb 0x07(%0),%3 \n" - "movd 0x00(%4,%3,4),%%xmm3 \n" - "pshuflw $0x40,%%xmm2,%%xmm2 \n" - "pshuflw $0x40,%%xmm3,%%xmm3 \n" - "movlhps %%xmm3,%%xmm2 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "movdqu (%0),%%xmm1 \n" - "movzb 0x0b(%0),%3 \n" - "punpckhbw %%xmm1,%%xmm1 \n" - "movd 0x00(%4,%3,4),%%xmm2 \n" - "movzb 0x0f(%0),%3 \n" - "movd 0x00(%4,%3,4),%%xmm3 \n" - "pshuflw $0x40,%%xmm2,%%xmm2 \n" - "pshuflw $0x40,%%xmm3,%%xmm3 \n" - "movlhps %%xmm3,%%xmm2 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movzb 0x03(%0),%3 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "movd 0x00(%4,%3,4),%%xmm2 \n" + "movzb 0x07(%0),%3 \n" + "movd 0x00(%4,%3,4),%%xmm3 \n" + "pshuflw $0x40,%%xmm2,%%xmm2 \n" + "pshuflw $0x40,%%xmm3,%%xmm3 \n" + "movlhps %%xmm3,%%xmm2 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "movdqu (%0),%%xmm1 \n" + "movzb 0x0b(%0),%3 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "movd 0x00(%4,%3,4),%%xmm2 \n" + "movzb 0x0f(%0),%3 \n" + "movd 0x00(%4,%3,4),%%xmm3 \n" + "pshuflw $0x40,%%xmm2,%%xmm2 \n" + "pshuflw $0x40,%%xmm3,%%xmm3 \n" + "movlhps %%xmm3,%%xmm2 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width), // %2 @@ -4937,52 +5034,52 @@ void ARGBUnattenuateRow_AVX2(const uint8_t* src_argb, int width) { uintptr_t alpha; asm volatile( - "sub %0,%1 \n" + "sub %0,%1 \n" "vbroadcastf128 %5,%%ymm5 \n" // 8 pixel loop. LABELALIGN "1: \n" // replace VPGATHER - "movzb 0x03(%0),%3 \n" - "vmovd 0x00(%4,%3,4),%%xmm0 \n" - "movzb 0x07(%0),%3 \n" - "vmovd 0x00(%4,%3,4),%%xmm1 \n" - "movzb 0x0b(%0),%3 \n" - "vpunpckldq %%xmm1,%%xmm0,%%xmm6 \n" - "vmovd 0x00(%4,%3,4),%%xmm2 \n" - "movzb 0x0f(%0),%3 \n" - "vmovd 0x00(%4,%3,4),%%xmm3 \n" - "movzb 0x13(%0),%3 \n" - "vpunpckldq %%xmm3,%%xmm2,%%xmm7 \n" - "vmovd 0x00(%4,%3,4),%%xmm0 \n" - "movzb 0x17(%0),%3 \n" - "vmovd 0x00(%4,%3,4),%%xmm1 \n" - "movzb 0x1b(%0),%3 \n" - "vpunpckldq %%xmm1,%%xmm0,%%xmm0 \n" - "vmovd 0x00(%4,%3,4),%%xmm2 \n" - "movzb 0x1f(%0),%3 \n" - "vmovd 0x00(%4,%3,4),%%xmm3 \n" - "vpunpckldq %%xmm3,%%xmm2,%%xmm2 \n" + "movzb 0x03(%0),%3 \n" + "vmovd 0x00(%4,%3,4),%%xmm0 \n" + "movzb 0x07(%0),%3 \n" + "vmovd 0x00(%4,%3,4),%%xmm1 \n" + "movzb 0x0b(%0),%3 \n" + "vpunpckldq %%xmm1,%%xmm0,%%xmm6 \n" + "vmovd 0x00(%4,%3,4),%%xmm2 \n" + "movzb 0x0f(%0),%3 \n" + "vmovd 0x00(%4,%3,4),%%xmm3 \n" + "movzb 0x13(%0),%3 \n" + "vpunpckldq %%xmm3,%%xmm2,%%xmm7 \n" + "vmovd 0x00(%4,%3,4),%%xmm0 \n" + "movzb 0x17(%0),%3 \n" + "vmovd 0x00(%4,%3,4),%%xmm1 \n" + "movzb 0x1b(%0),%3 \n" + "vpunpckldq %%xmm1,%%xmm0,%%xmm0 \n" + "vmovd 0x00(%4,%3,4),%%xmm2 \n" + "movzb 0x1f(%0),%3 \n" + "vmovd 0x00(%4,%3,4),%%xmm3 \n" + "vpunpckldq %%xmm3,%%xmm2,%%xmm2 \n" "vpunpcklqdq %%xmm7,%%xmm6,%%xmm3 \n" "vpunpcklqdq %%xmm2,%%xmm0,%%xmm0 \n" "vinserti128 $0x1,%%xmm0,%%ymm3,%%ymm3 \n" // end of VPGATHER - "vmovdqu (%0),%%ymm6 \n" - "vpunpcklbw %%ymm6,%%ymm6,%%ymm0 \n" - "vpunpckhbw %%ymm6,%%ymm6,%%ymm1 \n" - "vpunpcklwd %%ymm3,%%ymm3,%%ymm2 \n" - "vpunpckhwd %%ymm3,%%ymm3,%%ymm3 \n" - "vpshufb %%ymm5,%%ymm2,%%ymm2 \n" - "vpshufb %%ymm5,%%ymm3,%%ymm3 \n" - "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,0x00(%0,%1,1) \n" - "lea 0x20(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm6 \n" + "vpunpcklbw %%ymm6,%%ymm6,%%ymm0 \n" + "vpunpckhbw %%ymm6,%%ymm6,%%ymm1 \n" + "vpunpcklwd %%ymm3,%%ymm3,%%ymm2 \n" + "vpunpckhwd %%ymm3,%%ymm3,%%ymm3 \n" + "vpshufb %%ymm5,%%ymm2,%%ymm2 \n" + "vpshufb %%ymm5,%%ymm3,%%ymm3 \n" + "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,0x00(%0,%1,1) \n" + "lea 0x20(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 @@ -4999,42 +5096,42 @@ void ARGBUnattenuateRow_AVX2(const uint8_t* src_argb, // Convert 8 ARGB pixels (64 bytes) to 8 Gray ARGB pixels void ARGBGrayRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_argb, int width) { asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "psubb %%xmm5,%%xmm0 \n" - "psubb %%xmm5,%%xmm1 \n" - "movdqu %%xmm4,%%xmm6 \n" - "pmaddubsw %%xmm0,%%xmm6 \n" - "movdqu %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm1,%%xmm0 \n" - "phaddw %%xmm0,%%xmm6 \n" - "paddw %%xmm5,%%xmm6 \n" - "psrlw $0x8,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movdqu (%0),%%xmm2 \n" - "movdqu 0x10(%0),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "psrld $0x18,%%xmm2 \n" - "psrld $0x18,%%xmm3 \n" - "packuswb %%xmm3,%%xmm2 \n" - "packuswb %%xmm2,%%xmm2 \n" - "movdqa %%xmm6,%%xmm3 \n" - "punpcklbw %%xmm6,%%xmm6 \n" - "punpcklbw %%xmm2,%%xmm3 \n" - "movdqa %%xmm6,%%xmm1 \n" - "punpcklwd %%xmm3,%%xmm6 \n" - "punpckhwd %%xmm3,%%xmm1 \n" - "movdqu %%xmm6,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "psubb %%xmm5,%%xmm0 \n" + "psubb %%xmm5,%%xmm1 \n" + "movdqu %%xmm4,%%xmm6 \n" + "pmaddubsw %%xmm0,%%xmm6 \n" + "movdqu %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm1,%%xmm0 \n" + "phaddw %%xmm0,%%xmm6 \n" + "paddw %%xmm5,%%xmm6 \n" + "psrlw $0x8,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movdqu (%0),%%xmm2 \n" + "movdqu 0x10(%0),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "psrld $0x18,%%xmm2 \n" + "psrld $0x18,%%xmm3 \n" + "packuswb %%xmm3,%%xmm2 \n" + "packuswb %%xmm2,%%xmm2 \n" + "movdqa %%xmm6,%%xmm3 \n" + "punpcklbw %%xmm6,%%xmm6 \n" + "punpcklbw %%xmm2,%%xmm3 \n" + "movdqa %%xmm6,%%xmm1 \n" + "punpcklwd %%xmm3,%%xmm6 \n" + "punpckhwd %%xmm3,%%xmm1 \n" + "movdqu %%xmm6,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -5061,50 +5158,50 @@ static const vec8 kARGBToSepiaR = {24, 98, 50, 0, 24, 98, 50, 0, // Convert 8 ARGB pixels (32 bytes) to 8 Sepia ARGB pixels. void ARGBSepiaRow_SSSE3(uint8_t* dst_argb, int width) { asm volatile( - "movdqa %2,%%xmm2 \n" - "movdqa %3,%%xmm3 \n" - "movdqa %4,%%xmm4 \n" + "movdqa %2,%%xmm2 \n" + "movdqa %3,%%xmm3 \n" + "movdqa %4,%%xmm4 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm6 \n" - "pmaddubsw %%xmm2,%%xmm0 \n" - "pmaddubsw %%xmm2,%%xmm6 \n" - "phaddw %%xmm6,%%xmm0 \n" - "psrlw $0x7,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movdqu (%0),%%xmm5 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm5 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "phaddw %%xmm1,%%xmm5 \n" - "psrlw $0x7,%%xmm5 \n" - "packuswb %%xmm5,%%xmm5 \n" - "punpcklbw %%xmm5,%%xmm0 \n" - "movdqu (%0),%%xmm5 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "pmaddubsw %%xmm4,%%xmm5 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "phaddw %%xmm1,%%xmm5 \n" - "psrlw $0x7,%%xmm5 \n" - "packuswb %%xmm5,%%xmm5 \n" - "movdqu (%0),%%xmm6 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "psrld $0x18,%%xmm6 \n" - "psrld $0x18,%%xmm1 \n" - "packuswb %%xmm1,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "punpcklbw %%xmm6,%%xmm5 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklwd %%xmm5,%%xmm0 \n" - "punpckhwd %%xmm5,%%xmm1 \n" - "movdqu %%xmm0,(%0) \n" - "movdqu %%xmm1,0x10(%0) \n" - "lea 0x20(%0),%0 \n" - "sub $0x8,%1 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm6 \n" + "pmaddubsw %%xmm2,%%xmm0 \n" + "pmaddubsw %%xmm2,%%xmm6 \n" + "phaddw %%xmm6,%%xmm0 \n" + "psrlw $0x7,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movdqu (%0),%%xmm5 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm5 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "phaddw %%xmm1,%%xmm5 \n" + "psrlw $0x7,%%xmm5 \n" + "packuswb %%xmm5,%%xmm5 \n" + "punpcklbw %%xmm5,%%xmm0 \n" + "movdqu (%0),%%xmm5 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm5 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "phaddw %%xmm1,%%xmm5 \n" + "psrlw $0x7,%%xmm5 \n" + "packuswb %%xmm5,%%xmm5 \n" + "movdqu (%0),%%xmm6 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "psrld $0x18,%%xmm6 \n" + "psrld $0x18,%%xmm1 \n" + "packuswb %%xmm1,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "punpcklbw %%xmm6,%%xmm5 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklwd %%xmm5,%%xmm0 \n" + "punpckhwd %%xmm5,%%xmm1 \n" + "movdqu %%xmm0,(%0) \n" + "movdqu %%xmm1,0x10(%0) \n" + "lea 0x20(%0),%0 \n" + "sub $0x8,%1 \n" + "jg 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : "m"(kARGBToSepiaB), // %2 @@ -5122,54 +5219,54 @@ void ARGBColorMatrixRow_SSSE3(const uint8_t* src_argb, const int8_t* matrix_argb, int width) { asm volatile( - "movdqu (%3),%%xmm5 \n" - "pshufd $0x00,%%xmm5,%%xmm2 \n" - "pshufd $0x55,%%xmm5,%%xmm3 \n" - "pshufd $0xaa,%%xmm5,%%xmm4 \n" - "pshufd $0xff,%%xmm5,%%xmm5 \n" + "movdqu (%3),%%xmm5 \n" + "pshufd $0x00,%%xmm5,%%xmm2 \n" + "pshufd $0x55,%%xmm5,%%xmm3 \n" + "pshufd $0xaa,%%xmm5,%%xmm4 \n" + "pshufd $0xff,%%xmm5,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm7 \n" - "pmaddubsw %%xmm2,%%xmm0 \n" - "pmaddubsw %%xmm2,%%xmm7 \n" - "movdqu (%0),%%xmm6 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "pmaddubsw %%xmm3,%%xmm6 \n" - "pmaddubsw %%xmm3,%%xmm1 \n" - "phaddsw %%xmm7,%%xmm0 \n" - "phaddsw %%xmm1,%%xmm6 \n" - "psraw $0x6,%%xmm0 \n" - "psraw $0x6,%%xmm6 \n" - "packuswb %%xmm0,%%xmm0 \n" - "packuswb %%xmm6,%%xmm6 \n" - "punpcklbw %%xmm6,%%xmm0 \n" - "movdqu (%0),%%xmm1 \n" - "movdqu 0x10(%0),%%xmm7 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "pmaddubsw %%xmm4,%%xmm7 \n" - "phaddsw %%xmm7,%%xmm1 \n" - "movdqu (%0),%%xmm6 \n" - "movdqu 0x10(%0),%%xmm7 \n" - "pmaddubsw %%xmm5,%%xmm6 \n" - "pmaddubsw %%xmm5,%%xmm7 \n" - "phaddsw %%xmm7,%%xmm6 \n" - "psraw $0x6,%%xmm1 \n" - "psraw $0x6,%%xmm6 \n" - "packuswb %%xmm1,%%xmm1 \n" - "packuswb %%xmm6,%%xmm6 \n" - "punpcklbw %%xmm6,%%xmm1 \n" - "movdqa %%xmm0,%%xmm6 \n" - "punpcklwd %%xmm1,%%xmm0 \n" - "punpckhwd %%xmm1,%%xmm6 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm6,0x10(%1) \n" - "lea 0x20(%0),%0 \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm7 \n" + "pmaddubsw %%xmm2,%%xmm0 \n" + "pmaddubsw %%xmm2,%%xmm7 \n" + "movdqu (%0),%%xmm6 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "phaddsw %%xmm7,%%xmm0 \n" + "phaddsw %%xmm1,%%xmm6 \n" + "psraw $0x6,%%xmm0 \n" + "psraw $0x6,%%xmm6 \n" + "packuswb %%xmm0,%%xmm0 \n" + "packuswb %%xmm6,%%xmm6 \n" + "punpcklbw %%xmm6,%%xmm0 \n" + "movdqu (%0),%%xmm1 \n" + "movdqu 0x10(%0),%%xmm7 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm7 \n" + "phaddsw %%xmm7,%%xmm1 \n" + "movdqu (%0),%%xmm6 \n" + "movdqu 0x10(%0),%%xmm7 \n" + "pmaddubsw %%xmm5,%%xmm6 \n" + "pmaddubsw %%xmm5,%%xmm7 \n" + "phaddsw %%xmm7,%%xmm6 \n" + "psraw $0x6,%%xmm1 \n" + "psraw $0x6,%%xmm6 \n" + "packuswb %%xmm1,%%xmm1 \n" + "packuswb %%xmm6,%%xmm6 \n" + "punpcklbw %%xmm6,%%xmm1 \n" + "movdqa %%xmm0,%%xmm6 \n" + "punpcklwd %%xmm1,%%xmm0 \n" + "punpckhwd %%xmm1,%%xmm6 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm6,0x10(%1) \n" + "lea 0x20(%0),%0 \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -5187,40 +5284,40 @@ void ARGBQuantizeRow_SSE2(uint8_t* dst_argb, int interval_offset, int width) { asm volatile( - "movd %2,%%xmm2 \n" - "movd %3,%%xmm3 \n" - "movd %4,%%xmm4 \n" - "pshuflw $0x40,%%xmm2,%%xmm2 \n" - "pshufd $0x44,%%xmm2,%%xmm2 \n" - "pshuflw $0x40,%%xmm3,%%xmm3 \n" - "pshufd $0x44,%%xmm3,%%xmm3 \n" - "pshuflw $0x40,%%xmm4,%%xmm4 \n" - "pshufd $0x44,%%xmm4,%%xmm4 \n" - "pxor %%xmm5,%%xmm5 \n" - "pcmpeqb %%xmm6,%%xmm6 \n" - "pslld $0x18,%%xmm6 \n" + "movd %2,%%xmm2 \n" + "movd %3,%%xmm3 \n" + "movd %4,%%xmm4 \n" + "pshuflw $0x40,%%xmm2,%%xmm2 \n" + "pshufd $0x44,%%xmm2,%%xmm2 \n" + "pshuflw $0x40,%%xmm3,%%xmm3 \n" + "pshufd $0x44,%%xmm3,%%xmm3 \n" + "pshuflw $0x40,%%xmm4,%%xmm4 \n" + "pshufd $0x44,%%xmm4,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm6,%%xmm6 \n" + "pslld $0x18,%%xmm6 \n" // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "punpcklbw %%xmm5,%%xmm0 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "movdqu (%0),%%xmm1 \n" - "punpckhbw %%xmm5,%%xmm1 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "pmullw %%xmm3,%%xmm0 \n" - "movdqu (%0),%%xmm7 \n" - "pmullw %%xmm3,%%xmm1 \n" - "pand %%xmm6,%%xmm7 \n" - "paddw %%xmm4,%%xmm0 \n" - "paddw %%xmm4,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "por %%xmm7,%%xmm0 \n" - "movdqu %%xmm0,(%0) \n" - "lea 0x10(%0),%0 \n" - "sub $0x4,%1 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "punpcklbw %%xmm5,%%xmm0 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "movdqu (%0),%%xmm1 \n" + "punpckhbw %%xmm5,%%xmm1 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "pmullw %%xmm3,%%xmm0 \n" + "movdqu (%0),%%xmm7 \n" + "pmullw %%xmm3,%%xmm1 \n" + "pand %%xmm6,%%xmm7 \n" + "paddw %%xmm4,%%xmm0 \n" + "paddw %%xmm4,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "por %%xmm7,%%xmm0 \n" + "movdqu %%xmm0,(%0) \n" + "lea 0x10(%0),%0 \n" + "sub $0x4,%1 \n" + "jg 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : "r"(scale), // %2 @@ -5238,27 +5335,27 @@ void ARGBShadeRow_SSE2(const uint8_t* src_argb, int width, uint32_t value) { asm volatile( - "movd %3,%%xmm2 \n" - "punpcklbw %%xmm2,%%xmm2 \n" - "punpcklqdq %%xmm2,%%xmm2 \n" + "movd %3,%%xmm2 \n" + "punpcklbw %%xmm2,%%xmm2 \n" + "punpcklqdq %%xmm2,%%xmm2 \n" // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm1 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "pmulhuw %%xmm2,%%xmm1 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "pmulhuw %%xmm2,%%xmm1 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -5275,28 +5372,28 @@ void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, int width) { asm volatile( - "pxor %%xmm5,%%xmm5 \n" + "pxor %%xmm5,%%xmm5 \n" // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movdqu (%1),%%xmm2 \n" - "lea 0x10(%1),%1 \n" - "movdqu %%xmm0,%%xmm1 \n" - "movdqu %%xmm2,%%xmm3 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "punpckhbw %%xmm5,%%xmm3 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "pmulhuw %%xmm3,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movdqu (%1),%%xmm2 \n" + "lea 0x10(%1),%1 \n" + "movdqu %%xmm0,%%xmm1 \n" + "movdqu %%xmm2,%%xmm3 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "punpckhbw %%xmm5,%%xmm3 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "pmulhuw %%xmm3,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jg 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -5314,26 +5411,26 @@ void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, int width) { asm volatile( - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" // 4 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm1 \n" - "lea 0x20(%0),%0 \n" - "vmovdqu (%1),%%ymm3 \n" - "lea 0x20(%1),%1 \n" - "vpunpcklbw %%ymm1,%%ymm1,%%ymm0 \n" - "vpunpckhbw %%ymm1,%%ymm1,%%ymm1 \n" - "vpunpcklbw %%ymm5,%%ymm3,%%ymm2 \n" - "vpunpckhbw %%ymm5,%%ymm3,%%ymm3 \n" - "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" - "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%2) \n" - "lea 0x20(%2),%2 \n" - "sub $0x8,%3 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm1 \n" + "lea 0x20(%0),%0 \n" + "vmovdqu (%1),%%ymm3 \n" + "lea 0x20(%1),%1 \n" + "vpunpcklbw %%ymm1,%%ymm1,%%ymm0 \n" + "vpunpckhbw %%ymm1,%%ymm1,%%ymm1 \n" + "vpunpcklbw %%ymm5,%%ymm3,%%ymm2 \n" + "vpunpckhbw %%ymm5,%%ymm3,%%ymm3 \n" + "vpmulhuw %%ymm2,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%2) \n" + "lea 0x20(%2),%2 \n" + "sub $0x8,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 @@ -5359,15 +5456,15 @@ void ARGBAddRow_SSE2(const uint8_t* src_argb0, // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movdqu (%1),%%xmm1 \n" - "lea 0x10(%1),%1 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movdqu (%1),%%xmm1 \n" + "lea 0x10(%1),%1 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jg 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -5387,14 +5484,14 @@ void ARGBAddRow_AVX2(const uint8_t* src_argb0, // 4 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "lea 0x20(%0),%0 \n" - "vpaddusb (%1),%%ymm0,%%ymm0 \n" - "lea 0x20(%1),%1 \n" - "vmovdqu %%ymm0,(%2) \n" - "lea 0x20(%2),%2 \n" - "sub $0x8,%3 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "lea 0x20(%0),%0 \n" + "vpaddusb (%1),%%ymm0,%%ymm0 \n" + "lea 0x20(%1),%1 \n" + "vmovdqu %%ymm0,(%2) \n" + "lea 0x20(%2),%2 \n" + "sub $0x8,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 @@ -5415,15 +5512,15 @@ void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "movdqu (%1),%%xmm1 \n" - "lea 0x10(%1),%1 \n" - "psubusb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "movdqu (%1),%%xmm1 \n" + "lea 0x10(%1),%1 \n" + "psubusb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jg 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -5443,14 +5540,14 @@ void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, // 4 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "lea 0x20(%0),%0 \n" - "vpsubusb (%1),%%ymm0,%%ymm0 \n" - "lea 0x20(%1),%1 \n" - "vmovdqu %%ymm0,(%2) \n" - "lea 0x20(%2),%2 \n" - "sub $0x8,%3 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "lea 0x20(%0),%0 \n" + "vpsubusb (%1),%%ymm0,%%ymm0 \n" + "lea 0x20(%1),%1 \n" + "vmovdqu %%ymm0,(%2) \n" + "lea 0x20(%2),%2 \n" + "sub $0x8,%3 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 @@ -5472,40 +5569,40 @@ void SobelXRow_SSE2(const uint8_t* src_y0, uint8_t* dst_sobelx, int width) { asm volatile( - "sub %0,%1 \n" - "sub %0,%2 \n" - "sub %0,%3 \n" - "pxor %%xmm5,%%xmm5 \n" + "sub %0,%1 \n" + "sub %0,%2 \n" + "sub %0,%3 \n" + "pxor %%xmm5,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movq (%0),%%xmm0 \n" - "movq 0x2(%0),%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm0 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "psubw %%xmm1,%%xmm0 \n" - "movq 0x00(%0,%1,1),%%xmm1 \n" - "movq 0x02(%0,%1,1),%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "psubw %%xmm2,%%xmm1 \n" - "movq 0x00(%0,%2,1),%%xmm2 \n" - "movq 0x02(%0,%2,1),%%xmm3 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm3 \n" - "psubw %%xmm3,%%xmm2 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm1,%%xmm0 \n" - "paddw %%xmm1,%%xmm0 \n" - "pxor %%xmm1,%%xmm1 \n" - "psubw %%xmm0,%%xmm1 \n" - "pmaxsw %%xmm1,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,0x00(%0,%3,1) \n" - "lea 0x8(%0),%0 \n" - "sub $0x8,%4 \n" - "jg 1b \n" + "movq (%0),%%xmm0 \n" + "movq 0x2(%0),%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm0 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "psubw %%xmm1,%%xmm0 \n" + "movq 0x00(%0,%1,1),%%xmm1 \n" + "movq 0x02(%0,%1,1),%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "psubw %%xmm2,%%xmm1 \n" + "movq 0x00(%0,%2,1),%%xmm2 \n" + "movq 0x02(%0,%2,1),%%xmm3 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm3 \n" + "psubw %%xmm3,%%xmm2 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm1,%%xmm0 \n" + "paddw %%xmm1,%%xmm0 \n" + "pxor %%xmm1,%%xmm1 \n" + "psubw %%xmm0,%%xmm1 \n" + "pmaxsw %%xmm1,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,0x00(%0,%3,1) \n" + "lea 0x8(%0),%0 \n" + "sub $0x8,%4 \n" + "jg 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(src_y2), // %2 @@ -5526,39 +5623,39 @@ void SobelYRow_SSE2(const uint8_t* src_y0, uint8_t* dst_sobely, int width) { asm volatile( - "sub %0,%1 \n" - "sub %0,%2 \n" - "pxor %%xmm5,%%xmm5 \n" + "sub %0,%1 \n" + "sub %0,%2 \n" + "pxor %%xmm5,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movq (%0),%%xmm0 \n" - "movq 0x00(%0,%1,1),%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm0 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "psubw %%xmm1,%%xmm0 \n" - "movq 0x1(%0),%%xmm1 \n" - "movq 0x01(%0,%1,1),%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "psubw %%xmm2,%%xmm1 \n" - "movq 0x2(%0),%%xmm2 \n" - "movq 0x02(%0,%1,1),%%xmm3 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm3 \n" - "psubw %%xmm3,%%xmm2 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm1,%%xmm0 \n" - "paddw %%xmm1,%%xmm0 \n" - "pxor %%xmm1,%%xmm1 \n" - "psubw %%xmm0,%%xmm1 \n" - "pmaxsw %%xmm1,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,0x00(%0,%2,1) \n" - "lea 0x8(%0),%0 \n" - "sub $0x8,%3 \n" - "jg 1b \n" + "movq (%0),%%xmm0 \n" + "movq 0x00(%0,%1,1),%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm0 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "psubw %%xmm1,%%xmm0 \n" + "movq 0x1(%0),%%xmm1 \n" + "movq 0x01(%0,%1,1),%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "psubw %%xmm2,%%xmm1 \n" + "movq 0x2(%0),%%xmm2 \n" + "movq 0x02(%0,%1,1),%%xmm3 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm3 \n" + "psubw %%xmm3,%%xmm2 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm1,%%xmm0 \n" + "paddw %%xmm1,%%xmm0 \n" + "pxor %%xmm1,%%xmm1 \n" + "psubw %%xmm0,%%xmm1 \n" + "pmaxsw %%xmm1,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,0x00(%0,%2,1) \n" + "lea 0x8(%0),%0 \n" + "sub $0x8,%3 \n" + "jg 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(dst_sobely), // %2 @@ -5579,37 +5676,37 @@ void SobelRow_SSE2(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "sub %0,%1 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "pslld $0x18,%%xmm5 \n" + "sub %0,%1 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "pslld $0x18,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%1,1),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "punpcklbw %%xmm0,%%xmm2 \n" - "punpckhbw %%xmm0,%%xmm0 \n" - "movdqa %%xmm2,%%xmm1 \n" - "punpcklwd %%xmm2,%%xmm1 \n" - "punpckhwd %%xmm2,%%xmm2 \n" - "por %%xmm5,%%xmm1 \n" - "por %%xmm5,%%xmm2 \n" - "movdqa %%xmm0,%%xmm3 \n" - "punpcklwd %%xmm0,%%xmm3 \n" - "punpckhwd %%xmm0,%%xmm0 \n" - "por %%xmm5,%%xmm3 \n" - "por %%xmm5,%%xmm0 \n" - "movdqu %%xmm1,(%2) \n" - "movdqu %%xmm2,0x10(%2) \n" - "movdqu %%xmm3,0x20(%2) \n" - "movdqu %%xmm0,0x30(%2) \n" - "lea 0x40(%2),%2 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%1,1),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "punpcklbw %%xmm0,%%xmm2 \n" + "punpckhbw %%xmm0,%%xmm0 \n" + "movdqa %%xmm2,%%xmm1 \n" + "punpcklwd %%xmm2,%%xmm1 \n" + "punpckhwd %%xmm2,%%xmm2 \n" + "por %%xmm5,%%xmm1 \n" + "por %%xmm5,%%xmm2 \n" + "movdqa %%xmm0,%%xmm3 \n" + "punpcklwd %%xmm0,%%xmm3 \n" + "punpckhwd %%xmm0,%%xmm0 \n" + "por %%xmm5,%%xmm3 \n" + "por %%xmm5,%%xmm0 \n" + "movdqu %%xmm1,(%2) \n" + "movdqu %%xmm2,0x10(%2) \n" + "movdqu %%xmm3,0x20(%2) \n" + "movdqu %%xmm0,0x30(%2) \n" + "lea 0x40(%2),%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -5626,21 +5723,21 @@ void SobelToPlaneRow_SSE2(const uint8_t* src_sobelx, uint8_t* dst_y, int width) { asm volatile( - "sub %0,%1 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" - "pslld $0x18,%%xmm5 \n" + "sub %0,%1 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "pslld $0x18,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%1,1),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%1,1),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_y), // %2 @@ -5661,36 +5758,36 @@ void SobelXYRow_SSE2(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "sub %0,%1 \n" - "pcmpeqb %%xmm5,%%xmm5 \n" + "sub %0,%1 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" // 8 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%1,1),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "paddusb %%xmm1,%%xmm2 \n" - "movdqa %%xmm0,%%xmm3 \n" - "punpcklbw %%xmm5,%%xmm3 \n" - "punpckhbw %%xmm5,%%xmm0 \n" - "movdqa %%xmm1,%%xmm4 \n" - "punpcklbw %%xmm2,%%xmm4 \n" - "punpckhbw %%xmm2,%%xmm1 \n" - "movdqa %%xmm4,%%xmm6 \n" - "punpcklwd %%xmm3,%%xmm6 \n" - "punpckhwd %%xmm3,%%xmm4 \n" - "movdqa %%xmm1,%%xmm7 \n" - "punpcklwd %%xmm0,%%xmm7 \n" - "punpckhwd %%xmm0,%%xmm1 \n" - "movdqu %%xmm6,(%2) \n" - "movdqu %%xmm4,0x10(%2) \n" - "movdqu %%xmm7,0x20(%2) \n" - "movdqu %%xmm1,0x30(%2) \n" - "lea 0x40(%2),%2 \n" - "sub $0x10,%3 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%1,1),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "paddusb %%xmm1,%%xmm2 \n" + "movdqa %%xmm0,%%xmm3 \n" + "punpcklbw %%xmm5,%%xmm3 \n" + "punpckhbw %%xmm5,%%xmm0 \n" + "movdqa %%xmm1,%%xmm4 \n" + "punpcklbw %%xmm2,%%xmm4 \n" + "punpckhbw %%xmm2,%%xmm1 \n" + "movdqa %%xmm4,%%xmm6 \n" + "punpcklwd %%xmm3,%%xmm6 \n" + "punpckhwd %%xmm3,%%xmm4 \n" + "movdqa %%xmm1,%%xmm7 \n" + "punpcklwd %%xmm0,%%xmm7 \n" + "punpckhwd %%xmm0,%%xmm1 \n" + "movdqu %%xmm6,(%2) \n" + "movdqu %%xmm4,0x10(%2) \n" + "movdqu %%xmm7,0x20(%2) \n" + "movdqu %%xmm1,0x30(%2) \n" + "lea 0x40(%2),%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -5709,67 +5806,67 @@ void ComputeCumulativeSumRow_SSE2(const uint8_t* row, const int32_t* previous_cumsum, int width) { asm volatile( - "pxor %%xmm0,%%xmm0 \n" - "pxor %%xmm1,%%xmm1 \n" - "sub $0x4,%3 \n" - "jl 49f \n" - "test $0xf,%1 \n" - "jne 49f \n" + "pxor %%xmm0,%%xmm0 \n" + "pxor %%xmm1,%%xmm1 \n" + "sub $0x4,%3 \n" + "jl 49f \n" + "test $0xf,%1 \n" + "jne 49f \n" // 4 pixel loop. LABELALIGN "40: \n" - "movdqu (%0),%%xmm2 \n" - "lea 0x10(%0),%0 \n" - "movdqa %%xmm2,%%xmm4 \n" - "punpcklbw %%xmm1,%%xmm2 \n" - "movdqa %%xmm2,%%xmm3 \n" - "punpcklwd %%xmm1,%%xmm2 \n" - "punpckhwd %%xmm1,%%xmm3 \n" - "punpckhbw %%xmm1,%%xmm4 \n" - "movdqa %%xmm4,%%xmm5 \n" - "punpcklwd %%xmm1,%%xmm4 \n" - "punpckhwd %%xmm1,%%xmm5 \n" - "paddd %%xmm2,%%xmm0 \n" - "movdqu (%2),%%xmm2 \n" - "paddd %%xmm0,%%xmm2 \n" - "paddd %%xmm3,%%xmm0 \n" - "movdqu 0x10(%2),%%xmm3 \n" - "paddd %%xmm0,%%xmm3 \n" - "paddd %%xmm4,%%xmm0 \n" - "movdqu 0x20(%2),%%xmm4 \n" - "paddd %%xmm0,%%xmm4 \n" - "paddd %%xmm5,%%xmm0 \n" - "movdqu 0x30(%2),%%xmm5 \n" - "lea 0x40(%2),%2 \n" - "paddd %%xmm0,%%xmm5 \n" - "movdqu %%xmm2,(%1) \n" - "movdqu %%xmm3,0x10(%1) \n" - "movdqu %%xmm4,0x20(%1) \n" - "movdqu %%xmm5,0x30(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x4,%3 \n" - "jge 40b \n" + "movdqu (%0),%%xmm2 \n" + "lea 0x10(%0),%0 \n" + "movdqa %%xmm2,%%xmm4 \n" + "punpcklbw %%xmm1,%%xmm2 \n" + "movdqa %%xmm2,%%xmm3 \n" + "punpcklwd %%xmm1,%%xmm2 \n" + "punpckhwd %%xmm1,%%xmm3 \n" + "punpckhbw %%xmm1,%%xmm4 \n" + "movdqa %%xmm4,%%xmm5 \n" + "punpcklwd %%xmm1,%%xmm4 \n" + "punpckhwd %%xmm1,%%xmm5 \n" + "paddd %%xmm2,%%xmm0 \n" + "movdqu (%2),%%xmm2 \n" + "paddd %%xmm0,%%xmm2 \n" + "paddd %%xmm3,%%xmm0 \n" + "movdqu 0x10(%2),%%xmm3 \n" + "paddd %%xmm0,%%xmm3 \n" + "paddd %%xmm4,%%xmm0 \n" + "movdqu 0x20(%2),%%xmm4 \n" + "paddd %%xmm0,%%xmm4 \n" + "paddd %%xmm5,%%xmm0 \n" + "movdqu 0x30(%2),%%xmm5 \n" + "lea 0x40(%2),%2 \n" + "paddd %%xmm0,%%xmm5 \n" + "movdqu %%xmm2,(%1) \n" + "movdqu %%xmm3,0x10(%1) \n" + "movdqu %%xmm4,0x20(%1) \n" + "movdqu %%xmm5,0x30(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x4,%3 \n" + "jge 40b \n" "49: \n" - "add $0x3,%3 \n" - "jl 19f \n" + "add $0x3,%3 \n" + "jl 19f \n" // 1 pixel loop. LABELALIGN "10: \n" - "movd (%0),%%xmm2 \n" - "lea 0x4(%0),%0 \n" - "punpcklbw %%xmm1,%%xmm2 \n" - "punpcklwd %%xmm1,%%xmm2 \n" - "paddd %%xmm2,%%xmm0 \n" - "movdqu (%2),%%xmm2 \n" - "lea 0x10(%2),%2 \n" - "paddd %%xmm0,%%xmm2 \n" - "movdqu %%xmm2,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x1,%3 \n" - "jge 10b \n" + "movd (%0),%%xmm2 \n" + "lea 0x4(%0),%0 \n" + "punpcklbw %%xmm1,%%xmm2 \n" + "punpcklwd %%xmm1,%%xmm2 \n" + "paddd %%xmm2,%%xmm0 \n" + "movdqu (%2),%%xmm2 \n" + "lea 0x10(%2),%2 \n" + "paddd %%xmm0,%%xmm2 \n" + "movdqu %%xmm2,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x1,%3 \n" + "jge 10b \n" "19: \n" : "+r"(row), // %0 @@ -5789,119 +5886,119 @@ void CumulativeSumToAverageRow_SSE2(const int32_t* topleft, uint8_t* dst, int count) { asm volatile( - "movd %5,%%xmm5 \n" - "cvtdq2ps %%xmm5,%%xmm5 \n" - "rcpss %%xmm5,%%xmm4 \n" - "pshufd $0x0,%%xmm4,%%xmm4 \n" - "sub $0x4,%3 \n" - "jl 49f \n" - "cmpl $0x80,%5 \n" - "ja 40f \n" + "movd %5,%%xmm5 \n" + "cvtdq2ps %%xmm5,%%xmm5 \n" + "rcpss %%xmm5,%%xmm4 \n" + "pshufd $0x0,%%xmm4,%%xmm4 \n" + "sub $0x4,%3 \n" + "jl 49f \n" + "cmpl $0x80,%5 \n" + "ja 40f \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "pcmpeqb %%xmm6,%%xmm6 \n" - "psrld $0x10,%%xmm6 \n" - "cvtdq2ps %%xmm6,%%xmm6 \n" - "addps %%xmm6,%%xmm5 \n" - "mulps %%xmm4,%%xmm5 \n" - "cvtps2dq %%xmm5,%%xmm5 \n" - "packssdw %%xmm5,%%xmm5 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm6,%%xmm6 \n" + "psrld $0x10,%%xmm6 \n" + "cvtdq2ps %%xmm6,%%xmm6 \n" + "addps %%xmm6,%%xmm5 \n" + "mulps %%xmm4,%%xmm5 \n" + "cvtps2dq %%xmm5,%%xmm5 \n" + "packssdw %%xmm5,%%xmm5 \n" // 4 pixel small loop. LABELALIGN "4: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm3 \n" - "psubd 0x00(%0,%4,4),%%xmm0 \n" - "psubd 0x10(%0,%4,4),%%xmm1 \n" - "psubd 0x20(%0,%4,4),%%xmm2 \n" - "psubd 0x30(%0,%4,4),%%xmm3 \n" - "lea 0x40(%0),%0 \n" - "psubd (%1),%%xmm0 \n" - "psubd 0x10(%1),%%xmm1 \n" - "psubd 0x20(%1),%%xmm2 \n" - "psubd 0x30(%1),%%xmm3 \n" - "paddd 0x00(%1,%4,4),%%xmm0 \n" - "paddd 0x10(%1,%4,4),%%xmm1 \n" - "paddd 0x20(%1,%4,4),%%xmm2 \n" - "paddd 0x30(%1,%4,4),%%xmm3 \n" - "lea 0x40(%1),%1 \n" - "packssdw %%xmm1,%%xmm0 \n" - "packssdw %%xmm3,%%xmm2 \n" - "pmulhuw %%xmm5,%%xmm0 \n" - "pmulhuw %%xmm5,%%xmm2 \n" - "packuswb %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jge 4b \n" - "jmp 49f \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm3 \n" + "psubd 0x00(%0,%4,4),%%xmm0 \n" + "psubd 0x10(%0,%4,4),%%xmm1 \n" + "psubd 0x20(%0,%4,4),%%xmm2 \n" + "psubd 0x30(%0,%4,4),%%xmm3 \n" + "lea 0x40(%0),%0 \n" + "psubd (%1),%%xmm0 \n" + "psubd 0x10(%1),%%xmm1 \n" + "psubd 0x20(%1),%%xmm2 \n" + "psubd 0x30(%1),%%xmm3 \n" + "paddd 0x00(%1,%4,4),%%xmm0 \n" + "paddd 0x10(%1,%4,4),%%xmm1 \n" + "paddd 0x20(%1,%4,4),%%xmm2 \n" + "paddd 0x30(%1,%4,4),%%xmm3 \n" + "lea 0x40(%1),%1 \n" + "packssdw %%xmm1,%%xmm0 \n" + "packssdw %%xmm3,%%xmm2 \n" + "pmulhuw %%xmm5,%%xmm0 \n" + "pmulhuw %%xmm5,%%xmm2 \n" + "packuswb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jge 4b \n" + "jmp 49f \n" // 4 pixel loop LABELALIGN "40: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x30(%0),%%xmm3 \n" - "psubd 0x00(%0,%4,4),%%xmm0 \n" - "psubd 0x10(%0,%4,4),%%xmm1 \n" - "psubd 0x20(%0,%4,4),%%xmm2 \n" - "psubd 0x30(%0,%4,4),%%xmm3 \n" - "lea 0x40(%0),%0 \n" - "psubd (%1),%%xmm0 \n" - "psubd 0x10(%1),%%xmm1 \n" - "psubd 0x20(%1),%%xmm2 \n" - "psubd 0x30(%1),%%xmm3 \n" - "paddd 0x00(%1,%4,4),%%xmm0 \n" - "paddd 0x10(%1,%4,4),%%xmm1 \n" - "paddd 0x20(%1,%4,4),%%xmm2 \n" - "paddd 0x30(%1,%4,4),%%xmm3 \n" - "lea 0x40(%1),%1 \n" - "cvtdq2ps %%xmm0,%%xmm0 \n" - "cvtdq2ps %%xmm1,%%xmm1 \n" - "mulps %%xmm4,%%xmm0 \n" - "mulps %%xmm4,%%xmm1 \n" - "cvtdq2ps %%xmm2,%%xmm2 \n" - "cvtdq2ps %%xmm3,%%xmm3 \n" - "mulps %%xmm4,%%xmm2 \n" - "mulps %%xmm4,%%xmm3 \n" - "cvtps2dq %%xmm0,%%xmm0 \n" - "cvtps2dq %%xmm1,%%xmm1 \n" - "cvtps2dq %%xmm2,%%xmm2 \n" - "cvtps2dq %%xmm3,%%xmm3 \n" - "packssdw %%xmm1,%%xmm0 \n" - "packssdw %%xmm3,%%xmm2 \n" - "packuswb %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jge 40b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x30(%0),%%xmm3 \n" + "psubd 0x00(%0,%4,4),%%xmm0 \n" + "psubd 0x10(%0,%4,4),%%xmm1 \n" + "psubd 0x20(%0,%4,4),%%xmm2 \n" + "psubd 0x30(%0,%4,4),%%xmm3 \n" + "lea 0x40(%0),%0 \n" + "psubd (%1),%%xmm0 \n" + "psubd 0x10(%1),%%xmm1 \n" + "psubd 0x20(%1),%%xmm2 \n" + "psubd 0x30(%1),%%xmm3 \n" + "paddd 0x00(%1,%4,4),%%xmm0 \n" + "paddd 0x10(%1,%4,4),%%xmm1 \n" + "paddd 0x20(%1,%4,4),%%xmm2 \n" + "paddd 0x30(%1,%4,4),%%xmm3 \n" + "lea 0x40(%1),%1 \n" + "cvtdq2ps %%xmm0,%%xmm0 \n" + "cvtdq2ps %%xmm1,%%xmm1 \n" + "mulps %%xmm4,%%xmm0 \n" + "mulps %%xmm4,%%xmm1 \n" + "cvtdq2ps %%xmm2,%%xmm2 \n" + "cvtdq2ps %%xmm3,%%xmm3 \n" + "mulps %%xmm4,%%xmm2 \n" + "mulps %%xmm4,%%xmm3 \n" + "cvtps2dq %%xmm0,%%xmm0 \n" + "cvtps2dq %%xmm1,%%xmm1 \n" + "cvtps2dq %%xmm2,%%xmm2 \n" + "cvtps2dq %%xmm3,%%xmm3 \n" + "packssdw %%xmm1,%%xmm0 \n" + "packssdw %%xmm3,%%xmm2 \n" + "packuswb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jge 40b \n" "49: \n" - "add $0x3,%3 \n" - "jl 19f \n" + "add $0x3,%3 \n" + "jl 19f \n" // 1 pixel loop LABELALIGN "10: \n" - "movdqu (%0),%%xmm0 \n" - "psubd 0x00(%0,%4,4),%%xmm0 \n" - "lea 0x10(%0),%0 \n" - "psubd (%1),%%xmm0 \n" - "paddd 0x00(%1,%4,4),%%xmm0 \n" - "lea 0x10(%1),%1 \n" - "cvtdq2ps %%xmm0,%%xmm0 \n" - "mulps %%xmm4,%%xmm0 \n" - "cvtps2dq %%xmm0,%%xmm0 \n" - "packssdw %%xmm0,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movd %%xmm0,(%2) \n" - "lea 0x4(%2),%2 \n" - "sub $0x1,%3 \n" - "jge 10b \n" + "movdqu (%0),%%xmm0 \n" + "psubd 0x00(%0,%4,4),%%xmm0 \n" + "lea 0x10(%0),%0 \n" + "psubd (%1),%%xmm0 \n" + "paddd 0x00(%1,%4,4),%%xmm0 \n" + "lea 0x10(%1),%1 \n" + "cvtdq2ps %%xmm0,%%xmm0 \n" + "mulps %%xmm4,%%xmm0 \n" + "cvtps2dq %%xmm0,%%xmm0 \n" + "packssdw %%xmm0,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movd %%xmm0,(%2) \n" + "lea 0x4(%2),%2 \n" + "sub $0x1,%3 \n" + "jge 10b \n" "19: \n" : "+r"(topleft), // %0 "+r"(botleft), // %1 @@ -5924,70 +6021,70 @@ void ARGBAffineRow_SSE2(const uint8_t* src_argb, intptr_t src_argb_stride_temp = src_argb_stride; intptr_t temp; asm volatile( - "movq (%3),%%xmm2 \n" - "movq 0x08(%3),%%xmm7 \n" - "shl $0x10,%1 \n" - "add $0x4,%1 \n" - "movd %1,%%xmm5 \n" - "sub $0x4,%4 \n" - "jl 49f \n" + "movq (%3),%%xmm2 \n" + "movq 0x08(%3),%%xmm7 \n" + "shl $0x10,%1 \n" + "add $0x4,%1 \n" + "movd %1,%%xmm5 \n" + "sub $0x4,%4 \n" + "jl 49f \n" - "pshufd $0x44,%%xmm7,%%xmm7 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "movdqa %%xmm2,%%xmm0 \n" - "addps %%xmm7,%%xmm0 \n" - "movlhps %%xmm0,%%xmm2 \n" - "movdqa %%xmm7,%%xmm4 \n" - "addps %%xmm4,%%xmm4 \n" - "movdqa %%xmm2,%%xmm3 \n" - "addps %%xmm4,%%xmm3 \n" - "addps %%xmm4,%%xmm4 \n" + "pshufd $0x44,%%xmm7,%%xmm7 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "movdqa %%xmm2,%%xmm0 \n" + "addps %%xmm7,%%xmm0 \n" + "movlhps %%xmm0,%%xmm2 \n" + "movdqa %%xmm7,%%xmm4 \n" + "addps %%xmm4,%%xmm4 \n" + "movdqa %%xmm2,%%xmm3 \n" + "addps %%xmm4,%%xmm3 \n" + "addps %%xmm4,%%xmm4 \n" // 4 pixel loop LABELALIGN "40: \n" - "cvttps2dq %%xmm2,%%xmm0 \n" // x,y float->int first 2 - "cvttps2dq %%xmm3,%%xmm1 \n" // x,y float->int next 2 - "packssdw %%xmm1,%%xmm0 \n" // x, y as 8 shorts - "pmaddwd %%xmm5,%%xmm0 \n" // off = x*4 + y*stride - "movd %%xmm0,%k1 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movd %%xmm0,%k5 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movd 0x00(%0,%1,1),%%xmm1 \n" - "movd 0x00(%0,%5,1),%%xmm6 \n" - "punpckldq %%xmm6,%%xmm1 \n" - "addps %%xmm4,%%xmm2 \n" - "movq %%xmm1,(%2) \n" - "movd %%xmm0,%k1 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movd %%xmm0,%k5 \n" - "movd 0x00(%0,%1,1),%%xmm0 \n" - "movd 0x00(%0,%5,1),%%xmm6 \n" - "punpckldq %%xmm6,%%xmm0 \n" - "addps %%xmm4,%%xmm3 \n" - "movq %%xmm0,0x08(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%4 \n" - "jge 40b \n" + "cvttps2dq %%xmm2,%%xmm0 \n" // x,y float->int first 2 + "cvttps2dq %%xmm3,%%xmm1 \n" // x,y float->int next 2 + "packssdw %%xmm1,%%xmm0 \n" // x, y as 8 shorts + "pmaddwd %%xmm5,%%xmm0 \n" // off = x*4 + y*stride + "movd %%xmm0,%k1 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movd %%xmm0,%k5 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movd 0x00(%0,%1,1),%%xmm1 \n" + "movd 0x00(%0,%5,1),%%xmm6 \n" + "punpckldq %%xmm6,%%xmm1 \n" + "addps %%xmm4,%%xmm2 \n" + "movq %%xmm1,(%2) \n" + "movd %%xmm0,%k1 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movd %%xmm0,%k5 \n" + "movd 0x00(%0,%1,1),%%xmm0 \n" + "movd 0x00(%0,%5,1),%%xmm6 \n" + "punpckldq %%xmm6,%%xmm0 \n" + "addps %%xmm4,%%xmm3 \n" + "movq %%xmm0,0x08(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%4 \n" + "jge 40b \n" "49: \n" - "add $0x3,%4 \n" - "jl 19f \n" + "add $0x3,%4 \n" + "jl 19f \n" // 1 pixel loop LABELALIGN "10: \n" - "cvttps2dq %%xmm2,%%xmm0 \n" - "packssdw %%xmm0,%%xmm0 \n" - "pmaddwd %%xmm5,%%xmm0 \n" - "addps %%xmm7,%%xmm2 \n" - "movd %%xmm0,%k1 \n" - "movd 0x00(%0,%1,1),%%xmm0 \n" - "movd %%xmm0,(%2) \n" - "lea 0x04(%2),%2 \n" - "sub $0x1,%4 \n" - "jge 10b \n" + "cvttps2dq %%xmm2,%%xmm0 \n" + "packssdw %%xmm0,%%xmm0 \n" + "pmaddwd %%xmm5,%%xmm0 \n" + "addps %%xmm7,%%xmm2 \n" + "movd %%xmm0,%k1 \n" + "movd 0x00(%0,%1,1),%%xmm0 \n" + "movd %%xmm0,(%2) \n" + "lea 0x04(%2),%2 \n" + "sub $0x1,%4 \n" + "jge 10b \n" "19: \n" : "+r"(src_argb), // %0 "+r"(src_argb_stride_temp), // %1 @@ -6009,68 +6106,68 @@ void InterpolateRow_SSSE3(uint8_t* dst_ptr, int dst_width, int source_y_fraction) { asm volatile( - "sub %1,%0 \n" - "cmp $0x0,%3 \n" - "je 100f \n" - "cmp $0x80,%3 \n" - "je 50f \n" + "sub %1,%0 \n" + "cmp $0x0,%3 \n" + "je 100f \n" + "cmp $0x80,%3 \n" + "je 50f \n" - "movd %3,%%xmm0 \n" - "neg %3 \n" - "add $0x100,%3 \n" - "movd %3,%%xmm5 \n" - "punpcklbw %%xmm0,%%xmm5 \n" - "punpcklwd %%xmm5,%%xmm5 \n" - "pshufd $0x0,%%xmm5,%%xmm5 \n" - "mov $0x80808080,%%eax \n" - "movd %%eax,%%xmm4 \n" - "pshufd $0x0,%%xmm4,%%xmm4 \n" + "movd %3,%%xmm0 \n" + "neg %3 \n" + "add $0x100,%3 \n" + "movd %3,%%xmm5 \n" + "punpcklbw %%xmm0,%%xmm5 \n" + "punpcklwd %%xmm5,%%xmm5 \n" + "pshufd $0x0,%%xmm5,%%xmm5 \n" + "mov $0x80808080,%%eax \n" + "movd %%eax,%%xmm4 \n" + "pshufd $0x0,%%xmm4,%%xmm4 \n" // General purpose row blend. LABELALIGN "1: \n" - "movdqu (%1),%%xmm0 \n" - "movdqu 0x00(%1,%4,1),%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm2,%%xmm0 \n" - "punpckhbw %%xmm2,%%xmm1 \n" - "psubb %%xmm4,%%xmm0 \n" - "psubb %%xmm4,%%xmm1 \n" - "movdqa %%xmm5,%%xmm2 \n" - "movdqa %%xmm5,%%xmm3 \n" - "pmaddubsw %%xmm0,%%xmm2 \n" - "pmaddubsw %%xmm1,%%xmm3 \n" - "paddw %%xmm4,%%xmm2 \n" - "paddw %%xmm4,%%xmm3 \n" - "psrlw $0x8,%%xmm2 \n" - "psrlw $0x8,%%xmm3 \n" - "packuswb %%xmm3,%%xmm2 \n" - "movdqu %%xmm2,0x00(%1,%0,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" - "jmp 99f \n" + "movdqu (%1),%%xmm0 \n" + "movdqu 0x00(%1,%4,1),%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" + "punpckhbw %%xmm2,%%xmm1 \n" + "psubb %%xmm4,%%xmm0 \n" + "psubb %%xmm4,%%xmm1 \n" + "movdqa %%xmm5,%%xmm2 \n" + "movdqa %%xmm5,%%xmm3 \n" + "pmaddubsw %%xmm0,%%xmm2 \n" + "pmaddubsw %%xmm1,%%xmm3 \n" + "paddw %%xmm4,%%xmm2 \n" + "paddw %%xmm4,%%xmm3 \n" + "psrlw $0x8,%%xmm2 \n" + "psrlw $0x8,%%xmm3 \n" + "packuswb %%xmm3,%%xmm2 \n" + "movdqu %%xmm2,0x00(%1,%0,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + "jmp 99f \n" // Blend 50 / 50. LABELALIGN "50: \n" - "movdqu (%1),%%xmm0 \n" - "movdqu 0x00(%1,%4,1),%%xmm1 \n" - "pavgb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,0x00(%1,%0,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 50b \n" - "jmp 99f \n" + "movdqu (%1),%%xmm0 \n" + "movdqu 0x00(%1,%4,1),%%xmm1 \n" + "pavgb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,0x00(%1,%0,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 50b \n" + "jmp 99f \n" // Blend 100 / 0 - Copy row unchanged. LABELALIGN "100: \n" - "movdqu (%1),%%xmm0 \n" - "movdqu %%xmm0,0x00(%1,%0,1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 100b \n" + "movdqu (%1),%%xmm0 \n" + "movdqu %%xmm0,0x00(%1,%0,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 100b \n" "99: \n" : "+r"(dst_ptr), // %0 @@ -6090,61 +6187,61 @@ void InterpolateRow_AVX2(uint8_t* dst_ptr, int dst_width, int source_y_fraction) { asm volatile( - "cmp $0x0,%3 \n" - "je 100f \n" - "sub %1,%0 \n" - "cmp $0x80,%3 \n" - "je 50f \n" + "cmp $0x0,%3 \n" + "je 100f \n" + "sub %1,%0 \n" + "cmp $0x80,%3 \n" + "je 50f \n" - "vmovd %3,%%xmm0 \n" - "neg %3 \n" - "add $0x100,%3 \n" - "vmovd %3,%%xmm5 \n" - "vpunpcklbw %%xmm0,%%xmm5,%%xmm5 \n" - "vpunpcklwd %%xmm5,%%xmm5,%%xmm5 \n" + "vmovd %3,%%xmm0 \n" + "neg %3 \n" + "add $0x100,%3 \n" + "vmovd %3,%%xmm5 \n" + "vpunpcklbw %%xmm0,%%xmm5,%%xmm5 \n" + "vpunpcklwd %%xmm5,%%xmm5,%%xmm5 \n" "vbroadcastss %%xmm5,%%ymm5 \n" - "mov $0x80808080,%%eax \n" - "vmovd %%eax,%%xmm4 \n" + "mov $0x80808080,%%eax \n" + "vmovd %%eax,%%xmm4 \n" "vbroadcastss %%xmm4,%%ymm4 \n" // General purpose row blend. LABELALIGN "1: \n" - "vmovdqu (%1),%%ymm0 \n" - "vmovdqu 0x00(%1,%4,1),%%ymm2 \n" - "vpunpckhbw %%ymm2,%%ymm0,%%ymm1 \n" - "vpunpcklbw %%ymm2,%%ymm0,%%ymm0 \n" - "vpsubb %%ymm4,%%ymm1,%%ymm1 \n" - "vpsubb %%ymm4,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm1,%%ymm5,%%ymm1 \n" - "vpmaddubsw %%ymm0,%%ymm5,%%ymm0 \n" - "vpaddw %%ymm4,%%ymm1,%%ymm1 \n" - "vpaddw %%ymm4,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,0x00(%1,%0,1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" - "jmp 99f \n" + "vmovdqu (%1),%%ymm0 \n" + "vmovdqu 0x00(%1,%4,1),%%ymm2 \n" + "vpunpckhbw %%ymm2,%%ymm0,%%ymm1 \n" + "vpunpcklbw %%ymm2,%%ymm0,%%ymm0 \n" + "vpsubb %%ymm4,%%ymm1,%%ymm1 \n" + "vpsubb %%ymm4,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm1,%%ymm5,%%ymm1 \n" + "vpmaddubsw %%ymm0,%%ymm5,%%ymm0 \n" + "vpaddw %%ymm4,%%ymm1,%%ymm1 \n" + "vpaddw %%ymm4,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,0x00(%1,%0,1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "jmp 99f \n" // Blend 50 / 50. LABELALIGN "50: \n" - "vmovdqu (%1),%%ymm0 \n" - "vpavgb 0x00(%1,%4,1),%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,0x00(%1,%0,1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 50b \n" - "jmp 99f \n" + "vmovdqu (%1),%%ymm0 \n" + "vpavgb 0x00(%1,%4,1),%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,0x00(%1,%0,1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 50b \n" + "jmp 99f \n" // Blend 100 / 0 - Copy row unchanged. LABELALIGN "100: \n" - "rep movsb \n" - "jmp 999f \n" + "rep movsb \n" + "jmp 999f \n" "99: \n" "vzeroupper \n" @@ -6166,20 +6263,20 @@ void ARGBShuffleRow_SSSE3(const uint8_t* src_argb, int width) { asm volatile( - "movdqu (%3),%%xmm5 \n" + "movdqu (%3),%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pshufb %%xmm5,%%xmm0 \n" - "pshufb %%xmm5,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pshufb %%xmm5,%%xmm0 \n" + "pshufb %%xmm5,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -6200,16 +6297,16 @@ void ARGBShuffleRow_AVX2(const uint8_t* src_argb, LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" - "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 @@ -6227,24 +6324,24 @@ void I422ToYUY2Row_SSE2(const uint8_t* src_y, int width) { asm volatile( - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movq (%1),%%xmm2 \n" - "movq 0x00(%1,%2,1),%%xmm1 \n" - "add $0x8,%1 \n" - "punpcklbw %%xmm1,%%xmm2 \n" - "movdqu (%0),%%xmm0 \n" - "add $0x10,%0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm2,%%xmm0 \n" - "punpckhbw %%xmm2,%%xmm1 \n" - "movdqu %%xmm0,(%3) \n" - "movdqu %%xmm1,0x10(%3) \n" - "lea 0x20(%3),%3 \n" - "sub $0x10,%4 \n" - "jg 1b \n" + "movq (%1),%%xmm2 \n" + "movq 0x00(%1,%2,1),%%xmm1 \n" + "add $0x8,%1 \n" + "punpcklbw %%xmm1,%%xmm2 \n" + "movdqu (%0),%%xmm0 \n" + "add $0x10,%0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" + "punpckhbw %%xmm2,%%xmm1 \n" + "movdqu %%xmm0,(%3) \n" + "movdqu %%xmm1,0x10(%3) \n" + "lea 0x20(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -6263,24 +6360,24 @@ void I422ToUYVYRow_SSE2(const uint8_t* src_y, int width) { asm volatile( - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "movq (%1),%%xmm2 \n" - "movq 0x00(%1,%2,1),%%xmm1 \n" - "add $0x8,%1 \n" - "punpcklbw %%xmm1,%%xmm2 \n" - "movdqu (%0),%%xmm0 \n" - "movdqa %%xmm2,%%xmm1 \n" - "add $0x10,%0 \n" - "punpcklbw %%xmm0,%%xmm1 \n" - "punpckhbw %%xmm0,%%xmm2 \n" - "movdqu %%xmm1,(%3) \n" - "movdqu %%xmm2,0x10(%3) \n" - "lea 0x20(%3),%3 \n" - "sub $0x10,%4 \n" - "jg 1b \n" + "movq (%1),%%xmm2 \n" + "movq 0x00(%1,%2,1),%%xmm1 \n" + "add $0x8,%1 \n" + "punpcklbw %%xmm1,%%xmm2 \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm2,%%xmm1 \n" + "add $0x10,%0 \n" + "punpcklbw %%xmm0,%%xmm1 \n" + "punpckhbw %%xmm0,%%xmm2 \n" + "movdqu %%xmm1,(%3) \n" + "movdqu %%xmm2,0x10(%3) \n" + "lea 0x20(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -6299,26 +6396,26 @@ void I422ToYUY2Row_AVX2(const uint8_t* src_y, int width) { asm volatile( - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vpmovzxbw (%1),%%ymm1 \n" - "vpmovzxbw 0x00(%1,%2,1),%%ymm2 \n" - "add $0x10,%1 \n" - "vpsllw $0x8,%%ymm2,%%ymm2 \n" - "vpor %%ymm1,%%ymm2,%%ymm2 \n" - "vmovdqu (%0),%%ymm0 \n" - "add $0x20,%0 \n" - "vpunpcklbw %%ymm2,%%ymm0,%%ymm1 \n" - "vpunpckhbw %%ymm2,%%ymm0,%%ymm2 \n" + "vpmovzxbw (%1),%%ymm1 \n" + "vpmovzxbw 0x00(%1,%2,1),%%ymm2 \n" + "add $0x10,%1 \n" + "vpsllw $0x8,%%ymm2,%%ymm2 \n" + "vpor %%ymm1,%%ymm2,%%ymm2 \n" + "vmovdqu (%0),%%ymm0 \n" + "add $0x20,%0 \n" + "vpunpcklbw %%ymm2,%%ymm0,%%ymm1 \n" + "vpunpckhbw %%ymm2,%%ymm0,%%ymm2 \n" "vextractf128 $0x0,%%ymm1,(%3) \n" "vextractf128 $0x0,%%ymm2,0x10(%3) \n" "vextractf128 $0x1,%%ymm1,0x20(%3) \n" "vextractf128 $0x1,%%ymm2,0x30(%3) \n" - "lea 0x40(%3),%3 \n" - "sub $0x20,%4 \n" - "jg 1b \n" + "lea 0x40(%3),%3 \n" + "sub $0x20,%4 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 @@ -6338,26 +6435,26 @@ void I422ToUYVYRow_AVX2(const uint8_t* src_y, int width) { asm volatile( - "sub %1,%2 \n" + "sub %1,%2 \n" LABELALIGN "1: \n" - "vpmovzxbw (%1),%%ymm1 \n" - "vpmovzxbw 0x00(%1,%2,1),%%ymm2 \n" - "add $0x10,%1 \n" - "vpsllw $0x8,%%ymm2,%%ymm2 \n" - "vpor %%ymm1,%%ymm2,%%ymm2 \n" - "vmovdqu (%0),%%ymm0 \n" - "add $0x20,%0 \n" - "vpunpcklbw %%ymm0,%%ymm2,%%ymm1 \n" - "vpunpckhbw %%ymm0,%%ymm2,%%ymm2 \n" + "vpmovzxbw (%1),%%ymm1 \n" + "vpmovzxbw 0x00(%1,%2,1),%%ymm2 \n" + "add $0x10,%1 \n" + "vpsllw $0x8,%%ymm2,%%ymm2 \n" + "vpor %%ymm1,%%ymm2,%%ymm2 \n" + "vmovdqu (%0),%%ymm0 \n" + "add $0x20,%0 \n" + "vpunpcklbw %%ymm0,%%ymm2,%%ymm1 \n" + "vpunpckhbw %%ymm0,%%ymm2,%%ymm2 \n" "vextractf128 $0x0,%%ymm1,(%3) \n" "vextractf128 $0x0,%%ymm2,0x10(%3) \n" "vextractf128 $0x1,%%ymm1,0x20(%3) \n" "vextractf128 $0x1,%%ymm2,0x30(%3) \n" - "lea 0x40(%3),%3 \n" - "sub $0x20,%4 \n" - "jg 1b \n" + "lea 0x40(%3),%3 \n" + "sub $0x20,%4 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 @@ -6376,47 +6473,47 @@ void ARGBPolynomialRow_SSE2(const uint8_t* src_argb, int width) { asm volatile( - "pxor %%xmm3,%%xmm3 \n" + "pxor %%xmm3,%%xmm3 \n" // 2 pixel loop. LABELALIGN "1: \n" - "movq (%0),%%xmm0 \n" - "lea 0x8(%0),%0 \n" - "punpcklbw %%xmm3,%%xmm0 \n" - "movdqa %%xmm0,%%xmm4 \n" - "punpcklwd %%xmm3,%%xmm0 \n" - "punpckhwd %%xmm3,%%xmm4 \n" - "cvtdq2ps %%xmm0,%%xmm0 \n" - "cvtdq2ps %%xmm4,%%xmm4 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm4,%%xmm5 \n" - "mulps 0x10(%3),%%xmm0 \n" - "mulps 0x10(%3),%%xmm4 \n" - "addps (%3),%%xmm0 \n" - "addps (%3),%%xmm4 \n" - "movdqa %%xmm1,%%xmm2 \n" - "movdqa %%xmm5,%%xmm6 \n" - "mulps %%xmm1,%%xmm2 \n" - "mulps %%xmm5,%%xmm6 \n" - "mulps %%xmm2,%%xmm1 \n" - "mulps %%xmm6,%%xmm5 \n" - "mulps 0x20(%3),%%xmm2 \n" - "mulps 0x20(%3),%%xmm6 \n" - "mulps 0x30(%3),%%xmm1 \n" - "mulps 0x30(%3),%%xmm5 \n" - "addps %%xmm2,%%xmm0 \n" - "addps %%xmm6,%%xmm4 \n" - "addps %%xmm1,%%xmm0 \n" - "addps %%xmm5,%%xmm4 \n" - "cvttps2dq %%xmm0,%%xmm0 \n" - "cvttps2dq %%xmm4,%%xmm4 \n" - "packuswb %%xmm4,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x2,%2 \n" - "jg 1b \n" + "movq (%0),%%xmm0 \n" + "lea 0x8(%0),%0 \n" + "punpcklbw %%xmm3,%%xmm0 \n" + "movdqa %%xmm0,%%xmm4 \n" + "punpcklwd %%xmm3,%%xmm0 \n" + "punpckhwd %%xmm3,%%xmm4 \n" + "cvtdq2ps %%xmm0,%%xmm0 \n" + "cvtdq2ps %%xmm4,%%xmm4 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm4,%%xmm5 \n" + "mulps 0x10(%3),%%xmm0 \n" + "mulps 0x10(%3),%%xmm4 \n" + "addps (%3),%%xmm0 \n" + "addps (%3),%%xmm4 \n" + "movdqa %%xmm1,%%xmm2 \n" + "movdqa %%xmm5,%%xmm6 \n" + "mulps %%xmm1,%%xmm2 \n" + "mulps %%xmm5,%%xmm6 \n" + "mulps %%xmm2,%%xmm1 \n" + "mulps %%xmm6,%%xmm5 \n" + "mulps 0x20(%3),%%xmm2 \n" + "mulps 0x20(%3),%%xmm6 \n" + "mulps 0x30(%3),%%xmm1 \n" + "mulps 0x30(%3),%%xmm5 \n" + "addps %%xmm2,%%xmm0 \n" + "addps %%xmm6,%%xmm4 \n" + "addps %%xmm1,%%xmm0 \n" + "addps %%xmm5,%%xmm4 \n" + "cvttps2dq %%xmm0,%%xmm0 \n" + "cvttps2dq %%xmm4,%%xmm4 \n" + "packuswb %%xmm4,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x2,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -6512,27 +6609,27 @@ void HalfFloatRow_AVX2(const uint16_t* src, int width) { scale *= kScaleBias; asm volatile( - "vbroadcastss %3, %%ymm4 \n" - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" - "sub %0,%1 \n" + "vbroadcastss %3, %%ymm4 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + "sub %0,%1 \n" // 16 pixel loop. LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm2 \n" // 16 shorts - "add $0x20,%0 \n" - "vpunpckhwd %%ymm5,%%ymm2,%%ymm3 \n" // mutates - "vpunpcklwd %%ymm5,%%ymm2,%%ymm2 \n" - "vcvtdq2ps %%ymm3,%%ymm3 \n" - "vcvtdq2ps %%ymm2,%%ymm2 \n" - "vmulps %%ymm3,%%ymm4,%%ymm3 \n" - "vmulps %%ymm2,%%ymm4,%%ymm2 \n" - "vpsrld $0xd,%%ymm3,%%ymm3 \n" - "vpsrld $0xd,%%ymm2,%%ymm2 \n" - "vpackssdw %%ymm3, %%ymm2, %%ymm2 \n" // unmutates - "vmovdqu %%ymm2,-0x20(%0,%1,1) \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm2 \n" // 16 shorts + "add $0x20,%0 \n" + "vpunpckhwd %%ymm5,%%ymm2,%%ymm3 \n" // mutates + "vpunpcklwd %%ymm5,%%ymm2,%%ymm2 \n" + "vcvtdq2ps %%ymm3,%%ymm3 \n" + "vcvtdq2ps %%ymm2,%%ymm2 \n" + "vmulps %%ymm3,%%ymm4,%%ymm3 \n" + "vmulps %%ymm2,%%ymm4,%%ymm2 \n" + "vpsrld $0xd,%%ymm3,%%ymm3 \n" + "vpsrld $0xd,%%ymm2,%%ymm2 \n" + "vpackssdw %%ymm3, %%ymm2, %%ymm2 \n" // unmutates + "vmovdqu %%ymm2,-0x20(%0,%1,1) \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src), // %0 @@ -6553,8 +6650,8 @@ void HalfFloatRow_F16C(const uint16_t* src, float scale, int width) { asm volatile( - "vbroadcastss %3, %%ymm4 \n" - "sub %0,%1 \n" + "vbroadcastss %3, %%ymm4 \n" + "sub %0,%1 \n" // 16 pixel loop. LABELALIGN @@ -6588,7 +6685,7 @@ void HalfFloatRow_F16C(const uint16_t* src, #ifdef HAS_HALFFLOATROW_F16C void HalfFloat1Row_F16C(const uint16_t* src, uint16_t* dst, float, int width) { asm volatile( - "sub %0,%1 \n" + "sub %0,%1 \n" // 16 pixel loop. LABELALIGN "1: \n" @@ -6622,21 +6719,21 @@ void ARGBColorTableRow_X86(uint8_t* dst_argb, // 1 pixel loop. LABELALIGN "1: \n" - "movzb (%0),%1 \n" - "lea 0x4(%0),%0 \n" - "movzb 0x00(%3,%1,4),%1 \n" - "mov %b1,-0x4(%0) \n" - "movzb -0x3(%0),%1 \n" - "movzb 0x01(%3,%1,4),%1 \n" - "mov %b1,-0x3(%0) \n" - "movzb -0x2(%0),%1 \n" - "movzb 0x02(%3,%1,4),%1 \n" - "mov %b1,-0x2(%0) \n" - "movzb -0x1(%0),%1 \n" - "movzb 0x03(%3,%1,4),%1 \n" - "mov %b1,-0x1(%0) \n" - "dec %2 \n" - "jg 1b \n" + "movzb (%0),%1 \n" + "lea 0x4(%0),%0 \n" + "movzb 0x00(%3,%1,4),%1 \n" + "mov %b1,-0x4(%0) \n" + "movzb -0x3(%0),%1 \n" + "movzb 0x01(%3,%1,4),%1 \n" + "mov %b1,-0x3(%0) \n" + "movzb -0x2(%0),%1 \n" + "movzb 0x02(%3,%1,4),%1 \n" + "mov %b1,-0x2(%0) \n" + "movzb -0x1(%0),%1 \n" + "movzb 0x03(%3,%1,4),%1 \n" + "mov %b1,-0x1(%0) \n" + "dec %2 \n" + "jg 1b \n" : "+r"(dst_argb), // %0 "=&d"(pixel_temp), // %1 "+r"(width) // %2 @@ -6655,18 +6752,18 @@ void RGBColorTableRow_X86(uint8_t* dst_argb, // 1 pixel loop. LABELALIGN "1: \n" - "movzb (%0),%1 \n" - "lea 0x4(%0),%0 \n" - "movzb 0x00(%3,%1,4),%1 \n" - "mov %b1,-0x4(%0) \n" - "movzb -0x3(%0),%1 \n" - "movzb 0x01(%3,%1,4),%1 \n" - "mov %b1,-0x3(%0) \n" - "movzb -0x2(%0),%1 \n" - "movzb 0x02(%3,%1,4),%1 \n" - "mov %b1,-0x2(%0) \n" - "dec %2 \n" - "jg 1b \n" + "movzb (%0),%1 \n" + "lea 0x4(%0),%0 \n" + "movzb 0x00(%3,%1,4),%1 \n" + "mov %b1,-0x4(%0) \n" + "movzb -0x3(%0),%1 \n" + "movzb 0x01(%3,%1,4),%1 \n" + "mov %b1,-0x3(%0) \n" + "movzb -0x2(%0),%1 \n" + "movzb 0x02(%3,%1,4),%1 \n" + "mov %b1,-0x2(%0) \n" + "dec %2 \n" + "jg 1b \n" : "+r"(dst_argb), // %0 "=&d"(pixel_temp), // %1 "+r"(width) // %2 @@ -6685,86 +6782,86 @@ void ARGBLumaColorTableRow_SSSE3(const uint8_t* src_argb, uintptr_t pixel_temp; uintptr_t table_temp; asm volatile( - "movd %6,%%xmm3 \n" - "pshufd $0x0,%%xmm3,%%xmm3 \n" - "pcmpeqb %%xmm4,%%xmm4 \n" - "psllw $0x8,%%xmm4 \n" - "pxor %%xmm5,%%xmm5 \n" + "movd %6,%%xmm3 \n" + "pshufd $0x0,%%xmm3,%%xmm3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psllw $0x8,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" // 4 pixel loop. LABELALIGN "1: \n" - "movdqu (%2),%%xmm0 \n" - "pmaddubsw %%xmm3,%%xmm0 \n" - "phaddw %%xmm0,%%xmm0 \n" - "pand %%xmm4,%%xmm0 \n" - "punpcklwd %%xmm5,%%xmm0 \n" - "movd %%xmm0,%k1 \n" // 32 bit offset - "add %5,%1 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movdqu (%2),%%xmm0 \n" + "pmaddubsw %%xmm3,%%xmm0 \n" + "phaddw %%xmm0,%%xmm0 \n" + "pand %%xmm4,%%xmm0 \n" + "punpcklwd %%xmm5,%%xmm0 \n" + "movd %%xmm0,%k1 \n" // 32 bit offset + "add %5,%1 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movzb (%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,(%3) \n" - "movzb 0x1(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x1(%3) \n" - "movzb 0x2(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x2(%3) \n" - "movzb 0x3(%2),%0 \n" - "mov %b0,0x3(%3) \n" + "movzb (%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,(%3) \n" + "movzb 0x1(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x1(%3) \n" + "movzb 0x2(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x2(%3) \n" + "movzb 0x3(%2),%0 \n" + "mov %b0,0x3(%3) \n" - "movd %%xmm0,%k1 \n" // 32 bit offset - "add %5,%1 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movd %%xmm0,%k1 \n" // 32 bit offset + "add %5,%1 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movzb 0x4(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x4(%3) \n" - "movzb 0x5(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x5(%3) \n" - "movzb 0x6(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x6(%3) \n" - "movzb 0x7(%2),%0 \n" - "mov %b0,0x7(%3) \n" + "movzb 0x4(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x4(%3) \n" + "movzb 0x5(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x5(%3) \n" + "movzb 0x6(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x6(%3) \n" + "movzb 0x7(%2),%0 \n" + "mov %b0,0x7(%3) \n" - "movd %%xmm0,%k1 \n" // 32 bit offset - "add %5,%1 \n" - "pshufd $0x39,%%xmm0,%%xmm0 \n" + "movd %%xmm0,%k1 \n" // 32 bit offset + "add %5,%1 \n" + "pshufd $0x39,%%xmm0,%%xmm0 \n" - "movzb 0x8(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x8(%3) \n" - "movzb 0x9(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0x9(%3) \n" - "movzb 0xa(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0xa(%3) \n" - "movzb 0xb(%2),%0 \n" - "mov %b0,0xb(%3) \n" + "movzb 0x8(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x8(%3) \n" + "movzb 0x9(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0x9(%3) \n" + "movzb 0xa(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0xa(%3) \n" + "movzb 0xb(%2),%0 \n" + "mov %b0,0xb(%3) \n" - "movd %%xmm0,%k1 \n" // 32 bit offset - "add %5,%1 \n" + "movd %%xmm0,%k1 \n" // 32 bit offset + "add %5,%1 \n" - "movzb 0xc(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0xc(%3) \n" - "movzb 0xd(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0xd(%3) \n" - "movzb 0xe(%2),%0 \n" - "movzb 0x00(%1,%0,1),%0 \n" - "mov %b0,0xe(%3) \n" - "movzb 0xf(%2),%0 \n" - "mov %b0,0xf(%3) \n" - "lea 0x10(%2),%2 \n" - "lea 0x10(%3),%3 \n" - "sub $0x4,%4 \n" - "jg 1b \n" + "movzb 0xc(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0xc(%3) \n" + "movzb 0xd(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0xd(%3) \n" + "movzb 0xe(%2),%0 \n" + "movzb 0x00(%1,%0,1),%0 \n" + "mov %b0,0xe(%3) \n" + "movzb 0xf(%2),%0 \n" + "mov %b0,0xf(%3) \n" + "lea 0x10(%2),%2 \n" + "lea 0x10(%3),%3 \n" + "sub $0x4,%4 \n" + "jg 1b \n" : "=&d"(pixel_temp), // %0 "=&a"(table_temp), // %1 "+r"(src_argb), // %2 @@ -6837,46 +6934,47 @@ void NV21ToYUV24Row_AVX2(const uint8_t* src_y, src_y_ptr = (uint8_t*)src_y; asm volatile( - "vmovdqu %5, %%ymm0 \n" // init blend value - "vmovdqu %6, %%ymm1 \n" // init blend value - "vmovdqu %7, %%ymm2 \n" // init blend value - // "sub $0x20, %3 \n" //sub 32 from width for final loop + "vmovdqu %5, %%ymm0 \n" // init blend value + "vmovdqu %6, %%ymm1 \n" // init blend value + "vmovdqu %7, %%ymm2 \n" // init blend value + // "sub $0x20, %3 \n" //sub 32 from + // width for final loop LABELALIGN - "1: \n" // label 1 - "vmovdqu (%0,%4), %%ymm3 \n" // src_y - "vmovdqu 1(%1,%4), %%ymm4 \n" // src_uv+1 - "vmovdqu (%1), %%ymm5 \n" // src_uv - "vpshufb %8, %%ymm3, %%ymm13 \n" // y, kSHUF0 for shuf - "vpshufb %9, %%ymm4, %%ymm14 \n" // uv+1, kSHUF1 for - // shuf - "vpshufb %10, %%ymm5, %%ymm15 \n" // uv, kSHUF2 for - // shuf - "vpshufb %11, %%ymm3, %%ymm3 \n" // y kSHUF3 for shuf - "vpshufb %12, %%ymm4, %%ymm4 \n" // uv+1 kSHUF4 for - // shuf - "vpblendvb %%ymm0, %%ymm14, %%ymm13, %%ymm12 \n" // blend 0 - "vpblendvb %%ymm0, %%ymm13, %%ymm14, %%ymm14 \n" // blend 0 - "vpblendvb %%ymm2, %%ymm15, %%ymm12, %%ymm12 \n" // blend 2 - "vpblendvb %%ymm1, %%ymm15, %%ymm14, %%ymm13 \n" // blend 1 - "vpshufb %13, %%ymm5, %%ymm15 \n" // shuffle const - "vpor %%ymm4, %%ymm3, %%ymm5 \n" // get results - "vmovdqu %%ymm12, 0x20(%2) \n" // store dst_yuv+20h - "vpor %%ymm15, %%ymm5, %%ymm3 \n" // get results - "add $0x20, %4 \n" // add to src buffer - // ptr - "vinserti128 $0x1, %%xmm3, %%ymm13, %%ymm4 \n" // insert - "vperm2i128 $0x31, %%ymm13, %%ymm3, %%ymm5 \n" // insert - "vmovdqu %%ymm4, (%2) \n" // store dst_yuv - "vmovdqu %%ymm5, 0x40(%2) \n" // store dst_yuv+40h - "add $0x60,%2 \n" // add to dst buffer - // ptr - // "cmp %3, %4 \n" //(width64 - + "1: \n" // label 1 + "vmovdqu (%0,%4), %%ymm3 \n" // src_y + "vmovdqu 1(%1,%4), %%ymm4 \n" // src_uv+1 + "vmovdqu (%1), %%ymm5 \n" // src_uv + "vpshufb %8, %%ymm3, %%ymm13 \n" // y, kSHUF0 for shuf + "vpshufb %9, %%ymm4, %%ymm14 \n" // uv+1, kSHUF1 for + // shuf + "vpshufb %10, %%ymm5, %%ymm15 \n" // uv, kSHUF2 for + // shuf + "vpshufb %11, %%ymm3, %%ymm3 \n" // y kSHUF3 for shuf + "vpshufb %12, %%ymm4, %%ymm4 \n" // uv+1 kSHUF4 for + // shuf + "vpblendvb %%ymm0, %%ymm14, %%ymm13, %%ymm12 \n" // blend 0 + "vpblendvb %%ymm0, %%ymm13, %%ymm14, %%ymm14 \n" // blend 0 + "vpblendvb %%ymm2, %%ymm15, %%ymm12, %%ymm12 \n" // blend 2 + "vpblendvb %%ymm1, %%ymm15, %%ymm14, %%ymm13 \n" // blend 1 + "vpshufb %13, %%ymm5, %%ymm15 \n" // shuffle const + "vpor %%ymm4, %%ymm3, %%ymm5 \n" // get results + "vmovdqu %%ymm12, 0x20(%2) \n" // store dst_yuv+20h + "vpor %%ymm15, %%ymm5, %%ymm3 \n" // get results + "add $0x20, %4 \n" // add to src buffer + // ptr + "vinserti128 $0x1, %%xmm3, %%ymm13, %%ymm4 \n" // insert + "vperm2i128 $0x31, %%ymm13, %%ymm3, %%ymm5 \n" // insert + "vmovdqu %%ymm4, (%2) \n" // store dst_yuv + "vmovdqu %%ymm5, 0x40(%2) \n" // store dst_yuv+40h + "add $0x60,%2 \n" // add to dst buffer + // ptr + // "cmp %3, %4 \n" //(width64 - // 32 bytes) and src_offset - "sub $0x20,%3 \n" // 32 pixels per loop - "jg 1b \n" - "vzeroupper \n" // sse-avx2 - // transistions + "sub $0x20,%3 \n" // 32 pixels per loop + "jg 1b \n" + "vzeroupper \n" // sse-avx2 + // transistions : "+r"(src_y), //%0 "+r"(src_vu), //%1 @@ -6907,20 +7005,20 @@ static const uvec8 kShuffleUVToVU = {1u, 0u, 3u, 2u, 5u, 4u, 7u, 6u, void SwapUVRow_SSSE3(const uint8_t* src_uv, uint8_t* dst_vu, int width) { asm volatile( - "movdqu %3,%%xmm5 \n" + "movdqu %3,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pshufb %%xmm5,%%xmm0 \n" - "pshufb %%xmm5,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pshufb %%xmm5,%%xmm0 \n" + "pshufb %%xmm5,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_uv), // %0 "+r"(dst_vu), // %1 "+r"(width) // %2 @@ -6937,16 +7035,16 @@ void SwapUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_vu, int width) { LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" - "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_uv), // %0 "+r"(dst_vu), // %1 @@ -6956,6 +7054,119 @@ void SwapUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_vu, int width) { } #endif // HAS_SWAPUVROW_AVX2 +void HalfMergeUVRow_SSSE3(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width) { + asm volatile( + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrlw $0xf,%%xmm4 \n" + "packuswb %%xmm4,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" // load 16 U values + "movdqu (%1),%%xmm1 \n" // load 16 V values + "movdqu 0(%0,%4,1),%%xmm2 \n" // 16 from next row + "movdqu 0(%1,%5,1),%%xmm3 \n" + "lea 0x10(%0),%0 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" // half size + "pmaddubsw %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm3 \n" + "lea 0x10(%1),%1 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm3,%%xmm1 \n" + "psrlw $0x1,%%xmm0 \n" + "psrlw $0x1,%%xmm1 \n" + "pavgw %%xmm5,%%xmm0 \n" + "pavgw %%xmm5,%%xmm1 \n" + "packuswb %%xmm0,%%xmm0 \n" + "packuswb %%xmm1,%%xmm1 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" // store 8 UV pixels + "lea 0x10(%2),%2 \n" + "sub $0x10,%3 \n" // 16 src pixels per loop + "jg 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : "r"((intptr_t)(src_stride_u)), // %4 + "r"((intptr_t)(src_stride_v)) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} + +void HalfMergeUVRow_AVX2(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width) { + asm volatile( + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $0xf,%%ymm4,%%ymm4 \n" + "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // load 32 U values + "vmovdqu (%1),%%ymm1 \n" // load 32 V values + "vmovdqu 0(%0,%4,1),%%ymm2 \n" // 32 from next row + "vmovdqu 0(%1,%5,1),%%ymm3 \n" + "lea 0x20(%0),%0 \n" + "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" // half size + "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "lea 0x20(%1),%1 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vpsrlw $0x1,%%ymm0,%%ymm0 \n" + "vpsrlw $0x1,%%ymm1,%%ymm1 \n" + "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" + "vpavgw %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%2) \n" // store 16 UV pixels + "lea 0x20(%2),%2 \n" + "sub $0x20,%3 \n" // 32 src pixels per loop + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : "r"((intptr_t)(src_stride_u)), // %4 + "r"((intptr_t)(src_stride_v)) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} + +void ClampFloatToZero_SSE2(const float* src_x, float* dst_y, int width) { + asm volatile( + "pxor %%xmm1,%%xmm1 \n" + + LABELALIGN + "1: \n" + "movd (%0),%%xmm0 \n" // load float + "maxss %%xmm1, %%xmm0 \n" // clamp to zero + "add 4, %0 \n" + "movd %%xmm0, (%1) \n" // store float + "add 4, %1 \n" + "sub $0x4,%2 \n" // 1 float per loop + "jg 1b \n" + : "+r"(src_x), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : + : "memory", "cc", "xmm0", "xmm1"); +} + #endif // defined(__x86_64__) || defined(__i386__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc b/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc index 50cfca726..9a8e2cb2d 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc @@ -21,6 +21,8 @@ extern "C" { // This module is for Mips MMI. #if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) +// clang-format off + void RGB24ToARGBRow_MMI(const uint8_t* src_rgb24, uint8_t* dst_argb, int width) { @@ -688,12 +690,15 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x0026004a00700002; - const uint64_t mask_v = 0x00020070005e0012; + const uint64_t mask_u = 0x0013002500380002; + const uint64_t mask_v = 0x00020038002f0009; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -707,7 +712,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest0_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest0_u], %[dest0_u], %[value] \n\t" "pinsrh_3 %[dest0_v], %[src0], %[value] \n\t" @@ -725,7 +731,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -752,7 +759,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest1_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest1_u], %[dest1_u], %[value] \n\t" "pinsrh_3 %[dest1_v], %[src0], %[value] \n\t" @@ -770,7 +778,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -797,7 +806,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest2_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest2_u], %[dest2_u], %[value] \n\t" "pinsrh_3 %[dest2_v], %[src0], %[value] \n\t" @@ -815,7 +825,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -842,7 +853,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest3_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest3_u], %[dest3_u], %[value] \n\t" "pinsrh_3 %[dest3_v], %[src0], %[value] \n\t" @@ -860,7 +872,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -898,11 +911,12 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -992,12 +1006,15 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x00020070004a0026; - const uint64_t mask_v = 0x0012005e00700002; + const uint64_t mask_u = 0x0002003800250013; + const uint64_t mask_v = 0x0009002f00380002; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -1011,7 +1028,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[dest0_u], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest0_u], %[dest0_u], %[value] \n\t" "pinsrh_0 %[dest0_v], %[src0], %[value] \n\t" @@ -1029,7 +1047,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_0 %[src_hi], %[src0], %[value] \n\t" @@ -1056,7 +1075,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[dest1_u], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest1_u], %[dest1_u], %[value] \n\t" "pinsrh_0 %[dest1_v], %[src0], %[value] \n\t" @@ -1074,7 +1094,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_0 %[src_hi], %[src0], %[value] \n\t" @@ -1101,7 +1122,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[dest2_u], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest2_u], %[dest2_u], %[value] \n\t" "pinsrh_0 %[dest2_v], %[src0], %[value] \n\t" @@ -1119,7 +1141,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_0 %[src_hi], %[src0], %[value] \n\t" @@ -1146,7 +1169,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[dest3_u], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest3_u], %[dest3_u], %[value] \n\t" "pinsrh_0 %[dest3_v], %[src0], %[value] \n\t" @@ -1164,7 +1188,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsrl %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_0 %[src_hi], %[src0], %[value] \n\t" @@ -1202,11 +1227,12 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -1296,12 +1322,15 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x00020070004a0026; - const uint64_t mask_v = 0x0012005e00700002; + const uint64_t mask_u = 0x0002003800250013; + const uint64_t mask_v = 0x0009002F00380002; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -1315,7 +1344,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest0_u], %[src0], %[value] \n\t" "dsll %[dest0_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest0_v], %[dest0_v], %[value] \n\t" @@ -1333,7 +1363,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -1360,7 +1391,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest1_u], %[src0], %[value] \n\t" "dsll %[dest1_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest1_v], %[dest1_v], %[value] \n\t" @@ -1378,7 +1410,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -1405,7 +1438,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest2_u], %[src0], %[value] \n\t" "dsll %[dest2_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest2_v], %[dest2_v], %[value] \n\t" @@ -1423,7 +1457,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -1450,7 +1485,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest3_u], %[src0], %[value] \n\t" "dsll %[dest3_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest3_v], %[dest3_v], %[value] \n\t" @@ -1468,7 +1504,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -1506,11 +1543,12 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -1600,12 +1638,15 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x0026004a00700002; - const uint64_t mask_v = 0x00020070005e0012; + const uint64_t mask_u = 0x0013002500380002; + const uint64_t mask_v = 0x00020038002f0009; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -1619,7 +1660,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[dest0_u], %[src0], %[value] \n\t" "dsrl %[dest0_v], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest0_v], %[dest0_v], %[value] \n\t" @@ -1637,7 +1679,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[src_lo], %[src0], %[value] \n\t" "dsrl %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_hi], %[src_hi], %[value] \n\t" @@ -1664,7 +1707,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[dest1_u], %[src0], %[value] \n\t" "dsrl %[dest1_v], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest1_v], %[dest1_v], %[value] \n\t" @@ -1682,7 +1726,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[src_lo], %[src0], %[value] \n\t" "dsrl %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_hi], %[src_hi], %[value] \n\t" @@ -1709,7 +1754,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[dest2_u], %[src0], %[value] \n\t" "dsrl %[dest2_v], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest2_v], %[dest2_v], %[value] \n\t" @@ -1727,7 +1773,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[src_lo], %[src0], %[value] \n\t" "dsrl %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_hi], %[src_hi], %[value] \n\t" @@ -1754,7 +1801,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[dest3_u], %[src0], %[value] \n\t" "dsrl %[dest3_v], %[src0], %[sixteen] \n\t" "pinsrh_3 %[dest3_v], %[dest3_v], %[value] \n\t" @@ -1772,7 +1820,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "paddh %[src0], %[src0], %[src_lo] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_0 %[src_lo], %[src0], %[value] \n\t" "dsrl %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_3 %[src_hi], %[src_hi], %[value] \n\t" @@ -1810,11 +1859,12 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -1908,12 +1958,15 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x0026004a00700002; - const uint64_t mask_v = 0x00020070005e0012; + const uint64_t mask_u = 0x0013002500380002; + const uint64_t mask_v = 0x00020038002f0009; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -1929,7 +1982,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest0_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest0_u], %[dest0_u], %[value] \n\t" "pinsrh_3 %[dest0_v], %[src0], %[value] \n\t" @@ -1949,7 +2003,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -1978,7 +2033,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest1_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest1_u], %[dest1_u], %[value] \n\t" "pinsrh_3 %[dest1_v], %[src0], %[value] \n\t" @@ -1998,7 +2054,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -2027,7 +2084,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest2_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest2_u], %[dest2_u], %[value] \n\t" "pinsrh_3 %[dest2_v], %[src0], %[value] \n\t" @@ -2047,7 +2105,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -2076,7 +2135,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[dest3_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest3_u], %[dest3_u], %[value] \n\t" "pinsrh_3 %[dest3_v], %[src0], %[value] \n\t" @@ -2096,7 +2156,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pinsrh_3 %[src_hi], %[src0], %[value] \n\t" @@ -2134,11 +2195,12 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -2232,12 +2294,15 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, uint8_t* dst_v, int width) { uint64_t src_rgb1; - uint64_t ftmp[12]; + uint64_t ftmp[13]; + uint64_t tmp[1]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x00020070004a0026; - const uint64_t mask_v = 0x0012005e00700002; + const uint64_t mask_u = 0x0002003800250013; + const uint64_t mask_v = 0x0009002f00380002; __asm__ volatile( + "dli %[tmp0], 0x0001000100010001 \n\t" + "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" @@ -2253,7 +2318,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest0_u], %[src0], %[value] \n\t" "dsll %[dest0_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest0_v], %[dest0_v], %[value] \n\t" @@ -2273,7 +2339,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -2302,7 +2369,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest1_u], %[src0], %[value] \n\t" "dsll %[dest1_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest1_v], %[dest1_v], %[value] \n\t" @@ -2322,7 +2390,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -2351,7 +2420,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest2_u], %[src0], %[value] \n\t" "dsll %[dest2_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest2_v], %[dest2_v], %[value] \n\t" @@ -2371,7 +2441,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -2400,7 +2471,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[dest3_u], %[src0], %[value] \n\t" "dsll %[dest3_v], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest3_v], %[dest3_v], %[value] \n\t" @@ -2420,7 +2492,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dsll %[src1], %[src1], %[eight] \n\t" "punpckhbh %[src_hi], %[src1], %[zero] \n\t" "paddh %[src0], %[src0], %[src_hi] \n\t" - "psrlh %[src0], %[src0], %[two] \n\t" + "paddh %[src0], %[src0], %[ftmp12] \n\t" + "psrlh %[src0], %[src0], %[one] \n\t" "pinsrh_3 %[src_lo], %[src0], %[value] \n\t" "dsll %[src_hi], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_hi], %[src_hi], %[value] \n\t" @@ -2458,11 +2531,12 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, [dest0_u] "=&f"(ftmp[4]), [dest0_v] "=&f"(ftmp[5]), [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), - [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) + [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), + [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), [sixteen] "f"(0x10) : "memory"); } @@ -2471,10 +2545,10 @@ void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest, dest0, dest1, dest2, dest3; uint64_t tmp0, tmp1; - const uint64_t shift = 0x07; - const uint64_t value = 0x0040; + const uint64_t shift = 0x08; + const uint64_t value = 0x80; const uint64_t mask0 = 0x0; - const uint64_t mask1 = 0x00010026004B000FULL; + const uint64_t mask1 = 0x0001004D0096001DULL; __asm__ volatile( "1: \n\t" @@ -2558,8 +2632,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, uint64_t src_rgb1; uint64_t ftmp[12]; const uint64_t value = 0x4040; - const uint64_t mask_u = 0x002b0054007f0002; - const uint64_t mask_v = 0x0002007f006b0014; + const uint64_t mask_u = 0x0015002a003f0002; + const uint64_t mask_v = 0x0002003f0035000a; __asm__ volatile( "1: \n\t" @@ -2572,8 +2646,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[dest0_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest0_u], %[dest0_u], %[value] \n\t" @@ -2589,8 +2663,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" @@ -2615,8 +2689,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[dest1_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest1_u], %[dest1_u], %[value] \n\t" @@ -2632,8 +2706,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" @@ -2658,8 +2732,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[dest2_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest2_u], %[dest2_u], %[value] \n\t" @@ -2675,8 +2749,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" @@ -2701,8 +2775,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[dest3_u], %[src0], %[sixteen] \n\t" "pinsrh_0 %[dest3_u], %[dest3_u], %[value] \n\t" @@ -2718,8 +2792,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "punpckhbh %[src_hi], %[src0], %[zero] \n\t" "punpcklbh %[src0], %[src1], %[zero] \n\t" "punpckhbh %[src1], %[src1], %[zero] \n\t" - "pavgh %[src0], %[src_lo], %[src0] \n\t" - "pavgh %[src1], %[src_hi], %[src1] \n\t" + "paddh %[src0], %[src_lo], %[src0] \n\t" + "paddh %[src1], %[src_hi], %[src1] \n\t" "pavgh %[src0], %[src0], %[src1] \n\t" "dsll %[src_lo], %[src0], %[sixteen] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" @@ -2762,7 +2836,7 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), - [zero] "f"(0x00), [eight] "f"(0x08), [two] "f"(0x02), + [zero] "f"(0x00), [eight] "f"(0x08), [sixteen] "f"(0x10) : "memory"); } @@ -4052,10 +4126,10 @@ void ARGBGrayRow_MMI(const uint8_t* src_argb, uint8_t* dst_argb, int width) { uint64_t tmp0, tmp1; const uint64_t mask0 = 0x0; const uint64_t mask1 = 0x01; - const uint64_t mask2 = 0x00400026004B000FULL; + const uint64_t mask2 = 0x0080004D0096001DULL; const uint64_t mask3 = 0xFF000000FF000000ULL; const uint64_t mask4 = ~mask3; - const uint64_t shift = 0x07; + const uint64_t shift = 0x08; __asm__ volatile( "1: \n\t" @@ -4778,7 +4852,9 @@ void J400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* dst_argb, int width) { : "memory"); } -void I400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* rgb_buf, int width) { +// TODO - respect YuvConstants +void I400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* rgb_buf, + const struct YuvConstants*, int width) { uint64_t src, src_lo, src_hi, dest, dest_lo, dest_hi; const uint64_t mask0 = 0x0; const uint64_t mask1 = 0x55; @@ -4912,10 +4988,10 @@ void MirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width) { : "memory"); } -void MirrorUVRow_MMI(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void MirrorSplitUVRow_MMI(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { uint64_t src0, src1, dest0, dest1; const uint64_t mask0 = 0x00ff00ff00ff00ffULL; const uint64_t mask1 = 0x1b; @@ -6040,90 +6116,93 @@ void I444ToARGBRow_MMI(const uint8_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) { - uint64_t y, u, v; - uint64_t b_vec[2], g_vec[2], r_vec[2]; + uint64_t y,u,v; + uint64_t b_vec[2],g_vec[2],r_vec[2]; uint64_t mask = 0xff00ff00ff00ff00ULL; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; - __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" // yg - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" // bb - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" // ub - "or %[ub], %[ub], %[mask] \n\t" // must - // sign - // extension - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" // bg - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" // ug - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" // vg - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" // br - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" // vr - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" // sign - // extension + uint64_t ub,ug,vg,vr,bb,bg,br,yg; + __asm__ volatile ( + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t"//yg + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t"//bb + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t"//ub + "or %[ub], %[ub], %[mask] \n\t"//must sign extension + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t"//bg + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t"//ug + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t"//vg + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t"//br + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t"//vr + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t"//sign extension - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" // y*0x0101 - "pmulhuh %[y], %[y], %[yg] \n\t" // y1 + "punpcklbh %[y], %[y], %[y] \n\t"//y*0x0101 + "pmulhuh %[y], %[y], %[yg] \n\t"//y1 - "punpcklbh %[u], %[u], %[zero] \n\t" // u - "paddsh %[b_vec0], %[y], %[bb] \n\t" - "pmullh %[b_vec1], %[u], %[ub] \n\t" - "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" - "psrah %[b_vec0], %[b_vec0], %[six] \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t"//u + "paddsh %[b_vec0], %[y], %[bb] \n\t" + "pmullh %[b_vec1], %[u], %[ub] \n\t" + "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" + "psrah %[b_vec0], %[b_vec0], %[six] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" // v - "paddsh %[g_vec0], %[y], %[bg] \n\t" - "pmullh %[g_vec1], %[u], %[ug] \n\t" // u*ug - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "pmullh %[g_vec1], %[v], %[vg] \n\t" // v*vg - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "psrah %[g_vec0], %[g_vec0], %[six] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t"//v + "paddsh %[g_vec0], %[y], %[bg] \n\t" + "pmullh %[g_vec1], %[u], %[ug] \n\t"//u*ug + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "pmullh %[g_vec1], %[v], %[vg] \n\t"//v*vg + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "psrah %[g_vec0], %[g_vec0], %[six] \n\t" - "paddsh %[r_vec0], %[y], %[br] \n\t" - "pmullh %[r_vec1], %[v], %[vr] \n\t" // v*vr - "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" - "psrah %[r_vec0], %[r_vec0], %[six] \n\t" + "paddsh %[r_vec0], %[y], %[br] \n\t" + "pmullh %[r_vec1], %[v], %[vr] \n\t"//v*vr + "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" + "psrah %[r_vec0], %[r_vec0], %[six] \n\t" - "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" // rrrrbbbb - "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t" // ffffgggg - "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" - "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" // gbgbgbgb - "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" // frfrfrfr - "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" // frgbfrgb - "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" // frgbfrgb - "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" + "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t"//rrrrbbbb + "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t"//ffffgggg + "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" + "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t"//gbgbgbgb + "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t"//frfrfrfr + "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t"//frgbfrgb + "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t"//frgbfrgb + "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x04 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec0] "=&f"(b_vec[0]), - [b_vec1] "=&f"(b_vec[1]), [g_vec0] "=&f"(g_vec[0]), - [g_vec1] "=&f"(g_vec[1]), [r_vec0] "=&f"(r_vec[0]), - [r_vec1] "=&f"(r_vec[1]), [ub] "=&f"(ub), [ug] "=&f"(ug), - [vg] "=&f"(vg), [vr] "=&f"(vr), [bb] "=&f"(bb), [bg] "=&f"(bg), - [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [alpha] "f"(-1), [six] "f"(0x6), - [five] "f"(0x55), [mask] "f"(mask) - : "memory"); + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x04 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" + : [y]"=&f"(y), + [u]"=&f"(u), [v]"=&f"(v), + [b_vec0]"=&f"(b_vec[0]), [b_vec1]"=&f"(b_vec[1]), + [g_vec0]"=&f"(g_vec[0]), [g_vec1]"=&f"(g_vec[1]), + [r_vec0]"=&f"(r_vec[0]), [r_vec1]"=&f"(r_vec[1]), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [alpha]"f"(-1), + [six]"f"(0x6), [five]"f"(0x55), + [mask]"f"(mask) + : "memory" + ); } // Also used for 420 @@ -6133,96 +6212,99 @@ void I422ToARGBRow_MMI(const uint8_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) { - uint64_t y, u, v; - uint64_t b_vec[2], g_vec[2], r_vec[2]; + uint64_t y,u,v; + uint64_t b_vec[2],g_vec[2],r_vec[2]; uint64_t mask = 0xff00ff00ff00ff00ULL; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" // yg - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" // bb - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" // ub - "or %[ub], %[ub], %[mask] \n\t" // must - // sign - // extension - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" // bg - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" // ug - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" // vg - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" // br - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" // vr - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" // sign - // extension + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t"//yg + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t"//bb + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t"//ub + "or %[ub], %[ub], %[mask] \n\t"//must sign extension + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t"//bg + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t"//ug + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t"//vg + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t"//br + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t"//vr + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t"//sign extension - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" // y*0x0101 - "pmulhuh %[y], %[y], %[yg] \n\t" // y1 + "punpcklbh %[y], %[y], %[y] \n\t"//y*0x0101 + "pmulhuh %[y], %[y], %[yg] \n\t"//y1 - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" // u - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec0], %[y], %[bb] \n\t" - "pmullh %[b_vec1], %[u], %[ub] \n\t" - "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" - "psrah %[b_vec0], %[b_vec0], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t"//u + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec0], %[y], %[bb] \n\t" + "pmullh %[b_vec1], %[u], %[ub] \n\t" + "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" + "psrah %[b_vec0], %[b_vec0], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" // v - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec0], %[y], %[bg] \n\t" - "pmullh %[g_vec1], %[u], %[ug] \n\t" // u*ug - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "pmullh %[g_vec1], %[v], %[vg] \n\t" // v*vg - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "psrah %[g_vec0], %[g_vec0], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t"//v + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec0], %[y], %[bg] \n\t" + "pmullh %[g_vec1], %[u], %[ug] \n\t"//u*ug + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "pmullh %[g_vec1], %[v], %[vg] \n\t"//v*vg + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "psrah %[g_vec0], %[g_vec0], %[six] \n\t" - "paddsh %[r_vec0], %[y], %[br] \n\t" - "pmullh %[r_vec1], %[v], %[vr] \n\t" // v*vr - "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" - "psrah %[r_vec0], %[r_vec0], %[six] \n\t" + "paddsh %[r_vec0], %[y], %[br] \n\t" + "pmullh %[r_vec1], %[v], %[vr] \n\t"//v*vr + "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" + "psrah %[r_vec0], %[r_vec0], %[six] \n\t" - "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" // rrrrbbbb - "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t" // ffffgggg - "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" - "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" // gbgbgbgb - "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" // frfrfrfr - "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" // frgbfrgb - "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" // frgbfrgb - "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" + "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t"//rrrrbbbb + "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t"//ffffgggg + "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" + "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t"//gbgbgbgb + "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t"//frfrfrfr + "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t"//frgbfrgb + "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t"//frgbfrgb + "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec0] "=&f"(b_vec[0]), - [b_vec1] "=&f"(b_vec[1]), [g_vec0] "=&f"(g_vec[0]), - [g_vec1] "=&f"(g_vec[1]), [r_vec0] "=&f"(r_vec[0]), - [r_vec1] "=&f"(r_vec[1]), [ub] "=&f"(ub), [ug] "=&f"(ug), - [vg] "=&f"(vg), [vr] "=&f"(vr), [bb] "=&f"(bb), [bg] "=&f"(bg), - [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [alpha] "f"(-1), [six] "f"(0x6), - [five] "f"(0x55), [mask] "f"(mask) - : "memory"); + : [y]"=&f"(y), + [u]"=&f"(u), [v]"=&f"(v), + [b_vec0]"=&f"(b_vec[0]), [b_vec1]"=&f"(b_vec[1]), + [g_vec0]"=&f"(g_vec[0]), [g_vec1]"=&f"(g_vec[1]), + [r_vec0]"=&f"(r_vec[0]), [r_vec1]"=&f"(r_vec[1]), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [alpha]"f"(-1), + [six]"f"(0x6), [five]"f"(0x55), + [mask]"f"(mask) + : "memory" + ); } // 10 bit YUV to ARGB @@ -6232,96 +6314,102 @@ void I210ToARGBRow_MMI(const uint16_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) { - uint64_t y, u, v; - uint64_t b_vec[2], g_vec[2], r_vec[2]; + uint64_t y,u,v; + uint64_t b_vec[2],g_vec[2],r_vec[2]; uint64_t mask = 0xff00ff00ff00ff00ULL; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t" - "1: \n\t" - "gsldlc1 %[y], 0x07(%[y_ptr]) \n\t" - "gsldrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gsldlc1 %[y], 0x07(%[y_ptr]) \n\t" + "gsldrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "psllh %[y], %[y], %[six] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "psllh %[y], %[y], %[six] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "punpcklhw %[u], %[u], %[u] \n\t" - "psrah %[u], %[u], %[two] \n\t" - "punpcklhw %[v], %[v], %[v] \n\t" - "psrah %[v], %[v], %[two] \n\t" - "pminsh %[u], %[u], %[mask1] \n\t" - "pminsh %[v], %[v], %[mask1] \n\t" + "punpcklhw %[u], %[u], %[u] \n\t" + "psrah %[u], %[u], %[two] \n\t" + "punpcklhw %[v], %[v], %[v] \n\t" + "psrah %[v], %[v], %[two] \n\t" + "pminsh %[u], %[u], %[mask1] \n\t" + "pminsh %[v], %[v], %[mask1] \n\t" - "paddsh %[b_vec0], %[y], %[bb] \n\t" - "pmullh %[b_vec1], %[u], %[ub] \n\t" - "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" + "paddsh %[b_vec0], %[y], %[bb] \n\t" + "pmullh %[b_vec1], %[u], %[ub] \n\t" + "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" - "paddsh %[g_vec0], %[y], %[bg] \n\t" - "pmullh %[g_vec1], %[u], %[ug] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "pmullh %[g_vec1], %[v], %[vg] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "paddsh %[g_vec0], %[y], %[bg] \n\t" + "pmullh %[g_vec1], %[u], %[ug] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "pmullh %[g_vec1], %[v], %[vg] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "paddsh %[r_vec0], %[y], %[br] \n\t" - "pmullh %[r_vec1], %[v], %[vr] \n\t" - "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" + "paddsh %[r_vec0], %[y], %[br] \n\t" + "pmullh %[r_vec1], %[v], %[vr] \n\t" + "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" - "psrah %[b_vec0], %[b_vec0], %[six] \n\t" - "psrah %[g_vec0], %[g_vec0], %[six] \n\t" - "psrah %[r_vec0], %[r_vec0], %[six] \n\t" + "psrah %[b_vec0], %[b_vec0], %[six] \n\t" + "psrah %[g_vec0], %[g_vec0], %[six] \n\t" + "psrah %[r_vec0], %[r_vec0], %[six] \n\t" - "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" - "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t" - "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" - "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" - "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" - "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" + "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" + "packushb %[g_vec0], %[g_vec0], %[alpha] \n\t" + "punpcklwd %[g_vec0], %[g_vec0], %[alpha] \n\t" + "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" + "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" + "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x08 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x04 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x08 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x04 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec0] "=&f"(b_vec[0]), - [b_vec1] "=&f"(b_vec[1]), [g_vec0] "=&f"(g_vec[0]), - [g_vec1] "=&f"(g_vec[1]), [r_vec0] "=&f"(r_vec[0]), - [r_vec1] "=&f"(r_vec[1]), [ub] "=&f"(ub), [ug] "=&f"(ug), - [vg] "=&f"(vg), [vr] "=&f"(vr), [bb] "=&f"(bb), [bg] "=&f"(bg), - [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [alpha] "f"(-1), [six] "f"(0x6), - [five] "f"(0x55), [mask] "f"(mask), [two] "f"(0x02), - [mask1] "f"(0x00ff00ff00ff00ff) - : "memory"); + : [y]"=&f"(y), + [u]"=&f"(u), [v]"=&f"(v), + [b_vec0]"=&f"(b_vec[0]), [b_vec1]"=&f"(b_vec[1]), + [g_vec0]"=&f"(g_vec[0]), [g_vec1]"=&f"(g_vec[1]), + [r_vec0]"=&f"(r_vec[0]), [r_vec1]"=&f"(r_vec[1]), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [alpha]"f"(-1), + [six]"f"(0x6), [five]"f"(0x55), + [mask]"f"(mask), [two]"f"(0x02), + [mask1]"f"(0x00ff00ff00ff00ff) + : "memory" + ); } void I422AlphaToARGBRow_MMI(const uint8_t* src_y, @@ -6331,96 +6419,102 @@ void I422AlphaToARGBRow_MMI(const uint8_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) { - uint64_t y, u, v, a; - uint64_t b_vec[2], g_vec[2], r_vec[2]; + uint64_t y,u,v,a; + uint64_t b_vec[2],g_vec[2],r_vec[2]; uint64_t mask = 0xff00ff00ff00ff00ULL; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "gslwlc1 %[a], 0x03(%[a_ptr]) \n\t" - "gslwrc1 %[a], 0x00(%[a_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "gslwlc1 %[a], 0x03(%[a_ptr]) \n\t" + "gslwrc1 %[a], 0x00(%[a_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" // y*0x0101 - "pmulhuh %[y], %[y], %[yg] \n\t" // y1 + "punpcklbh %[y], %[y], %[y] \n\t"//y*0x0101 + "pmulhuh %[y], %[y], %[yg] \n\t"//y1 - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" // u - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec0], %[y], %[bb] \n\t" - "pmullh %[b_vec1], %[u], %[ub] \n\t" - "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" - "psrah %[b_vec0], %[b_vec0], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t"//u + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec0], %[y], %[bb] \n\t" + "pmullh %[b_vec1], %[u], %[ub] \n\t" + "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" + "psrah %[b_vec0], %[b_vec0], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec0], %[y], %[bg] \n\t" - "pmullh %[g_vec1], %[u], %[ug] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "pmullh %[g_vec1], %[v], %[vg] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "psrah %[g_vec0], %[g_vec0], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec0], %[y], %[bg] \n\t" + "pmullh %[g_vec1], %[u], %[ug] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "pmullh %[g_vec1], %[v], %[vg] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "psrah %[g_vec0], %[g_vec0], %[six] \n\t" - "paddsh %[r_vec0], %[y], %[br] \n\t" - "pmullh %[r_vec1], %[v], %[vr] \n\t" - "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" - "psrah %[r_vec0], %[r_vec0], %[six] \n\t" + "paddsh %[r_vec0], %[y], %[br] \n\t" + "pmullh %[r_vec1], %[v], %[vr] \n\t" + "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" + "psrah %[r_vec0], %[r_vec0], %[six] \n\t" - "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" // rrrrbbbb - "packushb %[g_vec0], %[g_vec0], %[a] \n\t" - "punpcklwd %[g_vec0], %[g_vec0], %[a] \n\t" // aaaagggg - "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" - "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" - "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" + "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t"//rrrrbbbb + "packushb %[g_vec0], %[g_vec0], %[a] \n\t" + "punpcklwd %[g_vec0], %[g_vec0], %[a] \n\t"//aaaagggg + "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" + "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" + "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec1], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[a_ptr], %[a_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[a_ptr], %[a_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [a] "=&f"(a), - [b_vec0] "=&f"(b_vec[0]), [b_vec1] "=&f"(b_vec[1]), - [g_vec0] "=&f"(g_vec[0]), [g_vec1] "=&f"(g_vec[1]), - [r_vec0] "=&f"(r_vec[0]), [r_vec1] "=&f"(r_vec[1]), [ub] "=&f"(ub), - [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), [bb] "=&f"(bb), - [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [a_ptr] "r"(src_a), [zero] "f"(0x00), - [six] "f"(0x6), [five] "f"(0x55), [mask] "f"(mask) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), [a]"=&f"(a), + [b_vec0]"=&f"(b_vec[0]), [b_vec1]"=&f"(b_vec[1]), + [g_vec0]"=&f"(g_vec[0]), [g_vec1]"=&f"(g_vec[1]), + [r_vec0]"=&f"(r_vec[0]), [r_vec1]"=&f"(r_vec[1]), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [a_ptr]"r"(src_a), [zero]"f"(0x00), + [six]"f"(0x6), [five]"f"(0x55), + [mask]"f"(mask) + : "memory" + ); } void I422ToRGB24Row_MMI(const uint8_t* src_y, @@ -6429,105 +6523,113 @@ void I422ToRGB24Row_MMI(const uint8_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) { - uint64_t y, u, v; - uint64_t b_vec[2], g_vec[2], r_vec[2]; + uint64_t y,u,v; + uint64_t b_vec[2],g_vec[2],r_vec[2]; uint64_t mask = 0xff00ff00ff00ff00ULL; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" // y*0x0101 - "pmulhuh %[y], %[y], %[yg] \n\t" // y1 + "punpcklbh %[y], %[y], %[y] \n\t"//y*0x0101 + "pmulhuh %[y], %[y], %[yg] \n\t"//y1 - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" // u - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec0], %[y], %[bb] \n\t" - "pmullh %[b_vec1], %[u], %[ub] \n\t" - "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" - "psrah %[b_vec0], %[b_vec0], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t"//u + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec0], %[y], %[bb] \n\t" + "pmullh %[b_vec1], %[u], %[ub] \n\t" + "psubsh %[b_vec0], %[b_vec0], %[b_vec1] \n\t" + "psrah %[b_vec0], %[b_vec0], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec0], %[y], %[bg] \n\t" - "pmullh %[g_vec1], %[u], %[ug] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "pmullh %[g_vec1], %[v], %[vg] \n\t" - "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" - "psrah %[g_vec0], %[g_vec0], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec0], %[y], %[bg] \n\t" + "pmullh %[g_vec1], %[u], %[ug] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "pmullh %[g_vec1], %[v], %[vg] \n\t" + "psubsh %[g_vec0], %[g_vec0], %[g_vec1] \n\t" + "psrah %[g_vec0], %[g_vec0], %[six] \n\t" - "paddsh %[r_vec0], %[y], %[br] \n\t" - "pmullh %[r_vec1], %[v], %[vr] \n\t" - "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" - "psrah %[r_vec0], %[r_vec0], %[six] \n\t" + "paddsh %[r_vec0], %[y], %[br] \n\t" + "pmullh %[r_vec1], %[v], %[vr] \n\t" + "psubsh %[r_vec0], %[r_vec0], %[r_vec1] \n\t" + "psrah %[r_vec0], %[r_vec0], %[six] \n\t" - "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" - "packushb %[g_vec0], %[g_vec0], %[zero] \n\t" - "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" - "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" - "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" + "packushb %[r_vec0], %[b_vec0], %[r_vec0] \n\t" + "packushb %[g_vec0], %[g_vec0], %[zero] \n\t" + "punpcklbh %[b_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpckhbh %[r_vec0], %[r_vec0], %[g_vec0] \n\t" + "punpcklhw %[g_vec0], %[b_vec0], %[r_vec0] \n\t" + "punpckhhw %[g_vec1], %[b_vec0], %[r_vec0] \n\t" - "punpckhwd %[r_vec0], %[g_vec0], %[g_vec0] \n\t" - "psllw %[r_vec1], %[r_vec0], %[lmove1] \n\t" - "or %[g_vec0], %[g_vec0], %[r_vec1] \n\t" - "psrlw %[r_vec1], %[r_vec0], %[rmove1] \n\t" - "pextrh %[r_vec1], %[r_vec1], %[zero] \n\t" - "pinsrh_2 %[g_vec0], %[g_vec0], %[r_vec1] \n\t" - "pextrh %[r_vec1], %[g_vec1], %[zero] \n\t" - "pinsrh_3 %[g_vec0], %[g_vec0], %[r_vec1] \n\t" - "pextrh %[r_vec1], %[g_vec1], %[one] \n\t" - "punpckhwd %[g_vec1], %[g_vec1], %[g_vec1] \n\t" - "psllw %[g_vec1], %[g_vec1], %[rmove1] \n\t" - "or %[g_vec1], %[g_vec1], %[r_vec1] \n\t" - "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" - "gsswlc1 %[g_vec1], 0x0b(%[rgbbuf_ptr]) \n\t" - "gsswrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" + "punpckhwd %[r_vec0], %[g_vec0], %[g_vec0] \n\t" + "psllw %[r_vec1], %[r_vec0], %[lmove1] \n\t" + "or %[g_vec0], %[g_vec0], %[r_vec1] \n\t" + "psrlw %[r_vec1], %[r_vec0], %[rmove1] \n\t" + "pextrh %[r_vec1], %[r_vec1], %[zero] \n\t" + "pinsrh_2 %[g_vec0], %[g_vec0], %[r_vec1] \n\t" + "pextrh %[r_vec1], %[g_vec1], %[zero] \n\t" + "pinsrh_3 %[g_vec0], %[g_vec0], %[r_vec1] \n\t" + "pextrh %[r_vec1], %[g_vec1], %[one] \n\t" + "punpckhwd %[g_vec1], %[g_vec1], %[g_vec1] \n\t" + "psllw %[g_vec1], %[g_vec1], %[rmove1] \n\t" + "or %[g_vec1], %[g_vec1], %[r_vec1] \n\t" + "gssdlc1 %[g_vec0], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec0], 0x00(%[rgbbuf_ptr]) \n\t" + "gsswlc1 %[g_vec1], 0x0b(%[rgbbuf_ptr]) \n\t" + "gsswrc1 %[g_vec1], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0c \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec0] "=&f"(b_vec[0]), - [b_vec1] "=&f"(b_vec[1]), [g_vec0] "=&f"(g_vec[0]), - [g_vec1] "=&f"(g_vec[1]), [r_vec0] "=&f"(r_vec[0]), - [r_vec1] "=&f"(r_vec[1]), [ub] "=&f"(ub), [ug] "=&f"(ug), - [vg] "=&f"(vg), [vr] "=&f"(vr), [bb] "=&f"(bb), [bg] "=&f"(bg), - [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [five] "f"(0x55), [six] "f"(0x6), - [mask] "f"(mask), [lmove1] "f"(0x18), [rmove1] "f"(0x8), [one] "f"(0x1) - : "memory"); + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0c \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" + + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec0]"=&f"(b_vec[0]), [b_vec1]"=&f"(b_vec[1]), + [g_vec0]"=&f"(g_vec[0]), [g_vec1]"=&f"(g_vec[1]), + [r_vec0]"=&f"(r_vec[0]), [r_vec1]"=&f"(r_vec[1]), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask]"f"(mask), + [lmove1]"f"(0x18), [rmove1]"f"(0x8), + [one]"f"(0x1) + : "memory" + ); } void I422ToARGB4444Row_MMI(const uint8_t* src_y, @@ -6538,103 +6640,110 @@ void I422ToARGB4444Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" // y*0x0101 - "pmulhuh %[y], %[y], %[yg] \n\t" // y1 + "punpcklbh %[y], %[y], %[y] \n\t"//y*0x0101 + "pmulhuh %[y], %[y], %[yg] \n\t"//y1 - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" // u - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t"//u + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "and %[g_vec], %[g_vec], %[mask1] \n\t" - "psrlw %[g_vec], %[g_vec], %[four] \n\t" - "psrlw %[r_vec], %[g_vec], %[four] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "punpcklbh %[r_vec], %[alpha], %[zero] \n\t" - "and %[g_vec], %[g_vec], %[r_vec] \n\t" + "and %[g_vec], %[g_vec], %[mask1] \n\t" + "psrlw %[g_vec], %[g_vec], %[four] \n\t" + "psrlw %[r_vec], %[g_vec], %[four] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "punpcklbh %[r_vec], %[alpha], %[zero] \n\t" + "and %[g_vec], %[g_vec], %[r_vec] \n\t" - "and %[b_vec], %[b_vec], %[mask1] \n\t" - "psrlw %[b_vec], %[b_vec], %[four] \n\t" - "psrlw %[r_vec], %[b_vec], %[four] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "punpcklbh %[r_vec], %[alpha], %[zero] \n\t" - "and %[b_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[b_vec] \n\t" + "and %[b_vec], %[b_vec], %[mask1] \n\t" + "psrlw %[b_vec], %[b_vec], %[four] \n\t" + "psrlw %[r_vec], %[b_vec], %[four] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "punpcklbh %[r_vec], %[alpha], %[zero] \n\t" + "and %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[b_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[dst_argb4444]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[dst_argb4444]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[dst_argb4444]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[dst_argb4444]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[dst_argb4444], %[dst_argb4444], 0x08 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[dst_argb4444], %[dst_argb4444], 0x08 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [dst_argb4444] "r"(dst_argb4444), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [five] "f"(0x55), [six] "f"(0x6), - [mask] "f"(0xff00ff00ff00ff00), [four] "f"(0x4), - [mask1] "f"(0xf0f0f0f0f0f0f0f0), [alpha] "f"(-1) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [dst_argb4444]"r"(dst_argb4444), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask]"f"(0xff00ff00ff00ff00), + [four]"f"(0x4), [mask1]"f"(0xf0f0f0f0f0f0f0f0), + [alpha]"f"(-1) + : "memory" + ); } void I422ToARGB1555Row_MMI(const uint8_t* src_y, @@ -6645,118 +6754,125 @@ void I422ToARGB1555Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "psrlw %[temp], %[g_vec], %[three] \n\t" - "and %[g_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[eight] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "psrlw %[temp], %[temp], %[eight] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "or %[g_vec], %[g_vec], %[mask3] \n\t" + "psrlw %[temp], %[g_vec], %[three] \n\t" + "and %[g_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[eight] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "psrlw %[temp], %[temp], %[eight] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "or %[g_vec], %[g_vec], %[mask3] \n\t" - "psrlw %[temp], %[b_vec], %[three] \n\t" - "and %[b_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[eight] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "psrlw %[temp], %[temp], %[eight] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "or %[b_vec], %[b_vec], %[mask3] \n\t" + "psrlw %[temp], %[b_vec], %[three] \n\t" + "and %[b_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[eight] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "psrlw %[temp], %[temp], %[eight] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "or %[b_vec], %[b_vec], %[mask3] \n\t" - "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" - "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" - "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" + "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" + "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" + "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[dst_argb1555]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[dst_argb1555]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[dst_argb1555]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[dst_argb1555]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[dst_argb1555], %[dst_argb1555], 0x08 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[dst_argb1555], %[dst_argb1555], 0x08 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [dst_argb1555] "r"(dst_argb1555), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [five] "f"(0x55), [six] "f"(0x6), - [mask1] "f"(0xff00ff00ff00ff00), [three] "f"(0x3), - [mask2] "f"(0x1f0000001f), [eight] "f"(0x8), - [mask3] "f"(0x800000008000), [lmove5] "f"(0x5) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [dst_argb1555]"r"(dst_argb1555), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [three]"f"(0x3), [mask2]"f"(0x1f0000001f), + [eight]"f"(0x8), [mask3]"f"(0x800000008000), + [lmove5]"f"(0x5) + : "memory" + ); } void I422ToRGB565Row_MMI(const uint8_t* src_y, @@ -6767,120 +6883,127 @@ void I422ToRGB565Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - // u3|u2|u1|u0 --> u1|u1|u0|u0 - "punpcklbh %[u], %[u], %[u] \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + //u3|u2|u1|u0 --> u1|u1|u0|u0 + "punpcklbh %[u], %[u], %[u] \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - // v3|v2|v1|v0 --> v1|v1|v0|v0 - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + //v3|v2|v1|v0 --> v1|v1|v0|v0 + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "psrlh %[temp], %[g_vec], %[three] \n\t" - "and %[g_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[seven] \n\t" - "psrlw %[r_vec], %[mask1], %[eight] \n\t" - "and %[r_vec], %[temp], %[r_vec] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "paddb %[r_vec], %[three], %[six] \n\t" - "psrlw %[temp], %[temp], %[r_vec] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "paddb %[temp], %[three], %[eight] \n\t" - "psllw %[r_vec], %[r_vec], %[temp] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "psrlh %[temp], %[g_vec], %[three] \n\t" + "and %[g_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[seven] \n\t" + "psrlw %[r_vec], %[mask1], %[eight] \n\t" + "and %[r_vec], %[temp], %[r_vec] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "paddb %[r_vec], %[three], %[six] \n\t" + "psrlw %[temp], %[temp], %[r_vec] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "paddb %[temp], %[three], %[eight] \n\t" + "psllw %[r_vec], %[r_vec], %[temp] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "psrlh %[temp], %[b_vec], %[three] \n\t" - "and %[b_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[seven] \n\t" - "psrlw %[r_vec], %[mask1], %[eight] \n\t" - "and %[r_vec], %[temp], %[r_vec] \n\t" - "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "paddb %[r_vec], %[three], %[six] \n\t" - "psrlw %[temp], %[temp], %[r_vec] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "paddb %[temp], %[three], %[eight] \n\t" - "psllw %[r_vec], %[r_vec], %[temp] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "psrlh %[temp], %[b_vec], %[three] \n\t" + "and %[b_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[seven] \n\t" + "psrlw %[r_vec], %[mask1], %[eight] \n\t" + "and %[r_vec], %[temp], %[r_vec] \n\t" + "psllw %[r_vec], %[r_vec], %[lmove5] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "paddb %[r_vec], %[three], %[six] \n\t" + "psrlw %[temp], %[temp], %[r_vec] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "paddb %[temp], %[three], %[eight] \n\t" + "psllw %[r_vec], %[r_vec], %[temp] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" - "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" - "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" + "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" + "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" + "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[dst_rgb565]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[dst_rgb565]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[dst_rgb565]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[dst_rgb565]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[dst_rgb565], %[dst_rgb565], 0x08 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[dst_rgb565], %[dst_rgb565], 0x08 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [dst_rgb565] "r"(dst_rgb565), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [five] "f"(0x55), [six] "f"(0x6), - [mask1] "f"(0xff00ff00ff00ff00), [three] "f"(0x3), - [mask2] "f"(0x1f0000001f), [eight] "f"(0x8), [seven] "f"(0x7), - [lmove5] "f"(0x5) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [dst_rgb565]"r"(dst_rgb565), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [three]"f"(0x3), [mask2]"f"(0x1f0000001f), + [eight]"f"(0x8), [seven]"f"(0x7), + [lmove5]"f"(0x5) + : "memory" + ); } void NV12ToARGBRow_MMI(const uint8_t* src_y, @@ -6890,83 +7013,91 @@ void NV12ToARGBRow_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "pshufh %[v], %[u], %[vshu] \n\t" - "pshufh %[u], %[u], %[ushu] \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "pshufh %[v], %[u], %[vshu] \n\t" + "pshufh %[u], %[u], %[ushu] \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [uv_ptr] "r"(src_uv), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [alpha] "f"(-1) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [uv_ptr]"r"(src_uv), + [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [alpha]"f"(-1) + : "memory" + ); } void NV21ToARGBRow_MMI(const uint8_t* src_y, @@ -6976,83 +7107,91 @@ void NV21ToARGBRow_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[vu_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[vu_ptr]) \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "pshufh %[v], %[u], %[ushu] \n\t" - "pshufh %[u], %[u], %[vshu] \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[vu_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[vu_ptr]) \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "pshufh %[v], %[u], %[ushu] \n\t" + "pshufh %[u], %[u], %[vshu] \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[vu_ptr], %[vu_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[vu_ptr], %[vu_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [vu_ptr] "r"(src_vu), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [alpha] "f"(-1) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [vu_ptr]"r"(src_vu), + [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [alpha]"f"(-1) + : "memory" + ); } void NV12ToRGB24Row_MMI(const uint8_t* src_y, @@ -7062,95 +7201,103 @@ void NV12ToRGB24Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "pshufh %[v], %[u], %[vshu] \n\t" - "pshufh %[u], %[u], %[ushu] \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "pshufh %[v], %[u], %[vshu] \n\t" + "pshufh %[u], %[u], %[ushu] \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "punpckhwd %[r_vec], %[g_vec], %[g_vec] \n\t" - "psllw %[temp], %[r_vec], %[lmove1] \n\t" - "or %[g_vec], %[g_vec], %[temp] \n\t" - "psrlw %[temp], %[r_vec], %[rmove1] \n\t" - "pextrh %[temp], %[temp], %[zero] \n\t" - "pinsrh_2 %[g_vec], %[g_vec], %[temp] \n\t" - "pextrh %[temp], %[b_vec], %[zero] \n\t" - "pinsrh_3 %[g_vec], %[g_vec], %[temp] \n\t" - "pextrh %[temp], %[b_vec], %[one] \n\t" - "punpckhwd %[b_vec], %[b_vec], %[b_vec] \n\t" - "psllw %[b_vec], %[b_vec], %[rmove1] \n\t" - "or %[b_vec], %[b_vec], %[temp] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gsswlc1 %[b_vec], 0x0b(%[rgbbuf_ptr]) \n\t" - "gsswrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "punpckhwd %[r_vec], %[g_vec], %[g_vec] \n\t" + "psllw %[temp], %[r_vec], %[lmove1] \n\t" + "or %[g_vec], %[g_vec], %[temp] \n\t" + "psrlw %[temp], %[r_vec], %[rmove1] \n\t" + "pextrh %[temp], %[temp], %[zero] \n\t" + "pinsrh_2 %[g_vec], %[g_vec], %[temp] \n\t" + "pextrh %[temp], %[b_vec], %[zero] \n\t" + "pinsrh_3 %[g_vec], %[g_vec], %[temp] \n\t" + "pextrh %[temp], %[b_vec], %[one] \n\t" + "punpckhwd %[b_vec], %[b_vec], %[b_vec] \n\t" + "psllw %[b_vec], %[b_vec], %[rmove1] \n\t" + "or %[b_vec], %[b_vec], %[temp] \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gsswlc1 %[b_vec], 0x0b(%[rgbbuf_ptr]) \n\t" + "gsswrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0C \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0C \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [uv_ptr] "r"(src_uv), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [alpha] "f"(-1), [lmove1] "f"(0x18), - [one] "f"(0x1), [rmove1] "f"(0x8) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [uv_ptr]"r"(src_uv), + [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [alpha]"f"(-1), [lmove1]"f"(0x18), + [one]"f"(0x1), [rmove1]"f"(0x8) + : "memory" + ); } void NV21ToRGB24Row_MMI(const uint8_t* src_y, @@ -7160,95 +7307,103 @@ void NV21ToRGB24Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[vu_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[vu_ptr]) \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "pshufh %[v], %[u], %[ushu] \n\t" - "pshufh %[u], %[u], %[vshu] \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[vu_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[vu_ptr]) \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "pshufh %[v], %[u], %[ushu] \n\t" + "pshufh %[u], %[u], %[vshu] \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "punpckhwd %[r_vec], %[g_vec], %[g_vec] \n\t" - "psllw %[temp], %[r_vec], %[lmove1] \n\t" - "or %[g_vec], %[g_vec], %[temp] \n\t" - "psrlw %[temp], %[r_vec], %[rmove1] \n\t" - "pextrh %[temp], %[temp], %[zero] \n\t" - "pinsrh_2 %[g_vec], %[g_vec], %[temp] \n\t" - "pextrh %[temp], %[b_vec], %[zero] \n\t" - "pinsrh_3 %[g_vec], %[g_vec], %[temp] \n\t" - "pextrh %[temp], %[b_vec], %[one] \n\t" - "punpckhwd %[b_vec], %[b_vec], %[b_vec] \n\t" - "psllw %[b_vec], %[b_vec], %[rmove1] \n\t" - "or %[b_vec], %[b_vec], %[temp] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gsswlc1 %[b_vec], 0x0b(%[rgbbuf_ptr]) \n\t" - "gsswrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "punpckhwd %[r_vec], %[g_vec], %[g_vec] \n\t" + "psllw %[temp], %[r_vec], %[lmove1] \n\t" + "or %[g_vec], %[g_vec], %[temp] \n\t" + "psrlw %[temp], %[r_vec], %[rmove1] \n\t" + "pextrh %[temp], %[temp], %[zero] \n\t" + "pinsrh_2 %[g_vec], %[g_vec], %[temp] \n\t" + "pextrh %[temp], %[b_vec], %[zero] \n\t" + "pinsrh_3 %[g_vec], %[g_vec], %[temp] \n\t" + "pextrh %[temp], %[b_vec], %[one] \n\t" + "punpckhwd %[b_vec], %[b_vec], %[b_vec] \n\t" + "psllw %[b_vec], %[b_vec], %[rmove1] \n\t" + "or %[b_vec], %[b_vec], %[temp] \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gsswlc1 %[b_vec], 0x0b(%[rgbbuf_ptr]) \n\t" + "gsswrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[vu_ptr], %[vu_ptr], 0x04 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0C \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[vu_ptr], %[vu_ptr], 0x04 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x0C \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [vu_ptr] "r"(src_vu), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [lmove1] "f"(0x18), - [rmove1] "f"(0x8), [one] "f"(0x1) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [vu_ptr]"r"(src_vu), + [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [lmove1]"f"(0x18), [rmove1]"f"(0x8), + [one]"f"(0x1) + : "memory" + ); } void NV12ToRGB565Row_MMI(const uint8_t* src_y, @@ -7258,115 +7413,123 @@ void NV12ToRGB565Row_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "pshufh %[v], %[u], %[vshu] \n\t" - "pshufh %[u], %[u], %[ushu] \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[uv_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[uv_ptr]) \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "pshufh %[v], %[u], %[vshu] \n\t" + "pshufh %[u], %[u], %[ushu] \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "psrlh %[temp], %[g_vec], %[three] \n\t" - "and %[g_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[seven] \n\t" - "psrlw %[r_vec], %[mask1], %[eight] \n\t" - "and %[r_vec], %[temp], %[r_vec] \n\t" - "psubb %[y], %[eight], %[three] \n\t" // 5 - "psllw %[r_vec], %[r_vec], %[y] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "paddb %[r_vec], %[three], %[six] \n\t" - "psrlw %[temp], %[temp], %[r_vec] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "paddb %[temp], %[three], %[eight] \n\t" - "psllw %[r_vec], %[r_vec], %[temp] \n\t" - "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "psrlh %[temp], %[g_vec], %[three] \n\t" + "and %[g_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[seven] \n\t" + "psrlw %[r_vec], %[mask1], %[eight] \n\t" + "and %[r_vec], %[temp], %[r_vec] \n\t" + "psubb %[y], %[eight], %[three] \n\t"//5 + "psllw %[r_vec], %[r_vec], %[y] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" + "paddb %[r_vec], %[three], %[six] \n\t" + "psrlw %[temp], %[temp], %[r_vec] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "paddb %[temp], %[three], %[eight] \n\t" + "psllw %[r_vec], %[r_vec], %[temp] \n\t" + "or %[g_vec], %[g_vec], %[r_vec] \n\t" - "psrlh %[temp], %[b_vec], %[three] \n\t" - "and %[b_vec], %[temp], %[mask2] \n\t" - "psrlw %[temp], %[temp], %[seven] \n\t" - "psrlw %[r_vec], %[mask1], %[eight] \n\t" - "and %[r_vec], %[temp], %[r_vec] \n\t" - "psubb %[y], %[eight], %[three] \n\t" // 5 - "psllw %[r_vec], %[r_vec], %[y] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "paddb %[r_vec], %[three], %[six] \n\t" - "psrlw %[temp], %[temp], %[r_vec] \n\t" - "and %[r_vec], %[temp], %[mask2] \n\t" - "paddb %[temp], %[three], %[eight] \n\t" - "psllw %[r_vec], %[r_vec], %[temp] \n\t" - "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "psrlh %[temp], %[b_vec], %[three] \n\t" + "and %[b_vec], %[temp], %[mask2] \n\t" + "psrlw %[temp], %[temp], %[seven] \n\t" + "psrlw %[r_vec], %[mask1], %[eight] \n\t" + "and %[r_vec], %[temp], %[r_vec] \n\t" + "psubb %[y], %[eight], %[three] \n\t"//5 + "psllw %[r_vec], %[r_vec], %[y] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" + "paddb %[r_vec], %[three], %[six] \n\t" + "psrlw %[temp], %[temp], %[r_vec] \n\t" + "and %[r_vec], %[temp], %[mask2] \n\t" + "paddb %[temp], %[three], %[eight] \n\t" + "psllw %[r_vec], %[r_vec], %[temp] \n\t" + "or %[b_vec], %[b_vec], %[r_vec] \n\t" - "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" - "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" - "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" + "punpcklhw %[r_vec], %[g_vec], %[b_vec] \n\t" + "punpckhhw %[b_vec], %[g_vec], %[b_vec] \n\t" + "punpcklhw %[g_vec], %[r_vec], %[b_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[dst_rgb565]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[dst_rgb565]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[dst_rgb565]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[dst_rgb565]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" - "daddiu %[dst_rgb565], %[dst_rgb565], 0x08 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[uv_ptr], %[uv_ptr], 0x04 \n\t" + "daddiu %[dst_rgb565], %[dst_rgb565], 0x08 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [uv_ptr] "r"(src_uv), [dst_rgb565] "r"(dst_rgb565), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [three] "f"(0x3), - [mask2] "f"(0x1f0000001f), [eight] "f"(0x8), [seven] "f"(0x7) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [uv_ptr]"r"(src_uv), + [dst_rgb565]"r"(dst_rgb565), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [three]"f"(0x3), [mask2]"f"(0x1f0000001f), + [eight]"f"(0x8), [seven]"f"(0x7) + : "memory" + ); } void YUY2ToARGBRow_MMI(const uint8_t* src_yuy2, @@ -7375,83 +7538,90 @@ void YUY2ToARGBRow_MMI(const uint8_t* src_yuy2, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gsldlc1 %[y], 0x07(%[yuy2_ptr]) \n\t" - "gsldrc1 %[y], 0x00(%[yuy2_ptr]) \n\t" - "psrlh %[temp], %[y], %[eight] \n\t" - "pshufh %[u], %[temp], %[ushu] \n\t" - "pshufh %[v], %[temp], %[vshu] \n\t" + "1: \n\t" + "gsldlc1 %[y], 0x07(%[yuy2_ptr]) \n\t" + "gsldrc1 %[y], 0x00(%[yuy2_ptr]) \n\t" + "psrlh %[temp], %[y], %[eight] \n\t" + "pshufh %[u], %[temp], %[ushu] \n\t" + "pshufh %[v], %[temp], %[vshu] \n\t" - "psrlh %[temp], %[mask1], %[eight] \n\t" - "and %[y], %[y], %[temp] \n\t" - "psllh %[temp], %[y], %[eight] \n\t" - "or %[y], %[y], %[temp] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "psrlh %[temp], %[mask1], %[eight] \n\t" + "and %[y], %[y], %[temp] \n\t" + "psllh %[temp], %[y], %[eight] \n\t" + "or %[y], %[y], %[temp] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[yuy2_ptr], %[yuy2_ptr], 0x08 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[yuy2_ptr], %[yuy2_ptr], 0x08 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [yuy2_ptr] "r"(src_yuy2), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [alpha] "f"(-1), [eight] "f"(0x8) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [yuy2_ptr]"r"(src_yuy2), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [alpha]"f"(-1), [eight]"f"(0x8) + : "memory" + ); } void UYVYToARGBRow_MMI(const uint8_t* src_uyvy, @@ -7460,83 +7630,90 @@ void UYVYToARGBRow_MMI(const uint8_t* src_uyvy, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gsldlc1 %[y], 0x07(%[uyvy_ptr]) \n\t" - "gsldrc1 %[y], 0x00(%[uyvy_ptr]) \n\t" - "psrlh %[temp], %[mask1], %[eight] \n\t" - "and %[temp], %[y], %[temp] \n\t" - "pshufh %[u], %[temp], %[ushu] \n\t" - "pshufh %[v], %[temp], %[vshu] \n\t" + "1: \n\t" + "gsldlc1 %[y], 0x07(%[uyvy_ptr]) \n\t" + "gsldrc1 %[y], 0x00(%[uyvy_ptr]) \n\t" + "psrlh %[temp], %[mask1], %[eight] \n\t" + "and %[temp], %[y], %[temp] \n\t" + "pshufh %[u], %[temp], %[ushu] \n\t" + "pshufh %[v], %[temp], %[vshu] \n\t" - "psrlh %[y], %[y], %[eight] \n\t" - "psllh %[temp], %[y], %[eight] \n\t" - "or %[y], %[y], %[temp] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "psrlh %[y], %[y], %[eight] \n\t" + "psllh %[temp], %[y], %[eight] \n\t" + "or %[y], %[y], %[temp] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" - "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" - "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[g_vec], %[alpha] \n\t" + "punpcklbh %[b_vec], %[r_vec], %[g_vec] \n\t" + "punpckhbh %[r_vec], %[r_vec], %[g_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[uyvy_ptr], %[uyvy_ptr], 0x08 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[uyvy_ptr], %[uyvy_ptr], 0x08 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [uyvy_ptr] "r"(src_uyvy), [rgbbuf_ptr] "r"(rgb_buf), - [yuvcons_ptr] "r"(yuvconstants), [width] "r"(width), [zero] "f"(0x00), - [five] "f"(0x55), [six] "f"(0x6), [mask1] "f"(0xff00ff00ff00ff00), - [ushu] "f"(0xA0), [vshu] "f"(0xf5), [alpha] "f"(-1), [eight] "f"(0x8) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [uyvy_ptr]"r"(src_uyvy), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [ushu]"f"(0xA0), [vshu]"f"(0xf5), + [alpha]"f"(-1), [eight]"f"(0x8) + : "memory" + ); } void I422ToRGBARow_MMI(const uint8_t* src_y, @@ -7547,105 +7724,114 @@ void I422ToRGBARow_MMI(const uint8_t* src_y, int width) { uint64_t y, u, v; uint64_t b_vec, g_vec, r_vec, temp; - uint64_t ub, ug, vg, vr, bb, bg, br, yg; + uint64_t ub,ug,vg,vr,bb,bg,br,yg; __asm__ volatile( - "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" - "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" - "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" - "or %[ub], %[ub], %[mask1] \n\t" - "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" - "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[ug], %[ug], %[zero] \n\t" - "pshufh %[ug], %[ug], %[zero] \n\t" - "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vg], %[vg], %[zero] \n\t" - "pshufh %[vg], %[vg], %[five] \n\t" - "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" - "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" - "punpcklbh %[vr], %[vr], %[zero] \n\t" - "pshufh %[vr], %[vr], %[five] \n\t" - "or %[vr], %[vr], %[mask1] \n\t" + "ldc1 %[yg], 0xc0(%[yuvcons_ptr]) \n\t" + "ldc1 %[bb], 0x60(%[yuvcons_ptr]) \n\t" + "ldc1 %[ub], 0x00(%[yuvcons_ptr]) \n\t" + "or %[ub], %[ub], %[mask1] \n\t" + "ldc1 %[bg], 0x80(%[yuvcons_ptr]) \n\t" + "ldc1 %[ug], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[ug], %[ug], %[zero] \n\t" + "pshufh %[ug], %[ug], %[zero] \n\t" + "ldc1 %[vg], 0x20(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vg], %[vg], %[zero] \n\t" + "pshufh %[vg], %[vg], %[five] \n\t" + "ldc1 %[br], 0xa0(%[yuvcons_ptr]) \n\t" + "ldc1 %[vr], 0x40(%[yuvcons_ptr]) \n\t" + "punpcklbh %[vr], %[vr], %[zero] \n\t" + "pshufh %[vr], %[vr], %[five] \n\t" + "or %[vr], %[vr], %[mask1] \n\t" - "1: \n\t" - "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" - "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" - "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" - "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" - "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" - "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" + "1: \n\t" + "gslwlc1 %[y], 0x03(%[y_ptr]) \n\t" + "gslwrc1 %[y], 0x00(%[y_ptr]) \n\t" + "gslwlc1 %[u], 0x03(%[u_ptr]) \n\t" + "gslwrc1 %[u], 0x00(%[u_ptr]) \n\t" + "gslwlc1 %[v], 0x03(%[v_ptr]) \n\t" + "gslwrc1 %[v], 0x00(%[v_ptr]) \n\t" - "punpcklbh %[y], %[y], %[y] \n\t" - "pmulhuh %[y], %[y], %[yg] \n\t" + "punpcklbh %[y], %[y], %[y] \n\t" + "pmulhuh %[y], %[y], %[yg] \n\t" - "punpcklbh %[u], %[u], %[u] \n\t" - "punpcklbh %[u], %[u], %[zero] \n\t" - "paddsh %[b_vec], %[y], %[bb] \n\t" - "pmullh %[temp], %[u], %[ub] \n\t" - "psubsh %[b_vec], %[b_vec], %[temp] \n\t" - "psrah %[b_vec], %[b_vec], %[six] \n\t" + "punpcklbh %[u], %[u], %[u] \n\t" + "punpcklbh %[u], %[u], %[zero] \n\t" + "paddsh %[b_vec], %[y], %[bb] \n\t" + "pmullh %[temp], %[u], %[ub] \n\t" + "psubsh %[b_vec], %[b_vec], %[temp] \n\t" + "psrah %[b_vec], %[b_vec], %[six] \n\t" - "punpcklbh %[v], %[v], %[v] \n\t" - "punpcklbh %[v], %[v], %[zero] \n\t" - "paddsh %[g_vec], %[y], %[bg] \n\t" - "pmullh %[temp], %[u], %[ug] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "pmullh %[temp], %[v], %[vg] \n\t" - "psubsh %[g_vec], %[g_vec], %[temp] \n\t" - "psrah %[g_vec], %[g_vec], %[six] \n\t" + "punpcklbh %[v], %[v], %[v] \n\t" + "punpcklbh %[v], %[v], %[zero] \n\t" + "paddsh %[g_vec], %[y], %[bg] \n\t" + "pmullh %[temp], %[u], %[ug] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "pmullh %[temp], %[v], %[vg] \n\t" + "psubsh %[g_vec], %[g_vec], %[temp] \n\t" + "psrah %[g_vec], %[g_vec], %[six] \n\t" - "paddsh %[r_vec], %[y], %[br] \n\t" - "pmullh %[temp], %[v], %[vr] \n\t" - "psubsh %[r_vec], %[r_vec], %[temp] \n\t" - "psrah %[r_vec], %[r_vec], %[six] \n\t" + "paddsh %[r_vec], %[y], %[br] \n\t" + "pmullh %[temp], %[v], %[vr] \n\t" + "psubsh %[r_vec], %[r_vec], %[temp] \n\t" + "psrah %[r_vec], %[r_vec], %[six] \n\t" - "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" - "packushb %[g_vec], %[g_vec], %[zero] \n\t" - "punpcklwd %[g_vec], %[alpha], %[g_vec] \n\t" - "punpcklbh %[b_vec], %[g_vec], %[r_vec] \n\t" - "punpckhbh %[r_vec], %[g_vec], %[r_vec] \n\t" - "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" - "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[r_vec], %[b_vec], %[r_vec] \n\t" + "packushb %[g_vec], %[g_vec], %[zero] \n\t" + "punpcklwd %[g_vec], %[alpha], %[g_vec] \n\t" + "punpcklbh %[b_vec], %[g_vec], %[r_vec] \n\t" + "punpckhbh %[r_vec], %[g_vec], %[r_vec] \n\t" + "punpcklhw %[g_vec], %[b_vec], %[r_vec] \n\t" + "punpckhhw %[b_vec], %[b_vec], %[r_vec] \n\t" - "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" - "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" - "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[g_vec], 0x07(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[g_vec], 0x00(%[rgbbuf_ptr]) \n\t" + "gssdlc1 %[b_vec], 0x0f(%[rgbbuf_ptr]) \n\t" + "gssdrc1 %[b_vec], 0x08(%[rgbbuf_ptr]) \n\t" - "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" - "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" - "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" - "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "bnez %[width], 1b \n\t" + "daddiu %[y_ptr], %[y_ptr], 0x04 \n\t" + "daddiu %[u_ptr], %[u_ptr], 0x02 \n\t" + "daddiu %[v_ptr], %[v_ptr], 0x02 \n\t" + "daddiu %[rgbbuf_ptr], %[rgbbuf_ptr], 0x10 \n\t" + "daddi %[width], %[width], -0x04 \n\t" + "bnez %[width], 1b \n\t" - : [y] "=&f"(y), [u] "=&f"(u), [v] "=&f"(v), [b_vec] "=&f"(b_vec), - [g_vec] "=&f"(g_vec), [r_vec] "=&f"(r_vec), [temp] "=&f"(temp), - [ub] "=&f"(ub), [ug] "=&f"(ug), [vg] "=&f"(vg), [vr] "=&f"(vr), - [bb] "=&f"(bb), [bg] "=&f"(bg), [br] "=&f"(br), [yg] "=&f"(yg) - : [y_ptr] "r"(src_y), [u_ptr] "r"(src_u), [v_ptr] "r"(src_v), - [rgbbuf_ptr] "r"(rgb_buf), [yuvcons_ptr] "r"(yuvconstants), - [width] "r"(width), [zero] "f"(0x00), [five] "f"(0x55), [six] "f"(0x6), - [mask1] "f"(0xff00ff00ff00ff00), [alpha] "f"(-1) - : "memory"); + : [y]"=&f"(y), [u]"=&f"(u), + [v]"=&f"(v), + [b_vec]"=&f"(b_vec), [g_vec]"=&f"(g_vec), + [r_vec]"=&f"(r_vec), [temp]"=&f"(temp), + [ub]"=&f"(ub), [ug]"=&f"(ug), + [vg]"=&f"(vg), [vr]"=&f"(vr), + [bb]"=&f"(bb), [bg]"=&f"(bg), + [br]"=&f"(br), [yg]"=&f"(yg) + : [y_ptr]"r"(src_y), [u_ptr]"r"(src_u), + [v_ptr]"r"(src_v), [rgbbuf_ptr]"r"(rgb_buf), + [yuvcons_ptr]"r"(yuvconstants), [width]"r"(width), + [zero]"f"(0x00), [five]"f"(0x55), + [six]"f"(0x6), [mask1]"f"(0xff00ff00ff00ff00), + [alpha]"f"(-1) + : "memory" + ); } void ARGBSetRow_MMI(uint8_t* dst_argb, uint32_t v32, int width) { - __asm__ volatile( - "punpcklwd %[v32], %[v32], %[v32] \n\t" - "1: \n\t" - "gssdlc1 %[v32], 0x07(%[dst_ptr]) \n\t" - "gssdrc1 %[v32], 0x00(%[dst_ptr]) \n\t" - "gssdlc1 %[v32], 0x0f(%[dst_ptr]) \n\t" - "gssdrc1 %[v32], 0x08(%[dst_ptr]) \n\t" + __asm__ volatile ( + "punpcklwd %[v32], %[v32], %[v32] \n\t" + "1: \n\t" + "gssdlc1 %[v32], 0x07(%[dst_ptr]) \n\t" + "gssdrc1 %[v32], 0x00(%[dst_ptr]) \n\t" + "gssdlc1 %[v32], 0x0f(%[dst_ptr]) \n\t" + "gssdrc1 %[v32], 0x08(%[dst_ptr]) \n\t" - "daddi %[width], %[width], -0x04 \n\t" - "daddiu %[dst_ptr], %[dst_ptr], 0x10 \n\t" - "bnez %[width], 1b \n\t" - : [v32] "+&f"(v32) - : [dst_ptr] "r"(dst_argb), [width] "r"(width) - : "memory"); + "daddi %[width], %[width], -0x04 \n\t" + "daddiu %[dst_ptr], %[dst_ptr], 0x10 \n\t" + "bnez %[width], 1b \n\t" + : [v32]"+&f"(v32) + : [dst_ptr]"r"(dst_argb), [width]"r"(width) + : "memory" + ); } +// clang-format on // 10 bit YUV to ARGB #endif // !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc b/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc index 5c0239a37..fe6df93a6 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc @@ -155,11 +155,10 @@ extern "C" { } // Loads current and next row of ARGB input and averages it to calculate U and V -#define READ_ARGB(s_ptr, t_ptr, argb0, argb1, argb2, argb3) \ +#define READ_ARGB(s_ptr, t_ptr, argb0, argb1, argb2, argb3, const_0x0101) \ { \ v16u8 src0_m, src1_m, src2_m, src3_m, src4_m, src5_m, src6_m, src7_m; \ v16u8 vec0_m, vec1_m, vec2_m, vec3_m, vec4_m, vec5_m, vec6_m, vec7_m; \ - v16u8 vec8_m, vec9_m; \ v8u16 reg0_m, reg1_m, reg2_m, reg3_m, reg4_m, reg5_m, reg6_m, reg7_m; \ v8u16 reg8_m, reg9_m; \ \ @@ -195,81 +194,81 @@ extern "C" { reg1_m = (v8u16)__msa_pckev_d((v2i64)reg7_m, (v2i64)reg3_m); \ reg0_m += (v8u16)__msa_pckod_d((v2i64)reg6_m, (v2i64)reg2_m); \ reg1_m += (v8u16)__msa_pckod_d((v2i64)reg7_m, (v2i64)reg3_m); \ - reg8_m = (v8u16)__msa_srai_h((v8i16)reg8_m, 2); \ - reg9_m = (v8u16)__msa_srai_h((v8i16)reg9_m, 2); \ - reg0_m = (v8u16)__msa_srai_h((v8i16)reg0_m, 2); \ - reg1_m = (v8u16)__msa_srai_h((v8i16)reg1_m, 2); \ - argb0 = (v16u8)__msa_pckev_b((v16i8)reg9_m, (v16i8)reg8_m); \ - argb1 = (v16u8)__msa_pckev_b((v16i8)reg1_m, (v16i8)reg0_m); \ - src0_m = (v16u8)__msa_ld_b((void*)s, 64); \ - src1_m = (v16u8)__msa_ld_b((void*)s, 80); \ - src2_m = (v16u8)__msa_ld_b((void*)s, 96); \ - src3_m = (v16u8)__msa_ld_b((void*)s, 112); \ - src4_m = (v16u8)__msa_ld_b((void*)t, 64); \ - src5_m = (v16u8)__msa_ld_b((void*)t, 80); \ - src6_m = (v16u8)__msa_ld_b((void*)t, 96); \ - src7_m = (v16u8)__msa_ld_b((void*)t, 112); \ - vec2_m = (v16u8)__msa_ilvr_b((v16i8)src0_m, (v16i8)src4_m); \ - vec3_m = (v16u8)__msa_ilvr_b((v16i8)src1_m, (v16i8)src5_m); \ - vec4_m = (v16u8)__msa_ilvr_b((v16i8)src2_m, (v16i8)src6_m); \ - vec5_m = (v16u8)__msa_ilvr_b((v16i8)src3_m, (v16i8)src7_m); \ - vec6_m = (v16u8)__msa_ilvl_b((v16i8)src0_m, (v16i8)src4_m); \ - vec7_m = (v16u8)__msa_ilvl_b((v16i8)src1_m, (v16i8)src5_m); \ - vec8_m = (v16u8)__msa_ilvl_b((v16i8)src2_m, (v16i8)src6_m); \ - vec9_m = (v16u8)__msa_ilvl_b((v16i8)src3_m, (v16i8)src7_m); \ - reg0_m = __msa_hadd_u_h(vec2_m, vec2_m); \ - reg1_m = __msa_hadd_u_h(vec3_m, vec3_m); \ - reg2_m = __msa_hadd_u_h(vec4_m, vec4_m); \ - reg3_m = __msa_hadd_u_h(vec5_m, vec5_m); \ - reg4_m = __msa_hadd_u_h(vec6_m, vec6_m); \ - reg5_m = __msa_hadd_u_h(vec7_m, vec7_m); \ - reg6_m = __msa_hadd_u_h(vec8_m, vec8_m); \ - reg7_m = __msa_hadd_u_h(vec9_m, vec9_m); \ - reg8_m = (v8u16)__msa_pckev_d((v2i64)reg4_m, (v2i64)reg0_m); \ - reg9_m = (v8u16)__msa_pckev_d((v2i64)reg5_m, (v2i64)reg1_m); \ - reg8_m += (v8u16)__msa_pckod_d((v2i64)reg4_m, (v2i64)reg0_m); \ - reg9_m += (v8u16)__msa_pckod_d((v2i64)reg5_m, (v2i64)reg1_m); \ - reg0_m = (v8u16)__msa_pckev_d((v2i64)reg6_m, (v2i64)reg2_m); \ - reg1_m = (v8u16)__msa_pckev_d((v2i64)reg7_m, (v2i64)reg3_m); \ - reg0_m += (v8u16)__msa_pckod_d((v2i64)reg6_m, (v2i64)reg2_m); \ - reg1_m += (v8u16)__msa_pckod_d((v2i64)reg7_m, (v2i64)reg3_m); \ - reg8_m = (v8u16)__msa_srai_h((v8i16)reg8_m, 2); \ - reg9_m = (v8u16)__msa_srai_h((v8i16)reg9_m, 2); \ - reg0_m = (v8u16)__msa_srai_h((v8i16)reg0_m, 2); \ - reg1_m = (v8u16)__msa_srai_h((v8i16)reg1_m, 2); \ - argb2 = (v16u8)__msa_pckev_b((v16i8)reg9_m, (v16i8)reg8_m); \ - argb3 = (v16u8)__msa_pckev_b((v16i8)reg1_m, (v16i8)reg0_m); \ + reg8_m += const_0x0101; \ + reg9_m += const_0x0101; \ + reg0_m += const_0x0101; \ + reg1_m += const_0x0101; \ + argb0 = (v8u16)__msa_srai_h((v8i16)reg8_m, 1); \ + argb1 = (v8u16)__msa_srai_h((v8i16)reg9_m, 1); \ + argb2 = (v8u16)__msa_srai_h((v8i16)reg0_m, 1); \ + argb3 = (v8u16)__msa_srai_h((v8i16)reg1_m, 1); \ + } + +#define ARGBTOUV(argb0, argb1, argb2, argb3, const0, const1, const2, const3, \ + shf0, shf1, shf2, shf3, shift, u_out, v_out) \ + { \ + v8u16 vec0_m, vec1_m, vec2_m, vec3_m, vec4_m, vec5_m, vec6_m, vec7_m; \ + v4u32 reg0_m, reg1_m, reg2_m, reg3_m; \ + \ + vec0_m = (v8u16)__msa_vshf_h(shf0, (v16i8)argb1, (v16i8)argb0); \ + vec1_m = (v8u16)__msa_vshf_h(shf0, (v16i8)argb3, (v16i8)argb2); \ + vec2_m = (v8u16)__msa_vshf_h(shf1, (v16i8)argb1, (v16i8)argb0); \ + vec3_m = (v8u16)__msa_vshf_h(shf1, (v16i8)argb3, (v16i8)argb2); \ + vec4_m = (v8u16)__msa_vshf_h(shf2, (v16i8)argb1, (v16i8)argb0); \ + vec5_m = (v8u16)__msa_vshf_h(shf2, (v16i8)argb3, (v16i8)argb2); \ + vec6_m = (v8u16)__msa_vshf_h(shf3, (v16i8)argb1, (v16i8)argb0); \ + vec7_m = (v8u16)__msa_vshf_h(shf3, (v16i8)argb3, (v16i8)argb2); \ + reg0_m = __msa_dotp_u_w(vec0_m, const0); \ + reg1_m = __msa_dotp_u_w(vec1_m, const0); \ + reg2_m = __msa_dotp_u_w(vec4_m, const0); \ + reg3_m = __msa_dotp_u_w(vec5_m, const0); \ + reg0_m += const1; \ + reg1_m += const1; \ + reg2_m += const1; \ + reg3_m += const1; \ + reg0_m -= (v4u32)__msa_dotp_u_w(vec2_m, const2); \ + reg1_m -= (v4u32)__msa_dotp_u_w(vec3_m, const2); \ + reg2_m -= (v4u32)__msa_dotp_u_w(vec6_m, const3); \ + reg3_m -= (v4u32)__msa_dotp_u_w(vec7_m, const3); \ + reg0_m = __msa_srl_w(reg0_m, shift); \ + reg1_m = __msa_srl_w(reg1_m, shift); \ + reg2_m = __msa_srl_w(reg2_m, shift); \ + reg3_m = __msa_srl_w(reg3_m, shift); \ + u_out = (v8u16)__msa_pckev_h((v8i16)reg1_m, (v8i16)reg0_m); \ + v_out = (v8u16)__msa_pckev_h((v8i16)reg3_m, (v8i16)reg2_m); \ } // Takes ARGB input and calculates U and V. -#define ARGBTOUV(argb0, argb1, argb2, argb3, const0, const1, const2, const3, \ - shf0, shf1, shf2, shf3, v_out, u_out) \ - { \ - v16u8 vec0_m, vec1_m, vec2_m, vec3_m, vec4_m, vec5_m, vec6_m, vec7_m; \ - v8u16 reg0_m, reg1_m, reg2_m, reg3_m; \ - \ - vec0_m = (v16u8)__msa_vshf_b(shf0, (v16i8)argb1, (v16i8)argb0); \ - vec1_m = (v16u8)__msa_vshf_b(shf0, (v16i8)argb3, (v16i8)argb2); \ - vec2_m = (v16u8)__msa_vshf_b(shf1, (v16i8)argb1, (v16i8)argb0); \ - vec3_m = (v16u8)__msa_vshf_b(shf1, (v16i8)argb3, (v16i8)argb2); \ - vec4_m = (v16u8)__msa_vshf_b(shf2, (v16i8)argb1, (v16i8)argb0); \ - vec5_m = (v16u8)__msa_vshf_b(shf2, (v16i8)argb3, (v16i8)argb2); \ - vec6_m = (v16u8)__msa_vshf_b(shf3, (v16i8)argb1, (v16i8)argb0); \ - vec7_m = (v16u8)__msa_vshf_b(shf3, (v16i8)argb3, (v16i8)argb2); \ - reg0_m = __msa_dotp_u_h(vec0_m, const1); \ - reg1_m = __msa_dotp_u_h(vec1_m, const1); \ - reg2_m = __msa_dotp_u_h(vec4_m, const1); \ - reg3_m = __msa_dotp_u_h(vec5_m, const1); \ - reg0_m += const3; \ - reg1_m += const3; \ - reg2_m += const3; \ - reg3_m += const3; \ - reg0_m -= __msa_dotp_u_h(vec2_m, const0); \ - reg1_m -= __msa_dotp_u_h(vec3_m, const0); \ - reg2_m -= __msa_dotp_u_h(vec6_m, const2); \ - reg3_m -= __msa_dotp_u_h(vec7_m, const2); \ - v_out = (v16u8)__msa_pckod_b((v16i8)reg1_m, (v16i8)reg0_m); \ - u_out = (v16u8)__msa_pckod_b((v16i8)reg3_m, (v16i8)reg2_m); \ +#define ARGBTOUV_H(argb0, argb1, argb2, argb3, const0, const1, const2, const3, \ + shf0, shf1, shf2, shf3, v_out, u_out) \ + { \ + v8u16 vec0_m, vec1_m, vec2_m, vec3_m, vec4_m, vec5_m, vec6_m, vec7_m; \ + v4u32 reg0_m, reg1_m, reg2_m, reg3_m; \ + \ + vec0_m = __msa_vshf_h(shf0, (v16i8)argb1, (v16i8)argb0); \ + vec1_m = __msa_vshf_h(shf0, (v16i8)argb3, (v16i8)argb2); \ + vec2_m = __msa_vshf_h(shf1, (v16i8)argb1, (v16i8)argb0); \ + vec3_m = __msa_vshf_h(shf1, (v16i8)argb3, (v16i8)argb2); \ + vec4_m = __msa_vshf_h(shf2, (v16i8)argb1, (v16i8)argb0); \ + vec5_m = __msa_vshf_h(shf2, (v16i8)argb3, (v16i8)argb2); \ + vec6_m = __msa_vshf_h(shf3, (v16i8)argb1, (v16i8)argb0); \ + vec7_m = __msa_vshf_h(shf3, (v16i8)argb3, (v16i8)argb2); \ + reg0_m = __msa_dotp_u_w(vec0_m, const1); \ + reg1_m = __msa_dotp_u_w(vec1_m, const1); \ + reg2_m = __msa_dotp_u_w(vec4_m, const1); \ + reg3_m = __msa_dotp_u_w(vec5_m, const1); \ + reg0_m += (v4u32)const3; \ + reg1_m += (v4u32)const3; \ + reg2_m += (v4u32)const3; \ + reg3_m += (v4u32)const3; \ + reg0_m -= __msa_dotp_u_w(vec2_m, const0); \ + reg1_m -= __msa_dotp_u_w(vec3_m, const0); \ + reg2_m -= __msa_dotp_u_w(vec6_m, const2); \ + reg3_m -= __msa_dotp_u_w(vec7_m, const2); \ + u_out = (v16u8)__msa_pckev_h((v8i16)reg3_m, (v8i16)reg2_m); \ + v_out = (v16u8)__msa_pckev_h((v8i16)reg1_m, (v8i16)reg0_m); \ + u_out = (v16u8)__msa_pckod_b((v16i8)u_out, (v16i8)u_out); \ + v_out = (v16u8)__msa_pckod_b((v16i8)v_out, (v16i8)v_out); \ } // Load I444 pixel data @@ -302,6 +301,20 @@ void MirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width) { } } +void MirrorUVRow_MSA(const uint8_t* src_uv, uint8_t* dst_uv, int width) { + int x; + v8u16 src, dst; + v8u16 shuffler = {7, 6, 5, 4, 3, 2, 1, 0}; + src_uv += (width - 8) << 1; + for (x = 0; x < width; x += 8) { + src = LD_UH(src_uv); + dst = __msa_vshf_h(shuffler, src, src); + ST_UH(dst, dst_uv); + src_uv -= 16; + dst_uv += 16; + } +} + void ARGBMirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width) { int x; v16u8 src0, src1, src2, src3; @@ -825,12 +838,13 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0, v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, vec8, vec9; v8u16 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, reg8, reg9; v16u8 dst0, dst1; - v8u16 const_0x70 = (v8u16)__msa_ldi_h(0x70); - v8u16 const_0x4A = (v8u16)__msa_ldi_h(0x4A); - v8u16 const_0x26 = (v8u16)__msa_ldi_h(0x26); - v8u16 const_0x5E = (v8u16)__msa_ldi_h(0x5E); - v8u16 const_0x12 = (v8u16)__msa_ldi_h(0x12); + v8u16 const_0x70 = (v8u16)__msa_ldi_h(0x38); + v8u16 const_0x4A = (v8u16)__msa_ldi_h(0x25); + v8u16 const_0x26 = (v8u16)__msa_ldi_h(0x13); + v8u16 const_0x5E = (v8u16)__msa_ldi_h(0x2f); + v8u16 const_0x12 = (v8u16)__msa_ldi_h(0x09); v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); for (x = 0; x < width; x += 32) { src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 0); @@ -889,12 +903,18 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0, reg3 += __msa_hadd_u_h(vec5, vec5); reg4 += __msa_hadd_u_h(vec0, vec0); reg5 += __msa_hadd_u_h(vec1, vec1); - reg0 = (v8u16)__msa_srai_h((v8i16)reg0, 2); - reg1 = (v8u16)__msa_srai_h((v8i16)reg1, 2); - reg2 = (v8u16)__msa_srai_h((v8i16)reg2, 2); - reg3 = (v8u16)__msa_srai_h((v8i16)reg3, 2); - reg4 = (v8u16)__msa_srai_h((v8i16)reg4, 2); - reg5 = (v8u16)__msa_srai_h((v8i16)reg5, 2); + reg0 += const_0x0001; + reg1 += const_0x0001; + reg2 += const_0x0001; + reg3 += const_0x0001; + reg4 += const_0x0001; + reg5 += const_0x0001; + reg0 = (v8u16)__msa_srai_h((v8i16)reg0, 1); + reg1 = (v8u16)__msa_srai_h((v8i16)reg1, 1); + reg2 = (v8u16)__msa_srai_h((v8i16)reg2, 1); + reg3 = (v8u16)__msa_srai_h((v8i16)reg3, 1); + reg4 = (v8u16)__msa_srai_h((v8i16)reg4, 1); + reg5 = (v8u16)__msa_srai_h((v8i16)reg5, 1); reg6 = reg0 * const_0x70; reg7 = reg1 * const_0x70; reg8 = reg2 * const_0x4A; @@ -1412,17 +1432,17 @@ void ARGBGrayRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, int width) { int x; v16u8 src0, src1, vec0, vec1, dst0, dst1; v8u16 reg0; - v16u8 const_0x26 = (v16u8)__msa_ldi_h(0x26); - v16u8 const_0x4B0F = (v16u8)__msa_fill_h(0x4B0F); + v16u8 const_0x4D = (v16u8)__msa_ldi_h(0x4D); + v16u8 const_0x961D = (v16u8)__msa_fill_h(0x961D); for (x = 0; x < width; x += 8) { src0 = (v16u8)__msa_ld_b((v16u8*)src_argb, 0); src1 = (v16u8)__msa_ld_b((v16u8*)src_argb, 16); vec0 = (v16u8)__msa_pckev_h((v8i16)src1, (v8i16)src0); vec1 = (v16u8)__msa_pckod_h((v8i16)src1, (v8i16)src0); - reg0 = __msa_dotp_u_h(vec0, const_0x4B0F); - reg0 = __msa_dpadd_u_h(reg0, vec1, const_0x26); - reg0 = (v8u16)__msa_srari_h((v8i16)reg0, 7); + reg0 = __msa_dotp_u_h(vec0, const_0x961D); + reg0 = __msa_dpadd_u_h(reg0, vec1, const_0x4D); + reg0 = (v8u16)__msa_srari_h((v8i16)reg0, 8); vec0 = (v16u8)__msa_ilvev_b((v16i8)reg0, (v16i8)reg0); vec1 = (v16u8)__msa_ilvod_b((v16i8)vec1, (v16i8)vec0); dst0 = (v16u8)__msa_ilvr_b((v16i8)vec1, (v16i8)vec0); @@ -2031,12 +2051,13 @@ void RGB24ToUVRow_MSA(const uint8_t* src_rgb0, v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; v8i16 reg0, reg1, reg2, reg3; v16u8 dst0; - v8u16 const_0x70 = (v8u16)__msa_fill_h(0x70); - v8u16 const_0x4A = (v8u16)__msa_fill_h(0x4A); - v8u16 const_0x26 = (v8u16)__msa_fill_h(0x26); - v8u16 const_0x5E = (v8u16)__msa_fill_h(0x5E); - v8u16 const_0x12 = (v8u16)__msa_fill_h(0x12); + v8u16 const_0x70 = (v8u16)__msa_fill_h(0x38); + v8u16 const_0x4A = (v8u16)__msa_fill_h(0x25); + v8u16 const_0x26 = (v8u16)__msa_fill_h(0x13); + v8u16 const_0x5E = (v8u16)__msa_fill_h(0x2f); + v8u16 const_0x12 = (v8u16)__msa_fill_h(0x09); v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); v16i8 mask = {0, 1, 2, 16, 3, 4, 5, 17, 6, 7, 8, 18, 9, 10, 11, 19}; v16i8 zero = {0}; @@ -2085,10 +2106,14 @@ void RGB24ToUVRow_MSA(const uint8_t* src_rgb0, reg1 += (v8i16)__msa_pckod_d((v2i64)vec3, (v2i64)vec2); reg2 += (v8i16)__msa_pckod_d((v2i64)vec5, (v2i64)vec4); reg3 += (v8i16)__msa_pckod_d((v2i64)vec7, (v2i64)vec6); - reg0 = __msa_srai_h((v8i16)reg0, 2); - reg1 = __msa_srai_h((v8i16)reg1, 2); - reg2 = __msa_srai_h((v8i16)reg2, 2); - reg3 = __msa_srai_h((v8i16)reg3, 2); + reg0 += const_0x0001; + reg1 += const_0x0001; + reg2 += const_0x0001; + reg3 += const_0x0001; + reg0 = __msa_srai_h((v8i16)reg0, 1); + reg1 = __msa_srai_h((v8i16)reg1, 1); + reg2 = __msa_srai_h((v8i16)reg2, 1); + reg3 = __msa_srai_h((v8i16)reg3, 1); vec4 = (v8u16)__msa_pckev_h(reg1, reg0); vec5 = (v8u16)__msa_pckev_h(reg3, reg2); vec6 = (v8u16)__msa_pckod_h(reg1, reg0); @@ -2136,12 +2161,13 @@ void RAWToUVRow_MSA(const uint8_t* src_rgb0, v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; v8i16 reg0, reg1, reg2, reg3; v16u8 dst0; - v8u16 const_0x70 = (v8u16)__msa_fill_h(0x70); - v8u16 const_0x4A = (v8u16)__msa_fill_h(0x4A); - v8u16 const_0x26 = (v8u16)__msa_fill_h(0x26); - v8u16 const_0x5E = (v8u16)__msa_fill_h(0x5E); - v8u16 const_0x12 = (v8u16)__msa_fill_h(0x12); + v8u16 const_0x70 = (v8u16)__msa_fill_h(0x38); + v8u16 const_0x4A = (v8u16)__msa_fill_h(0x25); + v8u16 const_0x26 = (v8u16)__msa_fill_h(0x13); + v8u16 const_0x5E = (v8u16)__msa_fill_h(0x2f); + v8u16 const_0x12 = (v8u16)__msa_fill_h(0x09); v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); v16i8 mask = {0, 1, 2, 16, 3, 4, 5, 17, 6, 7, 8, 18, 9, 10, 11, 19}; v16i8 zero = {0}; @@ -2190,10 +2216,14 @@ void RAWToUVRow_MSA(const uint8_t* src_rgb0, reg1 += (v8i16)__msa_pckod_d((v2i64)vec3, (v2i64)vec2); reg2 += (v8i16)__msa_pckod_d((v2i64)vec5, (v2i64)vec4); reg3 += (v8i16)__msa_pckod_d((v2i64)vec7, (v2i64)vec6); - reg0 = __msa_srai_h(reg0, 2); - reg1 = __msa_srai_h(reg1, 2); - reg2 = __msa_srai_h(reg2, 2); - reg3 = __msa_srai_h(reg3, 2); + reg0 += const_0x0001; + reg1 += const_0x0001; + reg2 += const_0x0001; + reg3 += const_0x0001; + reg0 = __msa_srai_h(reg0, 1); + reg1 = __msa_srai_h(reg1, 1); + reg2 = __msa_srai_h(reg2, 1); + reg3 = __msa_srai_h(reg3, 1); vec4 = (v8u16)__msa_pckev_h((v8i16)reg1, (v8i16)reg0); vec5 = (v8u16)__msa_pckev_h((v8i16)reg3, (v8i16)reg2); vec6 = (v8u16)__msa_pckod_h((v8i16)reg1, (v8i16)reg0); @@ -2419,16 +2449,16 @@ void SobelXYRow_MSA(const uint8_t* src_sobelx, void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, dst0; - v16u8 const_0x4B0F = (v16u8)__msa_fill_h(0x4B0F); - v16u8 const_0x26 = (v16u8)__msa_fill_h(0x26); - v8u16 const_0x40 = (v8u16)__msa_fill_h(0x40); + v16u8 const_0x961D = (v16u8)__msa_fill_h(0x961D); + v16u8 const_0x4D = (v16u8)__msa_fill_h(0x4D); + v8u16 const_0x80 = (v8u16)__msa_fill_h(0x80); for (x = 0; x < width; x += 16) { src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48); - ARGBTOY(src0, src1, src2, src3, const_0x4B0F, const_0x26, const_0x40, 7, + ARGBTOY(src0, src1, src2, src3, const_0x961D, const_0x4D, const_0x80, 8, dst0); ST_UB(dst0, dst_y); src_argb0 += 64; @@ -2504,61 +2534,123 @@ void ARGBToUVJRow_MSA(const uint8_t* src_rgb0, int x; const uint8_t* s = src_rgb0; const uint8_t* t = src_rgb0 + src_stride_rgb; - v16u8 src0, src1, src2, src3, src4, src5, src6, src7; - v16u8 vec0, vec1, vec2, vec3; - v16u8 dst0, dst1; - v16i8 shuffler0 = {0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29}; - v16i8 shuffler1 = {2, 3, 6, 7, 10, 11, 14, 15, - 18, 19, 22, 23, 26, 27, 30, 31}; - v16i8 shuffler2 = {0, 3, 4, 7, 8, 11, 12, 15, 16, 19, 20, 23, 24, 27, 28, 31}; - v16i8 shuffler3 = {1, 2, 5, 6, 9, 10, 13, 14, 17, 18, 21, 22, 25, 26, 29, 30}; - v16u8 const_0x7F = (v16u8)__msa_fill_h(0x7F); - v16u8 const_0x6B14 = (v16u8)__msa_fill_h(0x6B14); - v16u8 const_0x2B54 = (v16u8)__msa_fill_h(0x2B54); - v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + v8u16 src0, src1, src2, src3, src4, src5, src6, src7; + v8u16 vec0, vec1, vec2, vec3; + v8u16 dst0, dst1, dst2, dst3; + v16u8 zero = {0}; + v8i16 shuffler0 = {0, 3, 4, 7, 8, 11, 12, 15}; + v8i16 shuffler1 = {1, 2, 5, 6, 9, 10, 13, 14}; + v8i16 shuffler2 = {2, 3, 6, 7, 10, 11, 14, 15}; + v8i16 shuffler3 = {0, 1, 4, 5, 8, 9, 12, 13}; + v8u16 const_0x0000003f = (v8u16)__msa_fill_w(0x0000003f); + v4u32 const_0x00008080 = (v8u16)__msa_fill_w(0x00008080); + v8u16 const_0x0015002a = (v8u16)__msa_fill_w(0x0015002a); + v8u16 const_0x0035000a = (v8u16)__msa_fill_w(0x0035000a); + v4i32 shift = __msa_fill_w(0x00000008); for (x = 0; x < width; x += 32) { - src0 = (v16u8)__msa_ld_b((void*)s, 0); - src1 = (v16u8)__msa_ld_b((void*)s, 16); - src2 = (v16u8)__msa_ld_b((void*)s, 32); - src3 = (v16u8)__msa_ld_b((void*)s, 48); - src4 = (v16u8)__msa_ld_b((void*)t, 0); - src5 = (v16u8)__msa_ld_b((void*)t, 16); - src6 = (v16u8)__msa_ld_b((void*)t, 32); - src7 = (v16u8)__msa_ld_b((void*)t, 48); - src0 = __msa_aver_u_b(src0, src4); - src1 = __msa_aver_u_b(src1, src5); - src2 = __msa_aver_u_b(src2, src6); - src3 = __msa_aver_u_b(src3, src7); - src4 = (v16u8)__msa_pckev_w((v4i32)src1, (v4i32)src0); - src5 = (v16u8)__msa_pckev_w((v4i32)src3, (v4i32)src2); - src6 = (v16u8)__msa_pckod_w((v4i32)src1, (v4i32)src0); - src7 = (v16u8)__msa_pckod_w((v4i32)src3, (v4i32)src2); - vec0 = __msa_aver_u_b(src4, src6); - vec1 = __msa_aver_u_b(src5, src7); - src0 = (v16u8)__msa_ld_b((void*)s, 64); - src1 = (v16u8)__msa_ld_b((void*)s, 80); - src2 = (v16u8)__msa_ld_b((void*)s, 96); - src3 = (v16u8)__msa_ld_b((void*)s, 112); - src4 = (v16u8)__msa_ld_b((void*)t, 64); - src5 = (v16u8)__msa_ld_b((void*)t, 80); - src6 = (v16u8)__msa_ld_b((void*)t, 96); - src7 = (v16u8)__msa_ld_b((void*)t, 112); - src0 = __msa_aver_u_b(src0, src4); - src1 = __msa_aver_u_b(src1, src5); - src2 = __msa_aver_u_b(src2, src6); - src3 = __msa_aver_u_b(src3, src7); - src4 = (v16u8)__msa_pckev_w((v4i32)src1, (v4i32)src0); - src5 = (v16u8)__msa_pckev_w((v4i32)src3, (v4i32)src2); - src6 = (v16u8)__msa_pckod_w((v4i32)src1, (v4i32)src0); - src7 = (v16u8)__msa_pckod_w((v4i32)src3, (v4i32)src2); - vec2 = __msa_aver_u_b(src4, src6); - vec3 = __msa_aver_u_b(src5, src7); - ARGBTOUV(vec0, vec1, vec2, vec3, const_0x6B14, const_0x7F, const_0x2B54, - const_0x8080, shuffler1, shuffler0, shuffler2, shuffler3, dst0, - dst1); - ST_UB(dst0, dst_v); - ST_UB(dst1, dst_u); + src1 = __msa_ld_b((void*)s, 0); + src3 = __msa_ld_b((void*)s, 16); + src5 = __msa_ld_b((void*)t, 0); + src7 = __msa_ld_b((void*)t, 16); + src0 = __msa_ilvr_b(zero, src1); + src1 = __msa_ilvl_b(zero, src1); + src2 = __msa_ilvr_b(zero, src3); + src3 = __msa_ilvl_b(zero, src3); + src4 = __msa_ilvr_b(zero, src5); + src5 = __msa_ilvl_b(zero, src5); + src6 = __msa_ilvr_b(zero, src7); + src7 = __msa_ilvl_b(zero, src7); + src0 += src4; + src1 += src5; + src2 += src6; + src3 += src7; + src4 = __msa_ilvev_d(src1, src0); + src5 = __msa_ilvod_d(src1, src0); + src6 = __msa_ilvev_d(src3, src2); + src7 = __msa_ilvod_d(src3, src2); + vec0 = __msa_aver_u_h(src4, src5); + vec1 = __msa_aver_u_h(src6, src7); + + src1 = __msa_ld_b((void*)s, 32); + src3 = __msa_ld_b((void*)s, 48); + src5 = __msa_ld_b((void*)t, 32); + src7 = __msa_ld_b((void*)t, 48); + src0 = __msa_ilvr_b(zero, src1); + src1 = __msa_ilvl_b(zero, src1); + src2 = __msa_ilvr_b(zero, src3); + src3 = __msa_ilvl_b(zero, src3); + src4 = __msa_ilvr_b(zero, src5); + src5 = __msa_ilvl_b(zero, src5); + src6 = __msa_ilvr_b(zero, src7); + src7 = __msa_ilvl_b(zero, src7); + src0 += src4; + src1 += src5; + src2 += src6; + src3 += src7; + src4 = __msa_ilvev_d(src1, src0); + src5 = __msa_ilvod_d(src1, src0); + src6 = __msa_ilvev_d(src3, src2); + src7 = __msa_ilvod_d(src3, src2); + vec2 = __msa_aver_u_h(src4, src5); + vec3 = __msa_aver_u_h(src6, src7); + ARGBTOUV(vec0, vec1, vec2, vec3, const_0x0000003f, const_0x00008080, + const_0x0015002a, const_0x0035000a, shuffler0, shuffler1, + shuffler2, shuffler3, shift, dst0, dst1); + + src1 = __msa_ld_b((void*)s, 64); + src3 = __msa_ld_b((void*)s, 80); + src5 = __msa_ld_b((void*)t, 64); + src7 = __msa_ld_b((void*)t, 80); + src0 = __msa_ilvr_b(zero, src1); + src1 = __msa_ilvl_b(zero, src1); + src2 = __msa_ilvr_b(zero, src3); + src3 = __msa_ilvl_b(zero, src3); + src4 = __msa_ilvr_b(zero, src5); + src5 = __msa_ilvl_b(zero, src5); + src6 = __msa_ilvr_b(zero, src7); + src7 = __msa_ilvl_b(zero, src7); + src0 += src4; + src1 += src5; + src2 += src6; + src3 += src7; + src4 = __msa_ilvev_d(src1, src0); + src5 = __msa_ilvod_d(src1, src0); + src6 = __msa_ilvev_d(src3, src2); + src7 = __msa_ilvod_d(src3, src2); + vec0 = __msa_aver_u_h(src4, src5); + vec1 = __msa_aver_u_h(src6, src7); + + src1 = __msa_ld_b((void*)s, 96); + src3 = __msa_ld_b((void*)s, 112); + src5 = __msa_ld_b((void*)t, 96); + src7 = __msa_ld_b((void*)t, 112); + src0 = __msa_ilvr_b(zero, src1); + src1 = __msa_ilvl_b(zero, src1); + src2 = __msa_ilvr_b(zero, src3); + src3 = __msa_ilvl_b(zero, src3); + src4 = __msa_ilvr_b(zero, src5); + src5 = __msa_ilvl_b(zero, src5); + src6 = __msa_ilvr_b(zero, src7); + src7 = __msa_ilvl_b(zero, src7); + src0 += src4; + src1 += src5; + src2 += src6; + src3 += src7; + src4 = __msa_ilvev_d(src1, src0); + src5 = __msa_ilvod_d(src1, src0); + src6 = __msa_ilvev_d(src3, src2); + src7 = __msa_ilvod_d(src3, src2); + vec2 = __msa_aver_u_h(src4, src5); + vec3 = __msa_aver_u_h(src6, src7); + ARGBTOUV(vec0, vec1, vec2, vec3, const_0x0000003f, const_0x00008080, + const_0x0015002a, const_0x0035000a, shuffler0, shuffler1, + shuffler2, shuffler3, shift, dst2, dst3); + + dst0 = (v8u16)__msa_pckev_b(dst2, dst0); + dst1 = (v8u16)__msa_pckev_b(dst3, dst1); + ST_UB(dst0, dst_u); + ST_UB(dst1, dst_v); s += 128; t += 128; dst_v += 16; @@ -2574,28 +2666,30 @@ void BGRAToUVRow_MSA(const uint8_t* src_rgb0, int x; const uint8_t* s = src_rgb0; const uint8_t* t = src_rgb0 + src_stride_rgb; - v16u8 dst0, dst1, vec0, vec1, vec2, vec3; - v16i8 shuffler0 = {0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29}; - v16i8 shuffler1 = {2, 3, 6, 7, 10, 11, 14, 15, - 18, 19, 22, 23, 26, 27, 30, 31}; - v16i8 shuffler2 = {0, 3, 4, 7, 8, 11, 12, 15, 16, 19, 20, 23, 24, 27, 28, 31}; - v16i8 shuffler3 = {2, 1, 6, 5, 10, 9, 14, 13, 18, 17, 22, 21, 26, 25, 30, 29}; - v16u8 const_0x125E = (v16u8)__msa_fill_h(0x125E); - v16u8 const_0x7000 = (v16u8)__msa_fill_h(0x7000); - v16u8 const_0x264A = (v16u8)__msa_fill_h(0x264A); - v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + const uint8_t unused = 0xf; + v8u16 src0, src1, src2, src3; + v16u8 dst0, dst1; + v8i16 shuffler0 = {1, unused, 5, unused, 9, unused, 13, unused}; + v8i16 shuffler1 = {2, 3, 6, 7, 10, 11, 14, 15}; + v8i16 shuffler2 = {3, unused, 7, unused, 11, unused, 15, unused}; + v8i16 shuffler3 = {1, 2, 5, 6, 9, 10, 13, 14}; + v8u16 const_0x09002f = (v8u16)__msa_fill_w(0x09002f); + v8u16 const_0x000038 = (v8u16)__msa_fill_w(0x0038); + v8u16 const_0x250013 = (v8u16)__msa_fill_w(0x250013); + v4u32 const_0x008080 = (v4u32)__msa_fill_w(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); - for (x = 0; x < width; x += 32) { - READ_ARGB(s, t, vec0, vec1, vec2, vec3); - ARGBTOUV(vec0, vec1, vec2, vec3, const_0x125E, const_0x7000, const_0x264A, - const_0x8080, shuffler0, shuffler1, shuffler2, shuffler3, dst0, - dst1); - ST_UB(dst0, dst_v); - ST_UB(dst1, dst_u); - s += 128; - t += 128; - dst_v += 16; - dst_u += 16; + for (x = 0; x < width; x += 16) { + READ_ARGB(s, t, src0, src1, src2, src3, const_0x0001); + ARGBTOUV_H(src0, src1, src2, src3, const_0x09002f, const_0x000038, + const_0x250013, const_0x008080, shuffler0, shuffler1, shuffler2, + shuffler3, dst0, dst1); + *((uint64_t*)dst_v) = __msa_copy_u_d((v2i64)dst0, 0); + *((uint64_t*)dst_u) = __msa_copy_u_d((v2i64)dst1, 0); + s += 64; + t += 64; + dst_u += 8; + dst_v += 8; } } @@ -2607,29 +2701,30 @@ void ABGRToUVRow_MSA(const uint8_t* src_rgb0, int x; const uint8_t* s = src_rgb0; const uint8_t* t = src_rgb0 + src_stride_rgb; - v16u8 src0, src1, src2, src3; + const uint8_t unused = 0xf; + v8u16 src0, src1, src2, src3; v16u8 dst0, dst1; - v16i8 shuffler0 = {0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29}; - v16i8 shuffler1 = {2, 3, 6, 7, 10, 11, 14, 15, - 18, 19, 22, 23, 26, 27, 30, 31}; - v16i8 shuffler2 = {0, 3, 4, 7, 8, 11, 12, 15, 16, 19, 20, 23, 24, 27, 28, 31}; - v16i8 shuffler3 = {1, 2, 5, 6, 9, 10, 13, 14, 17, 18, 21, 22, 25, 26, 29, 30}; - v16u8 const_0x4A26 = (v16u8)__msa_fill_h(0x4A26); - v16u8 const_0x0070 = (v16u8)__msa_fill_h(0x0070); - v16u8 const_0x125E = (v16u8)__msa_fill_h(0x125E); - v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + v8i16 shuffler0 = {0, unused, 4, unused, 8, unused, 12, unused}; + v8i16 shuffler1 = {1, 2, 5, 6, 9, 10, 13, 14}; + v8i16 shuffler2 = {2, unused, 6, unused, 10, unused, 14, unused}; + v8i16 shuffler3 = {0, 1, 4, 5, 8, 9, 12, 13}; + v8u16 const_0x09002f = (v8u16)__msa_fill_w(0x09002f); + v8u16 const_0x000038 = (v8u16)__msa_fill_w(0x0038); + v8u16 const_0x250013 = (v8u16)__msa_fill_w(0x250013); + v4u32 const_0x008080 = (v4u32)__msa_fill_w(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); - for (x = 0; x < width; x += 32) { - READ_ARGB(s, t, src0, src1, src2, src3); - ARGBTOUV(src0, src1, src2, src3, const_0x4A26, const_0x0070, const_0x125E, - const_0x8080, shuffler1, shuffler0, shuffler2, shuffler3, dst0, - dst1); - ST_UB(dst0, dst_u); - ST_UB(dst1, dst_v); - s += 128; - t += 128; - dst_u += 16; - dst_v += 16; + for (x = 0; x < width; x += 16) { + READ_ARGB(s, t, src0, src1, src2, src3, const_0x0001); + ARGBTOUV_H(src0, src1, src2, src3, const_0x09002f, const_0x000038, + const_0x250013, const_0x008080, shuffler0, shuffler1, shuffler2, + shuffler3, dst0, dst1); + *((uint64_t*)dst_v) = __msa_copy_u_d((v2i64)dst0, 0); + *((uint64_t*)dst_u) = __msa_copy_u_d((v2i64)dst1, 0); + s += 64; + t += 64; + dst_u += 8; + dst_v += 8; } } @@ -2641,28 +2736,30 @@ void RGBAToUVRow_MSA(const uint8_t* src_rgb0, int x; const uint8_t* s = src_rgb0; const uint8_t* t = src_rgb0 + src_stride_rgb; - v16u8 dst0, dst1, vec0, vec1, vec2, vec3; - v16i8 shuffler0 = {0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29}; - v16i8 shuffler1 = {2, 3, 6, 7, 10, 11, 14, 15, - 18, 19, 22, 23, 26, 27, 30, 31}; - v16i8 shuffler2 = {0, 3, 4, 7, 8, 11, 12, 15, 16, 19, 20, 23, 24, 27, 28, 31}; - v16i8 shuffler3 = {2, 1, 6, 5, 10, 9, 14, 13, 18, 17, 22, 21, 26, 25, 30, 29}; - v16u8 const_0x125E = (v16u8)__msa_fill_h(0x264A); - v16u8 const_0x7000 = (v16u8)__msa_fill_h(0x7000); - v16u8 const_0x264A = (v16u8)__msa_fill_h(0x125E); - v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); + const uint8_t unused = 0xf; + v8u16 src0, src1, src2, src3; + v16u8 dst0, dst1; + v8i16 shuffler0 = {3, unused, 7, unused, 11, unused, 15, unused}; + v8i16 shuffler1 = {2, 1, 6, 5, 10, 9, 14, 13}; + v8i16 shuffler2 = {1, unused, 5, unused, 9, unused, 13, unused}; + v8i16 shuffler3 = {3, 2, 7, 6, 11, 10, 15, 14}; + v8u16 const_0x09002f = (v8u16)__msa_fill_w(0x09002f); + v8u16 const_0x000038 = (v8u16)__msa_fill_w(0x0038); + v8u16 const_0x250013 = (v8u16)__msa_fill_w(0x250013); + v4u32 const_0x008080 = (v4u32)__msa_fill_w(0x8080); + v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); - for (x = 0; x < width; x += 32) { - READ_ARGB(s, t, vec0, vec1, vec2, vec3); - ARGBTOUV(vec0, vec1, vec2, vec3, const_0x125E, const_0x7000, const_0x264A, - const_0x8080, shuffler0, shuffler1, shuffler2, shuffler3, dst0, - dst1); - ST_UB(dst0, dst_u); - ST_UB(dst1, dst_v); - s += 128; - t += 128; - dst_u += 16; - dst_v += 16; + for (x = 0; x < width; x += 16) { + READ_ARGB(s, t, src0, src1, src2, src3, const_0x0001); + ARGBTOUV_H(src0, src1, src2, src3, const_0x09002f, const_0x000038, + const_0x250013, const_0x008080, shuffler0, shuffler1, shuffler2, + shuffler3, dst0, dst1); + *((uint64_t*)dst_v) = __msa_copy_u_d((v2i64)dst0, 0); + *((uint64_t*)dst_u) = __msa_copy_u_d((v2i64)dst1, 0); + s += 64; + t += 64; + dst_u += 8; + dst_v += 8; } } @@ -2734,13 +2831,24 @@ void I444ToARGBRow_MSA(const uint8_t* src_y, } } -void I400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, int width) { +// TODO - respect YuvConstants +void I400ToARGBRow_MSA(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { int x; +#if defined(__aarch64__) || defined(__arm__) + int ygb = yuvconstants->kUVBiasBGR[3]; + int yg = yuvconstants->kYToRgb[1]; +#else + int ygb = yuvconstants->kYBiasToRgb[0]; + int yg = yuvconstants->kYToRgb[0]; +#endif v16u8 src0, res0, res1, res2, res3, res4, dst0, dst1, dst2, dst3; v8i16 vec0, vec1; v4i32 reg0, reg1, reg2, reg3; - v4i32 vec_yg = __msa_fill_w(0x4A35); - v8i16 vec_ygb = __msa_fill_h(0xFB78); + v4i32 vec_yg = __msa_fill_w(yg); + v8i16 vec_ygb = __msa_fill_h(ygb); v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); v8i16 max = __msa_ldi_h(0xFF); v8i16 zero = {0}; @@ -3006,7 +3114,7 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0, uint8_t* dst_argb, int width) { int x; - v16u8 src0, src1, src2, src3, dst0, dst1; + v16u8 src0, src1, src2, src3, dst0, dst1, dst2, dst3; v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; v8u16 vec8, vec9, vec10, vec11, vec12, vec13; v8u16 const_256 = (v8u16)__msa_ldi_h(256); @@ -3051,12 +3159,12 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0, vec9 = (v8u16)__msa_srai_h((v8i16)vec9, 8); vec10 = (v8u16)__msa_srai_h((v8i16)vec10, 8); vec11 = (v8u16)__msa_srai_h((v8i16)vec11, 8); - vec0 += vec8; - vec1 += vec9; - vec2 += vec10; - vec3 += vec11; dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); dst1 = (v16u8)__msa_pckev_b((v16i8)vec3, (v16i8)vec2); + dst2 = (v16u8)__msa_pckev_b((v16i8)vec9, (v16i8)vec8); + dst3 = (v16u8)__msa_pckev_b((v16i8)vec11, (v16i8)vec10); + dst0 = (v16u8)__msa_adds_u_b(dst0, dst2); + dst1 = (v16u8)__msa_adds_u_b(dst1, dst3); dst0 = __msa_bmnz_v(dst0, const_255, mask); dst1 = __msa_bmnz_v(dst1, const_255, mask); ST_UB2(dst0, dst1, dst_argb, 16); @@ -3082,7 +3190,7 @@ void ARGBQuantizeRow_MSA(uint8_t* dst_argb, v16i8 mask = {0, 1, 2, 19, 4, 5, 6, 23, 8, 9, 10, 27, 12, 13, 14, 31}; v16i8 zero = {0}; - for (x = 0; x < width; x += 8) { + for (x = 0; x < width; x += 16) { src0 = (v16u8)__msa_ld_b((void*)dst_argb, 0); src1 = (v16u8)__msa_ld_b((void*)dst_argb, 16); src2 = (v16u8)__msa_ld_b((void*)dst_argb, 32); @@ -3315,10 +3423,10 @@ void SetRow_MSA(uint8_t* dst, uint8_t v8, int width) { } } -void MirrorUVRow_MSA(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void MirrorSplitUVRow_MSA(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { int x; v16u8 src0, src1, src2, src3; v16u8 dst0, dst1, dst2, dst3; diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc index 1cf8eefea..a5aeaabfb 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc @@ -114,11 +114,11 @@ void I444ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READYUV444 YUVTORGB - "subs %4, %4, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%3]! \n" - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%3]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -140,11 +140,11 @@ void I422ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%3]! \n" - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%3]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -168,10 +168,10 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %5, %5, #8 \n" - "vld1.8 {d23}, [%3]! \n" - "vst4.8 {d20, d21, d22, d23}, [%4]! \n" - "bgt 1b \n" + "subs %5, %5, #8 \n" + "vld1.8 {d23}, [%3]! \n" + "vst4.8 {d20, d21, d22, d23}, [%4]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -195,10 +195,10 @@ void I422ToRGBARow_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d19, #255 \n" // YUVTORGB modified d19 - "vst4.8 {d19, d20, d21, d22}, [%3]! \n" - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vmov.u8 d19, #255 \n" // YUVTORGB modified d19 + "vst4.8 {d19, d20, d21, d22}, [%3]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -221,9 +221,9 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vst3.8 {d20, d21, d22}, [%3]! \n" - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vst3.8 {d20, d21, d22}, [%3]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -253,9 +253,9 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" ARGBTORGB565 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels RGB565. - "bgt 1b \n" + "subs %4, %4, #8 \n" ARGBTORGB565 + "vst1.8 {q0}, [%3]! \n" // store 8 pixels RGB565. + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -287,10 +287,10 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d23, #255 \n" ARGBTOARGB1555 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vmov.u8 d23, #255 \n" ARGBTOARGB1555 + "vst1.8 {q0}, [%3]! \n" // store 8 pixels + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -321,14 +321,14 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d4, #0x0f \n" // vbic bits to clear + "vmov.u8 d4, #0x0f \n" // vbic bits to clear "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d23, #255 \n" ARGBTOARGB4444 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels - "bgt 1b \n" + "subs %4, %4, #8 \n" + "vmov.u8 d23, #255 \n" ARGBTOARGB4444 + "vst1.8 {q0}, [%3]! \n" // store 8 pixels + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -342,35 +342,38 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, "q12", "q13", "q14", "q15"); } -void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { +void I400ToARGBRow_NEON(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READYUV400 YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" + "subs %2, %2, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%1]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 - : [kUVToRB] "r"(&kYuvI601Constants.kUVToRB), - [kUVToG] "r"(&kYuvI601Constants.kUVToG), - [kUVBiasBGR] "r"(&kYuvI601Constants.kUVBiasBGR), - [kYToRgb] "r"(&kYuvI601Constants.kYToRgb) + : [kUVToRB] "r"(&yuvconstants->kUVToRB), + [kUVToG] "r"(&yuvconstants->kUVToG), + [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), + [kYToRgb] "r"(&yuvconstants->kYToRgb) : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15"); } void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" - "vld1.8 {d20}, [%0]! \n" - "vmov d21, d20 \n" - "vmov d22, d20 \n" - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {d20}, [%0]! \n" + "vmov d21, d20 \n" + "vmov d22, d20 \n" + "subs %2, %2, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%1]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -384,11 +387,11 @@ void NV12ToARGBRow_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READNV12 YUVTORGB - "subs %3, %3, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%2]! \n" - "bgt 1b \n" + "subs %3, %3, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%2]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_argb), // %2 @@ -407,11 +410,11 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READNV21 YUVTORGB - "subs %3, %3, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%2]! \n" - "bgt 1b \n" + "subs %3, %3, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%2]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_argb), // %2 @@ -436,9 +439,9 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y, "1: \n" READNV12 YUVTORGB - "subs %3, %3, #8 \n" - "vst3.8 {d20, d21, d22}, [%2]! \n" - "bgt 1b \n" + "subs %3, %3, #8 \n" + "vst3.8 {d20, d21, d22}, [%2]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_rgb24), // %2 @@ -463,9 +466,9 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y, "1: \n" READNV21 YUVTORGB - "subs %3, %3, #8 \n" - "vst3.8 {d20, d21, d22}, [%2]! \n" - "bgt 1b \n" + "subs %3, %3, #8 \n" + "vst3.8 {d20, d21, d22}, [%2]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_rgb24), // %2 @@ -486,9 +489,9 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READNV12 YUVTORGB - "subs %3, %3, #8 \n" ARGBTORGB565 - "vst1.8 {q0}, [%2]! \n" // store 8 pixels RGB565. - "bgt 1b \n" + "subs %3, %3, #8 \n" ARGBTORGB565 + "vst1.8 {q0}, [%2]! \n" // store 8 pixels RGB565. + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_rgb565), // %2 @@ -506,11 +509,11 @@ void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2, const struct YuvConstants* yuvconstants, int width) { asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READYUY2 YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" + "subs %2, %2, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%1]! \n" + "bgt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -527,11 +530,11 @@ void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, const struct YuvConstants* yuvconstants, int width) { asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d23, #255 \n" "1: \n" READUYVY YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" + "subs %2, %2, #8 \n" + "vst4.8 {d20, d21, d22, d23}, [%1]! \n" + "bgt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -550,11 +553,11 @@ void SplitUVRow_NEON(const uint8_t* src_uv, int width) { asm volatile( "1: \n" - "vld2.8 {q0, q1}, [%0]! \n" // load 16 pairs of UV - "subs %3, %3, #16 \n" // 16 processed per loop - "vst1.8 {q0}, [%1]! \n" // store U - "vst1.8 {q1}, [%2]! \n" // store V - "bgt 1b \n" + "vld2.8 {q0, q1}, [%0]! \n" // load 16 pairs of UV + "subs %3, %3, #16 \n" // 16 processed per loop + "vst1.8 {q0}, [%1]! \n" // store U + "vst1.8 {q1}, [%2]! \n" // store V + "bgt 1b \n" : "+r"(src_uv), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -571,11 +574,11 @@ void MergeUVRow_NEON(const uint8_t* src_u, int width) { asm volatile( "1: \n" - "vld1.8 {q0}, [%0]! \n" // load U - "vld1.8 {q1}, [%1]! \n" // load V - "subs %3, %3, #16 \n" // 16 processed per loop - "vst2.8 {q0, q1}, [%2]! \n" // store 16 pairs of UV - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" // load U + "vld1.8 {q1}, [%1]! \n" // load V + "subs %3, %3, #16 \n" // 16 processed per loop + "vst2.8 {q0, q1}, [%2]! \n" // store 16 pairs of UV + "bgt 1b \n" : "+r"(src_u), // %0 "+r"(src_v), // %1 "+r"(dst_uv), // %2 @@ -593,13 +596,13 @@ void SplitRGBRow_NEON(const uint8_t* src_rgb, int width) { asm volatile( "1: \n" - "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RGB - "vld3.8 {d1, d3, d5}, [%0]! \n" // next 8 RGB - "subs %4, %4, #16 \n" // 16 processed per loop - "vst1.8 {q0}, [%1]! \n" // store R - "vst1.8 {q1}, [%2]! \n" // store G - "vst1.8 {q2}, [%3]! \n" // store B - "bgt 1b \n" + "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RGB + "vld3.8 {d1, d3, d5}, [%0]! \n" // next 8 RGB + "subs %4, %4, #16 \n" // 16 processed per loop + "vst1.8 {q0}, [%1]! \n" // store R + "vst1.8 {q1}, [%2]! \n" // store G + "vst1.8 {q2}, [%3]! \n" // store B + "bgt 1b \n" : "+r"(src_rgb), // %0 "+r"(dst_r), // %1 "+r"(dst_g), // %2 @@ -618,13 +621,13 @@ void MergeRGBRow_NEON(const uint8_t* src_r, int width) { asm volatile( "1: \n" - "vld1.8 {q0}, [%0]! \n" // load R - "vld1.8 {q1}, [%1]! \n" // load G - "vld1.8 {q2}, [%2]! \n" // load B - "subs %4, %4, #16 \n" // 16 processed per loop - "vst3.8 {d0, d2, d4}, [%3]! \n" // store 8 RGB - "vst3.8 {d1, d3, d5}, [%3]! \n" // next 8 RGB - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" // load R + "vld1.8 {q1}, [%1]! \n" // load G + "vld1.8 {q2}, [%2]! \n" // load B + "subs %4, %4, #16 \n" // 16 processed per loop + "vst3.8 {d0, d2, d4}, [%3]! \n" // store 8 RGB + "vst3.8 {d1, d3, d5}, [%3]! \n" // next 8 RGB + "bgt 1b \n" : "+r"(src_r), // %0 "+r"(src_g), // %1 "+r"(src_b), // %2 @@ -639,10 +642,10 @@ void MergeRGBRow_NEON(const uint8_t* src_r, void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "1: \n" - "vld1.8 {d0, d1, d2, d3}, [%0]! \n" // load 32 - "subs %2, %2, #32 \n" // 32 processed per loop - "vst1.8 {d0, d1, d2, d3}, [%1]! \n" // store 32 - "bgt 1b \n" + "vld1.8 {d0, d1, d2, d3}, [%0]! \n" // load 32 + "subs %2, %2, #32 \n" // 32 processed per loop + "vst1.8 {d0, d1, d2, d3}, [%1]! \n" // store 32 + "bgt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 // Output registers @@ -654,11 +657,11 @@ void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) { // SetRow writes 'width' bytes using an 8 bit value repeated. void SetRow_NEON(uint8_t* dst, uint8_t v8, int width) { asm volatile( - "vdup.8 q0, %2 \n" // duplicate 16 bytes + "vdup.8 q0, %2 \n" // duplicate 16 bytes "1: \n" - "subs %1, %1, #16 \n" // 16 bytes per loop - "vst1.8 {q0}, [%0]! \n" // store - "bgt 1b \n" + "subs %1, %1, #16 \n" // 16 bytes per loop + "vst1.8 {q0}, [%0]! \n" // store + "bgt 1b \n" : "+r"(dst), // %0 "+r"(width) // %1 : "r"(v8) // %2 @@ -668,11 +671,11 @@ void SetRow_NEON(uint8_t* dst, uint8_t v8, int width) { // ARGBSetRow writes 'width' pixels using an 32 bit value repeated. void ARGBSetRow_NEON(uint8_t* dst, uint32_t v32, int width) { asm volatile( - "vdup.u32 q0, %2 \n" // duplicate 4 ints + "vdup.u32 q0, %2 \n" // duplicate 4 ints "1: \n" - "subs %1, %1, #4 \n" // 4 pixels per loop - "vst1.8 {q0}, [%0]! \n" // store - "bgt 1b \n" + "subs %1, %1, #4 \n" // 4 pixels per loop + "vst1.8 {q0}, [%0]! \n" // store + "bgt 1b \n" : "+r"(dst), // %0 "+r"(width) // %1 : "r"(v32) // %2 @@ -682,41 +685,62 @@ void ARGBSetRow_NEON(uint8_t* dst, uint32_t v32, int width) { void MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width) { asm volatile( // Start at end of source row. - "mov r3, #-16 \n" - "add %0, %0, %2 \n" - "sub %0, #16 \n" + "add %0, %0, %2 \n" + "sub %0, %0, #32 \n" // 32 bytes per loop "1: \n" - "vld1.8 {q0}, [%0], r3 \n" // src -= 16 - "subs %2, #16 \n" // 16 pixels per loop. - "vrev64.8 q0, q0 \n" - "vst1.8 {d1}, [%1]! \n" // dst += 16 - "vst1.8 {d0}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {q1, q2}, [%0], %3 \n" // src -= 32 + "subs %2, #32 \n" // 32 pixels per loop. + "vrev64.8 q0, q2 \n" + "vrev64.8 q1, q1 \n" + "vswp d0, d1 \n" + "vswp d2, d3 \n" + "vst1.8 {q0, q1}, [%1]! \n" // dst += 32 + "bgt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 - : - : "cc", "memory", "r3", "q0"); + : "r"(-32) // %3 + : "cc", "memory", "q0", "q1", "q2"); } -void MirrorUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void MirrorUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_uv, int width) { asm volatile( // Start at end of source row. - "mov r12, #-16 \n" - "add %0, %0, %3, lsl #1 \n" - "sub %0, #16 \n" + "mov r12, #-16 \n" + "add %0, %0, %2, lsl #1 \n" + "sub %0, #16 \n" "1: \n" - "vld2.8 {d0, d1}, [%0], r12 \n" // src -= 16 - "subs %3, #8 \n" // 8 pixels per loop. - "vrev64.8 q0, q0 \n" - "vst1.8 {d0}, [%1]! \n" // dst += 8 - "vst1.8 {d1}, [%2]! \n" - "bgt 1b \n" + "vld2.8 {d0, d1}, [%0], r12 \n" // src -= 16 + "subs %2, #8 \n" // 8 pixels per loop. + "vrev64.8 q0, q0 \n" + "vst2.8 {d0, d1}, [%1]! \n" // dst += 16 + "bgt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_uv), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "r12", "q0"); +} + +void MirrorSplitUVRow_NEON(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + // Start at end of source row. + "mov r12, #-16 \n" + "add %0, %0, %3, lsl #1 \n" + "sub %0, #16 \n" + + "1: \n" + "vld2.8 {d0, d1}, [%0], r12 \n" // src -= 16 + "subs %3, #8 \n" // 8 pixels per loop. + "vrev64.8 q0, q0 \n" + "vst1.8 {d0}, [%1]! \n" // dst += 8 + "vst1.8 {d1}, [%2]! \n" + "bgt 1b \n" : "+r"(src_uv), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -725,37 +749,57 @@ void MirrorUVRow_NEON(const uint8_t* src_uv, : "cc", "memory", "r12", "q0"); } -void ARGBMirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width) { +void ARGBMirrorRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { asm volatile( - // Start at end of source row. - "mov r3, #-16 \n" - "add %0, %0, %2, lsl #2 \n" - "sub %0, #16 \n" + "add %0, %0, %2, lsl #2 \n" + "sub %0, #32 \n" "1: \n" - "vld1.8 {q0}, [%0], r3 \n" // src -= 16 - "subs %2, #4 \n" // 4 pixels per loop. - "vrev64.32 q0, q0 \n" - "vst1.8 {d1}, [%1]! \n" // dst += 16 - "vst1.8 {d0}, [%1]! \n" - "bgt 1b \n" - : "+r"(src), // %0 - "+r"(dst), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "r3", "q0"); + "vld4.8 {d0, d1, d2, d3}, [%0], %3 \n" // src -= 32 + "subs %2, #8 \n" // 8 pixels per loop. + "vrev64.8 d0, d0 \n" + "vrev64.8 d1, d1 \n" + "vrev64.8 d2, d2 \n" + "vrev64.8 d3, d3 \n" + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // dst += 32 + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "r"(-32) // %3 + : "cc", "memory", "d0", "d1", "d2", "d3"); +} + +void RGB24MirrorRow_NEON(const uint8_t* src_rgb24, + uint8_t* dst_rgb24, + int width) { + src_rgb24 += width * 3 - 24; + asm volatile( + "1: \n" + "vld3.8 {d0, d1, d2}, [%0], %3 \n" // src -= 24 + "subs %2, #8 \n" // 8 pixels per loop. + "vrev64.8 d0, d0 \n" + "vrev64.8 d1, d1 \n" + "vrev64.8 d2, d2 \n" + "vst3.8 {d0, d1, d2}, [%1]! \n" // dst += 24 + "bgt 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(dst_rgb24), // %1 + "+r"(width) // %2 + : "r"(-24) // %3 + : "cc", "memory", "d0", "d1", "d2"); } void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d4, #255 \n" // Alpha + "vmov.u8 d4, #255 \n" // Alpha "1: \n" - "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RGB24. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vst4.8 {d1, d2, d3, d4}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RGB24. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vst4.8 {d1, d2, d3, d4}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_rgb24), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -766,13 +810,13 @@ void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d4, #255 \n" // Alpha + "vmov.u8 d4, #255 \n" // Alpha "1: \n" - "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vswp.u8 d1, d3 \n" // swap R, B - "vst4.8 {d1, d2, d3, d4}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vswp.u8 d1, d3 \n" // swap R, B + "vst4.8 {d1, d2, d3, d4}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -783,13 +827,13 @@ void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) { void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { asm volatile( - "vmov.u8 d0, #255 \n" // Alpha + "vmov.u8 d0, #255 \n" // Alpha "1: \n" - "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vswp.u8 d1, d3 \n" // swap R, B - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of RGBA. - "bgt 1b \n" + "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vswp.u8 d1, d3 \n" // swap R, B + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of RGBA. + "bgt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgba), // %1 "+r"(width) // %2 @@ -800,12 +844,12 @@ void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) { asm volatile( "1: \n" - "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vswp.u8 d1, d3 \n" // swap R, B - "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of + "vld3.8 {d1, d2, d3}, [%0]! \n" // load 8 pixels of RAW. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vswp.u8 d1, d3 \n" // swap R, B + "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of // RGB24. - "bgt 1b \n" + "bgt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 @@ -830,13 +874,13 @@ void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d3, #255 \n" // Alpha + "vmov.u8 d3, #255 \n" // Alpha "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. RGB565TOARGB - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_rgb565), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -876,13 +920,13 @@ void ARGB1555ToARGBRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d3, #255 \n" // Alpha + "vmov.u8 d3, #255 \n" // Alpha "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGB1555TOARGB - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_argb1555), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -905,13 +949,13 @@ void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d3, #255 \n" // Alpha + "vmov.u8 d3, #255 \n" // Alpha "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGB4444TOARGB - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_argb4444), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -925,11 +969,11 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d1, d2, d3, d4}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of + "vld4.8 {d1, d2, d3, d4}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of // RGB24. - "bgt 1b \n" + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 @@ -941,11 +985,11 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) { asm volatile( "1: \n" - "vld4.8 {d1, d2, d3, d4}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vswp.u8 d1, d3 \n" // swap R, B - "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of RAW. - "bgt 1b \n" + "vld4.8 {d1, d2, d3, d4}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vswp.u8 d1, d3 \n" // swap R, B + "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of RAW. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_raw), // %1 "+r"(width) // %2 @@ -957,10 +1001,10 @@ void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) { void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "vld2.8 {q0, q1}, [%0]! \n" // load 16 pixels of YUY2. - "subs %2, %2, #16 \n" // 16 processed per loop. - "vst1.8 {q0}, [%1]! \n" // store 16 pixels of Y. - "bgt 1b \n" + "vld2.8 {q0, q1}, [%0]! \n" // load 16 pixels of YUY2. + "subs %2, %2, #16 \n" // 16 processed per loop. + "vst1.8 {q0}, [%1]! \n" // store 16 pixels of Y. + "bgt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -972,10 +1016,10 @@ void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { void UYVYToYRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "vld2.8 {q0, q1}, [%0]! \n" // load 16 pixels of UYVY. - "subs %2, %2, #16 \n" // 16 processed per loop. - "vst1.8 {q1}, [%1]! \n" // store 16 pixels of Y. - "bgt 1b \n" + "vld2.8 {q0, q1}, [%0]! \n" // load 16 pixels of UYVY. + "subs %2, %2, #16 \n" // 16 processed per loop. + "vst1.8 {q1}, [%1]! \n" // store 16 pixels of Y. + "bgt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -990,11 +1034,11 @@ void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2, int width) { asm volatile( "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of YUY2. - "subs %3, %3, #16 \n" // 16 pixels = 8 UVs. - "vst1.8 {d1}, [%1]! \n" // store 8 U. - "vst1.8 {d3}, [%2]! \n" // store 8 V. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of YUY2. + "subs %3, %3, #16 \n" // 16 pixels = 8 UVs. + "vst1.8 {d1}, [%1]! \n" // store 8 U. + "vst1.8 {d3}, [%2]! \n" // store 8 V. + "bgt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1010,11 +1054,11 @@ void UYVYToUV422Row_NEON(const uint8_t* src_uyvy, int width) { asm volatile( "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of UYVY. - "subs %3, %3, #16 \n" // 16 pixels = 8 UVs. - "vst1.8 {d0}, [%1]! \n" // store 8 U. - "vst1.8 {d2}, [%2]! \n" // store 8 V. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of UYVY. + "subs %3, %3, #16 \n" // 16 pixels = 8 UVs. + "vst1.8 {d0}, [%1]! \n" // store 8 U. + "vst1.8 {d2}, [%2]! \n" // store 8 V. + "bgt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1030,16 +1074,16 @@ void YUY2ToUVRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_v, int width) { asm volatile( - "add %1, %0, %1 \n" // stride + src_yuy2 + "add %1, %0, %1 \n" // stride + src_yuy2 "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of YUY2. - "subs %4, %4, #16 \n" // 16 pixels = 8 UVs. - "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load next row YUY2. - "vrhadd.u8 d1, d1, d5 \n" // average rows of U - "vrhadd.u8 d3, d3, d7 \n" // average rows of V - "vst1.8 {d1}, [%2]! \n" // store 8 U. - "vst1.8 {d3}, [%3]! \n" // store 8 V. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of YUY2. + "subs %4, %4, #16 \n" // 16 pixels = 8 UVs. + "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load next row YUY2. + "vrhadd.u8 d1, d1, d5 \n" // average rows of U + "vrhadd.u8 d3, d3, d7 \n" // average rows of V + "vst1.8 {d1}, [%2]! \n" // store 8 U. + "vst1.8 {d3}, [%3]! \n" // store 8 V. + "bgt 1b \n" : "+r"(src_yuy2), // %0 "+r"(stride_yuy2), // %1 "+r"(dst_u), // %2 @@ -1057,16 +1101,16 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_v, int width) { asm volatile( - "add %1, %0, %1 \n" // stride + src_uyvy + "add %1, %0, %1 \n" // stride + src_uyvy "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of UYVY. - "subs %4, %4, #16 \n" // 16 pixels = 8 UVs. - "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load next row UYVY. - "vrhadd.u8 d0, d0, d4 \n" // average rows of U - "vrhadd.u8 d2, d2, d6 \n" // average rows of V - "vst1.8 {d0}, [%2]! \n" // store 8 U. - "vst1.8 {d2}, [%3]! \n" // store 8 V. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 16 pixels of UYVY. + "subs %4, %4, #16 \n" // 16 pixels = 8 UVs. + "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load next row UYVY. + "vrhadd.u8 d0, d0, d4 \n" // average rows of U + "vrhadd.u8 d2, d2, d6 \n" // average rows of V + "vst1.8 {d0}, [%2]! \n" // store 8 U. + "vst1.8 {d2}, [%3]! \n" // store 8 V. + "bgt 1b \n" : "+r"(src_uyvy), // %0 "+r"(stride_uyvy), // %1 "+r"(dst_u), // %2 @@ -1084,14 +1128,14 @@ void ARGBShuffleRow_NEON(const uint8_t* src_argb, const uint8_t* shuffler, int width) { asm volatile( - "vld1.8 {q2}, [%3] \n" // shuffler + "vld1.8 {q2}, [%3] \n" // shuffler "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 4 pixels. - "subs %2, %2, #4 \n" // 4 processed per loop - "vtbl.8 d2, {d0, d1}, d4 \n" // look up 2 first pixels - "vtbl.8 d3, {d0, d1}, d5 \n" // look up 2 next pixels - "vst1.8 {q1}, [%1]! \n" // store 4. - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" // load 4 pixels. + "subs %2, %2, #4 \n" // 4 processed per loop + "vtbl.8 d2, {d0, d1}, d4 \n" // look up 2 first pixels + "vtbl.8 d3, {d0, d1}, d5 \n" // look up 2 next pixels + "vst1.8 {q1}, [%1]! \n" // store 4. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -1107,12 +1151,12 @@ void I422ToYUY2Row_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "vld2.8 {d0, d2}, [%0]! \n" // load 16 Ys - "vld1.8 {d1}, [%1]! \n" // load 8 Us - "vld1.8 {d3}, [%2]! \n" // load 8 Vs - "subs %4, %4, #16 \n" // 16 pixels - "vst4.8 {d0, d1, d2, d3}, [%3]! \n" // Store 8 YUY2/16 pixels. - "bgt 1b \n" + "vld2.8 {d0, d2}, [%0]! \n" // load 16 Ys + "vld1.8 {d1}, [%1]! \n" // load 8 Us + "vld1.8 {d3}, [%2]! \n" // load 8 Vs + "subs %4, %4, #16 \n" // 16 pixels + "vst4.8 {d0, d1, d2, d3}, [%3]! \n" // Store 8 YUY2/16 pixels. + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -1129,12 +1173,12 @@ void I422ToUYVYRow_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "vld2.8 {d1, d3}, [%0]! \n" // load 16 Ys - "vld1.8 {d0}, [%1]! \n" // load 8 Us - "vld1.8 {d2}, [%2]! \n" // load 8 Vs - "subs %4, %4, #16 \n" // 16 pixels - "vst4.8 {d0, d1, d2, d3}, [%3]! \n" // Store 8 UYVY/16 pixels. - "bgt 1b \n" + "vld2.8 {d1, d3}, [%0]! \n" // load 16 Ys + "vld1.8 {d0}, [%1]! \n" // load 8 Us + "vld1.8 {d2}, [%2]! \n" // load 8 Vs + "subs %4, %4, #16 \n" // 16 pixels + "vst4.8 {d0, d1, d2, d3}, [%3]! \n" // Store 8 UYVY/16 pixels. + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -1149,11 +1193,11 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTORGB565 - "vst1.8 {q0}, [%1]! \n" // store 8 pixels RGB565. - "bgt 1b \n" + "vst1.8 {q0}, [%1]! \n" // store 8 pixels RGB565. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb565), // %1 "+r"(width) // %2 @@ -1166,16 +1210,16 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, const uint32_t dither4, int width) { asm volatile( - "vdup.32 d2, %2 \n" // dither4 + "vdup.32 d2, %2 \n" // dither4 "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%1]! \n" // load 8 pixels of ARGB. - "subs %3, %3, #8 \n" // 8 processed per loop. - "vqadd.u8 d20, d20, d2 \n" - "vqadd.u8 d21, d21, d2 \n" - "vqadd.u8 d22, d22, d2 \n" // add for dither + "vld4.8 {d20, d21, d22, d23}, [%1]! \n" // load 8 pixels of ARGB. + "subs %3, %3, #8 \n" // 8 processed per loop. + "vqadd.u8 d20, d20, d2 \n" + "vqadd.u8 d21, d21, d2 \n" + "vqadd.u8 d22, d22, d2 \n" // add for dither ARGBTORGB565 - "vst1.8 {q0}, [%0]! \n" // store 8 RGB565. - "bgt 1b \n" + "vst1.8 {q0}, [%0]! \n" // store 8 RGB565. + "bgt 1b \n" : "+r"(dst_rgb) // %0 : "r"(src_argb), // %1 "r"(dither4), // %2 @@ -1188,11 +1232,11 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTOARGB1555 - "vst1.8 {q0}, [%1]! \n" // store 8 ARGB1555. - "bgt 1b \n" + "vst1.8 {q0}, [%1]! \n" // store 8 ARGB1555. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb1555), // %1 "+r"(width) // %2 @@ -1204,14 +1248,14 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, uint8_t* dst_argb4444, int width) { asm volatile( - "vmov.u8 d4, #0x0f \n" // bits to clear with + "vmov.u8 d4, #0x0f \n" // bits to clear with // vbic. "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTOARGB4444 - "vst1.8 {q0}, [%1]! \n" // store 8 ARGB4444. - "bgt 1b \n" + "vst1.8 {q0}, [%1]! \n" // store 8 ARGB4444. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb4444), // %1 "+r"(width) // %2 @@ -1221,20 +1265,20 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d27, #16 \n" // Add 16 constant + "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d27, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d27 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d27 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1247,11 +1291,11 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels - "subs %2, %2, #16 \n" // 16 processed per loop - "vst1.8 {q3}, [%1]! \n" // store 16 A's. - "bgt 1b \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels + "subs %2, %2, #16 \n" // 16 processed per loop + "vst1.8 {q3}, [%1]! \n" // store 16 A's. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_a), // %1 "+r"(width) // %2 @@ -1262,18 +1306,18 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient + "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient + "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient + "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1283,18 +1327,18 @@ void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient + "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient + "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient + "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 RGBA pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d1, d24 \n" // B - "vmlal.u8 q2, d2, d25 \n" // G - "vmlal.u8 q2, d3, d26 \n" // R + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 RGBA pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d1, d24 \n" // B + "vmlal.u8 q2, d2, d25 \n" // G + "vmlal.u8 q2, d3, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1308,32 +1352,32 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb, uint8_t* dst_v, int width) { asm volatile( - "vmov.u8 d24, #112 \n" // UB / VR 0.875 + "vmov.u8 d24, #112 \n" // UB / VR 0.875 // coefficient - "vmov.u8 d25, #74 \n" // UG -0.5781 coefficient - "vmov.u8 d26, #38 \n" // UR -0.2969 coefficient - "vmov.u8 d27, #18 \n" // VB -0.1406 coefficient - "vmov.u8 d28, #94 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 + "vmov.u8 d25, #74 \n" // UG -0.5781 coefficient + "vmov.u8 d26, #38 \n" // UR -0.2969 coefficient + "vmov.u8 d27, #18 \n" // VB -0.1406 coefficient + "vmov.u8 d28, #94 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %3, %3, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlsl.u8 q2, d1, d25 \n" // G - "vmlsl.u8 q2, d2, d26 \n" // R - "vadd.u16 q2, q2, q15 \n" // +128 -> unsigned + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "subs %3, %3, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d0, d24 \n" // B + "vmlsl.u8 q2, d1, d25 \n" // G + "vmlsl.u8 q2, d2, d26 \n" // R + "vadd.u16 q2, q2, q15 \n" // +128 -> unsigned - "vmull.u8 q3, d2, d24 \n" // R - "vmlsl.u8 q3, d1, d28 \n" // G - "vmlsl.u8 q3, d0, d27 \n" // B - "vadd.u16 q3, q3, q15 \n" // +128 -> unsigned + "vmull.u8 q3, d2, d24 \n" // R + "vmlsl.u8 q3, d1, d28 \n" // G + "vmlsl.u8 q3, d0, d27 \n" // B + "vadd.u16 q3, q3, q15 \n" // +128 -> unsigned "vqshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit U "vqshrn.u16 d1, q3, #8 \n" // 16 bit to 8 bit V - "vst1.8 {d0}, [%1]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%2]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%2]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1365,34 +1409,34 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. - "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. - "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q0, q1, q2) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(src_stride_argb), // %1 "+r"(dst_u), // %2 @@ -1411,34 +1455,34 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient - "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient - "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient - "vmov.s16 q13, #20 / 2 \n" // VB -0.08131 coefficient - "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. - "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. - "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient + "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient + "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient + "vmov.s16 q13, #20 / 2 \n" // VB -0.08131 coefficient + "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q0, q1, q2) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(src_stride_argb), // %1 "+r"(dst_u), // %2 @@ -1456,34 +1500,34 @@ void BGRAToUVRow_NEON(const uint8_t* src_bgra, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_bgra - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 BGRA pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 BGRA pixels. - "vpaddl.u8 q3, q3 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q2 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more BGRA pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 BGRA pixels. - "vpadal.u8 q3, q7 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q2, q6 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_bgra + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 BGRA pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 BGRA pixels. + "vpaddl.u8 q3, q3 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more BGRA pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 BGRA pixels. + "vpadal.u8 q3, q7 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q1, q1, #1 \n" // 2x average - "vrshr.u16 q2, q2, #1 \n" - "vrshr.u16 q3, q3, #1 \n" + "vrshr.u16 q1, q1, #1 \n" // 2x average + "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q3, q3, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q3, q2, q1) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_bgra), // %0 "+r"(src_stride_bgra), // %1 "+r"(dst_u), // %2 @@ -1501,34 +1545,34 @@ void ABGRToUVRow_NEON(const uint8_t* src_abgr, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_abgr - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ABGR pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ABGR pixels. - "vpaddl.u8 q2, q2 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q0, q0 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ABGR pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ABGR pixels. - "vpadal.u8 q2, q6 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q0, q4 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_abgr + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ABGR pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ABGR pixels. + "vpaddl.u8 q2, q2 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q0, q0 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ABGR pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ABGR pixels. + "vpadal.u8 q2, q6 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q0, q4 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q2, q1, q0) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_abgr), // %0 "+r"(src_stride_abgr), // %1 "+r"(dst_u), // %2 @@ -1546,34 +1590,34 @@ void RGBAToUVRow_NEON(const uint8_t* src_rgba, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_rgba - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 RGBA pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 RGBA pixels. - "vpaddl.u8 q0, q1 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q2 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q3 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more RGBA pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 RGBA pixels. - "vpadal.u8 q0, q5 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q6 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q2, q7 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_rgba + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 RGBA pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 RGBA pixels. + "vpaddl.u8 q0, q1 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q2 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q3 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more RGBA pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 RGBA pixels. + "vpadal.u8 q0, q5 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q6 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q7 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q0, q1, q2) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_rgba), // %0 "+r"(src_stride_rgba), // %1 "+r"(dst_u), // %2 @@ -1591,34 +1635,34 @@ void RGB24ToUVRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_rgb24 - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RGB24 pixels. - "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RGB24 pixels. - "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RGB24 pixels. - "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RGB24 pixels. - "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_rgb24 + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RGB24 pixels. + "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RGB24 pixels. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RGB24 pixels. + "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RGB24 pixels. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q0, q1, q2) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_rgb24), // %0 "+r"(src_stride_rgb24), // %1 "+r"(dst_u), // %2 @@ -1636,34 +1680,34 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, uint8_t* dst_v, int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_raw - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 - "1: \n" - "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RAW pixels. - "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RAW pixels. - "vpaddl.u8 q2, q2 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q0, q0 \n" // R 16 bytes -> 8 shorts. - "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RAW pixels. - "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RAW pixels. - "vpadal.u8 q2, q6 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q0, q4 \n" // R 16 bytes -> 8 shorts. + "add %1, %0, %1 \n" // src_stride + src_raw + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RAW pixels. + "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RAW pixels. + "vpaddl.u8 q2, q2 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q0, q0 \n" // R 16 bytes -> 8 shorts. + "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RAW pixels. + "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RAW pixels. + "vpadal.u8 q2, q6 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q0, q4 \n" // R 16 bytes -> 8 shorts. - "vrshr.u16 q0, q0, #1 \n" // 2x average - "vrshr.u16 q1, q1, #1 \n" - "vrshr.u16 q2, q2, #1 \n" + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "subs %4, %4, #16 \n" // 32 processed per loop. + "subs %4, %4, #16 \n" // 16 processed per loop. RGBTOUV(q2, q1, q0) - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_raw), // %0 "+r"(src_stride_raw), // %1 "+r"(dst_u), // %2 @@ -1682,55 +1726,55 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_v, int width) { asm volatile( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 // coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. + "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. RGB565TOARGB - "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%0]! \n" // next 8 RGB565 pixels. + "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%0]! \n" // next 8 RGB565 pixels. RGB565TOARGB - "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // load 8 RGB565 pixels. + "vld1.8 {q0}, [%1]! \n" // load 8 RGB565 pixels. RGB565TOARGB - "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // next 8 RGB565 pixels. + "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%1]! \n" // next 8 RGB565 pixels. RGB565TOARGB - "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vrshr.u16 q4, q4, #1 \n" // 2x average - "vrshr.u16 q5, q5, #1 \n" - "vrshr.u16 q6, q6, #1 \n" + "vrshr.u16 q4, q4, #1 \n" // 2x average + "vrshr.u16 q5, q5, #1 \n" + "vrshr.u16 q6, q6, #1 \n" - "subs %4, %4, #16 \n" // 16 processed per loop. - "vmul.s16 q8, q4, q10 \n" // B - "vmls.s16 q8, q5, q11 \n" // G - "vmls.s16 q8, q6, q12 \n" // R - "vadd.u16 q8, q8, q15 \n" // +128 -> unsigned - "vmul.s16 q9, q6, q10 \n" // R - "vmls.s16 q9, q5, q14 \n" // G - "vmls.s16 q9, q4, q13 \n" // B - "vadd.u16 q9, q9, q15 \n" // +128 -> unsigned + "subs %4, %4, #16 \n" // 16 processed per loop. + "vmul.s16 q8, q4, q10 \n" // B + "vmls.s16 q8, q5, q11 \n" // G + "vmls.s16 q8, q6, q12 \n" // R + "vadd.u16 q8, q8, q15 \n" // +128 -> unsigned + "vmul.s16 q9, q6, q10 \n" // R + "vmls.s16 q9, q5, q14 \n" // G + "vmls.s16 q9, q4, q13 \n" // B + "vadd.u16 q9, q9, q15 \n" // +128 -> unsigned "vqshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit U "vqshrn.u16 d1, q9, #8 \n" // 16 bit to 8 bit V - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_rgb565), // %0 "+r"(src_stride_rgb565), // %1 "+r"(dst_u), // %2 @@ -1748,55 +1792,55 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_v, int width) { asm volatile( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 // coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. RGB555TOARGB - "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%0]! \n" // next 8 ARGB1555 pixels. + "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%0]! \n" // next 8 ARGB1555 pixels. RGB555TOARGB - "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // load 8 ARGB1555 pixels. + "vld1.8 {q0}, [%1]! \n" // load 8 ARGB1555 pixels. RGB555TOARGB - "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // next 8 ARGB1555 pixels. + "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%1]! \n" // next 8 ARGB1555 pixels. RGB555TOARGB - "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vrshr.u16 q4, q4, #1 \n" // 2x average - "vrshr.u16 q5, q5, #1 \n" - "vrshr.u16 q6, q6, #1 \n" + "vrshr.u16 q4, q4, #1 \n" // 2x average + "vrshr.u16 q5, q5, #1 \n" + "vrshr.u16 q6, q6, #1 \n" - "subs %4, %4, #16 \n" // 16 processed per loop. - "vmul.s16 q8, q4, q10 \n" // B - "vmls.s16 q8, q5, q11 \n" // G - "vmls.s16 q8, q6, q12 \n" // R - "vadd.u16 q8, q8, q15 \n" // +128 -> unsigned - "vmul.s16 q9, q6, q10 \n" // R - "vmls.s16 q9, q5, q14 \n" // G - "vmls.s16 q9, q4, q13 \n" // B - "vadd.u16 q9, q9, q15 \n" // +128 -> unsigned + "subs %4, %4, #16 \n" // 16 processed per loop. + "vmul.s16 q8, q4, q10 \n" // B + "vmls.s16 q8, q5, q11 \n" // G + "vmls.s16 q8, q6, q12 \n" // R + "vadd.u16 q8, q8, q15 \n" // +128 -> unsigned + "vmul.s16 q9, q6, q10 \n" // R + "vmls.s16 q9, q5, q14 \n" // G + "vmls.s16 q9, q4, q13 \n" // B + "vadd.u16 q9, q9, q15 \n" // +128 -> unsigned "vqshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit U "vqshrn.u16 d1, q9, #8 \n" // 16 bit to 8 bit V - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_argb1555), // %0 "+r"(src_stride_argb1555), // %1 "+r"(dst_u), // %2 @@ -1814,55 +1858,46 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, uint8_t* dst_v, int width) { asm volatile( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 // coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient - "vmov.u16 q15, #0x8080 \n" // 128.5 + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. ARGB4444TOARGB - "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%0]! \n" // next 8 ARGB4444 pixels. + "vpaddl.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%0]! \n" // next 8 ARGB4444 pixels. ARGB4444TOARGB - "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpaddl.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpaddl.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpaddl.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // load 8 ARGB4444 pixels. + "vld1.8 {q0}, [%1]! \n" // load 8 ARGB4444 pixels. ARGB4444TOARGB - "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. - "vld1.8 {q0}, [%1]! \n" // next 8 ARGB4444 pixels. + "vpadal.u8 d8, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d10, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d12, d2 \n" // R 8 bytes -> 4 shorts. + "vld1.8 {q0}, [%1]! \n" // next 8 ARGB4444 pixels. ARGB4444TOARGB - "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. - "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. - "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. + "vpadal.u8 d9, d0 \n" // B 8 bytes -> 4 shorts. + "vpadal.u8 d11, d1 \n" // G 8 bytes -> 4 shorts. + "vpadal.u8 d13, d2 \n" // R 8 bytes -> 4 shorts. - "vrshr.u16 q4, q4, #1 \n" // 2x average - "vrshr.u16 q5, q5, #1 \n" - "vrshr.u16 q6, q6, #1 \n" + "vrshr.u16 q0, q4, #1 \n" // 2x average + "vrshr.u16 q1, q5, #1 \n" + "vrshr.u16 q2, q6, #1 \n" - "subs %4, %4, #16 \n" // 16 processed per loop. - "vmul.s16 q8, q4, q10 \n" // B - "vmls.s16 q8, q5, q11 \n" // G - "vmls.s16 q8, q6, q12 \n" // R - "vadd.u16 q8, q8, q15 \n" // +128 -> unsigned - "vmul.s16 q9, q6, q10 \n" // R - "vmls.s16 q9, q5, q14 \n" // G - "vmls.s16 q9, q4, q13 \n" // B - "vadd.u16 q9, q9, q15 \n" // +128 -> unsigned - "vqshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit U - "vqshrn.u16 d1, q9, #8 \n" // 16 bit to 8 bit V - "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. - "bgt 1b \n" + "subs %4, %4, #16 \n" // 16 processed per loop. + RGBTOUV(q0, q1, q2) + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" : "+r"(src_argb4444), // %0 "+r"(src_stride_argb4444), // %1 "+r"(dst_u), // %2 @@ -1875,21 +1910,21 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d27, #16 \n" // Add 16 constant + "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d27, #16 \n" // Add 16 constant "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 RGB565 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. RGB565TOARGB - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d27 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d27 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_rgb565), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1901,21 +1936,21 @@ void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d27, #16 \n" // Add 16 constant + "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d27, #16 \n" // Add 16 constant "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB1555 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGB1555TOARGB - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d27 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d27 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_argb1555), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1927,21 +1962,21 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d27, #16 \n" // Add 16 constant + "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d27, #16 \n" // Add 16 constant "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. + "vld1.8 {q0}, [%0]! \n" // load 8 ARGB4444 pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. ARGB4444TOARGB - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d27 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d27 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_argb4444), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1951,20 +1986,20 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of BGRA. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d1, d4 \n" // R - "vmlal.u8 q8, d2, d5 \n" // G - "vmlal.u8 q8, d3, d6 \n" // B + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of BGRA. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q8, d1, d4 \n" // R + "vmlal.u8 q8, d2, d5 \n" // G + "vmlal.u8 q8, d3, d6 \n" // B "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d7 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_bgra), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1974,20 +2009,20 @@ void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ABGR. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // R - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // B + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ABGR. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q8, d0, d4 \n" // R + "vmlal.u8 q8, d1, d5 \n" // G + "vmlal.u8 q8, d2, d6 \n" // B "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d7 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_abgr), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1997,20 +2032,20 @@ void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of RGBA. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d1, d4 \n" // B - "vmlal.u8 q8, d2, d5 \n" // G - "vmlal.u8 q8, d3, d6 \n" // R + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of RGBA. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q8, d1, d4 \n" // B + "vmlal.u8 q8, d2, d5 \n" // G + "vmlal.u8 q8, d3, d6 \n" // R "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d7 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_rgba), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2020,20 +2055,20 @@ void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RGB24. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // B - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // R + "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RGB24. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q8, d0, d4 \n" // B + "vmlal.u8 q8, d1, d5 \n" // G + "vmlal.u8 q8, d2, d6 \n" // R "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d7 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_rgb24), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2043,20 +2078,20 @@ void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient + "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient + "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient + "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // B - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // R + "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q8, d0, d4 \n" // B + "vmlal.u8 q8, d1, d5 \n" // G + "vmlal.u8 q8, d2, d6 \n" // R "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" + "vqadd.u8 d0, d7 \n" + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2064,6 +2099,48 @@ void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); } +void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + asm volatile( + "vmov.u8 d4, #29 \n" // B * 0.1140 coefficient + "vmov.u8 d5, #150 \n" // G * 0.5870 coefficient + "vmov.u8 d6, #77 \n" // R * 0.2990 coefficient + "1: \n" + "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RGB24. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q4, d0, d4 \n" // B + "vmlal.u8 q4, d1, d5 \n" // G + "vmlal.u8 q4, d2, d6 \n" // R + "vqrshrn.u16 d0, q4, #8 \n" // 16 bit to 8 bit Y + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(dst_yj), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "q4"); +} + +void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + asm volatile( + "vmov.u8 d6, #29 \n" // B * 0.1140 coefficient + "vmov.u8 d5, #150 \n" // G * 0.5870 coefficient + "vmov.u8 d4, #77 \n" // R * 0.2990 coefficient + "1: \n" + "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q4, d0, d4 \n" // B + "vmlal.u8 q4, d1, d5 \n" // G + "vmlal.u8 q4, d2, d6 \n" // R + "vqrshrn.u16 d0, q4, #8 \n" // 16 bit to 8 bit Y + "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "bgt 1b \n" + : "+r"(src_raw), // %0 + "+r"(dst_yj), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "q4"); +} + // Bilinear filter 16x2 -> 16x1 void InterpolateRow_NEON(uint8_t* dst_ptr, const uint8_t* src_ptr, @@ -2072,46 +2149,46 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, int source_y_fraction) { int y1_fraction = source_y_fraction; asm volatile( - "cmp %4, #0 \n" - "beq 100f \n" - "add %2, %1 \n" - "cmp %4, #128 \n" - "beq 50f \n" + "cmp %4, #0 \n" + "beq 100f \n" + "add %2, %1 \n" + "cmp %4, #128 \n" + "beq 50f \n" - "vdup.8 d5, %4 \n" - "rsb %4, #256 \n" - "vdup.8 d4, %4 \n" + "vdup.8 d5, %4 \n" + "rsb %4, #256 \n" + "vdup.8 d4, %4 \n" // General purpose row blend. "1: \n" - "vld1.8 {q0}, [%1]! \n" - "vld1.8 {q1}, [%2]! \n" - "subs %3, %3, #16 \n" - "vmull.u8 q13, d0, d4 \n" - "vmull.u8 q14, d1, d4 \n" - "vmlal.u8 q13, d2, d5 \n" - "vmlal.u8 q14, d3, d5 \n" - "vrshrn.u16 d0, q13, #8 \n" - "vrshrn.u16 d1, q14, #8 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 1b \n" - "b 99f \n" + "vld1.8 {q0}, [%1]! \n" + "vld1.8 {q1}, [%2]! \n" + "subs %3, %3, #16 \n" + "vmull.u8 q13, d0, d4 \n" + "vmull.u8 q14, d1, d4 \n" + "vmlal.u8 q13, d2, d5 \n" + "vmlal.u8 q14, d3, d5 \n" + "vrshrn.u16 d0, q13, #8 \n" + "vrshrn.u16 d1, q14, #8 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 1b \n" + "b 99f \n" // Blend 50 / 50. "50: \n" - "vld1.8 {q0}, [%1]! \n" - "vld1.8 {q1}, [%2]! \n" - "subs %3, %3, #16 \n" - "vrhadd.u8 q0, q1 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 50b \n" - "b 99f \n" + "vld1.8 {q0}, [%1]! \n" + "vld1.8 {q1}, [%2]! \n" + "subs %3, %3, #16 \n" + "vrhadd.u8 q0, q1 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 50b \n" + "b 99f \n" // Blend 100 / 0 - Copy row unchanged. "100: \n" - "vld1.8 {q0}, [%1]! \n" - "subs %3, %3, #16 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 100b \n" + "vld1.8 {q0}, [%1]! \n" + "subs %3, %3, #16 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 100b \n" "99: \n" : "+r"(dst_ptr), // %0 @@ -2129,51 +2206,51 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, uint8_t* dst_argb, int width) { asm volatile( - "subs %3, #8 \n" - "blt 89f \n" + "subs %3, #8 \n" + "blt 89f \n" // Blend 8 pixels. "8: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ARGB0. - "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 pixels of ARGB1. - "subs %3, %3, #8 \n" // 8 processed per loop. - "vmull.u8 q10, d4, d3 \n" // db * a - "vmull.u8 q11, d5, d3 \n" // dg * a - "vmull.u8 q12, d6, d3 \n" // dr * a + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ARGB0. + "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 pixels of ARGB1. + "subs %3, %3, #8 \n" // 8 processed per loop. + "vmull.u8 q10, d4, d3 \n" // db * a + "vmull.u8 q11, d5, d3 \n" // dg * a + "vmull.u8 q12, d6, d3 \n" // dr * a "vqrshrn.u16 d20, q10, #8 \n" // db >>= 8 "vqrshrn.u16 d21, q11, #8 \n" // dg >>= 8 "vqrshrn.u16 d22, q12, #8 \n" // dr >>= 8 - "vqsub.u8 q2, q2, q10 \n" // dbg - dbg * a / 256 - "vqsub.u8 d6, d6, d22 \n" // dr - dr * a / 256 - "vqadd.u8 q0, q0, q2 \n" // + sbg - "vqadd.u8 d2, d2, d6 \n" // + sr - "vmov.u8 d3, #255 \n" // a = 255 - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 pixels of ARGB. - "bge 8b \n" + "vqsub.u8 q2, q2, q10 \n" // dbg - dbg * a / 256 + "vqsub.u8 d6, d6, d22 \n" // dr - dr * a / 256 + "vqadd.u8 q0, q0, q2 \n" // + sbg + "vqadd.u8 d2, d2, d6 \n" // + sr + "vmov.u8 d3, #255 \n" // a = 255 + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 pixels of ARGB. + "bge 8b \n" "89: \n" - "adds %3, #8-1 \n" - "blt 99f \n" + "adds %3, #8-1 \n" + "blt 99f \n" // Blend 1 pixels. "1: \n" - "vld4.8 {d0[0],d1[0],d2[0],d3[0]}, [%0]! \n" // load 1 pixel ARGB0. - "vld4.8 {d4[0],d5[0],d6[0],d7[0]}, [%1]! \n" // load 1 pixel ARGB1. - "subs %3, %3, #1 \n" // 1 processed per loop. - "vmull.u8 q10, d4, d3 \n" // db * a - "vmull.u8 q11, d5, d3 \n" // dg * a - "vmull.u8 q12, d6, d3 \n" // dr * a - "vqrshrn.u16 d20, q10, #8 \n" // db >>= 8 - "vqrshrn.u16 d21, q11, #8 \n" // dg >>= 8 - "vqrshrn.u16 d22, q12, #8 \n" // dr >>= 8 - "vqsub.u8 q2, q2, q10 \n" // dbg - dbg * a / 256 - "vqsub.u8 d6, d6, d22 \n" // dr - dr * a / 256 - "vqadd.u8 q0, q0, q2 \n" // + sbg - "vqadd.u8 d2, d2, d6 \n" // + sr - "vmov.u8 d3, #255 \n" // a = 255 - "vst4.8 {d0[0],d1[0],d2[0],d3[0]}, [%2]! \n" // store 1 pixel. - "bge 1b \n" + "vld4.8 {d0[0],d1[0],d2[0],d3[0]}, [%0]! \n" // load 1 pixel ARGB0. + "vld4.8 {d4[0],d5[0],d6[0],d7[0]}, [%1]! \n" // load 1 pixel ARGB1. + "subs %3, %3, #1 \n" // 1 processed per loop. + "vmull.u8 q10, d4, d3 \n" // db * a + "vmull.u8 q11, d5, d3 \n" // dg * a + "vmull.u8 q12, d6, d3 \n" // dr * a + "vqrshrn.u16 d20, q10, #8 \n" // db >>= 8 + "vqrshrn.u16 d21, q11, #8 \n" // dg >>= 8 + "vqrshrn.u16 d22, q12, #8 \n" // dr >>= 8 + "vqsub.u8 q2, q2, q10 \n" // dbg - dbg * a / 256 + "vqsub.u8 d6, d6, d22 \n" // dr - dr * a / 256 + "vqadd.u8 q0, q0, q2 \n" // + sbg + "vqadd.u8 d2, d2, d6 \n" // + sr + "vmov.u8 d3, #255 \n" // a = 255 + "vst4.8 {d0[0],d1[0],d2[0],d3[0]}, [%2]! \n" // store 1 pixel. + "bge 1b \n" - "99: \n" + "99: \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 @@ -2190,16 +2267,16 @@ void ARGBAttenuateRow_NEON(const uint8_t* src_argb, asm volatile( // Attenuate 8 pixels. "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q10, d0, d3 \n" // b * a - "vmull.u8 q11, d1, d3 \n" // g * a - "vmull.u8 q12, d2, d3 \n" // r * a + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q10, d0, d3 \n" // b * a + "vmull.u8 q11, d1, d3 \n" // g * a + "vmull.u8 q12, d2, d3 \n" // r * a "vqrshrn.u16 d0, q10, #8 \n" // b >>= 8 "vqrshrn.u16 d1, q11, #8 \n" // g >>= 8 "vqrshrn.u16 d2, q12, #8 \n" // r >>= 8 - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2215,32 +2292,32 @@ void ARGBQuantizeRow_NEON(uint8_t* dst_argb, int interval_offset, int width) { asm volatile( - "vdup.u16 q8, %2 \n" - "vshr.u16 q8, q8, #1 \n" // scale >>= 1 - "vdup.u16 q9, %3 \n" // interval multiply. - "vdup.u16 q10, %4 \n" // interval add + "vdup.u16 q8, %2 \n" + "vshr.u16 q8, q8, #1 \n" // scale >>= 1 + "vdup.u16 q9, %3 \n" // interval multiply. + "vdup.u16 q10, %4 \n" // interval add // 8 pixel loop. "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0] \n" // load 8 pixels of ARGB. - "subs %1, %1, #8 \n" // 8 processed per loop. - "vmovl.u8 q0, d0 \n" // b (0 .. 255) - "vmovl.u8 q1, d2 \n" - "vmovl.u8 q2, d4 \n" + "vld4.8 {d0, d2, d4, d6}, [%0] \n" // load 8 pixels of ARGB. + "subs %1, %1, #8 \n" // 8 processed per loop. + "vmovl.u8 q0, d0 \n" // b (0 .. 255) + "vmovl.u8 q1, d2 \n" + "vmovl.u8 q2, d4 \n" "vqdmulh.s16 q0, q0, q8 \n" // b * scale "vqdmulh.s16 q1, q1, q8 \n" // g "vqdmulh.s16 q2, q2, q8 \n" // r - "vmul.u16 q0, q0, q9 \n" // b * interval_size - "vmul.u16 q1, q1, q9 \n" // g - "vmul.u16 q2, q2, q9 \n" // r - "vadd.u16 q0, q0, q10 \n" // b + interval_offset - "vadd.u16 q1, q1, q10 \n" // g - "vadd.u16 q2, q2, q10 \n" // r - "vqmovn.u16 d0, q0 \n" - "vqmovn.u16 d2, q1 \n" - "vqmovn.u16 d4, q2 \n" - "vst4.8 {d0, d2, d4, d6}, [%0]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vmul.u16 q0, q0, q9 \n" // b * interval_size + "vmul.u16 q1, q1, q9 \n" // g + "vmul.u16 q2, q2, q9 \n" // r + "vadd.u16 q0, q0, q10 \n" // b + interval_offset + "vadd.u16 q1, q1, q10 \n" // g + "vadd.u16 q2, q2, q10 \n" // r + "vqmovn.u16 d0, q0 \n" + "vqmovn.u16 d2, q1 \n" + "vqmovn.u16 d4, q2 \n" + "vst4.8 {d0, d2, d4, d6}, [%0]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : "r"(scale), // %2 @@ -2257,28 +2334,28 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb, int width, uint32_t value) { asm volatile( - "vdup.u32 q0, %3 \n" // duplicate scale value. - "vzip.u8 d0, d1 \n" // d0 aarrggbb. - "vshr.u16 q0, q0, #1 \n" // scale / 2. + "vdup.u32 q0, %3 \n" // duplicate scale value. + "vzip.u8 d0, d1 \n" // d0 aarrggbb. + "vshr.u16 q0, q0, #1 \n" // scale / 2. // 8 pixel loop. "1: \n" - "vld4.8 {d20, d22, d24, d26}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmovl.u8 q10, d20 \n" // b (0 .. 255) - "vmovl.u8 q11, d22 \n" - "vmovl.u8 q12, d24 \n" - "vmovl.u8 q13, d26 \n" + "vld4.8 {d20, d22, d24, d26}, [%0]! \n" // load 8 pixels of ARGB. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmovl.u8 q10, d20 \n" // b (0 .. 255) + "vmovl.u8 q11, d22 \n" + "vmovl.u8 q12, d24 \n" + "vmovl.u8 q13, d26 \n" "vqrdmulh.s16 q10, q10, d0[0] \n" // b * scale * 2 "vqrdmulh.s16 q11, q11, d0[1] \n" // g "vqrdmulh.s16 q12, q12, d0[2] \n" // r "vqrdmulh.s16 q13, q13, d0[3] \n" // a - "vqmovn.u16 d20, q10 \n" - "vqmovn.u16 d22, q11 \n" - "vqmovn.u16 d24, q12 \n" - "vqmovn.u16 d26, q13 \n" - "vst4.8 {d20, d22, d24, d26}, [%1]! \n" // store 8 pixels of ARGB. - "bgt 1b \n" + "vqmovn.u16 d20, q10 \n" + "vqmovn.u16 d22, q11 \n" + "vqmovn.u16 d24, q12 \n" + "vqmovn.u16 d26, q13 \n" + "vst4.8 {d20, d22, d24, d26}, [%1]! \n" // store 8 pixels of ARGB. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2291,20 +2368,20 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb, // C code is (29 * b + 150 * g + 77 * r + 128) >> 8; void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient + "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient + "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient + "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d0, d24 \n" // B + "vmlal.u8 q2, d1, d25 \n" // G + "vmlal.u8 q2, d2, d26 \n" // R "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit B - "vmov d1, d0 \n" // G - "vmov d2, d0 \n" // R - "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vmov d1, d0 \n" // G + "vmov d2, d0 \n" // R + "vst4.8 {d0, d1, d2, d3}, [%1]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2318,32 +2395,32 @@ void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { // r = (r * 50 + g * 98 + b * 24) >> 7 void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d20, #17 \n" // BB coefficient - "vmov.u8 d21, #68 \n" // BG coefficient - "vmov.u8 d22, #35 \n" // BR coefficient - "vmov.u8 d24, #22 \n" // GB coefficient - "vmov.u8 d25, #88 \n" // GG coefficient - "vmov.u8 d26, #45 \n" // GR coefficient - "vmov.u8 d28, #24 \n" // BB coefficient - "vmov.u8 d29, #98 \n" // BG coefficient - "vmov.u8 d30, #50 \n" // BR coefficient + "vmov.u8 d20, #17 \n" // BB coefficient + "vmov.u8 d21, #68 \n" // BG coefficient + "vmov.u8 d22, #35 \n" // BR coefficient + "vmov.u8 d24, #22 \n" // GB coefficient + "vmov.u8 d25, #88 \n" // GG coefficient + "vmov.u8 d26, #45 \n" // GR coefficient + "vmov.u8 d28, #24 \n" // BB coefficient + "vmov.u8 d29, #98 \n" // BG coefficient + "vmov.u8 d30, #50 \n" // BR coefficient "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0] \n" // load 8 ARGB pixels. - "subs %1, %1, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d20 \n" // B to Sepia B - "vmlal.u8 q2, d1, d21 \n" // G - "vmlal.u8 q2, d2, d22 \n" // R - "vmull.u8 q3, d0, d24 \n" // B to Sepia G - "vmlal.u8 q3, d1, d25 \n" // G - "vmlal.u8 q3, d2, d26 \n" // R - "vmull.u8 q8, d0, d28 \n" // B to Sepia R - "vmlal.u8 q8, d1, d29 \n" // G - "vmlal.u8 q8, d2, d30 \n" // R - "vqshrn.u16 d0, q2, #7 \n" // 16 bit to 8 bit B - "vqshrn.u16 d1, q3, #7 \n" // 16 bit to 8 bit G - "vqshrn.u16 d2, q8, #7 \n" // 16 bit to 8 bit R - "vst4.8 {d0, d1, d2, d3}, [%0]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0] \n" // load 8 ARGB pixels. + "subs %1, %1, #8 \n" // 8 processed per loop. + "vmull.u8 q2, d0, d20 \n" // B to Sepia B + "vmlal.u8 q2, d1, d21 \n" // G + "vmlal.u8 q2, d2, d22 \n" // R + "vmull.u8 q3, d0, d24 \n" // B to Sepia G + "vmlal.u8 q3, d1, d25 \n" // G + "vmlal.u8 q3, d2, d26 \n" // R + "vmull.u8 q8, d0, d28 \n" // B to Sepia R + "vmlal.u8 q8, d1, d29 \n" // G + "vmlal.u8 q8, d2, d30 \n" // R + "vqshrn.u16 d0, q2, #7 \n" // 16 bit to 8 bit B + "vqshrn.u16 d1, q3, #7 \n" // 16 bit to 8 bit G + "vqshrn.u16 d2, q8, #7 \n" // 16 bit to 8 bit R + "vst4.8 {d0, d1, d2, d3}, [%0]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : @@ -2359,51 +2436,51 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, const int8_t* matrix_argb, int width) { asm volatile( - "vld1.8 {q2}, [%3] \n" // load 3 ARGB vectors. - "vmovl.s8 q0, d4 \n" // B,G coefficients s16. - "vmovl.s8 q1, d5 \n" // R,A coefficients s16. + "vld1.8 {q2}, [%3] \n" // load 3 ARGB vectors. + "vmovl.s8 q0, d4 \n" // B,G coefficients s16. + "vmovl.s8 q1, d5 \n" // R,A coefficients s16. "1: \n" - "vld4.8 {d16, d18, d20, d22}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmovl.u8 q8, d16 \n" // b (0 .. 255) 16 bit - "vmovl.u8 q9, d18 \n" // g - "vmovl.u8 q10, d20 \n" // r - "vmovl.u8 q11, d22 \n" // a - "vmul.s16 q12, q8, d0[0] \n" // B = B * Matrix B - "vmul.s16 q13, q8, d1[0] \n" // G = B * Matrix G - "vmul.s16 q14, q8, d2[0] \n" // R = B * Matrix R - "vmul.s16 q15, q8, d3[0] \n" // A = B * Matrix A - "vmul.s16 q4, q9, d0[1] \n" // B += G * Matrix B - "vmul.s16 q5, q9, d1[1] \n" // G += G * Matrix G - "vmul.s16 q6, q9, d2[1] \n" // R += G * Matrix R - "vmul.s16 q7, q9, d3[1] \n" // A += G * Matrix A - "vqadd.s16 q12, q12, q4 \n" // Accumulate B - "vqadd.s16 q13, q13, q5 \n" // Accumulate G - "vqadd.s16 q14, q14, q6 \n" // Accumulate R - "vqadd.s16 q15, q15, q7 \n" // Accumulate A - "vmul.s16 q4, q10, d0[2] \n" // B += R * Matrix B - "vmul.s16 q5, q10, d1[2] \n" // G += R * Matrix G - "vmul.s16 q6, q10, d2[2] \n" // R += R * Matrix R - "vmul.s16 q7, q10, d3[2] \n" // A += R * Matrix A - "vqadd.s16 q12, q12, q4 \n" // Accumulate B - "vqadd.s16 q13, q13, q5 \n" // Accumulate G - "vqadd.s16 q14, q14, q6 \n" // Accumulate R - "vqadd.s16 q15, q15, q7 \n" // Accumulate A - "vmul.s16 q4, q11, d0[3] \n" // B += A * Matrix B - "vmul.s16 q5, q11, d1[3] \n" // G += A * Matrix G - "vmul.s16 q6, q11, d2[3] \n" // R += A * Matrix R - "vmul.s16 q7, q11, d3[3] \n" // A += A * Matrix A - "vqadd.s16 q12, q12, q4 \n" // Accumulate B - "vqadd.s16 q13, q13, q5 \n" // Accumulate G - "vqadd.s16 q14, q14, q6 \n" // Accumulate R - "vqadd.s16 q15, q15, q7 \n" // Accumulate A + "vld4.8 {d16, d18, d20, d22}, [%0]! \n" // load 8 ARGB pixels. + "subs %2, %2, #8 \n" // 8 processed per loop. + "vmovl.u8 q8, d16 \n" // b (0 .. 255) 16 bit + "vmovl.u8 q9, d18 \n" // g + "vmovl.u8 q10, d20 \n" // r + "vmovl.u8 q11, d22 \n" // a + "vmul.s16 q12, q8, d0[0] \n" // B = B * Matrix B + "vmul.s16 q13, q8, d1[0] \n" // G = B * Matrix G + "vmul.s16 q14, q8, d2[0] \n" // R = B * Matrix R + "vmul.s16 q15, q8, d3[0] \n" // A = B * Matrix A + "vmul.s16 q4, q9, d0[1] \n" // B += G * Matrix B + "vmul.s16 q5, q9, d1[1] \n" // G += G * Matrix G + "vmul.s16 q6, q9, d2[1] \n" // R += G * Matrix R + "vmul.s16 q7, q9, d3[1] \n" // A += G * Matrix A + "vqadd.s16 q12, q12, q4 \n" // Accumulate B + "vqadd.s16 q13, q13, q5 \n" // Accumulate G + "vqadd.s16 q14, q14, q6 \n" // Accumulate R + "vqadd.s16 q15, q15, q7 \n" // Accumulate A + "vmul.s16 q4, q10, d0[2] \n" // B += R * Matrix B + "vmul.s16 q5, q10, d1[2] \n" // G += R * Matrix G + "vmul.s16 q6, q10, d2[2] \n" // R += R * Matrix R + "vmul.s16 q7, q10, d3[2] \n" // A += R * Matrix A + "vqadd.s16 q12, q12, q4 \n" // Accumulate B + "vqadd.s16 q13, q13, q5 \n" // Accumulate G + "vqadd.s16 q14, q14, q6 \n" // Accumulate R + "vqadd.s16 q15, q15, q7 \n" // Accumulate A + "vmul.s16 q4, q11, d0[3] \n" // B += A * Matrix B + "vmul.s16 q5, q11, d1[3] \n" // G += A * Matrix G + "vmul.s16 q6, q11, d2[3] \n" // R += A * Matrix R + "vmul.s16 q7, q11, d3[3] \n" // A += A * Matrix A + "vqadd.s16 q12, q12, q4 \n" // Accumulate B + "vqadd.s16 q13, q13, q5 \n" // Accumulate G + "vqadd.s16 q14, q14, q6 \n" // Accumulate R + "vqadd.s16 q15, q15, q7 \n" // Accumulate A "vqshrun.s16 d16, q12, #6 \n" // 16 bit to 8 bit B "vqshrun.s16 d18, q13, #6 \n" // 16 bit to 8 bit G "vqshrun.s16 d20, q14, #6 \n" // 16 bit to 8 bit R "vqshrun.s16 d22, q15, #6 \n" // 16 bit to 8 bit A - "vst4.8 {d16, d18, d20, d22}, [%1]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vst4.8 {d16, d18, d20, d22}, [%1]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2420,19 +2497,19 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%1]! \n" // load 8 more ARGB - "subs %3, %3, #8 \n" // 8 processed per loop. - "vmull.u8 q0, d0, d1 \n" // multiply B - "vmull.u8 q1, d2, d3 \n" // multiply G - "vmull.u8 q2, d4, d5 \n" // multiply R - "vmull.u8 q3, d6, d7 \n" // multiply A - "vrshrn.u16 d0, q0, #8 \n" // 16 bit to 8 bit B - "vrshrn.u16 d1, q1, #8 \n" // 16 bit to 8 bit G - "vrshrn.u16 d2, q2, #8 \n" // 16 bit to 8 bit R - "vrshrn.u16 d3, q3, #8 \n" // 16 bit to 8 bit A - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%1]! \n" // load 8 more ARGB + "subs %3, %3, #8 \n" // 8 processed per loop. + "vmull.u8 q0, d0, d1 \n" // multiply B + "vmull.u8 q1, d2, d3 \n" // multiply G + "vmull.u8 q2, d4, d5 \n" // multiply R + "vmull.u8 q3, d6, d7 \n" // multiply A + "vrshrn.u16 d0, q0, #8 \n" // 16 bit to 8 bit B + "vrshrn.u16 d1, q1, #8 \n" // 16 bit to 8 bit G + "vrshrn.u16 d2, q2, #8 \n" // 16 bit to 8 bit R + "vrshrn.u16 d3, q3, #8 \n" // 16 bit to 8 bit A + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2449,13 +2526,13 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 more ARGB - "subs %3, %3, #8 \n" // 8 processed per loop. - "vqadd.u8 q0, q0, q2 \n" // add B, G - "vqadd.u8 q1, q1, q3 \n" // add R, A - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 more ARGB + "subs %3, %3, #8 \n" // 8 processed per loop. + "vqadd.u8 q0, q0, q2 \n" // add B, G + "vqadd.u8 q1, q1, q3 \n" // add R, A + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2472,13 +2549,13 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 more ARGB - "subs %3, %3, #8 \n" // 8 processed per loop. - "vqsub.u8 q0, q0, q2 \n" // subtract B, G - "vqsub.u8 q1, q1, q3 \n" // subtract R, A - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d4, d5, d6, d7}, [%1]! \n" // load 8 more ARGB + "subs %3, %3, #8 \n" // 8 processed per loop. + "vqsub.u8 q0, q0, q2 \n" // subtract B, G + "vqsub.u8 q1, q1, q3 \n" // subtract R, A + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2497,17 +2574,17 @@ void SobelRow_NEON(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d3, #255 \n" // alpha + "vmov.u8 d3, #255 \n" // alpha // 8 pixel loop. "1: \n" - "vld1.8 {d0}, [%0]! \n" // load 8 sobelx. - "vld1.8 {d1}, [%1]! \n" // load 8 sobely. - "subs %3, %3, #8 \n" // 8 processed per loop. - "vqadd.u8 d0, d0, d1 \n" // add - "vmov.u8 d1, d0 \n" - "vmov.u8 d2, d0 \n" - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld1.8 {d0}, [%0]! \n" // load 8 sobelx. + "vld1.8 {d1}, [%1]! \n" // load 8 sobely. + "subs %3, %3, #8 \n" // 8 processed per loop. + "vqadd.u8 d0, d0, d1 \n" // add + "vmov.u8 d1, d0 \n" + "vmov.u8 d2, d0 \n" + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -2524,12 +2601,12 @@ void SobelToPlaneRow_NEON(const uint8_t* src_sobelx, asm volatile( // 16 pixel loop. "1: \n" - "vld1.8 {q0}, [%0]! \n" // load 16 sobelx. - "vld1.8 {q1}, [%1]! \n" // load 16 sobely. - "subs %3, %3, #16 \n" // 16 processed per loop. - "vqadd.u8 q0, q0, q1 \n" // add - "vst1.8 {q0}, [%2]! \n" // store 16 pixels. - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" // load 16 sobelx. + "vld1.8 {q1}, [%1]! \n" // load 16 sobely. + "subs %3, %3, #16 \n" // 16 processed per loop. + "vqadd.u8 q0, q0, q1 \n" // add + "vst1.8 {q0}, [%2]! \n" // store 16 pixels. + "bgt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_y), // %2 @@ -2548,15 +2625,15 @@ void SobelXYRow_NEON(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "vmov.u8 d3, #255 \n" // alpha + "vmov.u8 d3, #255 \n" // alpha // 8 pixel loop. "1: \n" - "vld1.8 {d2}, [%0]! \n" // load 8 sobelx. - "vld1.8 {d0}, [%1]! \n" // load 8 sobely. - "subs %3, %3, #8 \n" // 8 processed per loop. - "vqadd.u8 d1, d0, d2 \n" // add - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. - "bgt 1b \n" + "vld1.8 {d2}, [%0]! \n" // load 8 sobelx. + "vld1.8 {d0}, [%1]! \n" // load 8 sobely. + "subs %3, %3, #8 \n" // 8 processed per loop. + "vqadd.u8 d1, d0, d2 \n" // add + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. + "bgt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -2576,23 +2653,23 @@ void SobelXRow_NEON(const uint8_t* src_y0, int width) { asm volatile( "1: \n" - "vld1.8 {d0}, [%0],%5 \n" // top - "vld1.8 {d1}, [%0],%6 \n" - "vsubl.u8 q0, d0, d1 \n" - "vld1.8 {d2}, [%1],%5 \n" // center * 2 - "vld1.8 {d3}, [%1],%6 \n" - "vsubl.u8 q1, d2, d3 \n" - "vadd.s16 q0, q0, q1 \n" - "vadd.s16 q0, q0, q1 \n" - "vld1.8 {d2}, [%2],%5 \n" // bottom - "vld1.8 {d3}, [%2],%6 \n" - "subs %4, %4, #8 \n" // 8 pixels - "vsubl.u8 q1, d2, d3 \n" - "vadd.s16 q0, q0, q1 \n" - "vabs.s16 q0, q0 \n" - "vqmovn.u16 d0, q0 \n" - "vst1.8 {d0}, [%3]! \n" // store 8 sobelx - "bgt 1b \n" + "vld1.8 {d0}, [%0],%5 \n" // top + "vld1.8 {d1}, [%0],%6 \n" + "vsubl.u8 q0, d0, d1 \n" + "vld1.8 {d2}, [%1],%5 \n" // center * 2 + "vld1.8 {d3}, [%1],%6 \n" + "vsubl.u8 q1, d2, d3 \n" + "vadd.s16 q0, q0, q1 \n" + "vadd.s16 q0, q0, q1 \n" + "vld1.8 {d2}, [%2],%5 \n" // bottom + "vld1.8 {d3}, [%2],%6 \n" + "subs %4, %4, #8 \n" // 8 pixels + "vsubl.u8 q1, d2, d3 \n" + "vadd.s16 q0, q0, q1 \n" + "vabs.s16 q0, q0 \n" + "vqmovn.u16 d0, q0 \n" + "vst1.8 {d0}, [%3]! \n" // store 8 sobelx + "bgt 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(src_y2), // %2 @@ -2614,23 +2691,23 @@ void SobelYRow_NEON(const uint8_t* src_y0, int width) { asm volatile( "1: \n" - "vld1.8 {d0}, [%0],%4 \n" // left - "vld1.8 {d1}, [%1],%4 \n" - "vsubl.u8 q0, d0, d1 \n" - "vld1.8 {d2}, [%0],%4 \n" // center * 2 - "vld1.8 {d3}, [%1],%4 \n" - "vsubl.u8 q1, d2, d3 \n" - "vadd.s16 q0, q0, q1 \n" - "vadd.s16 q0, q0, q1 \n" - "vld1.8 {d2}, [%0],%5 \n" // right - "vld1.8 {d3}, [%1],%5 \n" - "subs %3, %3, #8 \n" // 8 pixels - "vsubl.u8 q1, d2, d3 \n" - "vadd.s16 q0, q0, q1 \n" - "vabs.s16 q0, q0 \n" - "vqmovn.u16 d0, q0 \n" - "vst1.8 {d0}, [%2]! \n" // store 8 sobely - "bgt 1b \n" + "vld1.8 {d0}, [%0],%4 \n" // left + "vld1.8 {d1}, [%1],%4 \n" + "vsubl.u8 q0, d0, d1 \n" + "vld1.8 {d2}, [%0],%4 \n" // center * 2 + "vld1.8 {d3}, [%1],%4 \n" + "vsubl.u8 q1, d2, d3 \n" + "vadd.s16 q0, q0, q1 \n" + "vadd.s16 q0, q0, q1 \n" + "vld1.8 {d2}, [%0],%5 \n" // right + "vld1.8 {d3}, [%1],%5 \n" + "subs %3, %3, #8 \n" // 8 pixels + "vsubl.u8 q1, d2, d3 \n" + "vadd.s16 q0, q0, q1 \n" + "vabs.s16 q0, q0 \n" + "vqmovn.u16 d0, q0 \n" + "vst1.8 {d0}, [%2]! \n" // store 8 sobely + "bgt 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(dst_sobely), // %2 @@ -2652,18 +2729,18 @@ void HalfFloat1Row_NEON(const uint16_t* src, asm volatile( "1: \n" - "vld1.8 {q1}, [%0]! \n" // load 8 shorts - "subs %2, %2, #8 \n" // 8 pixels per loop - "vmovl.u16 q2, d2 \n" // 8 int's - "vmovl.u16 q3, d3 \n" - "vcvt.f32.u32 q2, q2 \n" // 8 floats - "vcvt.f32.u32 q3, q3 \n" - "vmul.f32 q2, q2, %y3 \n" // adjust exponent - "vmul.f32 q3, q3, %y3 \n" - "vqshrn.u32 d2, q2, #13 \n" // isolate halffloat - "vqshrn.u32 d3, q3, #13 \n" - "vst1.8 {q1}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {q1}, [%0]! \n" // load 8 shorts + "subs %2, %2, #8 \n" // 8 pixels per loop + "vmovl.u16 q2, d2 \n" // 8 int's + "vmovl.u16 q3, d3 \n" + "vcvt.f32.u32 q2, q2 \n" // 8 floats + "vcvt.f32.u32 q3, q3 \n" + "vmul.f32 q2, q2, %y3 \n" // adjust exponent + "vmul.f32 q3, q3, %y3 \n" + "vqshrn.u32 d2, q2, #13 \n" // isolate halffloat + "vqshrn.u32 d3, q3, #13 \n" + "vst1.8 {q1}, [%1]! \n" + "bgt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2678,18 +2755,18 @@ void HalfFloatRow_NEON(const uint16_t* src, asm volatile( "1: \n" - "vld1.8 {q1}, [%0]! \n" // load 8 shorts - "subs %2, %2, #8 \n" // 8 pixels per loop - "vmovl.u16 q2, d2 \n" // 8 int's - "vmovl.u16 q3, d3 \n" - "vcvt.f32.u32 q2, q2 \n" // 8 floats - "vcvt.f32.u32 q3, q3 \n" - "vmul.f32 q2, q2, %y3 \n" // adjust exponent - "vmul.f32 q3, q3, %y3 \n" - "vqshrn.u32 d2, q2, #13 \n" // isolate halffloat - "vqshrn.u32 d3, q3, #13 \n" - "vst1.8 {q1}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {q1}, [%0]! \n" // load 8 shorts + "subs %2, %2, #8 \n" // 8 pixels per loop + "vmovl.u16 q2, d2 \n" // 8 int's + "vmovl.u16 q3, d3 \n" + "vcvt.f32.u32 q2, q2 \n" // 8 floats + "vcvt.f32.u32 q3, q3 \n" + "vmul.f32 q2, q2, %y3 \n" // adjust exponent + "vmul.f32 q3, q3, %y3 \n" + "vqshrn.u32 d2, q2, #13 \n" // isolate halffloat + "vqshrn.u32 d3, q3, #13 \n" + "vst1.8 {q1}, [%1]! \n" + "bgt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2704,17 +2781,17 @@ void ByteToFloatRow_NEON(const uint8_t* src, asm volatile( "1: \n" - "vld1.8 {d2}, [%0]! \n" // load 8 bytes - "subs %2, %2, #8 \n" // 8 pixels per loop - "vmovl.u8 q1, d2 \n" // 8 shorts - "vmovl.u16 q2, d2 \n" // 8 ints - "vmovl.u16 q3, d3 \n" - "vcvt.f32.u32 q2, q2 \n" // 8 floats - "vcvt.f32.u32 q3, q3 \n" - "vmul.f32 q2, q2, %y3 \n" // scale - "vmul.f32 q3, q3, %y3 \n" - "vst1.8 {q2, q3}, [%1]! \n" // store 8 floats - "bgt 1b \n" + "vld1.8 {d2}, [%0]! \n" // load 8 bytes + "subs %2, %2, #8 \n" // 8 pixels per loop + "vmovl.u8 q1, d2 \n" // 8 shorts + "vmovl.u16 q2, d2 \n" // 8 ints + "vmovl.u16 q3, d3 \n" + "vcvt.f32.u32 q2, q2 \n" // 8 floats + "vcvt.f32.u32 q3, q3 \n" + "vmul.f32 q2, q2, %y3 \n" // scale + "vmul.f32 q3, q3, %y3 \n" + "vst1.8 {q2, q3}, [%1]! \n" // store 8 floats + "bgt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2731,26 +2808,26 @@ void GaussCol_NEON(const uint16_t* src0, uint32_t* dst, int width) { asm volatile( - "vmov.u16 d6, #4 \n" // constant 4 - "vmov.u16 d7, #6 \n" // constant 6 + "vmov.u16 d6, #4 \n" // constant 4 + "vmov.u16 d7, #6 \n" // constant 6 "1: \n" - "vld1.16 {q1}, [%0]! \n" // load 8 samples, 5 rows - "vld1.16 {q2}, [%4]! \n" - "vaddl.u16 q0, d2, d4 \n" // * 1 - "vaddl.u16 q1, d3, d5 \n" // * 1 - "vld1.16 {q2}, [%1]! \n" - "vmlal.u16 q0, d4, d6 \n" // * 4 - "vmlal.u16 q1, d5, d6 \n" // * 4 - "vld1.16 {q2}, [%2]! \n" - "vmlal.u16 q0, d4, d7 \n" // * 6 - "vmlal.u16 q1, d5, d7 \n" // * 6 - "vld1.16 {q2}, [%3]! \n" - "vmlal.u16 q0, d4, d6 \n" // * 4 - "vmlal.u16 q1, d5, d6 \n" // * 4 - "subs %6, %6, #8 \n" // 8 processed per loop - "vst1.32 {q0, q1}, [%5]! \n" // store 8 samples - "bgt 1b \n" + "vld1.16 {q1}, [%0]! \n" // load 8 samples, 5 rows + "vld1.16 {q2}, [%4]! \n" + "vaddl.u16 q0, d2, d4 \n" // * 1 + "vaddl.u16 q1, d3, d5 \n" // * 1 + "vld1.16 {q2}, [%1]! \n" + "vmlal.u16 q0, d4, d6 \n" // * 4 + "vmlal.u16 q1, d5, d6 \n" // * 4 + "vld1.16 {q2}, [%2]! \n" + "vmlal.u16 q0, d4, d7 \n" // * 6 + "vmlal.u16 q1, d5, d7 \n" // * 6 + "vld1.16 {q2}, [%3]! \n" + "vmlal.u16 q0, d4, d6 \n" // * 4 + "vmlal.u16 q1, d5, d6 \n" // * 4 + "subs %6, %6, #8 \n" // 8 processed per loop + "vst1.32 {q0, q1}, [%5]! \n" // store 8 samples + "bgt 1b \n" : "+r"(src0), // %0 "+r"(src1), // %1 "+r"(src2), // %2 @@ -2768,8 +2845,8 @@ void GaussRow_NEON(const uint32_t* src, uint16_t* dst, int width) { const uint32_t* src2 = src + 2; const uint32_t* src3 = src + 3; asm volatile( - "vmov.u32 q10, #4 \n" // constant 4 - "vmov.u32 q11, #6 \n" // constant 6 + "vmov.u32 q10, #4 \n" // constant 4 + "vmov.u32 q11, #6 \n" // constant 6 "1: \n" "vld1.32 {q0, q1}, [%0]! \n" // load 12 source samples @@ -2807,16 +2884,16 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "vld1.8 {q2}, [%0]! \n" // load 16 Y values - "vld2.8 {d0, d2}, [%1]! \n" // load 8 VU values - "vmov d1, d0 \n" - "vzip.u8 d0, d1 \n" // VV - "vmov d3, d2 \n" - "vzip.u8 d2, d3 \n" // UU - "subs %3, %3, #16 \n" // 16 pixels per loop - "vst3.8 {d0, d2, d4}, [%2]! \n" // store 16 YUV pixels - "vst3.8 {d1, d3, d5}, [%2]! \n" - "bgt 1b \n" + "vld1.8 {q2}, [%0]! \n" // load 16 Y values + "vld2.8 {d0, d2}, [%1]! \n" // load 8 VU values + "vmov d1, d0 \n" + "vzip.u8 d0, d1 \n" // VV + "vmov d3, d2 \n" + "vzip.u8 d2, d3 \n" // UU + "subs %3, %3, #16 \n" // 16 pixels per loop + "vst3.8 {d0, d2, d4}, [%2]! \n" // store 16 YUV pixels + "vst3.8 {d1, d3, d5}, [%2]! \n" + "bgt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_yuv24), // %2 @@ -2830,24 +2907,24 @@ void AYUVToUVRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_uv, int width) { asm volatile( - "add %1, %0, %1 \n" // src_stride + src_AYUV + "add %1, %0, %1 \n" // src_stride + src_AYUV "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV // pixels. - "vpaddl.u8 q0, q0 \n" // V 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // U 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more AYUV + "vpaddl.u8 q0, q0 \n" // V 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // U 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more AYUV // pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 AYUV + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 AYUV // pixels. - "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. "vqrshrun.s16 d1, q0, #2 \n" // 2x2 average "vqrshrun.s16 d0, q1, #2 \n" - "subs %3, %3, #16 \n" // 16 processed per loop. - "vst2.8 {d0, d1}, [%2]! \n" // store 8 pixels UV. - "bgt 1b \n" + "subs %3, %3, #16 \n" // 16 processed per loop. + "vst2.8 {d0, d1}, [%2]! \n" // store 8 pixels UV. + "bgt 1b \n" : "+r"(src_ayuv), // %0 "+r"(src_stride_ayuv), // %1 "+r"(dst_uv), // %2 @@ -2861,24 +2938,24 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv, uint8_t* dst_vu, int width) { asm volatile( - "add %1, %0, %1 \n" // src_stride + src_AYUV + "add %1, %0, %1 \n" // src_stride + src_AYUV "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV // pixels. - "vpaddl.u8 q0, q0 \n" // V 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // U 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more AYUV + "vpaddl.u8 q0, q0 \n" // V 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // U 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more AYUV // pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 AYUV + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 AYUV // pixels. - "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. "vqrshrun.s16 d0, q0, #2 \n" // 2x2 average "vqrshrun.s16 d1, q1, #2 \n" - "subs %3, %3, #16 \n" // 16 processed per loop. - "vst2.8 {d0, d1}, [%2]! \n" // store 8 pixels VU. - "bgt 1b \n" + "subs %3, %3, #16 \n" // 16 processed per loop. + "vst2.8 {d0, d1}, [%2]! \n" // store 8 pixels VU. + "bgt 1b \n" : "+r"(src_ayuv), // %0 "+r"(src_stride_ayuv), // %1 "+r"(dst_vu), // %2 @@ -2892,11 +2969,11 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv, void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV pixels - "subs %2, %2, #16 \n" // 16 processed per loop - "vst1.8 {q2}, [%1]! \n" // store 16 Y's. - "bgt 1b \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 AYUV pixels + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 AYUV pixels + "subs %2, %2, #16 \n" // 16 processed per loop + "vst1.8 {q2}, [%1]! \n" // store 16 Y's. + "bgt 1b \n" : "+r"(src_ayuv), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2908,12 +2985,12 @@ void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) { void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width) { asm volatile( "1: \n" - "vld2.8 {d0, d2}, [%0]! \n" // load 16 UV values - "vld2.8 {d1, d3}, [%0]! \n" - "vorr.u8 q2, q0, q0 \n" // move U after V - "subs %2, %2, #16 \n" // 16 pixels per loop - "vst2.8 {q1, q2}, [%1]! \n" // store 16 VU pixels - "bgt 1b \n" + "vld2.8 {d0, d2}, [%0]! \n" // load 16 UV values + "vld2.8 {d1, d3}, [%0]! \n" + "vorr.u8 q2, q0, q0 \n" // move U after V + "subs %2, %2, #16 \n" // 16 pixels per loop + "vst2.8 {q1, q2}, [%1]! \n" // store 16 VU pixels + "bgt 1b \n" : "+r"(src_uv), // %0 "+r"(dst_vu), // %1 "+r"(width) // %2 @@ -2921,6 +2998,39 @@ void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width) { : "cc", "memory", "q0", "q1", "q2"); } +void HalfMergeUVRow_NEON(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width) { + const uint8_t* src_u_1 = src_u + src_stride_u; + const uint8_t* src_v_1 = src_v + src_stride_v; + asm volatile( + "1: \n" + "vld1.8 {q0}, [%0]! \n" // load 16 U values + "vld1.8 {q1}, [%2]! \n" // load 16 V values + "vld1.8 {q2}, [%1]! \n" + "vld1.8 {q3}, [%3]! \n" + "vpaddl.u8 q0, q0 \n" // half size + "vpaddl.u8 q1, q1 \n" + "vpadal.u8 q0, q2 \n" + "vpadal.u8 q1, q3 \n" + "vqrshrn.u16 d0, q0, #2 \n" + "vqrshrn.u16 d1, q1, #2 \n" + "subs %5, %5, #16 \n" // 16 src pixels per loop + "vst2.8 {d0, d1}, [%4]! \n" // store 8 UV pixels + "bgt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_u_1), // %1 + "+r"(src_v), // %2 + "+r"(src_v_1), // %3 + "+r"(dst_uv), // %4 + "+r"(width) // %5 + : + : "cc", "memory", "q0", "q1", "q2", "q3"); +} + #endif // !defined(LIBYUV_DISABLE_NEON) && defined(__ARM_NEON__).. #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc index 866e7bfc6..d5258a3ae 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc @@ -68,13 +68,13 @@ extern "C" { "uzp2 v3.8b, v2.8b, v2.8b \n" \ "ins v1.s[1], v3.s[0] \n" -#define YUVTORGB_SETUP \ - "ld1r {v24.8h}, [%[kUVBiasBGR]], #2 \n" \ - "ld1r {v25.8h}, [%[kUVBiasBGR]], #2 \n" \ - "ld1r {v26.8h}, [%[kUVBiasBGR]] \n" \ - "ld1r {v31.4s}, [%[kYToRgb]] \n" \ - "ld2 {v27.8h, v28.8h}, [%[kUVToRB]] \n" \ - "ld2 {v29.8h, v30.8h}, [%[kUVToG]] \n" +#define YUVTORGB_SETUP \ + "ld3r {v24.8h, v25.8h, v26.8h}, [%[kUVBiasBGR]] \n" \ + "ld1r {v31.4s}, [%[kYToRgb]] \n" \ + "ld2 {v27.8h, v28.8h}, [%[kUVToRB]] \n" \ + "ld2 {v29.8h, v30.8h}, [%[kUVToG]] \n" + +// clang-format off #define YUVTORGB(vR, vG, vB) \ "uxtl v0.8h, v0.8b \n" /* Extract Y */ \ @@ -89,29 +89,23 @@ extern "C" { "mov v2.d[0], v1.d[1] \n" /* Extract V */ \ "uxtl v2.8h, v2.8b \n" \ "uxtl v1.8h, v1.8b \n" /* Extract U */ \ - "mul v3.8h, v1.8h, v27.8h \n" \ - "mul v5.8h, v1.8h, v29.8h \n" \ - "mul v6.8h, v2.8h, v30.8h \n" \ - "mul v7.8h, v2.8h, v28.8h \n" \ + "mul v3.8h, v27.8h, v1.8h \n" \ + "mul v5.8h, v29.8h, v1.8h \n" \ + "mul v6.8h, v30.8h, v2.8h \n" \ + "mul v7.8h, v28.8h, v2.8h \n" \ "sqadd v6.8h, v6.8h, v5.8h \n" \ - "sqadd " #vB \ - ".8h, v24.8h, v0.8h \n" /* B */ \ - "sqadd " #vG \ - ".8h, v25.8h, v0.8h \n" /* G */ \ - "sqadd " #vR \ - ".8h, v26.8h, v0.8h \n" /* R */ \ - "sqadd " #vB ".8h, " #vB \ - ".8h, v3.8h \n" /* B */ \ - "sqsub " #vG ".8h, " #vG \ - ".8h, v6.8h \n" /* G */ \ - "sqadd " #vR ".8h, " #vR \ - ".8h, v7.8h \n" /* R */ \ - "sqshrun " #vB ".8b, " #vB \ - ".8h, #6 \n" /* B */ \ - "sqshrun " #vG ".8b, " #vG \ - ".8h, #6 \n" /* G */ \ + "sqadd " #vB ".8h, v24.8h, v0.8h \n" /* B */ \ + "sqadd " #vG ".8h, v25.8h, v0.8h \n" /* G */ \ + "sqadd " #vR ".8h, v26.8h, v0.8h \n" /* R */ \ + "sqadd " #vB ".8h, " #vB ".8h, v3.8h \n" /* B */ \ + "sqsub " #vG ".8h, " #vG ".8h, v6.8h \n" /* G */ \ + "sqadd " #vR ".8h, " #vR ".8h, v7.8h \n" /* R */ \ + "sqshrun " #vB ".8b, " #vB ".8h, #6 \n" /* B */ \ + "sqshrun " #vG ".8b, " #vG ".8h, #6 \n" /* G */ \ "sqshrun " #vR ".8b, " #vR ".8h, #6 \n" /* R */ +// clang-format on + void I444ToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -120,13 +114,16 @@ void I444ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" /* A */ - "1: \n" + "movi v23.8b, #255 \n" /* A */ + "1: \n" READYUV444 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "subs %w4, %w4, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -149,13 +146,17 @@ void I422ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" /* A */ - "1: \n" + "movi v23.8b, #255 \n" /* A */ + + "1: \n" READYUV422 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "subs %w4, %w4, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -179,13 +180,17 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "1: \n" + "1: \n" READYUV422 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "ld1 {v23.8b}, [%3], #8 \n" - "subs %w5, %w5, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%4], #32 \n" - "b.gt 1b \n" + "ld1 {v23.8b}, [%3], #8 \n" + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "prfm pldl1keep, [%3, 448] \n" + "subs %w5, %w5, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%4], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -209,13 +214,16 @@ void I422ToRGBARow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v20.8b, #255 \n" /* A */ - "1: \n" + "movi v20.8b, #255 \n" /* A */ + "1: \n" READYUV422 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v23, v22, v21) - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "subs %w4, %w4, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -238,12 +246,15 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "1: \n" + "1: \n" READYUV422 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w4, %w4, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%3], #24 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "subs %w4, %w4, #8 \n" + "st3 {v20.8b,v21.8b,v22.8b}, [%3], #24 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -265,6 +276,8 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y, "sri v0.8h, v21.8h, #5 \n" /* RG */ \ "sri v0.8h, v20.8h, #11 \n" /* RGB */ +// clang-format off + void I422ToRGB565Row_NEON(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -272,13 +285,17 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - "1: \n" READYUV422 YUVTORGB( - v22, v21, - v20) "subs %w4, %w4, #8 \n" ARGBTORGB565 - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels - // RGB565. - "b.gt 1b \n" + YUVTORGB_SETUP + "1: \n" + READYUV422 + YUVTORGB(v22, v21, v20) + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #8 \n" + ARGBTORGB565 + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565. + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -308,14 +325,18 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" READYUV422 YUVTORGB( - v22, v21, - v20) "subs %w4, %w4, #8 \n" ARGBTOARGB1555 - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels - // RGB565. - "b.gt 1b \n" + YUVTORGB_SETUP + "movi v23.8b, #255 \n" + "1: \n" + READYUV422 + YUVTORGB(v22, v21, v20) + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #8 \n" + ARGBTOARGB1555 + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565. + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -328,6 +349,7 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y, : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"); } +// clang-format on #define ARGBTOARGB4444 \ /* Input v20.8b<=B, v21.8b<=G, v22.8b<=R, v23.8b<=A, v4.8b<=0x0f */ \ @@ -347,15 +369,18 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v4.16b, #0x0f \n" // bits to clear with vbic. - "1: \n" + "movi v4.16b, #0x0f \n" // bits to clear with vbic. + "1: \n" READYUV422 YUVTORGB(v22, v21, v20) - "subs %w4, %w4, #8 \n" - "movi v23.8b, #255 \n" + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #8 \n" + "movi v23.8b, #255 \n" ARGBTOARGB4444 - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels ARGB4444. - "b.gt 1b \n" + "prfm pldl1keep, [%1, 128] \n" + "prfm pldl1keep, [%2, 128] \n" + "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels ARGB4444. + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -370,23 +395,27 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, ); } -void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { +void I400ToARGBRow_NEON(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" + "movi v23.8b, #255 \n" + "1: \n" READYUV400 YUVTORGB(v22, v21, v20) - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 - : [kUVToRB]"r"(&kYuvI601Constants.kUVToRB), - [kUVToG]"r"(&kYuvI601Constants.kUVToG), - [kUVBiasBGR]"r"(&kYuvI601Constants.kUVBiasBGR), - [kYToRgb]"r"(&kYuvI601Constants.kYToRgb) + : [kUVToRB]"r"(&yuvconstants->kUVToRB), + [kUVToG]"r"(&yuvconstants->kUVToG), + [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), + [kYToRgb]"r"(&yuvconstants->kYToRgb) : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" ); @@ -394,14 +423,15 @@ void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { asm volatile( - "movi v23.8b, #255 \n" + "movi v23.8b, #255 \n" "1: \n" - "ld1 {v20.8b}, [%0], #8 \n" - "orr v21.8b, v20.8b, v20.8b \n" - "orr v22.8b, v20.8b, v20.8b \n" - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" - "b.gt 1b \n" + "ld1 {v20.8b}, [%0], #8 \n" + "prfm pldl1keep, [%0, 448] \n" + "orr v21.8b, v20.8b, v20.8b \n" + "orr v22.8b, v20.8b, v20.8b \n" + "subs %w2, %w2, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -416,13 +446,15 @@ void NV12ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" + "movi v23.8b, #255 \n" + "1: \n" READNV12 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w3, %w3, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 256] \n" + "subs %w3, %w3, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_argb), // %2 @@ -443,13 +475,15 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" + "movi v23.8b, #255 \n" + "1: \n" READNV21 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w3, %w3, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 256] \n" + "subs %w3, %w3, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_argb), // %2 @@ -470,12 +504,14 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "1: \n" + "1: \n" READNV12 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w3, %w3, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 256] \n" + "subs %w3, %w3, #8 \n" + "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_rgb24), // %2 @@ -496,12 +532,14 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y, int width) { asm volatile ( YUVTORGB_SETUP - "1: \n" + "1: \n" READNV21 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w3, %w3, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%1, 256] \n" + "subs %w3, %w3, #8 \n" + "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_rgb24), // %2 @@ -521,13 +559,13 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - "1: \n" READNV12 YUVTORGB( - v22, v21, - v20) "subs %w3, %w3, #8 \n" ARGBTORGB565 - "st1 {v0.8h}, [%2], 16 \n" // store 8 pixels - // RGB565. - "b.gt 1b \n" + YUVTORGB_SETUP "1: \n" READNV12 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB( + v22, v21, v20) ARGBTORGB565 + "prfm pldl1keep, [%1, 256] \n" + "subs %w3, %w3, #8 \n" + "st1 {v0.8h}, [%2], 16 \n" // store 8 pixels + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_uv), // %1 "+r"(dst_rgb565), // %2 @@ -546,13 +584,14 @@ void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" + "movi v23.8b, #255 \n" + "1: \n" READYUY2 + "prfm pldl1keep, [%0, 448] \n" YUVTORGB(v22, v21, v20) - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" - "b.gt 1b \n" + "subs %w2, %w2, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" + "b.gt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -571,13 +610,14 @@ void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, int width) { asm volatile ( YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" + "movi v23.8b, #255 \n" + "1: \n" READUYVY YUVTORGB(v22, v21, v20) - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], 32 \n" - "b.gt 1b \n" + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" + "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], 32 \n" + "b.gt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -597,11 +637,12 @@ void SplitUVRow_NEON(const uint8_t* src_uv, int width) { asm volatile( "1: \n" - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pairs of UV - "subs %w3, %w3, #16 \n" // 16 processed per loop - "st1 {v0.16b}, [%1], #16 \n" // store U - "st1 {v1.16b}, [%2], #16 \n" // store V - "b.gt 1b \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pairs of UV + "prfm pldl1keep, [%0, 448] \n" + "subs %w3, %w3, #16 \n" // 16 processed per loop + "st1 {v0.16b}, [%1], #16 \n" // store U + "st1 {v1.16b}, [%2], #16 \n" // store V + "b.gt 1b \n" : "+r"(src_uv), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -618,11 +659,13 @@ void MergeUVRow_NEON(const uint8_t* src_u, int width) { asm volatile( "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load U - "ld1 {v1.16b}, [%1], #16 \n" // load V - "subs %w3, %w3, #16 \n" // 16 processed per loop - "st2 {v0.16b,v1.16b}, [%2], #32 \n" // store 16 pairs of UV - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" // load U + "ld1 {v1.16b}, [%1], #16 \n" // load V + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #16 \n" // 16 processed per loop + "st2 {v0.16b,v1.16b}, [%2], #32 \n" // store 16 pairs of UV + "b.gt 1b \n" : "+r"(src_u), // %0 "+r"(src_v), // %1 "+r"(dst_uv), // %2 @@ -640,12 +683,13 @@ void SplitRGBRow_NEON(const uint8_t* src_rgb, int width) { asm volatile( "1: \n" - "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RGB - "subs %w4, %w4, #16 \n" // 16 processed per loop - "st1 {v0.16b}, [%1], #16 \n" // store R - "st1 {v1.16b}, [%2], #16 \n" // store G - "st1 {v2.16b}, [%3], #16 \n" // store B - "b.gt 1b \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #16 \n" // 16 processed per loop + "st1 {v0.16b}, [%1], #16 \n" // store R + "st1 {v1.16b}, [%2], #16 \n" // store G + "st1 {v2.16b}, [%3], #16 \n" // store B + "b.gt 1b \n" : "+r"(src_rgb), // %0 "+r"(dst_r), // %1 "+r"(dst_g), // %2 @@ -664,12 +708,16 @@ void MergeRGBRow_NEON(const uint8_t* src_r, int width) { asm volatile( "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load R - "ld1 {v1.16b}, [%1], #16 \n" // load G - "ld1 {v2.16b}, [%2], #16 \n" // load B - "subs %w4, %w4, #16 \n" // 16 processed per loop - "st3 {v0.16b,v1.16b,v2.16b}, [%3], #48 \n" // store 16 RGB - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" // load R + "ld1 {v1.16b}, [%1], #16 \n" // load G + "ld1 {v2.16b}, [%2], #16 \n" // load B + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "subs %w4, %w4, #16 \n" // 16 processed per loop + "st3 {v0.16b,v1.16b,v2.16b}, [%3], #48 \n" // store 16 RGB + "prfm pldl1keep, [%0, 448] \n" + "b.gt 1b \n" : "+r"(src_r), // %0 "+r"(src_g), // %1 "+r"(src_b), // %2 @@ -684,10 +732,11 @@ void MergeRGBRow_NEON(const uint8_t* src_r, void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) { asm volatile( "1: \n" - "ldp q0, q1, [%0], #32 \n" - "subs %w2, %w2, #32 \n" // 32 processed per loop - "stp q0, q1, [%1], #32 \n" - "b.gt 1b \n" + "ldp q0, q1, [%0], #32 \n" + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #32 \n" // 32 processed per loop + "stp q0, q1, [%1], #32 \n" + "b.gt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 // Output registers @@ -699,11 +748,11 @@ void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) { // SetRow writes 'width' bytes using an 8 bit value repeated. void SetRow_NEON(uint8_t* dst, uint8_t v8, int width) { asm volatile( - "dup v0.16b, %w2 \n" // duplicate 16 bytes + "dup v0.16b, %w2 \n" // duplicate 16 bytes "1: \n" - "subs %w1, %w1, #16 \n" // 16 bytes per loop - "st1 {v0.16b}, [%0], #16 \n" // store - "b.gt 1b \n" + "subs %w1, %w1, #16 \n" // 16 bytes per loop + "st1 {v0.16b}, [%0], #16 \n" // store + "b.gt 1b \n" : "+r"(dst), // %0 "+r"(width) // %1 : "r"(v8) // %2 @@ -712,89 +761,157 @@ void SetRow_NEON(uint8_t* dst, uint8_t v8, int width) { void ARGBSetRow_NEON(uint8_t* dst, uint32_t v32, int width) { asm volatile( - "dup v0.4s, %w2 \n" // duplicate 4 ints + "dup v0.4s, %w2 \n" // duplicate 4 ints "1: \n" - "subs %w1, %w1, #4 \n" // 4 ints per loop - "st1 {v0.16b}, [%0], #16 \n" // store - "b.gt 1b \n" + "subs %w1, %w1, #4 \n" // 4 ints per loop + "st1 {v0.16b}, [%0], #16 \n" // store + "b.gt 1b \n" : "+r"(dst), // %0 "+r"(width) // %1 : "r"(v32) // %2 : "cc", "memory", "v0"); } +// Shuffle table for reversing the bytes. +static const uvec8 kShuffleMirror = {15u, 14u, 13u, 12u, 11u, 10u, 9u, 8u, + 7u, 6u, 5u, 4u, 3u, 2u, 1u, 0u}; + void MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width) { asm volatile( // Start at end of source row. - "add %0, %0, %w2, sxtw \n" - "sub %0, %0, #16 \n" + "ld1 {v3.16b}, [%3] \n" // shuffler + "add %0, %0, %w2, sxtw \n" + "sub %0, %0, #32 \n" "1: \n" - "ld1 {v0.16b}, [%0], %3 \n" // src -= 16 - "subs %w2, %w2, #16 \n" // 16 pixels per loop. - "rev64 v0.16b, v0.16b \n" - "st1 {v0.D}[1], [%1], #8 \n" // dst += 16 - "st1 {v0.D}[0], [%1], #8 \n" - "b.gt 1b \n" - : "+r"(src), // %0 - "+r"(dst), // %1 - "+r"(width) // %2 - : "r"((ptrdiff_t)-16) // %3 - : "cc", "memory", "v0"); + "ldr q2, [%0, 16] \n" + "ldr q1, [%0], -32 \n" // src -= 32 + "subs %w2, %w2, #32 \n" // 32 pixels per loop. + "tbl v0.16b, {v2.16b}, v3.16b \n" + "tbl v1.16b, {v1.16b}, v3.16b \n" + "st1 {v0.16b, v1.16b}, [%1], #32 \n" // store 32 pixels + "b.gt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(&kShuffleMirror) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3"); } -void MirrorUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +// Shuffle table for reversing the UV. +static const uvec8 kShuffleMirrorUV = {14u, 15u, 12u, 13u, 10u, 11u, 8u, 9u, + 6u, 7u, 4u, 5u, 2u, 3u, 0u, 1u}; + +void MirrorUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_uv, int width) { asm volatile( // Start at end of source row. - "add %0, %0, %w3, sxtw #1 \n" - "sub %0, %0, #16 \n" + "ld1 {v4.16b}, [%3] \n" // shuffler + "add %0, %0, %w2, sxtw #1 \n" + "sub %0, %0, #32 \n" "1: \n" - "ld2 {v0.8b, v1.8b}, [%0], %4 \n" // src -= 16 - "subs %w3, %w3, #8 \n" // 8 pixels per loop. - "rev64 v0.8b, v0.8b \n" - "rev64 v1.8b, v1.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // dst += 8 - "st1 {v1.8b}, [%2], #8 \n" - "b.gt 1b \n" - : "+r"(src_uv), // %0 - "+r"(dst_u), // %1 - "+r"(dst_v), // %2 - "+r"(width) // %3 - : "r"((ptrdiff_t)-16) // %4 - : "cc", "memory", "v0", "v1"); + "ldr q1, [%0, 16] \n" + "ldr q0, [%0], -32 \n" // src -= 32 + "subs %w2, %w2, #16 \n" // 16 pixels per loop. + "tbl v2.16b, {v1.16b}, v4.16b \n" + "tbl v3.16b, {v0.16b}, v4.16b \n" + "st1 {v2.16b, v3.16b}, [%1], #32 \n" // dst += 32 + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_uv), // %1 + "+r"(width) // %2 + : "r"(&kShuffleMirrorUV) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); } -void ARGBMirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width) { +void MirrorSplitUVRow_NEON(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { asm volatile( // Start at end of source row. - "add %0, %0, %w2, sxtw #2 \n" - "sub %0, %0, #16 \n" + "ld1 {v4.16b}, [%4] \n" // shuffler + "add %0, %0, %w3, sxtw #1 \n" + "sub %0, %0, #32 \n" "1: \n" - "ld1 {v0.16b}, [%0], %3 \n" // src -= 16 - "subs %w2, %w2, #4 \n" // 4 pixels per loop. - "rev64 v0.4s, v0.4s \n" - "st1 {v0.D}[1], [%1], #8 \n" // dst += 16 - "st1 {v0.D}[0], [%1], #8 \n" - "b.gt 1b \n" - : "+r"(src), // %0 - "+r"(dst), // %1 - "+r"(width) // %2 - : "r"((ptrdiff_t)-16) // %3 - : "cc", "memory", "v0"); + "ldr q1, [%0, 16] \n" + "ldr q0, [%0], -32 \n" // src -= 32 + "subs %w3, %w3, #16 \n" // 16 pixels per loop. + "tbl v2.16b, {v1.16b}, v4.16b \n" + "tbl v3.16b, {v0.16b}, v4.16b \n" + "uzp1 v0.16b, v2.16b, v3.16b \n" // U + "uzp2 v1.16b, v2.16b, v3.16b \n" // V + "st1 {v0.16b}, [%1], #16 \n" // dst += 16 + "st1 {v1.16b}, [%2], #16 \n" + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(&kShuffleMirrorUV) // %4 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} + +// Shuffle table for reversing the ARGB. +static const uvec8 kShuffleMirrorARGB = {12u, 13u, 14u, 15u, 8u, 9u, 10u, 11u, + 4u, 5u, 6u, 7u, 0u, 1u, 2u, 3u}; + +void ARGBMirrorRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { + asm volatile( + // Start at end of source row. + "ld1 {v4.16b}, [%3] \n" // shuffler + "add %0, %0, %w2, sxtw #2 \n" + "sub %0, %0, #32 \n" + "1: \n" + "ldr q1, [%0, 16] \n" + "ldr q0, [%0], -32 \n" // src -= 32 + "subs %w2, %w2, #8 \n" // 8 pixels per loop. + "tbl v2.16b, {v1.16b}, v4.16b \n" + "tbl v3.16b, {v0.16b}, v4.16b \n" + "st1 {v2.16b, v3.16b}, [%1], #32 \n" // dst += 32 + "b.gt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "r"(&kShuffleMirrorARGB) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} + +void RGB24MirrorRow_NEON(const uint8_t* src_rgb24, + uint8_t* dst_rgb24, + int width) { + asm volatile( + "ld1 {v3.16b}, [%4] \n" // shuffler + "add %0, %0, %w2, sxtw #1 \n" // Start at end of row. + "add %0, %0, %w2, sxtw \n" + "sub %0, %0, #48 \n" + + "1: \n" + "ld3 {v0.16b, v1.16b, v2.16b}, [%0], %3 \n" // src -= 48 + "subs %w2, %w2, #16 \n" // 16 pixels per loop. + "tbl v0.16b, {v0.16b}, v3.16b \n" + "tbl v1.16b, {v1.16b}, v3.16b \n" + "tbl v2.16b, {v2.16b}, v3.16b \n" + "st3 {v0.16b, v1.16b, v2.16b}, [%1], #48 \n" // dst += 48 + "b.gt 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(dst_rgb24), // %1 + "+r"(width) // %2 + : "r"((ptrdiff_t)-48), // %3 + "r"(&kShuffleMirror) // %4 + : "cc", "memory", "v0", "v1", "v2", "v3"); } void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_argb, int width) { asm volatile( - "movi v4.8b, #255 \n" // Alpha + "movi v4.8b, #255 \n" // Alpha "1: \n" - "ld3 {v1.8b,v2.8b,v3.8b}, [%0], #24 \n" // load 8 pixels of RGB24. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "st4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%1], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld3 {v1.8b,v2.8b,v3.8b}, [%0], #24 \n" // load 8 pixels of + // RGB24. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "st4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_rgb24), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -805,14 +922,15 @@ void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) { asm volatile( - "movi v5.8b, #255 \n" // Alpha + "movi v5.8b, #255 \n" // Alpha "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v3.8b, v1.8b, v1.8b \n" // move g - "orr v4.8b, v0.8b, v0.8b \n" // move r - "st4 {v2.8b,v3.8b,v4.8b,v5.8b}, [%1], #32 \n" // store b g r a - "b.gt 1b \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v3.8b, v1.8b, v1.8b \n" // move g + "orr v4.8b, v0.8b, v0.8b \n" // move r + "st4 {v2.8b,v3.8b,v4.8b,v5.8b}, [%1], #32 \n" // store b g r a + "b.gt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -823,14 +941,15 @@ void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) { void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { asm volatile( - "movi v0.8b, #255 \n" // Alpha + "movi v0.8b, #255 \n" // Alpha "1: \n" - "ld3 {v3.8b,v4.8b,v5.8b}, [%0], #24 \n" // read r g b - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v2.8b, v4.8b, v4.8b \n" // move g - "orr v1.8b, v5.8b, v5.8b \n" // move r - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store a b g r - "b.gt 1b \n" + "ld3 {v3.8b,v4.8b,v5.8b}, [%0], #24 \n" // read r g b + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v2.8b, v4.8b, v4.8b \n" // move g + "orr v1.8b, v5.8b, v5.8b \n" // move r + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store a b g r + "b.gt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgba), // %1 "+r"(width) // %2 @@ -842,12 +961,13 @@ void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) { asm volatile( "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v3.8b, v1.8b, v1.8b \n" // move g - "orr v4.8b, v0.8b, v0.8b \n" // move r - "st3 {v2.8b,v3.8b,v4.8b}, [%1], #24 \n" // store b g r - "b.gt 1b \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v3.8b, v1.8b, v1.8b \n" // move g + "orr v4.8b, v0.8b, v0.8b \n" // move r + "st3 {v2.8b,v3.8b,v4.8b}, [%1], #24 \n" // store b g r + "b.gt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 @@ -873,13 +993,14 @@ void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_argb, int width) { asm volatile( - "movi v3.8b, #255 \n" // Alpha + "movi v3.8b, #255 \n" // Alpha "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. RGB565TOARGB - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_rgb565), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -929,14 +1050,14 @@ void ARGB1555ToARGBRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_argb, int width) { asm volatile( - "movi v3.8b, #255 \n" // Alpha + "movi v3.8b, #255 \n" // Alpha "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB1555TOARGB - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB - // pixels - "b.gt 1b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb1555), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -945,6 +1066,8 @@ void ARGB1555ToARGBRow_NEON(const uint8_t* src_argb1555, ); } +// Convert v0.8h to b = v0.8b g = v1.8b r = v2.8b +// clobbers v3 #define ARGB4444TOARGB \ "shrn v1.8b, v0.8h, #8 \n" /* v1(l) AR */ \ "xtn2 v1.16b, v0.8h \n" /* v1(h) GB */ \ @@ -962,12 +1085,12 @@ void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444, int width) { asm volatile( "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB4444TOARGB - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB - // pixels - "b.gt 1b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb4444), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -981,11 +1104,12 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "st3 {v1.8b,v2.8b,v3.8b}, [%1], #24 \n" // store 8 pixels of - // RGB24. - "b.gt 1b \n" + "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "st3 {v1.8b,v2.8b,v3.8b}, [%1], #24 \n" // store 8 pixels of + // RGB24 + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 @@ -997,12 +1121,13 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) { asm volatile( "1: \n" - "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load b g r a - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v4.8b, v2.8b, v2.8b \n" // mov g - "orr v5.8b, v1.8b, v1.8b \n" // mov b - "st3 {v3.8b,v4.8b,v5.8b}, [%1], #24 \n" // store r g b - "b.gt 1b \n" + "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load b g r a + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v4.8b, v2.8b, v2.8b \n" // mov g + "orr v5.8b, v1.8b, v1.8b \n" // mov b + "st3 {v3.8b,v4.8b,v5.8b}, [%1], #24 \n" // store r g b + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_raw), // %1 "+r"(width) // %2 @@ -1014,10 +1139,11 @@ void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) { void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of YUY2. - "subs %w2, %w2, #16 \n" // 16 processed per loop. - "st1 {v0.16b}, [%1], #16 \n" // store 16 pixels of Y. - "b.gt 1b \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of YUY2. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #16 \n" // 16 processed per loop. + "st1 {v0.16b}, [%1], #16 \n" // store 16 pixels of Y. + "b.gt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1029,10 +1155,11 @@ void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { void UYVYToYRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of UYVY. - "subs %w2, %w2, #16 \n" // 16 processed per loop. - "st1 {v1.16b}, [%1], #16 \n" // store 16 pixels of Y. - "b.gt 1b \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of UYVY. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #16 \n" // 16 processed per loop. + "st1 {v1.16b}, [%1], #16 \n" // store 16 pixels of Y. + "b.gt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1047,11 +1174,12 @@ void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2, int width) { asm volatile( "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 YUY2 - "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. - "st1 {v1.8b}, [%1], #8 \n" // store 8 U. - "st1 {v3.8b}, [%2], #8 \n" // store 8 V. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 YUY2 + "prfm pldl1keep, [%0, 448] \n" + "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. + "st1 {v1.8b}, [%1], #8 \n" // store 8 U. + "st1 {v3.8b}, [%2], #8 \n" // store 8 V. + "b.gt 1b \n" : "+r"(src_yuy2), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1067,11 +1195,12 @@ void UYVYToUV422Row_NEON(const uint8_t* src_uyvy, int width) { asm volatile( "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 UYVY - "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. - "st1 {v0.8b}, [%1], #8 \n" // store 8 U. - "st1 {v2.8b}, [%2], #8 \n" // store 8 V. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 UYVY + "prfm pldl1keep, [%0, 448] \n" + "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. + "st1 {v0.8b}, [%1], #8 \n" // store 8 U. + "st1 {v2.8b}, [%2], #8 \n" // store 8 V. + "b.gt 1b \n" : "+r"(src_uyvy), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1089,14 +1218,15 @@ void YUY2ToUVRow_NEON(const uint8_t* src_yuy2, const uint8_t* src_yuy2b = src_yuy2 + stride_yuy2; asm volatile( "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels - "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row - "urhadd v1.8b, v1.8b, v5.8b \n" // average rows of U - "urhadd v3.8b, v3.8b, v7.8b \n" // average rows of V - "st1 {v1.8b}, [%2], #8 \n" // store 8 U. - "st1 {v3.8b}, [%3], #8 \n" // store 8 V. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row + "urhadd v1.8b, v1.8b, v5.8b \n" // average rows of U + "urhadd v3.8b, v3.8b, v7.8b \n" // average rows of V + "st1 {v1.8b}, [%2], #8 \n" // store 8 U. + "st1 {v3.8b}, [%3], #8 \n" // store 8 V. + "b.gt 1b \n" : "+r"(src_yuy2), // %0 "+r"(src_yuy2b), // %1 "+r"(dst_u), // %2 @@ -1116,14 +1246,15 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy, const uint8_t* src_uyvyb = src_uyvy + stride_uyvy; asm volatile( "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels - "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row - "urhadd v0.8b, v0.8b, v4.8b \n" // average rows of U - "urhadd v2.8b, v2.8b, v6.8b \n" // average rows of V - "st1 {v0.8b}, [%2], #8 \n" // store 8 U. - "st1 {v2.8b}, [%3], #8 \n" // store 8 V. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row + "urhadd v0.8b, v0.8b, v4.8b \n" // average rows of U + "urhadd v2.8b, v2.8b, v6.8b \n" // average rows of V + "st1 {v0.8b}, [%2], #8 \n" // store 8 U. + "st1 {v2.8b}, [%3], #8 \n" // store 8 V. + "b.gt 1b \n" : "+r"(src_uyvy), // %0 "+r"(src_uyvyb), // %1 "+r"(dst_u), // %2 @@ -1141,13 +1272,14 @@ void ARGBShuffleRow_NEON(const uint8_t* src_argb, const uint8_t* shuffler, int width) { asm volatile( - "ld1 {v2.16b}, [%3] \n" // shuffler + "ld1 {v2.16b}, [%3] \n" // shuffler "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 4 pixels. - "subs %w2, %w2, #4 \n" // 4 processed per loop - "tbl v1.16b, {v0.16b}, v2.16b \n" // look up 4 pixels - "st1 {v1.16b}, [%1], #16 \n" // store 4. - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" // load 4 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #4 \n" // 4 processed per loop + "tbl v1.16b, {v0.16b}, v2.16b \n" // look up 4 pixels + "st1 {v1.16b}, [%1], #16 \n" // store 4. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -1163,13 +1295,14 @@ void I422ToYUY2Row_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "ld2 {v0.8b, v1.8b}, [%0], #16 \n" // load 16 Ys - "orr v2.8b, v1.8b, v1.8b \n" - "ld1 {v1.8b}, [%1], #8 \n" // load 8 Us - "ld1 {v3.8b}, [%2], #8 \n" // load 8 Vs - "subs %w4, %w4, #16 \n" // 16 pixels - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels. - "b.gt 1b \n" + "ld2 {v0.8b, v1.8b}, [%0], #16 \n" // load 16 Ys + "prfm pldl1keep, [%0, 448] \n" + "orr v2.8b, v1.8b, v1.8b \n" + "ld1 {v1.8b}, [%1], #8 \n" // load 8 Us + "ld1 {v3.8b}, [%2], #8 \n" // load 8 Vs + "subs %w4, %w4, #16 \n" // 16 pixels + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels. + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -1186,13 +1319,14 @@ void I422ToUYVYRow_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "ld2 {v1.8b,v2.8b}, [%0], #16 \n" // load 16 Ys - "orr v3.8b, v2.8b, v2.8b \n" - "ld1 {v0.8b}, [%1], #8 \n" // load 8 Us - "ld1 {v2.8b}, [%2], #8 \n" // load 8 Vs - "subs %w4, %w4, #16 \n" // 16 pixels - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels. - "b.gt 1b \n" + "ld2 {v1.8b,v2.8b}, [%0], #16 \n" // load 16 Ys + "prfm pldl1keep, [%0, 448] \n" + "orr v3.8b, v2.8b, v2.8b \n" + "ld1 {v0.8b}, [%1], #8 \n" // load 8 Us + "ld1 {v2.8b}, [%2], #8 \n" // load 8 Vs + "subs %w4, %w4, #16 \n" // 16 pixels + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels. + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_u), // %1 "+r"(src_v), // %2 @@ -1207,11 +1341,13 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 pixels - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + // pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGBTORGB565 - "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels RGB565. - "b.gt 1b \n" + "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels RGB565. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb565), // %1 "+r"(width) // %2 @@ -1224,15 +1360,17 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, const uint32_t dither4, int width) { asm volatile( - "dup v1.4s, %w2 \n" // dither4 + "dup v1.4s, %w2 \n" // dither4 "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" // load 8 pixels - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqadd v20.8b, v20.8b, v1.8b \n" - "uqadd v21.8b, v21.8b, v1.8b \n" - "uqadd v22.8b, v22.8b, v1.8b \n" ARGBTORGB565 - "st1 {v0.16b}, [%0], #16 \n" // store 8 pixels RGB565. - "b.gt 1b \n" + "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" // load 8 + // pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uqadd v20.8b, v20.8b, v1.8b \n" + "uqadd v21.8b, v21.8b, v1.8b \n" + "uqadd v22.8b, v22.8b, v1.8b \n" ARGBTORGB565 + "st1 {v0.16b}, [%0], #16 \n" // store 8 pixels RGB565. + "b.gt 1b \n" : "+r"(dst_rgb) // %0 : "r"(src_argb), // %1 "r"(dither4), // %2 @@ -1245,12 +1383,13 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 pixels - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + // pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGBTOARGB1555 - "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels - // ARGB1555. - "b.gt 1b \n" + "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb1555), // %1 "+r"(width) // %2 @@ -1262,15 +1401,16 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, uint8_t* dst_argb4444, int width) { asm volatile( - "movi v4.16b, #0x0f \n" // bits to clear with + "movi v4.16b, #0x0f \n" // bits to clear with // vbic. "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 pixels - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + // pixels + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGBTOARGB4444 - "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels - // ARGB4444. - "b.gt 1b \n" + "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb4444), // %1 "+r"(width) // %2 @@ -1280,20 +1420,21 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v4.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v6.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v3.8h, v0.8b, v4.8b \n" // B - "umlal v3.8h, v1.8b, v5.8b \n" // G - "umlal v3.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v3.8h, v0.8b, v4.8b \n" // B + "umlal v3.8h, v1.8b, v5.8b \n" // G + "umlal v3.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1306,11 +1447,11 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 - // pixels - "subs %w2, %w2, #16 \n" // 16 processed per loop - "st1 {v3.16b}, [%1], #16 \n" // store 16 A's. - "b.gt 1b \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #16 \n" // 16 processed per loop + "st1 {v3.16b}, [%1], #16 \n" // store 16 A's. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_a), // %1 "+r"(width) // %2 @@ -1321,18 +1462,19 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v6.8b, #77 \n" // R * 0.2990 coefficient + "movi v4.8b, #29 \n" // B * 0.1140 coefficient + "movi v5.8b, #150 \n" // G * 0.5870 coefficient + "movi v6.8b, #77 \n" // R * 0.2990 coefficient "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v3.8h, v0.8b, v4.8b \n" // B - "umlal v3.8h, v1.8b, v5.8b \n" // G - "umlal v3.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v3.8h, v0.8b, v4.8b \n" // B + "umlal v3.8h, v1.8b, v5.8b \n" // G + "umlal v3.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1342,18 +1484,19 @@ void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v6.8b, #77 \n" // R * 0.2990 coefficient + "movi v4.8b, #29 \n" // B * 0.1140 coefficient + "movi v5.8b, #150 \n" // G * 0.5870 coefficient + "movi v6.8b, #77 \n" // R * 0.2990 coefficient "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 RGBA - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v0.8h, v1.8b, v4.8b \n" // B - "umlal v0.8h, v2.8b, v5.8b \n" // G - "umlal v0.8h, v3.8b, v6.8b \n" // R - "uqrshrn v3.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v3.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 RGBA + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v0.8h, v1.8b, v4.8b \n" // B + "umlal v0.8h, v2.8b, v5.8b \n" // G + "umlal v0.8h, v3.8b, v6.8b \n" // R + "uqrshrn v3.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y + "st1 {v3.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1367,33 +1510,33 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb, uint8_t* dst_v, int width) { asm volatile( - "movi v24.8b, #112 \n" // UB / VR 0.875 + "movi v24.8b, #112 \n" // UB / VR 0.875 // coefficient - "movi v25.8b, #74 \n" // UG -0.5781 coefficient - "movi v26.8b, #38 \n" // UR -0.2969 coefficient - "movi v27.8b, #18 \n" // VB -0.1406 coefficient - "movi v28.8b, #94 \n" // VG -0.7344 coefficient - "movi v29.16b,#0x80 \n" // 128.5 + "movi v25.8b, #74 \n" // UG -0.5781 coefficient + "movi v26.8b, #38 \n" // UR -0.2969 coefficient + "movi v27.8b, #18 \n" // VB -0.1406 coefficient + "movi v28.8b, #94 \n" // VG -0.7344 coefficient + "movi v29.16b,#0x80 \n" // 128.5 "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - // pixels. - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "umull v4.8h, v0.8b, v24.8b \n" // B - "umlsl v4.8h, v1.8b, v25.8b \n" // G - "umlsl v4.8h, v2.8b, v26.8b \n" // R - "add v4.8h, v4.8h, v29.8h \n" // +128 -> unsigned + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "umull v4.8h, v0.8b, v24.8b \n" // B + "umlsl v4.8h, v1.8b, v25.8b \n" // G + "umlsl v4.8h, v2.8b, v26.8b \n" // R + "add v4.8h, v4.8h, v29.8h \n" // +128 -> unsigned - "umull v3.8h, v2.8b, v24.8b \n" // R - "umlsl v3.8h, v1.8b, v28.8b \n" // G - "umlsl v3.8h, v0.8b, v27.8b \n" // B - "add v3.8h, v3.8h, v29.8h \n" // +128 -> unsigned + "umull v3.8h, v2.8b, v24.8b \n" // R + "umlsl v3.8h, v1.8b, v28.8b \n" // G + "umlsl v3.8h, v0.8b, v27.8b \n" // B + "add v3.8h, v3.8h, v29.8h \n" // +128 -> unsigned - "uqshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit U - "uqshrn v1.8b, v3.8h, #8 \n" // 16 bit to 8 bit V + "uqshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit U + "uqshrn v1.8b, v3.8h, #8 \n" // 16 bit to 8 bit V - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%2], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%2], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 @@ -1437,26 +1580,28 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb_1 = src_argb + src_stride_argb; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v0.8h, #1 \n" // 2x average - "urshr v1.8h, v1.8h, #1 \n" - "urshr v2.8h, v2.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(src_argb_1), // %1 "+r"(dst_u), // %2 @@ -1468,7 +1613,6 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, ); } -// TODO(fbarchard): Subsample match C code. void ARGBToUVJRow_NEON(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, @@ -1476,31 +1620,33 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, int width) { const uint8_t* src_argb_1 = src_argb + src_stride_argb; asm volatile ( - "movi v20.8h, #63, lsl #0 \n" // UB/VR coeff (0.500) / 2 - "movi v21.8h, #42, lsl #0 \n" // UG coeff (-0.33126) / 2 - "movi v22.8h, #21, lsl #0 \n" // UR coeff (-0.16874) / 2 - "movi v23.8h, #10, lsl #0 \n" // VB coeff (-0.08131) / 2 - "movi v24.8h, #53, lsl #0 \n" // VG coeff (-0.41869) / 2 - "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) - "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. + "movi v20.8h, #63, lsl #0 \n" // UB/VR coeff (0.500) / 2 + "movi v21.8h, #42, lsl #0 \n" // UG coeff (-0.33126) / 2 + "movi v22.8h, #21, lsl #0 \n" // UR coeff (-0.16874) / 2 + "movi v23.8h, #10, lsl #0 \n" // VB coeff (-0.08131) / 2 + "movi v24.8h, #53, lsl #0 \n" // VG coeff (-0.41869) / 2 + "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v0.8h, #1 \n" // 2x average - "urshr v1.8h, v1.8h, #1 \n" - "urshr v2.8h, v2.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(src_argb_1), // %1 "+r"(dst_u), // %2 @@ -1520,25 +1666,27 @@ void BGRAToUVRow_NEON(const uint8_t* src_bgra, const uint8_t* src_bgra_1 = src_bgra + src_stride_bgra; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "uaddlp v0.8h, v3.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v3.8h, v2.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v1.16b \n" // R 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more - "uadalp v0.8h, v7.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v3.8h, v6.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v5.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v3.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v3.8h, v2.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v1.16b \n" // R 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v7.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v3.8h, v6.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v5.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v0.8h, #1 \n" // 2x average - "urshr v1.8h, v3.8h, #1 \n" - "urshr v2.8h, v2.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v3.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_bgra), // %0 "+r"(src_bgra_1), // %1 "+r"(dst_u), // %2 @@ -1558,25 +1706,27 @@ void ABGRToUVRow_NEON(const uint8_t* src_abgr, const uint8_t* src_abgr_1 = src_abgr + src_stride_abgr; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "uaddlp v3.8h, v2.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v2.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v1.8h, v0.16b \n" // R 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. - "uadalp v3.8h, v6.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v2.8h, v5.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v1.8h, v4.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v3.8h, v2.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v2.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v1.8h, v0.16b \n" // R 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v3.8h, v6.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v2.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v1.8h, v4.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v3.8h, #1 \n" // 2x average - "urshr v2.8h, v2.8h, #1 \n" - "urshr v1.8h, v1.8h, #1 \n" + "urshr v0.8h, v3.8h, #1 \n" // 2x average + "urshr v2.8h, v2.8h, #1 \n" + "urshr v1.8h, v1.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v0.8h, v2.8h, v1.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_abgr), // %0 "+r"(src_abgr_1), // %1 "+r"(dst_u), // %2 @@ -1596,25 +1746,27 @@ void RGBAToUVRow_NEON(const uint8_t* src_rgba, const uint8_t* src_rgba_1 = src_rgba + src_stride_rgba; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "uaddlp v0.8h, v1.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v2.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v3.16b \n" // R 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. - "uadalp v0.8h, v5.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v6.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v7.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v1.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v2.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v3.16b \n" // R 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v5.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v6.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v7.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v0.8h, #1 \n" // 2x average - "urshr v1.8h, v1.8h, #1 \n" - "urshr v2.8h, v2.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_rgba), // %0 "+r"(src_rgba_1), // %1 "+r"(dst_u), // %2 @@ -1634,25 +1786,27 @@ void RGB24ToUVRow_NEON(const uint8_t* src_rgb24, const uint8_t* src_rgb24_1 = src_rgb24 + src_stride_rgb24; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels. - "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. - "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 16 more. - "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 16 more. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. - "urshr v0.8h, v0.8h, #1 \n" // 2x average - "urshr v1.8h, v1.8h, #1 \n" - "urshr v2.8h, v2.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_rgb24), // %0 "+r"(src_rgb24_1), // %1 "+r"(dst_u), // %2 @@ -1672,25 +1826,27 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, const uint8_t* src_raw_1 = src_raw + src_stride_raw; asm volatile ( RGBTOUV_SETUP_REG - "1: \n" - "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 8 RAW pixels. - "uaddlp v2.8h, v2.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v0.8h, v0.16b \n" // R 16 bytes -> 8 shorts. - "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 8 more RAW pixels - "uadalp v2.8h, v6.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v0.8h, v4.16b \n" // R 16 bytes -> 8 shorts. + "1: \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 8 RAW pixels. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v2.8h, v2.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v0.8h, v0.16b \n" // R 16 bytes -> 8 shorts. + "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 8 more RAW pixels + "prfm pldl1keep, [%1, 448] \n" + "uadalp v2.8h, v6.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v0.8h, v4.16b \n" // R 16 bytes -> 8 shorts. - "urshr v2.8h, v2.8h, #1 \n" // 2x average - "urshr v1.8h, v1.8h, #1 \n" - "urshr v0.8h, v0.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v0.8h, v0.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 32 processed per loop. RGBTOUV(v2.8h, v1.8h, v0.8h) - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_raw), // %0 "+r"(src_raw_1), // %1 "+r"(dst_u), // %2 @@ -1702,7 +1858,7 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, ); } -// 16x2 pixels -> 8x1. width is number of argb pixels. e.g. 16. +// 16x2 pixels -> 8x1. width is number of rgb pixels. e.g. 16. void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, int src_stride_rgb565, uint8_t* dst_u, @@ -1710,67 +1866,54 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, int width) { const uint8_t* src_rgb565_1 = src_rgb565 + src_stride_rgb565; asm volatile( - "movi v22.8h, #56, lsl #0 \n" // UB / VR coeff (0.875) / - // 2 - "movi v23.8h, #37, lsl #0 \n" // UG coeff (-0.5781) / 2 - "movi v24.8h, #19, lsl #0 \n" // UR coeff (-0.2969) / 2 - "movi v25.8h, #9 , lsl #0 \n" // VB coeff (-0.1406) / 2 - "movi v26.8h, #47, lsl #0 \n" // VG coeff (-0.7344) / 2 - "movi v27.16b, #0x80 \n" // 128.5 0x8080 in 16bit + RGBTOUV_SETUP_REG "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. + "prfm pldl1keep, [%0, 448] \n" RGB565TOARGB - "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v18.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v20.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%0], #16 \n" // next 8 RGB565 pixels. + "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%0], #16 \n" // next 8 RGB565 pixels. RGB565TOARGB - "uaddlp v17.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v19.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v21.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uaddlp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // load 8 RGB565 pixels. + "ld1 {v0.16b}, [%1], #16 \n" // load 8 RGB565 pixels. + "prfm pldl1keep, [%1, 448] \n" RGB565TOARGB - "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v18.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v20.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // next 8 RGB565 pixels. + "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%1], #16 \n" // next 8 RGB565 pixels. RGB565TOARGB - "uadalp v17.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v19.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v21.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uadalp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ins v16.D[1], v17.D[0] \n" - "ins v18.D[1], v19.D[0] \n" - "ins v20.D[1], v21.D[0] \n" + "ins v16.D[1], v26.D[0] \n" + "ins v17.D[1], v27.D[0] \n" + "ins v18.D[1], v28.D[0] \n" - "urshr v4.8h, v16.8h, #1 \n" // 2x average - "urshr v5.8h, v18.8h, #1 \n" - "urshr v6.8h, v20.8h, #1 \n" + "urshr v0.8h, v16.8h, #1 \n" // 2x average + "urshr v1.8h, v17.8h, #1 \n" + "urshr v2.8h, v18.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 16 processed per loop. - "mul v16.8h, v4.8h, v22.8h \n" // B - "mls v16.8h, v5.8h, v23.8h \n" // G - "mls v16.8h, v6.8h, v24.8h \n" // R - "add v16.8h, v16.8h, v27.8h \n" // +128 -> unsigned - "mul v17.8h, v6.8h, v22.8h \n" // R - "mls v17.8h, v5.8h, v26.8h \n" // G - "mls v17.8h, v4.8h, v25.8h \n" // B - "add v17.8h, v17.8h, v27.8h \n" // +128 -> unsigned - "uqshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit U - "uqshrn v1.8b, v17.8h, #8 \n" // 16 bit to 8 bit V - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v0.8h, v1.8h, v2.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_rgb565), // %0 "+r"(src_rgb565_1), // %1 - "+r"(dst_u), // %2 - "+r"(dst_v), // %3 - "+r"(width) // %4 + "+r"(dst_u), // %2 + "+r"(dst_v), // %3 + "+r"(width) // %4 : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16", - "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", - "v27"); + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v16", "v17", + "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", + "v28"); } // 16x2 pixels -> 8x1. width is number of argb pixels. e.g. 16. @@ -1783,50 +1926,43 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555, asm volatile( RGBTOUV_SETUP_REG "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. + "prfm pldl1keep, [%0, 448] \n" RGB555TOARGB - "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB1555 pixels. + "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB1555 pixels. RGB555TOARGB - "uaddlp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uaddlp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB1555 pixels. + "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB1555 pixels. + "prfm pldl1keep, [%1, 448] \n" RGB555TOARGB - "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB1555 pixels. + "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB1555 pixels. RGB555TOARGB - "uadalp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uadalp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ins v16.D[1], v26.D[0] \n" - "ins v17.D[1], v27.D[0] \n" - "ins v18.D[1], v28.D[0] \n" + "ins v16.D[1], v26.D[0] \n" + "ins v17.D[1], v27.D[0] \n" + "ins v18.D[1], v28.D[0] \n" - "urshr v4.8h, v16.8h, #1 \n" // 2x average - "urshr v5.8h, v17.8h, #1 \n" - "urshr v6.8h, v18.8h, #1 \n" + "urshr v0.8h, v16.8h, #1 \n" // 2x average + "urshr v1.8h, v17.8h, #1 \n" + "urshr v2.8h, v18.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 16 processed per loop. - "mul v2.8h, v4.8h, v20.8h \n" // B - "mls v2.8h, v5.8h, v21.8h \n" // G - "mls v2.8h, v6.8h, v22.8h \n" // R - "add v2.8h, v2.8h, v25.8h \n" // +128 -> unsigned - "mul v3.8h, v6.8h, v20.8h \n" // R - "mls v3.8h, v5.8h, v24.8h \n" // G - "mls v3.8h, v4.8h, v23.8h \n" // B - "add v3.8h, v3.8h, v25.8h \n" // +128 -> unsigned - "uqshrn v0.8b, v2.8h, #8 \n" // 16 bit to 8 bit U - "uqshrn v1.8b, v3.8h, #8 \n" // 16 bit to 8 bit V - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v0.8h, v1.8h, v2.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_argb1555), // %0 "+r"(src_argb1555_1), // %1 "+r"(dst_u), // %2 @@ -1846,52 +1982,45 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, int width) { const uint8_t* src_argb4444_1 = src_argb4444 + src_stride_argb4444; asm volatile( - RGBTOUV_SETUP_REG + RGBTOUV_SETUP_REG // sets v20-v25 "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. + "prfm pldl1keep, [%0, 448] \n" ARGB4444TOARGB - "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB4444 pixels. + "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB4444 pixels. ARGB4444TOARGB - "uaddlp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uaddlp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uaddlp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uaddlp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB4444 pixels. + "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB4444 pixels. + "prfm pldl1keep, [%1, 448] \n" ARGB4444TOARGB - "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB4444 pixels. + "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB4444 pixels. ARGB4444TOARGB - "uadalp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. - "uadalp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. - "uadalp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. + "uadalp v26.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "uadalp v27.4h, v1.8b \n" // G 8 bytes -> 4 shorts. + "uadalp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. - "ins v16.D[1], v26.D[0] \n" - "ins v17.D[1], v27.D[0] \n" - "ins v18.D[1], v28.D[0] \n" + "ins v16.D[1], v26.D[0] \n" + "ins v17.D[1], v27.D[0] \n" + "ins v18.D[1], v28.D[0] \n" - "urshr v4.8h, v16.8h, #1 \n" // 2x average - "urshr v5.8h, v17.8h, #1 \n" - "urshr v6.8h, v18.8h, #1 \n" + "urshr v0.8h, v16.8h, #1 \n" // 2x average + "urshr v1.8h, v17.8h, #1 \n" + "urshr v2.8h, v18.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 16 processed per loop. - "mul v2.8h, v4.8h, v20.8h \n" // B - "mls v2.8h, v5.8h, v21.8h \n" // G - "mls v2.8h, v6.8h, v22.8h \n" // R - "add v2.8h, v2.8h, v25.8h \n" // +128 -> unsigned - "mul v3.8h, v6.8h, v20.8h \n" // R - "mls v3.8h, v5.8h, v24.8h \n" // G - "mls v3.8h, v4.8h, v23.8h \n" // B - "add v3.8h, v3.8h, v25.8h \n" // +128 -> unsigned - "uqshrn v0.8b, v2.8h, #8 \n" // 16 bit to 8 bit U - "uqshrn v1.8b, v3.8h, #8 \n" // 16 bit to 8 bit V - "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. - "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. - "b.gt 1b \n" + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v0.8h, v1.8h, v2.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" : "+r"(src_argb4444), // %0 "+r"(src_argb4444_1), // %1 "+r"(dst_u), // %2 @@ -1907,21 +2036,22 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { asm volatile( - "movi v24.8b, #25 \n" // B * 0.1016 coefficient - "movi v25.8b, #129 \n" // G * 0.5078 coefficient - "movi v26.8b, #66 \n" // R * 0.2578 coefficient - "movi v27.8b, #16 \n" // Add 16 constant + "movi v24.8b, #25 \n" // B * 0.1016 coefficient + "movi v25.8b, #129 \n" // G * 0.5078 coefficient + "movi v26.8b, #66 \n" // R * 0.2578 coefficient + "movi v27.8b, #16 \n" // Add 16 constant "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. RGB565TOARGB - "umull v3.8h, v0.8b, v24.8b \n" // B - "umlal v3.8h, v1.8b, v25.8b \n" // G - "umlal v3.8h, v2.8b, v26.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v27.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "umull v3.8h, v0.8b, v24.8b \n" // B + "umlal v3.8h, v1.8b, v25.8b \n" // G + "umlal v3.8h, v2.8b, v26.8b \n" // R + "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v27.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_rgb565), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1934,21 +2064,22 @@ void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v4.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v6.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB1555TOARGB - "umull v3.8h, v0.8b, v4.8b \n" // B - "umlal v3.8h, v1.8b, v5.8b \n" // G - "umlal v3.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "umull v3.8h, v0.8b, v4.8b \n" // B + "umlal v3.8h, v1.8b, v5.8b \n" // G + "umlal v3.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_argb1555), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1960,21 +2091,22 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, uint8_t* dst_y, int width) { asm volatile( - "movi v24.8b, #25 \n" // B * 0.1016 coefficient - "movi v25.8b, #129 \n" // G * 0.5078 coefficient - "movi v26.8b, #66 \n" // R * 0.2578 coefficient - "movi v27.8b, #16 \n" // Add 16 constant + "movi v24.8b, #25 \n" // B * 0.1016 coefficient + "movi v25.8b, #129 \n" // G * 0.5078 coefficient + "movi v26.8b, #66 \n" // R * 0.2578 coefficient + "movi v27.8b, #16 \n" // Add 16 constant "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. + "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB4444TOARGB - "umull v3.8h, v0.8b, v24.8b \n" // B - "umlal v3.8h, v1.8b, v25.8b \n" // G - "umlal v3.8h, v2.8b, v26.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v27.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "umull v3.8h, v0.8b, v24.8b \n" // B + "umlal v3.8h, v1.8b, v25.8b \n" // G + "umlal v3.8h, v2.8b, v26.8b \n" // R + "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v27.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_argb4444), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1984,20 +2116,21 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v4.8b, #66 \n" // R * 0.2578 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v6.8b, #25 \n" // B * 0.1016 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v1.8b, v4.8b \n" // R - "umlal v16.8h, v2.8b, v5.8b \n" // G - "umlal v16.8h, v3.8b, v6.8b \n" // B - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v16.8h, v1.8b, v4.8b \n" // R + "umlal v16.8h, v2.8b, v5.8b \n" // G + "umlal v16.8h, v3.8b, v6.8b \n" // B + "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_bgra), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2007,20 +2140,21 @@ void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { asm volatile( - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v6.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v4.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // R - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // B - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v16.8h, v0.8b, v4.8b \n" // R + "umlal v16.8h, v1.8b, v5.8b \n" // G + "umlal v16.8h, v2.8b, v6.8b \n" // B + "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_abgr), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2030,20 +2164,21 @@ void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v4.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v6.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v1.8b, v4.8b \n" // B - "umlal v16.8h, v2.8b, v5.8b \n" // G - "umlal v16.8h, v3.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v16.8h, v1.8b, v4.8b \n" // B + "umlal v16.8h, v2.8b, v5.8b \n" // G + "umlal v16.8h, v3.8b, v6.8b \n" // R + "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_rgba), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2053,20 +2188,21 @@ void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v4.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v6.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // B - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v16.8h, v0.8b, v4.8b \n" // B + "umlal v16.8h, v1.8b, v5.8b \n" // G + "umlal v16.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_rgb24), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2076,20 +2212,21 @@ void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { asm volatile( - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "movi v6.8b, #25 \n" // B * 0.1016 coefficient + "movi v5.8b, #129 \n" // G * 0.5078 coefficient + "movi v4.8b, #66 \n" // R * 0.2578 coefficient + "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // B - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v16.8h, v0.8b, v4.8b \n" // B + "umlal v16.8h, v1.8b, v5.8b \n" // G + "umlal v16.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y + "uqadd v0.8b, v0.8b, v7.8b \n" + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" : "+r"(src_raw), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -2097,6 +2234,50 @@ void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); } +void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + asm volatile( + "movi v4.8b, #29 \n" // B * 0.1140 coefficient + "movi v5.8b, #150 \n" // G * 0.5870 coefficient + "movi v6.8b, #77 \n" // R * 0.2990 coefficient + "1: \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v0.8h, v0.8b, v4.8b \n" // B + "umlal v0.8h, v1.8b, v5.8b \n" // G + "umlal v0.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(dst_yj), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); +} + +void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + asm volatile( + "movi v6.8b, #29 \n" // B * 0.1140 coefficient + "movi v5.8b, #150 \n" // G * 0.5870 coefficient + "movi v4.8b, #77 \n" // R * 0.2990 coefficient + "1: \n" + "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v0.8h, v0.8b, v4.8b \n" // B + "umlal v0.8h, v1.8b, v5.8b \n" // G + "umlal v0.8h, v2.8b, v6.8b \n" // R + "uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y + "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "b.gt 1b \n" + : "+r"(src_raw), // %0 + "+r"(dst_yj), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); +} + // Bilinear filter 16x2 -> 16x1 void InterpolateRow_NEON(uint8_t* dst_ptr, const uint8_t* src_ptr, @@ -2107,44 +2288,49 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, int y0_fraction = 256 - y1_fraction; const uint8_t* src_ptr1 = src_ptr + src_stride; asm volatile( - "cmp %w4, #0 \n" - "b.eq 100f \n" - "cmp %w4, #128 \n" - "b.eq 50f \n" + "cmp %w4, #0 \n" + "b.eq 100f \n" + "cmp %w4, #128 \n" + "b.eq 50f \n" - "dup v5.16b, %w4 \n" - "dup v4.16b, %w5 \n" + "dup v5.16b, %w4 \n" + "dup v4.16b, %w5 \n" // General purpose row blend. "1: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "ld1 {v1.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "umull v2.8h, v0.8b, v4.8b \n" - "umull2 v3.8h, v0.16b, v4.16b \n" - "umlal v2.8h, v1.8b, v5.8b \n" - "umlal2 v3.8h, v1.16b, v5.16b \n" - "rshrn v0.8b, v2.8h, #8 \n" - "rshrn2 v0.16b, v3.8h, #8 \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 1b \n" - "b 99f \n" + "ld1 {v0.16b}, [%1], #16 \n" + "ld1 {v1.16b}, [%2], #16 \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "subs %w3, %w3, #16 \n" + "umull v2.8h, v0.8b, v4.8b \n" + "umull2 v3.8h, v0.16b, v4.16b \n" + "umlal v2.8h, v1.8b, v5.8b \n" + "umlal2 v3.8h, v1.16b, v5.16b \n" + "rshrn v0.8b, v2.8h, #8 \n" + "rshrn2 v0.16b, v3.8h, #8 \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 1b \n" + "b 99f \n" // Blend 50 / 50. "50: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "ld1 {v1.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 50b \n" - "b 99f \n" + "ld1 {v0.16b}, [%1], #16 \n" + "ld1 {v1.16b}, [%2], #16 \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "subs %w3, %w3, #16 \n" + "urhadd v0.16b, v0.16b, v1.16b \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 50b \n" + "b 99f \n" // Blend 100 / 0 - Copy row unchanged. "100: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "subs %w3, %w3, #16 \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 100b \n" + "ld1 {v0.16b}, [%1], #16 \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #16 \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 100b \n" "99: \n" : "+r"(dst_ptr), // %0 @@ -2163,56 +2349,60 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, uint8_t* dst_argb, int width) { asm volatile( - "subs %w3, %w3, #8 \n" - "b.lt 89f \n" + "subs %w3, %w3, #8 \n" + "b.lt 89f \n" // Blend 8 pixels. "8: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB0 - // pixels - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 ARGB1 - // pixels - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "umull v16.8h, v4.8b, v3.8b \n" // db * a - "umull v17.8h, v5.8b, v3.8b \n" // dg * a - "umull v18.8h, v6.8b, v3.8b \n" // dr * a - "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 - "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 - "uqrshrn v18.8b, v18.8h, #8 \n" // dr >>= 8 - "uqsub v4.8b, v4.8b, v16.8b \n" // db - (db * a / 256) - "uqsub v5.8b, v5.8b, v17.8b \n" // dg - (dg * a / 256) - "uqsub v6.8b, v6.8b, v18.8b \n" // dr - (dr * a / 256) - "uqadd v0.8b, v0.8b, v4.8b \n" // + sb - "uqadd v1.8b, v1.8b, v5.8b \n" // + sg - "uqadd v2.8b, v2.8b, v6.8b \n" // + sr - "movi v3.8b, #255 \n" // a = 255 - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - // pixels - "b.ge 8b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB0 + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 ARGB1 + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "umull v16.8h, v4.8b, v3.8b \n" // db * a + "umull v17.8h, v5.8b, v3.8b \n" // dg * a + "umull v18.8h, v6.8b, v3.8b \n" // dr * a + "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 + "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 + "uqrshrn v18.8b, v18.8h, #8 \n" // dr >>= 8 + "uqsub v4.8b, v4.8b, v16.8b \n" // db - (db * a / 256) + "uqsub v5.8b, v5.8b, v17.8b \n" // dg - (dg * a / 256) + "uqsub v6.8b, v6.8b, v18.8b \n" // dr - (dr * a / 256) + "uqadd v0.8b, v0.8b, v4.8b \n" // + sb + "uqadd v1.8b, v1.8b, v5.8b \n" // + sg + "uqadd v2.8b, v2.8b, v6.8b \n" // + sr + "movi v3.8b, #255 \n" // a = 255 + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + // pixels + "b.ge 8b \n" "89: \n" - "adds %w3, %w3, #8-1 \n" - "b.lt 99f \n" + "adds %w3, %w3, #8-1 \n" + "b.lt 99f \n" // Blend 1 pixels. "1: \n" - "ld4 {v0.b,v1.b,v2.b,v3.b}[0], [%0], #4 \n" // load 1 pixel ARGB0. - "ld4 {v4.b,v5.b,v6.b,v7.b}[0], [%1], #4 \n" // load 1 pixel ARGB1. - "subs %w3, %w3, #1 \n" // 1 processed per loop. - "umull v16.8h, v4.8b, v3.8b \n" // db * a - "umull v17.8h, v5.8b, v3.8b \n" // dg * a - "umull v18.8h, v6.8b, v3.8b \n" // dr * a - "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 - "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 - "uqrshrn v18.8b, v18.8h, #8 \n" // dr >>= 8 - "uqsub v4.8b, v4.8b, v16.8b \n" // db - (db * a / 256) - "uqsub v5.8b, v5.8b, v17.8b \n" // dg - (dg * a / 256) - "uqsub v6.8b, v6.8b, v18.8b \n" // dr - (dr * a / 256) - "uqadd v0.8b, v0.8b, v4.8b \n" // + sb - "uqadd v1.8b, v1.8b, v5.8b \n" // + sg - "uqadd v2.8b, v2.8b, v6.8b \n" // + sr - "movi v3.8b, #255 \n" // a = 255 - "st4 {v0.b,v1.b,v2.b,v3.b}[0], [%2], #4 \n" // store 1 pixel. - "b.ge 1b \n" + "ld4 {v0.b,v1.b,v2.b,v3.b}[0], [%0], #4 \n" // load 1 pixel + // ARGB0. + "ld4 {v4.b,v5.b,v6.b,v7.b}[0], [%1], #4 \n" // load 1 pixel + // ARGB1. + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #1 \n" // 1 processed per loop. + "umull v16.8h, v4.8b, v3.8b \n" // db * a + "umull v17.8h, v5.8b, v3.8b \n" // dg * a + "umull v18.8h, v6.8b, v3.8b \n" // dr * a + "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 + "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 + "uqrshrn v18.8b, v18.8h, #8 \n" // dr >>= 8 + "uqsub v4.8b, v4.8b, v16.8b \n" // db - (db * a / 256) + "uqsub v5.8b, v5.8b, v17.8b \n" // dg - (dg * a / 256) + "uqsub v6.8b, v6.8b, v18.8b \n" // dr - (dr * a / 256) + "uqadd v0.8b, v0.8b, v4.8b \n" // + sb + "uqadd v1.8b, v1.8b, v5.8b \n" // + sg + "uqadd v2.8b, v2.8b, v6.8b \n" // + sr + "movi v3.8b, #255 \n" // a = 255 + "st4 {v0.b,v1.b,v2.b,v3.b}[0], [%2], #4 \n" // store 1 pixel. + "b.ge 1b \n" "99: \n" @@ -2232,17 +2422,17 @@ void ARGBAttenuateRow_NEON(const uint8_t* src_argb, asm volatile( // Attenuate 8 pixels. "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v4.8h, v0.8b, v3.8b \n" // b * a - "umull v5.8h, v1.8b, v3.8b \n" // g * a - "umull v6.8h, v2.8b, v3.8b \n" // r * a - "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8 - "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8 - "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8 - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB - // pixels - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v4.8h, v0.8b, v3.8b \n" // b * a + "umull v5.8h, v1.8b, v3.8b \n" // g * a + "umull v6.8h, v2.8b, v3.8b \n" // r * a + "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8 + "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8 + "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8 + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2258,32 +2448,33 @@ void ARGBQuantizeRow_NEON(uint8_t* dst_argb, int interval_offset, int width) { asm volatile( - "dup v4.8h, %w2 \n" - "ushr v4.8h, v4.8h, #1 \n" // scale >>= 1 - "dup v5.8h, %w3 \n" // interval multiply. - "dup v6.8h, %w4 \n" // interval add + "dup v4.8h, %w2 \n" + "ushr v4.8h, v4.8h, #1 \n" // scale >>= 1 + "dup v5.8h, %w3 \n" // interval multiply. + "dup v6.8h, %w4 \n" // interval add // 8 pixel loop. "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB. - "subs %w1, %w1, #8 \n" // 8 processed per loop. - "uxtl v0.8h, v0.8b \n" // b (0 .. 255) - "uxtl v1.8h, v1.8b \n" - "uxtl v2.8h, v2.8b \n" - "sqdmulh v0.8h, v0.8h, v4.8h \n" // b * scale - "sqdmulh v1.8h, v1.8h, v4.8h \n" // g - "sqdmulh v2.8h, v2.8h, v4.8h \n" // r - "mul v0.8h, v0.8h, v5.8h \n" // b * interval_size - "mul v1.8h, v1.8h, v5.8h \n" // g - "mul v2.8h, v2.8h, v5.8h \n" // r - "add v0.8h, v0.8h, v6.8h \n" // b + interval_offset - "add v1.8h, v1.8h, v6.8h \n" // g - "add v2.8h, v2.8h, v6.8h \n" // r - "uqxtn v0.8b, v0.8h \n" - "uqxtn v1.8b, v1.8h \n" - "uqxtn v2.8b, v2.8h \n" - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB. + "prfm pldl1keep, [%0, 448] \n" + "subs %w1, %w1, #8 \n" // 8 processed per loop. + "uxtl v0.8h, v0.8b \n" // b (0 .. 255) + "uxtl v1.8h, v1.8b \n" + "uxtl v2.8h, v2.8b \n" + "sqdmulh v0.8h, v0.8h, v4.8h \n" // b * scale + "sqdmulh v1.8h, v1.8h, v4.8h \n" // g + "sqdmulh v2.8h, v2.8h, v4.8h \n" // r + "mul v0.8h, v0.8h, v5.8h \n" // b * interval_size + "mul v1.8h, v1.8h, v5.8h \n" // g + "mul v2.8h, v2.8h, v5.8h \n" // r + "add v0.8h, v0.8h, v6.8h \n" // b + interval_offset + "add v1.8h, v1.8h, v6.8h \n" // g + "add v2.8h, v2.8h, v6.8h \n" // r + "uqxtn v0.8b, v0.8h \n" + "uqxtn v1.8b, v1.8h \n" + "uqxtn v2.8b, v2.8h \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : "r"(scale), // %2 @@ -2300,28 +2491,29 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb, int width, uint32_t value) { asm volatile( - "dup v0.4s, %w3 \n" // duplicate scale value. - "zip1 v0.8b, v0.8b, v0.8b \n" // v0.8b aarrggbb. - "ushr v0.8h, v0.8h, #1 \n" // scale / 2. + "dup v0.4s, %w3 \n" // duplicate scale value. + "zip1 v0.8b, v0.8b, v0.8b \n" // v0.8b aarrggbb. + "ushr v0.8h, v0.8h, #1 \n" // scale / 2. // 8 pixel loop. "1: \n" - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "uxtl v4.8h, v4.8b \n" // b (0 .. 255) - "uxtl v5.8h, v5.8b \n" - "uxtl v6.8h, v6.8b \n" - "uxtl v7.8h, v7.8b \n" - "sqrdmulh v4.8h, v4.8h, v0.h[0] \n" // b * scale * 2 - "sqrdmulh v5.8h, v5.8h, v0.h[1] \n" // g - "sqrdmulh v6.8h, v6.8h, v0.h[2] \n" // r - "sqrdmulh v7.8h, v7.8h, v0.h[3] \n" // a - "uqxtn v4.8b, v4.8h \n" - "uqxtn v5.8b, v5.8h \n" - "uqxtn v6.8b, v6.8h \n" - "uqxtn v7.8b, v7.8h \n" - "st4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "uxtl v4.8h, v4.8b \n" // b (0 .. 255) + "uxtl v5.8h, v5.8b \n" + "uxtl v6.8h, v6.8b \n" + "uxtl v7.8h, v7.8b \n" + "sqrdmulh v4.8h, v4.8h, v0.h[0] \n" // b * scale * 2 + "sqrdmulh v5.8h, v5.8h, v0.h[1] \n" // g + "sqrdmulh v6.8h, v6.8h, v0.h[2] \n" // r + "sqrdmulh v7.8h, v7.8h, v0.h[3] \n" // a + "uqxtn v4.8b, v4.8h \n" + "uqxtn v5.8b, v5.8h \n" + "uqxtn v6.8b, v6.8h \n" + "uqxtn v7.8b, v7.8h \n" + "st4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2334,20 +2526,21 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb, // C code is (29 * b + 150 * g + 77 * r + 128) >> 8; void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { asm volatile( - "movi v24.8b, #29 \n" // B * 0.1140 coefficient - "movi v25.8b, #150 \n" // G * 0.5870 coefficient - "movi v26.8b, #77 \n" // R * 0.2990 coefficient + "movi v24.8b, #29 \n" // B * 0.1140 coefficient + "movi v25.8b, #150 \n" // G * 0.5870 coefficient + "movi v26.8b, #77 \n" // R * 0.2990 coefficient "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v4.8h, v0.8b, v24.8b \n" // B - "umlal v4.8h, v1.8b, v25.8b \n" // G - "umlal v4.8h, v2.8b, v26.8b \n" // R - "uqrshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit B - "orr v1.8b, v0.8b, v0.8b \n" // G - "orr v2.8b, v0.8b, v0.8b \n" // R - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 pixels. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "umull v4.8h, v0.8b, v24.8b \n" // B + "umlal v4.8h, v1.8b, v25.8b \n" // G + "umlal v4.8h, v2.8b, v26.8b \n" // R + "uqrshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit B + "orr v1.8b, v0.8b, v0.8b \n" // G + "orr v2.8b, v0.8b, v0.8b \n" // R + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 pixels. + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2362,32 +2555,33 @@ void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width) { asm volatile( - "movi v20.8b, #17 \n" // BB coefficient - "movi v21.8b, #68 \n" // BG coefficient - "movi v22.8b, #35 \n" // BR coefficient - "movi v24.8b, #22 \n" // GB coefficient - "movi v25.8b, #88 \n" // GG coefficient - "movi v26.8b, #45 \n" // GR coefficient - "movi v28.8b, #24 \n" // BB coefficient - "movi v29.8b, #98 \n" // BG coefficient - "movi v30.8b, #50 \n" // BR coefficient + "movi v20.8b, #17 \n" // BB coefficient + "movi v21.8b, #68 \n" // BG coefficient + "movi v22.8b, #35 \n" // BR coefficient + "movi v24.8b, #22 \n" // GB coefficient + "movi v25.8b, #88 \n" // GG coefficient + "movi v26.8b, #45 \n" // GR coefficient + "movi v28.8b, #24 \n" // BB coefficient + "movi v29.8b, #98 \n" // BG coefficient + "movi v30.8b, #50 \n" // BR coefficient "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB pixels. - "subs %w1, %w1, #8 \n" // 8 processed per loop. - "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B - "umlal v4.8h, v1.8b, v21.8b \n" // G - "umlal v4.8h, v2.8b, v22.8b \n" // R - "umull v5.8h, v0.8b, v24.8b \n" // B to Sepia G - "umlal v5.8h, v1.8b, v25.8b \n" // G - "umlal v5.8h, v2.8b, v26.8b \n" // R - "umull v6.8h, v0.8b, v28.8b \n" // B to Sepia R - "umlal v6.8h, v1.8b, v29.8b \n" // G - "umlal v6.8h, v2.8b, v30.8b \n" // R - "uqshrn v0.8b, v4.8h, #7 \n" // 16 bit to 8 bit B - "uqshrn v1.8b, v5.8h, #7 \n" // 16 bit to 8 bit G - "uqshrn v2.8b, v6.8h, #7 \n" // 16 bit to 8 bit R - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // store 8 pixels. - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB pixels. + "prfm pldl1keep, [%0, 448] \n" + "subs %w1, %w1, #8 \n" // 8 processed per loop. + "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B + "umlal v4.8h, v1.8b, v21.8b \n" // G + "umlal v4.8h, v2.8b, v22.8b \n" // R + "umull v5.8h, v0.8b, v24.8b \n" // B to Sepia G + "umlal v5.8h, v1.8b, v25.8b \n" // G + "umlal v5.8h, v2.8b, v26.8b \n" // R + "umull v6.8h, v0.8b, v28.8b \n" // B to Sepia R + "umlal v6.8h, v1.8b, v29.8b \n" // G + "umlal v6.8h, v2.8b, v30.8b \n" // R + "uqshrn v0.8b, v4.8h, #7 \n" // 16 bit to 8 bit B + "uqshrn v1.8b, v5.8h, #7 \n" // 16 bit to 8 bit G + "uqshrn v2.8b, v6.8h, #7 \n" // 16 bit to 8 bit R + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // store 8 pixels. + "b.gt 1b \n" : "+r"(dst_argb), // %0 "+r"(width) // %1 : @@ -2403,51 +2597,52 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, const int8_t* matrix_argb, int width) { asm volatile( - "ld1 {v2.16b}, [%3] \n" // load 3 ARGB vectors. - "sxtl v0.8h, v2.8b \n" // B,G coefficients s16. - "sxtl2 v1.8h, v2.16b \n" // R,A coefficients s16. + "ld1 {v2.16b}, [%3] \n" // load 3 ARGB vectors. + "sxtl v0.8h, v2.8b \n" // B,G coefficients s16. + "sxtl2 v1.8h, v2.16b \n" // R,A coefficients s16. "1: \n" - "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 ARGB - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "uxtl v16.8h, v16.8b \n" // b (0 .. 255) 16 bit - "uxtl v17.8h, v17.8b \n" // g - "uxtl v18.8h, v18.8b \n" // r - "uxtl v19.8h, v19.8b \n" // a - "mul v22.8h, v16.8h, v0.h[0] \n" // B = B * Matrix B - "mul v23.8h, v16.8h, v0.h[4] \n" // G = B * Matrix G - "mul v24.8h, v16.8h, v1.h[0] \n" // R = B * Matrix R - "mul v25.8h, v16.8h, v1.h[4] \n" // A = B * Matrix A - "mul v4.8h, v17.8h, v0.h[1] \n" // B += G * Matrix B - "mul v5.8h, v17.8h, v0.h[5] \n" // G += G * Matrix G - "mul v6.8h, v17.8h, v1.h[1] \n" // R += G * Matrix R - "mul v7.8h, v17.8h, v1.h[5] \n" // A += G * Matrix A - "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B - "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G - "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R - "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A - "mul v4.8h, v18.8h, v0.h[2] \n" // B += R * Matrix B - "mul v5.8h, v18.8h, v0.h[6] \n" // G += R * Matrix G - "mul v6.8h, v18.8h, v1.h[2] \n" // R += R * Matrix R - "mul v7.8h, v18.8h, v1.h[6] \n" // A += R * Matrix A - "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B - "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G - "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R - "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A - "mul v4.8h, v19.8h, v0.h[3] \n" // B += A * Matrix B - "mul v5.8h, v19.8h, v0.h[7] \n" // G += A * Matrix G - "mul v6.8h, v19.8h, v1.h[3] \n" // R += A * Matrix R - "mul v7.8h, v19.8h, v1.h[7] \n" // A += A * Matrix A - "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B - "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G - "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R - "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A - "sqshrun v16.8b, v22.8h, #6 \n" // 16 bit to 8 bit B - "sqshrun v17.8b, v23.8h, #6 \n" // 16 bit to 8 bit G - "sqshrun v18.8b, v24.8h, #6 \n" // 16 bit to 8 bit R - "sqshrun v19.8b, v25.8h, #6 \n" // 16 bit to 8 bit A - "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%1], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 ARGB + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "uxtl v16.8h, v16.8b \n" // b (0 .. 255) 16 bit + "uxtl v17.8h, v17.8b \n" // g + "uxtl v18.8h, v18.8b \n" // r + "uxtl v19.8h, v19.8b \n" // a + "mul v22.8h, v16.8h, v0.h[0] \n" // B = B * Matrix B + "mul v23.8h, v16.8h, v0.h[4] \n" // G = B * Matrix G + "mul v24.8h, v16.8h, v1.h[0] \n" // R = B * Matrix R + "mul v25.8h, v16.8h, v1.h[4] \n" // A = B * Matrix A + "mul v4.8h, v17.8h, v0.h[1] \n" // B += G * Matrix B + "mul v5.8h, v17.8h, v0.h[5] \n" // G += G * Matrix G + "mul v6.8h, v17.8h, v1.h[1] \n" // R += G * Matrix R + "mul v7.8h, v17.8h, v1.h[5] \n" // A += G * Matrix A + "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B + "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G + "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R + "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A + "mul v4.8h, v18.8h, v0.h[2] \n" // B += R * Matrix B + "mul v5.8h, v18.8h, v0.h[6] \n" // G += R * Matrix G + "mul v6.8h, v18.8h, v1.h[2] \n" // R += R * Matrix R + "mul v7.8h, v18.8h, v1.h[6] \n" // A += R * Matrix A + "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B + "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G + "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R + "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A + "mul v4.8h, v19.8h, v0.h[3] \n" // B += A * Matrix B + "mul v5.8h, v19.8h, v0.h[7] \n" // G += A * Matrix G + "mul v6.8h, v19.8h, v1.h[3] \n" // R += A * Matrix R + "mul v7.8h, v19.8h, v1.h[7] \n" // A += A * Matrix A + "sqadd v22.8h, v22.8h, v4.8h \n" // Accumulate B + "sqadd v23.8h, v23.8h, v5.8h \n" // Accumulate G + "sqadd v24.8h, v24.8h, v6.8h \n" // Accumulate R + "sqadd v25.8h, v25.8h, v7.8h \n" // Accumulate A + "sqshrun v16.8b, v22.8h, #6 \n" // 16 bit to 8 bit B + "sqshrun v17.8b, v23.8h, #6 \n" // 16 bit to 8 bit G + "sqshrun v18.8b, v24.8h, #6 \n" // 16 bit to 8 bit R + "sqshrun v19.8b, v25.8h, #6 \n" // 16 bit to 8 bit A + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%1], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(width) // %2 @@ -2465,19 +2660,21 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "umull v0.8h, v0.8b, v4.8b \n" // multiply B - "umull v1.8h, v1.8b, v5.8b \n" // multiply G - "umull v2.8h, v2.8b, v6.8b \n" // multiply R - "umull v3.8h, v3.8b, v7.8b \n" // multiply A - "rshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit B - "rshrn v1.8b, v1.8h, #8 \n" // 16 bit to 8 bit G - "rshrn v2.8b, v2.8h, #8 \n" // 16 bit to 8 bit R - "rshrn v3.8b, v3.8h, #8 \n" // 16 bit to 8 bit A - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "umull v0.8h, v0.8b, v4.8b \n" // multiply B + "umull v1.8h, v1.8b, v5.8b \n" // multiply G + "umull v2.8h, v2.8b, v6.8b \n" // multiply R + "umull v3.8h, v3.8b, v7.8b \n" // multiply A + "rshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit B + "rshrn v1.8b, v1.8h, #8 \n" // 16 bit to 8 bit G + "rshrn v2.8b, v2.8h, #8 \n" // 16 bit to 8 bit R + "rshrn v3.8b, v3.8h, #8 \n" // 16 bit to 8 bit A + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2494,15 +2691,17 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqadd v0.8b, v0.8b, v4.8b \n" - "uqadd v1.8b, v1.8b, v5.8b \n" - "uqadd v2.8b, v2.8b, v6.8b \n" - "uqadd v3.8b, v3.8b, v7.8b \n" - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uqadd v0.8b, v0.8b, v4.8b \n" + "uqadd v1.8b, v1.8b, v5.8b \n" + "uqadd v2.8b, v2.8b, v6.8b \n" + "uqadd v3.8b, v3.8b, v7.8b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2519,15 +2718,17 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0, asm volatile( // 8 pixel loop. "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqsub v0.8b, v0.8b, v4.8b \n" - "uqsub v1.8b, v1.8b, v5.8b \n" - "uqsub v2.8b, v2.8b, v6.8b \n" - "uqsub v3.8b, v3.8b, v7.8b \n" - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uqsub v0.8b, v0.8b, v4.8b \n" + "uqsub v1.8b, v1.8b, v5.8b \n" + "uqsub v2.8b, v2.8b, v6.8b \n" + "uqsub v3.8b, v3.8b, v7.8b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_argb0), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 @@ -2546,17 +2747,19 @@ void SobelRow_NEON(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "movi v3.8b, #255 \n" // alpha + "movi v3.8b, #255 \n" // alpha // 8 pixel loop. "1: \n" - "ld1 {v0.8b}, [%0], #8 \n" // load 8 sobelx. - "ld1 {v1.8b}, [%1], #8 \n" // load 8 sobely. - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqadd v0.8b, v0.8b, v1.8b \n" // add - "orr v1.8b, v0.8b, v0.8b \n" - "orr v2.8b, v0.8b, v0.8b \n" - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld1 {v0.8b}, [%0], #8 \n" // load 8 sobelx. + "ld1 {v1.8b}, [%1], #8 \n" // load 8 sobely. + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uqadd v0.8b, v0.8b, v1.8b \n" // add + "orr v1.8b, v0.8b, v0.8b \n" + "orr v2.8b, v0.8b, v0.8b \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -2573,12 +2776,14 @@ void SobelToPlaneRow_NEON(const uint8_t* src_sobelx, asm volatile( // 16 pixel loop. "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load 16 sobelx. - "ld1 {v1.16b}, [%1], #16 \n" // load 16 sobely. - "subs %w3, %w3, #16 \n" // 16 processed per loop. - "uqadd v0.16b, v0.16b, v1.16b \n" // add - "st1 {v0.16b}, [%2], #16 \n" // store 16 pixels. - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" // load 16 sobelx. + "ld1 {v1.16b}, [%1], #16 \n" // load 16 sobely. + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #16 \n" // 16 processed per loop. + "uqadd v0.16b, v0.16b, v1.16b \n" // add + "st1 {v0.16b}, [%2], #16 \n" // store 16 pixels. + "b.gt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_y), // %2 @@ -2597,15 +2802,17 @@ void SobelXYRow_NEON(const uint8_t* src_sobelx, uint8_t* dst_argb, int width) { asm volatile( - "movi v3.8b, #255 \n" // alpha + "movi v3.8b, #255 \n" // alpha // 8 pixel loop. "1: \n" - "ld1 {v2.8b}, [%0], #8 \n" // load 8 sobelx. - "ld1 {v0.8b}, [%1], #8 \n" // load 8 sobely. - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqadd v1.8b, v0.8b, v2.8b \n" // add - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB - "b.gt 1b \n" + "ld1 {v2.8b}, [%0], #8 \n" // load 8 sobelx. + "ld1 {v0.8b}, [%1], #8 \n" // load 8 sobely. + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uqadd v1.8b, v0.8b, v2.8b \n" // add + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB + "b.gt 1b \n" : "+r"(src_sobelx), // %0 "+r"(src_sobely), // %1 "+r"(dst_argb), // %2 @@ -2625,23 +2832,26 @@ void SobelXRow_NEON(const uint8_t* src_y0, int width) { asm volatile( "1: \n" - "ld1 {v0.8b}, [%0],%5 \n" // top - "ld1 {v1.8b}, [%0],%6 \n" - "usubl v0.8h, v0.8b, v1.8b \n" - "ld1 {v2.8b}, [%1],%5 \n" // center * 2 - "ld1 {v3.8b}, [%1],%6 \n" - "usubl v1.8h, v2.8b, v3.8b \n" - "add v0.8h, v0.8h, v1.8h \n" - "add v0.8h, v0.8h, v1.8h \n" - "ld1 {v2.8b}, [%2],%5 \n" // bottom - "ld1 {v3.8b}, [%2],%6 \n" - "subs %w4, %w4, #8 \n" // 8 pixels - "usubl v1.8h, v2.8b, v3.8b \n" - "add v0.8h, v0.8h, v1.8h \n" - "abs v0.8h, v0.8h \n" - "uqxtn v0.8b, v0.8h \n" - "st1 {v0.8b}, [%3], #8 \n" // store 8 sobelx - "b.gt 1b \n" + "ld1 {v0.8b}, [%0],%5 \n" // top + "ld1 {v1.8b}, [%0],%6 \n" + "prfm pldl1keep, [%0, 448] \n" + "usubl v0.8h, v0.8b, v1.8b \n" + "ld1 {v2.8b}, [%1],%5 \n" // center * 2 + "ld1 {v3.8b}, [%1],%6 \n" + "prfm pldl1keep, [%1, 448] \n" + "usubl v1.8h, v2.8b, v3.8b \n" + "add v0.8h, v0.8h, v1.8h \n" + "add v0.8h, v0.8h, v1.8h \n" + "ld1 {v2.8b}, [%2],%5 \n" // bottom + "ld1 {v3.8b}, [%2],%6 \n" + "prfm pldl1keep, [%2, 448] \n" + "subs %w4, %w4, #8 \n" // 8 pixels + "usubl v1.8h, v2.8b, v3.8b \n" + "add v0.8h, v0.8h, v1.8h \n" + "abs v0.8h, v0.8h \n" + "uqxtn v0.8b, v0.8h \n" + "st1 {v0.8b}, [%3], #8 \n" // store 8 sobelx + "b.gt 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(src_y2), // %2 @@ -2663,23 +2873,25 @@ void SobelYRow_NEON(const uint8_t* src_y0, int width) { asm volatile( "1: \n" - "ld1 {v0.8b}, [%0],%4 \n" // left - "ld1 {v1.8b}, [%1],%4 \n" - "usubl v0.8h, v0.8b, v1.8b \n" - "ld1 {v2.8b}, [%0],%4 \n" // center * 2 - "ld1 {v3.8b}, [%1],%4 \n" - "usubl v1.8h, v2.8b, v3.8b \n" - "add v0.8h, v0.8h, v1.8h \n" - "add v0.8h, v0.8h, v1.8h \n" - "ld1 {v2.8b}, [%0],%5 \n" // right - "ld1 {v3.8b}, [%1],%5 \n" - "subs %w3, %w3, #8 \n" // 8 pixels - "usubl v1.8h, v2.8b, v3.8b \n" - "add v0.8h, v0.8h, v1.8h \n" - "abs v0.8h, v0.8h \n" - "uqxtn v0.8b, v0.8h \n" - "st1 {v0.8b}, [%2], #8 \n" // store 8 sobely - "b.gt 1b \n" + "ld1 {v0.8b}, [%0],%4 \n" // left + "ld1 {v1.8b}, [%1],%4 \n" + "usubl v0.8h, v0.8b, v1.8b \n" + "ld1 {v2.8b}, [%0],%4 \n" // center * 2 + "ld1 {v3.8b}, [%1],%4 \n" + "usubl v1.8h, v2.8b, v3.8b \n" + "add v0.8h, v0.8h, v1.8h \n" + "add v0.8h, v0.8h, v1.8h \n" + "ld1 {v2.8b}, [%0],%5 \n" // right + "ld1 {v3.8b}, [%1],%5 \n" + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "subs %w3, %w3, #8 \n" // 8 pixels + "usubl v1.8h, v2.8b, v3.8b \n" + "add v0.8h, v0.8h, v1.8h \n" + "abs v0.8h, v0.8h \n" + "uqxtn v0.8b, v0.8h \n" + "st1 {v0.8b}, [%2], #8 \n" // store 8 sobely + "b.gt 1b \n" : "+r"(src_y0), // %0 "+r"(src_y1), // %1 "+r"(dst_sobely), // %2 @@ -2697,16 +2909,17 @@ void HalfFloat1Row_NEON(const uint16_t* src, int width) { asm volatile( "1: \n" - "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts - "subs %w2, %w2, #8 \n" // 8 pixels per loop - "uxtl v2.4s, v1.4h \n" // 8 int's - "uxtl2 v3.4s, v1.8h \n" - "scvtf v2.4s, v2.4s \n" // 8 floats - "scvtf v3.4s, v3.4s \n" - "fcvtn v1.4h, v2.4s \n" // 8 half floats - "fcvtn2 v1.8h, v3.4s \n" - "st1 {v1.16b}, [%1], #16 \n" // store 8 shorts - "b.gt 1b \n" + "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 pixels per loop + "uxtl v2.4s, v1.4h \n" // 8 int's + "uxtl2 v3.4s, v1.8h \n" + "scvtf v2.4s, v2.4s \n" // 8 floats + "scvtf v3.4s, v3.4s \n" + "fcvtn v1.4h, v2.4s \n" // 8 half floats + "fcvtn2 v1.8h, v3.4s \n" + "st1 {v1.16b}, [%1], #16 \n" // store 8 shorts + "b.gt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2720,18 +2933,19 @@ void HalfFloatRow_NEON(const uint16_t* src, int width) { asm volatile( "1: \n" - "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts - "subs %w2, %w2, #8 \n" // 8 pixels per loop - "uxtl v2.4s, v1.4h \n" // 8 int's - "uxtl2 v3.4s, v1.8h \n" - "scvtf v2.4s, v2.4s \n" // 8 floats - "scvtf v3.4s, v3.4s \n" - "fmul v2.4s, v2.4s, %3.s[0] \n" // adjust exponent - "fmul v3.4s, v3.4s, %3.s[0] \n" - "uqshrn v1.4h, v2.4s, #13 \n" // isolate halffloat - "uqshrn2 v1.8h, v3.4s, #13 \n" - "st1 {v1.16b}, [%1], #16 \n" // store 8 shorts - "b.gt 1b \n" + "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 pixels per loop + "uxtl v2.4s, v1.4h \n" // 8 int's + "uxtl2 v3.4s, v1.8h \n" + "scvtf v2.4s, v2.4s \n" // 8 floats + "scvtf v3.4s, v3.4s \n" + "fmul v2.4s, v2.4s, %3.s[0] \n" // adjust exponent + "fmul v3.4s, v3.4s, %3.s[0] \n" + "uqshrn v1.4h, v2.4s, #13 \n" // isolate halffloat + "uqshrn2 v1.8h, v3.4s, #13 \n" + "st1 {v1.16b}, [%1], #16 \n" // store 8 shorts + "b.gt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2745,17 +2959,18 @@ void ByteToFloatRow_NEON(const uint8_t* src, int width) { asm volatile( "1: \n" - "ld1 {v1.8b}, [%0], #8 \n" // load 8 bytes - "subs %w2, %w2, #8 \n" // 8 pixels per loop - "uxtl v1.8h, v1.8b \n" // 8 shorts - "uxtl v2.4s, v1.4h \n" // 8 ints - "uxtl2 v3.4s, v1.8h \n" - "scvtf v2.4s, v2.4s \n" // 8 floats - "scvtf v3.4s, v3.4s \n" - "fmul v2.4s, v2.4s, %3.s[0] \n" // scale - "fmul v3.4s, v3.4s, %3.s[0] \n" - "st1 {v2.16b, v3.16b}, [%1], #32 \n" // store 8 floats - "b.gt 1b \n" + "ld1 {v1.8b}, [%0], #8 \n" // load 8 bytes + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 pixels per loop + "uxtl v1.8h, v1.8b \n" // 8 shorts + "uxtl v2.4s, v1.4h \n" // 8 ints + "uxtl2 v3.4s, v1.8h \n" + "scvtf v2.4s, v2.4s \n" // 8 floats + "scvtf v3.4s, v3.4s \n" + "fmul v2.4s, v2.4s, %3.s[0] \n" // scale + "fmul v3.4s, v3.4s, %3.s[0] \n" + "st1 {v2.16b, v3.16b}, [%1], #32 \n" // store 8 floats + "b.gt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2769,20 +2984,21 @@ float ScaleMaxSamples_NEON(const float* src, int width) { float fmax; asm volatile( - "movi v5.4s, #0 \n" // max - "movi v6.4s, #0 \n" + "movi v5.4s, #0 \n" // max + "movi v6.4s, #0 \n" "1: \n" - "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples - "subs %w2, %w2, #8 \n" // 8 processed per loop - "fmul v3.4s, v1.4s, %4.s[0] \n" // scale - "fmul v4.4s, v2.4s, %4.s[0] \n" // scale - "fmax v5.4s, v5.4s, v1.4s \n" // max - "fmax v6.4s, v6.4s, v2.4s \n" - "st1 {v3.4s, v4.4s}, [%1], #32 \n" // store 8 samples - "b.gt 1b \n" - "fmax v5.4s, v5.4s, v6.4s \n" // max - "fmaxv %s3, v5.4s \n" // signed max acculator + "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "fmul v3.4s, v1.4s, %4.s[0] \n" // scale + "fmul v4.4s, v2.4s, %4.s[0] \n" // scale + "fmax v5.4s, v5.4s, v1.4s \n" // max + "fmax v6.4s, v6.4s, v2.4s \n" + "st1 {v3.4s, v4.4s}, [%1], #32 \n" // store 8 samples + "b.gt 1b \n" + "fmax v5.4s, v5.4s, v6.4s \n" // max + "fmaxv %s3, v5.4s \n" // signed max acculator : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width), // %2 @@ -2798,21 +3014,22 @@ float ScaleSumSamples_NEON(const float* src, int width) { float fsum; asm volatile( - "movi v5.4s, #0 \n" // max - "movi v6.4s, #0 \n" // max + "movi v5.4s, #0 \n" // max + "movi v6.4s, #0 \n" // max "1: \n" - "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples - "subs %w2, %w2, #8 \n" // 8 processed per loop - "fmul v3.4s, v1.4s, %4.s[0] \n" // scale - "fmul v4.4s, v2.4s, %4.s[0] \n" - "fmla v5.4s, v1.4s, v1.4s \n" // sum of squares - "fmla v6.4s, v2.4s, v2.4s \n" - "st1 {v3.4s, v4.4s}, [%1], #32 \n" // store 8 samples - "b.gt 1b \n" - "faddp v5.4s, v5.4s, v6.4s \n" - "faddp v5.4s, v5.4s, v5.4s \n" - "faddp %3.4s, v5.4s, v5.4s \n" // sum + "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "fmul v3.4s, v1.4s, %4.s[0] \n" // scale + "fmul v4.4s, v2.4s, %4.s[0] \n" + "fmla v5.4s, v1.4s, v1.4s \n" // sum of squares + "fmla v6.4s, v2.4s, v2.4s \n" + "st1 {v3.4s, v4.4s}, [%1], #32 \n" // store 8 samples + "b.gt 1b \n" + "faddp v5.4s, v5.4s, v6.4s \n" + "faddp v5.4s, v5.4s, v5.4s \n" + "faddp %3.4s, v5.4s, v5.4s \n" // sum : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width), // %2 @@ -2825,12 +3042,13 @@ float ScaleSumSamples_NEON(const float* src, void ScaleSamples_NEON(const float* src, float* dst, float scale, int width) { asm volatile( "1: \n" - "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples - "subs %w2, %w2, #8 \n" // 8 processed per loop - "fmul v1.4s, v1.4s, %3.s[0] \n" // scale - "fmul v2.4s, v2.4s, %3.s[0] \n" // scale - "st1 {v1.4s, v2.4s}, [%1], #32 \n" // store 8 samples - "b.gt 1b \n" + "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "fmul v1.4s, v1.4s, %3.s[0] \n" // scale + "fmul v2.4s, v2.4s, %3.s[0] \n" // scale + "st1 {v1.4s, v2.4s}, [%1], #32 \n" // store 8 samples + "b.gt 1b \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -2847,26 +3065,31 @@ void GaussCol_NEON(const uint16_t* src0, uint32_t* dst, int width) { asm volatile( - "movi v6.8h, #4 \n" // constant 4 - "movi v7.8h, #6 \n" // constant 6 + "movi v6.8h, #4 \n" // constant 4 + "movi v7.8h, #6 \n" // constant 6 "1: \n" - "ld1 {v1.8h}, [%0], #16 \n" // load 8 samples, 5 rows - "ld1 {v2.8h}, [%4], #16 \n" - "uaddl v0.4s, v1.4h, v2.4h \n" // * 1 - "uaddl2 v1.4s, v1.8h, v2.8h \n" // * 1 - "ld1 {v2.8h}, [%1], #16 \n" - "umlal v0.4s, v2.4h, v6.4h \n" // * 4 - "umlal2 v1.4s, v2.8h, v6.8h \n" // * 4 - "ld1 {v2.8h}, [%2], #16 \n" - "umlal v0.4s, v2.4h, v7.4h \n" // * 6 - "umlal2 v1.4s, v2.8h, v7.8h \n" // * 6 - "ld1 {v2.8h}, [%3], #16 \n" - "umlal v0.4s, v2.4h, v6.4h \n" // * 4 - "umlal2 v1.4s, v2.8h, v6.8h \n" // * 4 - "subs %w6, %w6, #8 \n" // 8 processed per loop - "st1 {v0.4s,v1.4s}, [%5], #32 \n" // store 8 samples - "b.gt 1b \n" + "ld1 {v1.8h}, [%0], #16 \n" // load 8 samples, 5 rows + "ld1 {v2.8h}, [%4], #16 \n" + "uaddl v0.4s, v1.4h, v2.4h \n" // * 1 + "prfm pldl1keep, [%0, 448] \n" + "uaddl2 v1.4s, v1.8h, v2.8h \n" // * 1 + "ld1 {v2.8h}, [%1], #16 \n" + "umlal v0.4s, v2.4h, v6.4h \n" // * 4 + "prfm pldl1keep, [%1, 448] \n" + "umlal2 v1.4s, v2.8h, v6.8h \n" // * 4 + "ld1 {v2.8h}, [%2], #16 \n" + "umlal v0.4s, v2.4h, v7.4h \n" // * 6 + "prfm pldl1keep, [%2, 448] \n" + "umlal2 v1.4s, v2.8h, v7.8h \n" // * 6 + "ld1 {v2.8h}, [%3], #16 \n" + "umlal v0.4s, v2.4h, v6.4h \n" // * 4 + "prfm pldl1keep, [%3, 448] \n" + "umlal2 v1.4s, v2.8h, v6.8h \n" // * 4 + "subs %w6, %w6, #8 \n" // 8 processed per loop + "st1 {v0.4s,v1.4s}, [%5], #32 \n" // store 8 samples + "prfm pldl1keep, [%4, 448] \n" + "b.gt 1b \n" : "+r"(src0), // %0 "+r"(src1), // %1 "+r"(src2), // %2 @@ -2884,27 +3107,28 @@ void GaussRow_NEON(const uint32_t* src, uint16_t* dst, int width) { const uint32_t* src2 = src + 2; const uint32_t* src3 = src + 3; asm volatile( - "movi v6.4s, #4 \n" // constant 4 - "movi v7.4s, #6 \n" // constant 6 + "movi v6.4s, #4 \n" // constant 4 + "movi v7.4s, #6 \n" // constant 6 "1: \n" - "ld1 {v0.4s,v1.4s,v2.4s}, [%0], %6 \n" // load 12 source samples - "add v0.4s, v0.4s, v1.4s \n" // * 1 - "add v1.4s, v1.4s, v2.4s \n" // * 1 - "ld1 {v2.4s,v3.4s}, [%2], #32 \n" - "mla v0.4s, v2.4s, v7.4s \n" // * 6 - "mla v1.4s, v3.4s, v7.4s \n" // * 6 - "ld1 {v2.4s,v3.4s}, [%1], #32 \n" - "ld1 {v4.4s,v5.4s}, [%3], #32 \n" - "add v2.4s, v2.4s, v4.4s \n" // add rows for * 4 - "add v3.4s, v3.4s, v5.4s \n" - "mla v0.4s, v2.4s, v6.4s \n" // * 4 - "mla v1.4s, v3.4s, v6.4s \n" // * 4 - "subs %w5, %w5, #8 \n" // 8 processed per loop - "uqrshrn v0.4h, v0.4s, #8 \n" // round and pack - "uqrshrn2 v0.8h, v1.4s, #8 \n" - "st1 {v0.8h}, [%4], #16 \n" // store 8 samples - "b.gt 1b \n" + "ld1 {v0.4s,v1.4s,v2.4s}, [%0], %6 \n" // load 12 source samples + "add v0.4s, v0.4s, v1.4s \n" // * 1 + "add v1.4s, v1.4s, v2.4s \n" // * 1 + "ld1 {v2.4s,v3.4s}, [%2], #32 \n" + "mla v0.4s, v2.4s, v7.4s \n" // * 6 + "mla v1.4s, v3.4s, v7.4s \n" // * 6 + "ld1 {v2.4s,v3.4s}, [%1], #32 \n" + "ld1 {v4.4s,v5.4s}, [%3], #32 \n" + "add v2.4s, v2.4s, v4.4s \n" // add rows for * 4 + "add v3.4s, v3.4s, v5.4s \n" + "prfm pldl1keep, [%0, 448] \n" + "mla v0.4s, v2.4s, v6.4s \n" // * 4 + "mla v1.4s, v3.4s, v6.4s \n" // * 4 + "subs %w5, %w5, #8 \n" // 8 processed per loop + "uqrshrn v0.4h, v0.4s, #8 \n" // round and pack + "uqrshrn2 v0.8h, v1.4s, #8 \n" + "st1 {v0.8h}, [%4], #16 \n" // store 8 samples + "b.gt 1b \n" : "+r"(src), // %0 "+r"(src1), // %1 "+r"(src2), // %2 @@ -2915,6 +3139,87 @@ void GaussRow_NEON(const uint32_t* src, uint16_t* dst, int width) { : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7"); } +static const vecf32 kGaussCoefficients = {4.0f, 6.0f, 1.0f / 256.0f, 0.0f}; + +// filter 5 rows with 1, 4, 6, 4, 1 coefficients to produce 1 row. +void GaussCol_F32_NEON(const float* src0, + const float* src1, + const float* src2, + const float* src3, + const float* src4, + float* dst, + int width) { + asm volatile( + "ld2r {v6.4s, v7.4s}, [%7] \n" // constants 4 and 6 + + "1: \n" + "ld1 {v0.4s, v1.4s}, [%0], #32 \n" // load 8 samples, 5 rows + "ld1 {v2.4s, v3.4s}, [%1], #32 \n" + "fmla v0.4s, v2.4s, v6.4s \n" // * 4 + "ld1 {v4.4s, v5.4s}, [%2], #32 \n" + "fmla v1.4s, v3.4s, v6.4s \n" + "prfm pldl1keep, [%0, 448] \n" + "fmla v0.4s, v4.4s, v7.4s \n" // * 6 + "ld1 {v2.4s, v3.4s}, [%3], #32 \n" + "fmla v1.4s, v5.4s, v7.4s \n" + "prfm pldl1keep, [%1, 448] \n" + "fmla v0.4s, v2.4s, v6.4s \n" // * 4 + "ld1 {v4.4s, v5.4s}, [%4], #32 \n" + "fmla v1.4s, v3.4s, v6.4s \n" + "prfm pldl1keep, [%2, 448] \n" + "fadd v0.4s, v0.4s, v4.4s \n" // * 1 + "prfm pldl1keep, [%3, 448] \n" + "fadd v1.4s, v1.4s, v5.4s \n" + "prfm pldl1keep, [%4, 448] \n" + "subs %w6, %w6, #8 \n" // 8 processed per loop + "st1 {v0.4s, v1.4s}, [%5], #32 \n" // store 8 samples + "b.gt 1b \n" + : "+r"(src0), // %0 + "+r"(src1), // %1 + "+r"(src2), // %2 + "+r"(src3), // %3 + "+r"(src4), // %4 + "+r"(dst), // %5 + "+r"(width) // %6 + : "r"(&kGaussCoefficients) // %7 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7"); +} + +// filter 5 rows with 1, 4, 6, 4, 1 coefficients to produce 1 row. +void GaussRow_F32_NEON(const float* src, float* dst, int width) { + asm volatile( + "ld3r {v6.4s, v7.4s, v8.4s}, [%3] \n" // constants 4, 6, 1/256 + + "1: \n" + "ld1 {v0.4s, v1.4s, v2.4s}, [%0], %4 \n" // load 12 samples, 5 + // rows + "fadd v0.4s, v0.4s, v1.4s \n" // * 1 + "ld1 {v4.4s, v5.4s}, [%0], %5 \n" + "fadd v1.4s, v1.4s, v2.4s \n" + "fmla v0.4s, v4.4s, v7.4s \n" // * 6 + "ld1 {v2.4s, v3.4s}, [%0], %4 \n" + "fmla v1.4s, v5.4s, v7.4s \n" + "ld1 {v4.4s, v5.4s}, [%0], %6 \n" + "fadd v2.4s, v2.4s, v4.4s \n" + "fadd v3.4s, v3.4s, v5.4s \n" + "fmla v0.4s, v2.4s, v6.4s \n" // * 4 + "fmla v1.4s, v3.4s, v6.4s \n" + "prfm pldl1keep, [%0, 448] \n" + "fmul v0.4s, v0.4s, v8.4s \n" // / 256 + "fmul v1.4s, v1.4s, v8.4s \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "st1 {v0.4s, v1.4s}, [%1], #32 \n" // store 8 samples + "b.gt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(&kGaussCoefficients), // %3 + "r"(8LL), // %4 + "r"(-4LL), // %5 + "r"(20LL) // %6 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8"); +} + // Convert biplanar NV21 to packed YUV24 void NV21ToYUV24Row_NEON(const uint8_t* src_y, const uint8_t* src_vu, @@ -2922,13 +3227,15 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y, int width) { asm volatile( "1: \n" - "ld1 {v2.16b}, [%0], #16 \n" // load 16 Y values - "ld2 {v0.8b, v1.8b}, [%1], #16 \n" // load 8 VU values - "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values - "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values - "subs %w3, %w3, #16 \n" // 16 pixels per loop - "st3 {v0.16b,v1.16b,v2.16b}, [%2], #48 \n" // store 16 YUV pixels - "b.gt 1b \n" + "ld1 {v2.16b}, [%0], #16 \n" // load 16 Y values + "ld2 {v0.8b, v1.8b}, [%1], #16 \n" // load 8 VU values + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values + "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values + "subs %w3, %w3, #16 \n" // 16 pixels per loop + "st3 {v0.16b,v1.16b,v2.16b}, [%2], #48 \n" // store 16 YUV pixels + "b.gt 1b \n" : "+r"(src_y), // %0 "+r"(src_vu), // %1 "+r"(dst_yuv24), // %2 @@ -2945,17 +3252,19 @@ void AYUVToUVRow_NEON(const uint8_t* src_ayuv, asm volatile( "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv - "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. - "uqrshrn v3.8b, v0.8h, #2 \n" // 2x2 average - "uqrshrn v2.8b, v1.8h, #2 \n" - "subs %w3, %w3, #16 \n" // 16 processed per loop. - "st2 {v2.8b,v3.8b}, [%2], #16 \n" // store 8 pixels UV. - "b.gt 1b \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. + "uqrshrn v3.8b, v0.8h, #2 \n" // 2x2 average + "uqrshrn v2.8b, v1.8h, #2 \n" + "subs %w3, %w3, #16 \n" // 16 processed per loop. + "st2 {v2.8b,v3.8b}, [%2], #16 \n" // store 8 pixels UV. + "b.gt 1b \n" : "+r"(src_ayuv), // %0 "+r"(src_ayuv_1), // %1 "+r"(dst_uv), // %2 @@ -2972,18 +3281,19 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv, asm volatile( "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 - // pixels. - "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. - "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. - "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. - "uqrshrn v0.8b, v0.8h, #2 \n" // 2x2 average - "uqrshrn v1.8b, v1.8h, #2 \n" - "subs %w3, %w3, #16 \n" // 16 processed per loop. - "st2 {v0.8b,v1.8b}, [%2], #16 \n" // store 8 pixels VU. - "b.gt 1b \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. + "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. + "uqrshrn v0.8b, v0.8h, #2 \n" // 2x2 average + "uqrshrn v1.8b, v1.8h, #2 \n" + "subs %w3, %w3, #16 \n" // 16 processed per loop. + "st2 {v0.8b,v1.8b}, [%2], #16 \n" // store 8 pixels VU. + "b.gt 1b \n" : "+r"(src_ayuv), // %0 "+r"(src_ayuv_1), // %1 "+r"(dst_vu), // %2 @@ -2996,11 +3306,11 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv, void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) { asm volatile( "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 - // pixels - "subs %w2, %w2, #16 \n" // 16 pixels per loop - "st1 {v2.16b}, [%1], #16 \n" // store 16 Y pixels - "b.gt 1b \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #16 \n" // 16 pixels per loop + "st1 {v2.16b}, [%1], #16 \n" // store 16 Y pixels + "b.gt 1b \n" : "+r"(src_ayuv), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -3008,22 +3318,67 @@ void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) { : "cc", "memory", "v0", "v1", "v2", "v3"); } +// Shuffle table for swapping UV bytes. +static const uvec8 kShuffleSwapUV = {1u, 0u, 3u, 2u, 5u, 4u, 7u, 6u, + 9u, 8u, 11u, 10u, 13u, 12u, 15u, 14u}; + // Convert UV plane of NV12 to VU of NV21. void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width) { asm volatile( + "ld1 {v2.16b}, [%3] \n" // shuffler "1: \n" - "ld2 {v0.16b, v1.16b}, [%0], #32 \n" // load 16 UV values - "orr v2.16b, v0.16b, v0.16b \n" // move U after V - "subs %w2, %w2, #16 \n" // 16 pixels per loop - "st2 {v1.16b, v2.16b}, [%1], #32 \n" // store 16 VU pixels - "b.gt 1b \n" - : "+r"(src_uv), // %0 - "+r"(dst_vu), // %1 - "+r"(width) // %2 - : + "ld1 {v0.16b}, [%0], 16 \n" // load 16 UV values + "ld1 {v1.16b}, [%0], 16 \n" + "prfm pldl1keep, [%0, 448] \n" + "subs %w2, %w2, #16 \n" // 16 pixels per loop + "tbl v0.16b, {v0.16b}, v2.16b \n" + "tbl v1.16b, {v1.16b}, v2.16b \n" + "stp q0, q1, [%1], 32 \n" // store 16 VU pixels + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_vu), // %1 + "+r"(width) // %2 + : "r"(&kShuffleSwapUV) // %3 : "cc", "memory", "v0", "v1", "v2"); } +void HalfMergeUVRow_NEON(const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_uv, + int width) { + const uint8_t* src_u_1 = src_u + src_stride_u; + const uint8_t* src_v_1 = src_v + src_stride_v; + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], #16 \n" // load 16 U values + "ld1 {v1.16b}, [%2], #16 \n" // load 16 V values + "ld1 {v2.16b}, [%1], #16 \n" + "ld1 {v3.16b}, [%3], #16 \n" + "uaddlp v0.8h, v0.16b \n" // half size + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v1.8h, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "uadalp v0.8h, v2.16b \n" + "prfm pldl1keep, [%1, 448] \n" + "uadalp v1.8h, v3.16b \n" + "prfm pldl1keep, [%3, 448] \n" + "uqrshrn v0.8b, v0.8h, #2 \n" + "uqrshrn v1.8b, v1.8h, #2 \n" + "subs %w5, %w5, #16 \n" // 16 src pixels per loop + "st2 {v0.8b, v1.8b}, [%4], #16 \n" // store 8 UV pixels + "b.gt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_u_1), // %1 + "+r"(src_v), // %2 + "+r"(src_v_1), // %3 + "+r"(dst_uv), // %4 + "+r"(width) // %5 + : + : "cc", "memory", "v0", "v1", "v2", "v3"); +} + #endif // !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_win.cc b/TMessagesProj/jni/third_party/libyuv/source/row_win.cc index f976d4026..9afcf060a 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_win.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_win.cc @@ -2898,10 +2898,12 @@ __declspec(naked) void I422ToRGBARow_SSSE3( } #endif // HAS_I422TOARGBROW_SSSE3 +// I400ToARGBRow_SSE2 is disabled due to new yuvconstant parameter #ifdef HAS_I400TOARGBROW_SSE2 // 8 pixels of Y converted to 8 pixels of ARGB (32 bytes). __declspec(naked) void I400ToARGBRow_SSE2(const uint8_t* y_buf, uint8_t* rgb_buf, + const struct YuvConstants*, int width) { __asm { mov eax, 0x4a354a35 // 4a35 = 18997 = round(1.164 * 64 * 256) @@ -2949,6 +2951,7 @@ __declspec(naked) void I400ToARGBRow_SSE2(const uint8_t* y_buf, // note: vpunpcklbw mutates and vpackuswb unmutates. __declspec(naked) void I400ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* rgb_buf, + const struct YuvConstants*, int width) { __asm { mov eax, 0x4a354a35 // 4a35 = 18997 = round(1.164 * 64 * 256) @@ -3045,15 +3048,15 @@ __declspec(naked) void MirrorRow_AVX2(const uint8_t* src, } #endif // HAS_MIRRORROW_AVX2 -#ifdef HAS_MIRRORUVROW_SSSE3 +#ifdef HAS_MIRRORSPLITUVROW_SSSE3 // Shuffle table for reversing the bytes of UV channels. static const uvec8 kShuffleMirrorUV = {14u, 12u, 10u, 8u, 6u, 4u, 2u, 0u, 15u, 13u, 11u, 9u, 7u, 5u, 3u, 1u}; -__declspec(naked) void MirrorUVRow_SSSE3(const uint8_t* src, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +__declspec(naked) void MirrorSplitUVRow_SSSE3(const uint8_t* src, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { __asm { push edi mov eax, [esp + 4 + 4] // src @@ -3078,7 +3081,7 @@ __declspec(naked) void MirrorUVRow_SSSE3(const uint8_t* src, ret } } -#endif // HAS_MIRRORUVROW_SSSE3 +#endif // HAS_MIRRORSPLITUVROW_SSSE3 #ifdef HAS_ARGBMIRRORROW_SSE2 __declspec(naked) void ARGBMirrorRow_SSE2(const uint8_t* src, diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale.cc b/TMessagesProj/jni/third_party/libyuv/source/scale.cc index 5034c5032..cf3c03325 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale.cc @@ -17,6 +17,7 @@ #include "libyuv/planar_functions.h" // For CopyPlane #include "libyuv/row.h" #include "libyuv/scale_row.h" +#include "libyuv/scale_uv.h" // For UVScale #ifdef __cplusplus namespace libyuv { @@ -103,21 +104,6 @@ static void ScalePlaneDown2(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN2_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleRowDown2 = - filtering == kFilterNone - ? ScaleRowDown2_Any_MSA - : (filtering == kFilterLinear ? ScaleRowDown2Linear_Any_MSA - : ScaleRowDown2Box_Any_MSA); - if (IS_ALIGNED(dst_width, 32)) { - ScaleRowDown2 = filtering == kFilterNone ? ScaleRowDown2_MSA - : (filtering == kFilterLinear - ? ScaleRowDown2Linear_MSA - : ScaleRowDown2Box_MSA); - } - } -#endif #if defined(HAS_SCALEROWDOWN2_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleRowDown2 = @@ -133,6 +119,21 @@ static void ScalePlaneDown2(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN2_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleRowDown2 = + filtering == kFilterNone + ? ScaleRowDown2_Any_MSA + : (filtering == kFilterLinear ? ScaleRowDown2Linear_Any_MSA + : ScaleRowDown2Box_Any_MSA); + if (IS_ALIGNED(dst_width, 32)) { + ScaleRowDown2 = filtering == kFilterNone ? ScaleRowDown2_MSA + : (filtering == kFilterLinear + ? ScaleRowDown2Linear_MSA + : ScaleRowDown2Box_MSA); + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -255,15 +256,6 @@ static void ScalePlaneDown4(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN4_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleRowDown4 = - filtering ? ScaleRowDown4Box_Any_MSA : ScaleRowDown4_Any_MSA; - if (IS_ALIGNED(dst_width, 16)) { - ScaleRowDown4 = filtering ? ScaleRowDown4Box_MSA : ScaleRowDown4_MSA; - } - } -#endif #if defined(HAS_SCALEROWDOWN4_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleRowDown4 = @@ -273,6 +265,15 @@ static void ScalePlaneDown4(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN4_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleRowDown4 = + filtering ? ScaleRowDown4Box_Any_MSA : ScaleRowDown4_Any_MSA; + if (IS_ALIGNED(dst_width, 16)) { + ScaleRowDown4 = filtering ? ScaleRowDown4Box_MSA : ScaleRowDown4_MSA; + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -378,6 +379,18 @@ static void ScalePlaneDown34(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN34_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + if (!filtering) { + ScaleRowDown34_0 = ScaleRowDown34_Any_MMI; + ScaleRowDown34_1 = ScaleRowDown34_Any_MMI; + if (dst_width % 24 == 0) { + ScaleRowDown34_0 = ScaleRowDown34_MMI; + ScaleRowDown34_1 = ScaleRowDown34_MMI; + } + } + } +#endif #if defined(HAS_SCALEROWDOWN34_MSA) if (TestCpuFlag(kCpuHasMSA)) { if (!filtering) { @@ -398,18 +411,6 @@ static void ScalePlaneDown34(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN34_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - if (!filtering) { - ScaleRowDown34_0 = ScaleRowDown34_Any_MMI; - ScaleRowDown34_1 = ScaleRowDown34_Any_MMI; - if (dst_width % 24 == 0) { - ScaleRowDown34_0 = ScaleRowDown34_MMI; - ScaleRowDown34_1 = ScaleRowDown34_MMI; - } - } - } -#endif #if defined(HAS_SCALEROWDOWN34_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { if (!filtering) { @@ -890,14 +891,6 @@ static void ScalePlaneBox(int src_width, } } #endif -#if defined(HAS_SCALEADDROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleAddRow = ScaleAddRow_Any_MSA; - if (IS_ALIGNED(src_width, 16)) { - ScaleAddRow = ScaleAddRow_MSA; - } - } -#endif #if defined(HAS_SCALEADDROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleAddRow = ScaleAddRow_Any_MMI; @@ -906,6 +899,14 @@ static void ScalePlaneBox(int src_width, } } #endif +#if defined(HAS_SCALEADDROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleAddRow = ScaleAddRow_Any_MSA; + if (IS_ALIGNED(src_width, 16)) { + ScaleAddRow = ScaleAddRow_MSA; + } + } +#endif for (j = 0; j < dst_height; ++j) { int boxheight; @@ -1042,14 +1043,6 @@ void ScalePlaneBilinearDown(int src_width, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(src_width, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -1058,6 +1051,14 @@ void ScalePlaneBilinearDown(int src_width, } } #endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(src_width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif #if defined(HAS_SCALEFILTERCOLS_SSSE3) if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { @@ -1670,7 +1671,7 @@ void ScalePlane_16(const uint16_t* src, } if (dst_width == src_width && filtering != kFilterBox) { int dy = FixedDiv(src_height, dst_height); - // Arbitrary scale vertically, but unscaled vertically. + // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical_16(src_height, dst_width, dst_height, src_stride, dst_stride, src, dst, 0, 0, dy, 1, filtering); return; @@ -1869,6 +1870,40 @@ int I444Scale_16(const uint16_t* src_y, return 0; } +// Scale an NV12 image. +// This function in turn calls a scaling function for each plane. + +LIBYUV_API +int NV12Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int src_halfwidth = SUBSAMPLE(src_width, 1, 1); + int src_halfheight = SUBSAMPLE(src_height, 1, 1); + int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); + int dst_halfheight = SUBSAMPLE(dst_height, 1, 1); + if (!src_y || !src_uv || src_width == 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_uv || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + UVScale(src_uv, src_stride_uv, src_halfwidth, src_halfheight, dst_uv, + dst_stride_uv, dst_halfwidth, dst_halfheight, filtering); + return 0; +} + // Deprecated api LIBYUV_API int Scale(const uint8_t* src_y, diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc index d780cb1ff..c93d70c5f 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc @@ -20,49 +20,6 @@ namespace libyuv { extern "C" { #endif -// Definition for ScaleFilterCols, ScaleARGBCols and ScaleARGBFilterCols -#define CANY(NAMEANY, TERP_SIMD, TERP_C, BPP, MASK) \ - void NAMEANY(uint8_t* dst_ptr, const uint8_t* src_ptr, int dst_width, int x, \ - int dx) { \ - int r = dst_width & MASK; \ - int n = dst_width & ~MASK; \ - if (n > 0) { \ - TERP_SIMD(dst_ptr, src_ptr, n, x, dx); \ - } \ - TERP_C(dst_ptr + n * BPP, src_ptr, r, x + n * dx, dx); \ - } - -#ifdef HAS_SCALEFILTERCOLS_NEON -CANY(ScaleFilterCols_Any_NEON, ScaleFilterCols_NEON, ScaleFilterCols_C, 1, 7) -#endif -#ifdef HAS_SCALEFILTERCOLS_MSA -CANY(ScaleFilterCols_Any_MSA, ScaleFilterCols_MSA, ScaleFilterCols_C, 1, 15) -#endif -#ifdef HAS_SCALEARGBCOLS_NEON -CANY(ScaleARGBCols_Any_NEON, ScaleARGBCols_NEON, ScaleARGBCols_C, 4, 7) -#endif -#ifdef HAS_SCALEARGBCOLS_MSA -CANY(ScaleARGBCols_Any_MSA, ScaleARGBCols_MSA, ScaleARGBCols_C, 4, 3) -#endif -#ifdef HAS_SCALEARGBCOLS_MMI -CANY(ScaleARGBCols_Any_MMI, ScaleARGBCols_MMI, ScaleARGBCols_C, 4, 0) -#endif -#ifdef HAS_SCALEARGBFILTERCOLS_NEON -CANY(ScaleARGBFilterCols_Any_NEON, - ScaleARGBFilterCols_NEON, - ScaleARGBFilterCols_C, - 4, - 3) -#endif -#ifdef HAS_SCALEARGBFILTERCOLS_MSA -CANY(ScaleARGBFilterCols_Any_MSA, - ScaleARGBFilterCols_MSA, - ScaleARGBFilterCols_C, - 4, - 7) -#endif -#undef CANY - // Fixed scale down. // Mask may be non-power of 2, so use MOD #define SDANY(NAMEANY, SCALEROWDOWN_SIMD, SCALEROWDOWN_C, FACTOR, BPP, MASK) \ @@ -113,6 +70,22 @@ SDODD(ScaleRowDown2Box_Odd_SSSE3, 1, 15) #endif +#ifdef HAS_SCALEUVROWDOWN2BOX_SSSE3 +SDANY(ScaleUVRowDown2Box_Any_SSSE3, + ScaleUVRowDown2Box_SSSE3, + ScaleUVRowDown2Box_C, + 2, + 2, + 4) +#endif +#ifdef HAS_SCALEUVROWDOWN2BOX_AVX2 +SDANY(ScaleUVRowDown2Box_Any_AVX2, + ScaleUVRowDown2Box_AVX2, + ScaleUVRowDown2Box_C, + 2, + 2, + 8) +#endif #ifdef HAS_SCALEROWDOWN2_AVX2 SDANY(ScaleRowDown2_Any_AVX2, ScaleRowDown2_AVX2, ScaleRowDown2_C, 2, 1, 31) SDANY(ScaleRowDown2Linear_Any_AVX2, @@ -155,6 +128,15 @@ SDODD(ScaleRowDown2Box_Odd_NEON, 1, 15) #endif +#ifdef HAS_SCALEUVROWDOWN2BOX_NEON +SDANY(ScaleUVRowDown2Box_Any_NEON, + ScaleUVRowDown2Box_NEON, + ScaleUVRowDown2Box_C, + 2, + 2, + 8) +#endif + #ifdef HAS_SCALEROWDOWN2_MSA SDANY(ScaleRowDown2_Any_MSA, ScaleRowDown2_MSA, ScaleRowDown2_C, 2, 1, 31) SDANY(ScaleRowDown2Linear_Any_MSA, @@ -508,6 +490,13 @@ SDAANY(ScaleARGBRowDownEvenBox_Any_MMI, 4, 1) #endif +#ifdef HAS_SCALEUVROWDOWNEVEN_NEON +SDAANY(ScaleUVRowDownEven_Any_NEON, + ScaleUVRowDownEven_NEON, + ScaleUVRowDownEven_C, + 2, + 3) +#endif #ifdef SASIMDONLY // This also works and uses memcpy and SIMD instead of C, but is slower on ARM @@ -577,6 +566,49 @@ SAANY(ScaleAddRow_Any_MMI, ScaleAddRow_MMI, ScaleAddRow_C, 7) #endif // SASIMDONLY +// Definition for ScaleFilterCols, ScaleARGBCols and ScaleARGBFilterCols +#define CANY(NAMEANY, TERP_SIMD, TERP_C, BPP, MASK) \ + void NAMEANY(uint8_t* dst_ptr, const uint8_t* src_ptr, int dst_width, int x, \ + int dx) { \ + int r = dst_width & MASK; \ + int n = dst_width & ~MASK; \ + if (n > 0) { \ + TERP_SIMD(dst_ptr, src_ptr, n, x, dx); \ + } \ + TERP_C(dst_ptr + n * BPP, src_ptr, r, x + n * dx, dx); \ + } + +#ifdef HAS_SCALEFILTERCOLS_NEON +CANY(ScaleFilterCols_Any_NEON, ScaleFilterCols_NEON, ScaleFilterCols_C, 1, 7) +#endif +#ifdef HAS_SCALEFILTERCOLS_MSA +CANY(ScaleFilterCols_Any_MSA, ScaleFilterCols_MSA, ScaleFilterCols_C, 1, 15) +#endif +#ifdef HAS_SCALEARGBCOLS_NEON +CANY(ScaleARGBCols_Any_NEON, ScaleARGBCols_NEON, ScaleARGBCols_C, 4, 7) +#endif +#ifdef HAS_SCALEARGBCOLS_MSA +CANY(ScaleARGBCols_Any_MSA, ScaleARGBCols_MSA, ScaleARGBCols_C, 4, 3) +#endif +#ifdef HAS_SCALEARGBCOLS_MMI +CANY(ScaleARGBCols_Any_MMI, ScaleARGBCols_MMI, ScaleARGBCols_C, 4, 0) +#endif +#ifdef HAS_SCALEARGBFILTERCOLS_NEON +CANY(ScaleARGBFilterCols_Any_NEON, + ScaleARGBFilterCols_NEON, + ScaleARGBFilterCols_C, + 4, + 3) +#endif +#ifdef HAS_SCALEARGBFILTERCOLS_MSA +CANY(ScaleARGBFilterCols_Any_MSA, + ScaleARGBFilterCols_MSA, + ScaleARGBFilterCols_C, + 4, + 7) +#endif +#undef CANY + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc index 58aa5ebbe..451d4ec4d 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc @@ -95,22 +95,6 @@ static void ScaleARGBDown2(int src_width, } } #endif -#if defined(HAS_SCALEARGBROWDOWN2_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleARGBRowDown2 = - filtering == kFilterNone - ? ScaleARGBRowDown2_Any_MSA - : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_Any_MSA - : ScaleARGBRowDown2Box_Any_MSA); - if (IS_ALIGNED(dst_width, 4)) { - ScaleARGBRowDown2 = - filtering == kFilterNone - ? ScaleARGBRowDown2_MSA - : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_MSA - : ScaleARGBRowDown2Box_MSA); - } - } -#endif #if defined(HAS_SCALEARGBROWDOWN2_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleARGBRowDown2 = @@ -127,6 +111,22 @@ static void ScaleARGBDown2(int src_width, } } #endif +#if defined(HAS_SCALEARGBROWDOWN2_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleARGBRowDown2 = + filtering == kFilterNone + ? ScaleARGBRowDown2_Any_MSA + : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_Any_MSA + : ScaleARGBRowDown2Box_Any_MSA); + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBRowDown2 = + filtering == kFilterNone + ? ScaleARGBRowDown2_MSA + : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_MSA + : ScaleARGBRowDown2Box_MSA); + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -243,16 +243,6 @@ static void ScaleARGBDownEven(int src_width, } } #endif -#if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_MSA - : ScaleARGBRowDownEven_Any_MSA; - if (IS_ALIGNED(dst_width, 4)) { - ScaleARGBRowDownEven = - filtering ? ScaleARGBRowDownEvenBox_MSA : ScaleARGBRowDownEven_MSA; - } - } -#endif #if defined(HAS_SCALEARGBROWDOWNEVEN_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_MMI @@ -263,6 +253,16 @@ static void ScaleARGBDownEven(int src_width, } } #endif +#if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_MSA + : ScaleARGBRowDownEven_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBRowDownEven = + filtering ? ScaleARGBRowDownEvenBox_MSA : ScaleARGBRowDownEven_MSA; + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -436,14 +436,6 @@ static void ScaleARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(dst_width, 8)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -451,6 +443,14 @@ static void ScaleARGBBilinearUp(int src_width, InterpolateRow = InterpolateRow_MMI; } } +#endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow = InterpolateRow_MSA; + } + } #endif if (src_width >= 32768) { ScaleARGBFilterCols = @@ -490,14 +490,6 @@ static void ScaleARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MSA) - if (!filtering && TestCpuFlag(kCpuHasMSA)) { - ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; - if (IS_ALIGNED(dst_width, 4)) { - ScaleARGBFilterCols = ScaleARGBCols_MSA; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MMI) if (!filtering && TestCpuFlag(kCpuHasMMI)) { ScaleARGBFilterCols = ScaleARGBCols_Any_MMI; @@ -505,6 +497,14 @@ static void ScaleARGBBilinearUp(int src_width, ScaleARGBFilterCols = ScaleARGBCols_MMI; } } +#endif +#if defined(HAS_SCALEARGBCOLS_MSA) + if (!filtering && TestCpuFlag(kCpuHasMSA)) { + ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBFilterCols = ScaleARGBCols_MSA; + } + } #endif if (!filtering && src_width * 2 == dst_width && x < 0x8000) { ScaleARGBFilterCols = ScaleARGBColsUp2_C; @@ -619,14 +619,6 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_I422TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGBRow = I422ToARGBRow_Any_MSA; - if (IS_ALIGNED(src_width, 8)) { - I422ToARGBRow = I422ToARGBRow_MSA; - } - } -#endif #if defined(HAS_I422TOARGBROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { I422ToARGBRow = I422ToARGBRow_Any_MMI; @@ -635,6 +627,14 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_I422TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGBRow = I422ToARGBRow_Any_MSA; + if (IS_ALIGNED(src_width, 8)) { + I422ToARGBRow = I422ToARGBRow_MSA; + } + } +#endif void (*InterpolateRow)(uint8_t * dst_argb, const uint8_t* src_argb, ptrdiff_t src_stride, int dst_width, @@ -713,14 +713,6 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MSA) - if (!filtering && TestCpuFlag(kCpuHasMSA)) { - ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; - if (IS_ALIGNED(dst_width, 4)) { - ScaleARGBFilterCols = ScaleARGBCols_MSA; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MMI) if (!filtering && TestCpuFlag(kCpuHasMMI)) { ScaleARGBFilterCols = ScaleARGBCols_Any_MMI; @@ -728,6 +720,14 @@ static void ScaleYUVToARGBBilinearUp(int src_width, ScaleARGBFilterCols = ScaleARGBCols_MMI; } } +#endif +#if defined(HAS_SCALEARGBCOLS_MSA) + if (!filtering && TestCpuFlag(kCpuHasMSA)) { + ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBFilterCols = ScaleARGBCols_MSA; + } + } #endif if (!filtering && src_width * 2 == dst_width && x < 0x8000) { ScaleARGBFilterCols = ScaleARGBColsUp2_C; @@ -857,14 +857,6 @@ static void ScaleARGBSimple(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ScaleARGBCols = ScaleARGBCols_Any_MSA; - if (IS_ALIGNED(dst_width, 4)) { - ScaleARGBCols = ScaleARGBCols_MSA; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MMI) if (TestCpuFlag(kCpuHasMMI)) { ScaleARGBCols = ScaleARGBCols_Any_MMI; @@ -872,6 +864,14 @@ static void ScaleARGBSimple(int src_width, ScaleARGBCols = ScaleARGBCols_MMI; } } +#endif +#if defined(HAS_SCALEARGBCOLS_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleARGBCols = ScaleARGBCols_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBCols = ScaleARGBCols_MSA; + } + } #endif if (src_width * 2 == dst_width && x < 0x8000) { ScaleARGBCols = ScaleARGBColsUp2_C; @@ -981,7 +981,7 @@ static void ScaleARGB(const uint8_t* src, } } if (dx == 0x10000 && (x & 0xffff) == 0) { - // Arbitrary scale vertically, but unscaled vertically. + // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical(src_height, clip_width, clip_height, src_stride, dst_stride, src, dst, x, y, dy, 4, filtering); return; diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc index 636902717..fd4cbd038 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc @@ -776,6 +776,8 @@ void ScaleAddRow_16_C(const uint16_t* src_ptr, } } +// ARGB scale row functions + void ScaleARGBRowDown2_C(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, @@ -1018,6 +1020,235 @@ void ScaleARGBFilterCols64_C(uint8_t* dst_argb, #undef BLENDERC #undef BLENDER +// UV scale row functions +// same as ARGB but 2 channels + +void ScaleUVRowDown2_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width) { + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int x; + (void)src_stride; + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = src[1]; + dst[1] = src[3]; + src += 2; + dst += 2; + } + if (dst_width & 1) { + dst[0] = src[1]; + } +} + +void ScaleUVRowDown2Linear_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width) { + int x; + (void)src_stride; + for (x = 0; x < dst_width; ++x) { + dst_uv[0] = (src_uv[0] + src_uv[2] + 1) >> 1; + dst_uv[1] = (src_uv[1] + src_uv[3] + 1) >> 1; + src_uv += 4; + dst_uv += 2; + } +} + +void ScaleUVRowDown2Box_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + uint8_t* dst_uv, + int dst_width) { + int x; + for (x = 0; x < dst_width; ++x) { + dst_uv[0] = (src_uv[0] + src_uv[2] + src_uv[src_stride] + + src_uv[src_stride + 2] + 2) >> + 2; + dst_uv[1] = (src_uv[1] + src_uv[3] + src_uv[src_stride + 1] + + src_uv[src_stride + 3] + 2) >> + 2; + src_uv += 4; + dst_uv += 2; + } +} + +void ScaleUVRowDownEven_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width) { + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + (void)src_stride; + int x; + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = src[0]; + dst[1] = src[src_stepx]; + src += src_stepx * 2; + dst += 2; + } + if (dst_width & 1) { + dst[0] = src[0]; + } +} + +void ScaleUVRowDownEvenBox_C(const uint8_t* src_uv, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_uv, + int dst_width) { + int x; + for (x = 0; x < dst_width; ++x) { + dst_uv[0] = (src_uv[0] + src_uv[2] + src_uv[src_stride] + + src_uv[src_stride + 2] + 2) >> + 2; + dst_uv[1] = (src_uv[1] + src_uv[3] + src_uv[src_stride + 1] + + src_uv[src_stride + 3] + 2) >> + 2; + src_uv += src_stepx * 2; + dst_uv += 2; + } +} + +// Scales a single row of pixels using point sampling. +void ScaleUVCols_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x, + int dx) { + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int j; + for (j = 0; j < dst_width - 1; j += 2) { + dst[0] = src[x >> 16]; + x += dx; + dst[1] = src[x >> 16]; + x += dx; + dst += 2; + } + if (dst_width & 1) { + dst[0] = src[x >> 16]; + } +} + +void ScaleUVCols64_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x32, + int dx) { + int64_t x = (int64_t)(x32); + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int j; + for (j = 0; j < dst_width - 1; j += 2) { + dst[0] = src[x >> 16]; + x += dx; + dst[1] = src[x >> 16]; + x += dx; + dst += 2; + } + if (dst_width & 1) { + dst[0] = src[x >> 16]; + } +} + +// Scales a single row of pixels up by 2x using point sampling. +void ScaleUVColsUp2_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x, + int dx) { + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int j; + (void)x; + (void)dx; + for (j = 0; j < dst_width - 1; j += 2) { + dst[1] = dst[0] = src[0]; + src += 1; + dst += 2; + } + if (dst_width & 1) { + dst[0] = src[0]; + } +} + +// TODO(fbarchard): Replace 0x7f ^ f with 128-f. bug=607. +// Mimics SSSE3 blender +#define BLENDER1(a, b, f) ((a) * (0x7f ^ f) + (b)*f) >> 7 +#define BLENDERC(a, b, f, s) \ + (uint16_t)(BLENDER1(((a) >> s) & 255, ((b) >> s) & 255, f) << s) +#define BLENDER(a, b, f) BLENDERC(a, b, f, 8) | BLENDERC(a, b, f, 0) + +void ScaleUVFilterCols_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x, + int dx) { + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int j; + for (j = 0; j < dst_width - 1; j += 2) { + int xi = x >> 16; + int xf = (x >> 9) & 0x7f; + uint16_t a = src[xi]; + uint16_t b = src[xi + 1]; + dst[0] = BLENDER(a, b, xf); + x += dx; + xi = x >> 16; + xf = (x >> 9) & 0x7f; + a = src[xi]; + b = src[xi + 1]; + dst[1] = BLENDER(a, b, xf); + x += dx; + dst += 2; + } + if (dst_width & 1) { + int xi = x >> 16; + int xf = (x >> 9) & 0x7f; + uint16_t a = src[xi]; + uint16_t b = src[xi + 1]; + dst[0] = BLENDER(a, b, xf); + } +} + +void ScaleUVFilterCols64_C(uint8_t* dst_uv, + const uint8_t* src_uv, + int dst_width, + int x32, + int dx) { + int64_t x = (int64_t)(x32); + const uint16_t* src = (const uint16_t*)(src_uv); + uint16_t* dst = (uint16_t*)(dst_uv); + int j; + for (j = 0; j < dst_width - 1; j += 2) { + int64_t xi = x >> 16; + int xf = (x >> 9) & 0x7f; + uint16_t a = src[xi]; + uint16_t b = src[xi + 1]; + dst[0] = BLENDER(a, b, xf); + x += dx; + xi = x >> 16; + xf = (x >> 9) & 0x7f; + a = src[xi]; + b = src[xi + 1]; + dst[1] = BLENDER(a, b, xf); + x += dx; + dst += 2; + } + if (dst_width & 1) { + int64_t xi = x >> 16; + int xf = (x >> 9) & 0x7f; + uint16_t a = src[xi]; + uint16_t b = src[xi + 1]; + dst[0] = BLENDER(a, b, xf); + } +} +#undef BLENDER1 +#undef BLENDERC +#undef BLENDER + // Scale plane vertically with bilinear interpolation. void ScalePlaneVertical(int src_height, int dst_width, @@ -1067,14 +1298,6 @@ void ScalePlaneVertical(int src_height, } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(dst_width_bytes, 32)) { - InterpolateRow = InterpolateRow_MSA; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MMI) if (TestCpuFlag(kCpuHasMMI)) { InterpolateRow = InterpolateRow_Any_MMI; @@ -1082,6 +1305,14 @@ void ScalePlaneVertical(int src_height, InterpolateRow = InterpolateRow_MMI; } } +#endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(dst_width_bytes, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } #endif for (j = 0; j < dst_height; ++j) { int yi; diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc index 90a49f30d..e575ee18b 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc @@ -102,16 +102,16 @@ void ScaleRowDown2_SSSE3(const uint8_t* src_ptr, // 16 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "psrlw $0x8,%%xmm0 \n" - "psrlw $0x8,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -125,25 +125,25 @@ void ScaleRowDown2Linear_SSSE3(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrlw $0xf,%%xmm4 \n" - "packuswb %%xmm4,%%xmm4 \n" - "pxor %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrlw $0xf,%%xmm4 \n" + "packuswb %%xmm4,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "pavgw %%xmm5,%%xmm0 \n" - "pavgw %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "pavgw %%xmm5,%%xmm0 \n" + "pavgw %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -156,33 +156,33 @@ void ScaleRowDown2Box_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrlw $0xf,%%xmm4 \n" - "packuswb %%xmm4,%%xmm4 \n" - "pxor %%xmm5,%%xmm5 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrlw $0xf,%%xmm4 \n" + "packuswb %%xmm4,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x00(%0,%3,1),%%xmm2 \n" - "movdqu 0x10(%0,%3,1),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm4,%%xmm3 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm3,%%xmm1 \n" - "psrlw $0x1,%%xmm0 \n" - "psrlw $0x1,%%xmm1 \n" - "pavgw %%xmm5,%%xmm0 \n" - "pavgw %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%3,1),%%xmm2 \n" + "movdqu 0x10(%0,%3,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm3 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm3,%%xmm1 \n" + "psrlw $0x1,%%xmm0 \n" + "psrlw $0x1,%%xmm1 \n" + "pavgw %%xmm5,%%xmm0 \n" + "pavgw %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -200,17 +200,17 @@ void ScaleRowDown2_AVX2(const uint8_t* src_ptr, LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -225,26 +225,26 @@ void ScaleRowDown2Linear_AVX2(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" - "vpsrlw $0xf,%%ymm4,%%ymm4 \n" - "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $0xf,%%ymm4,%%ymm4 \n" + "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" - "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" - "vpavgw %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" + "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" + "vpavgw %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -258,34 +258,34 @@ void ScaleRowDown2Box_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" - "vpsrlw $0xf,%%ymm4,%%ymm4 \n" - "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $0xf,%%ymm4,%%ymm4 \n" + "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x00(%0,%3,1),%%ymm2 \n" - "vmovdqu 0x20(%0,%3,1),%%ymm3 \n" - "lea 0x40(%0),%0 \n" - "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" - "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" - "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vpsrlw $0x1,%%ymm0,%%ymm0 \n" - "vpsrlw $0x1,%%ymm1,%%ymm1 \n" - "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" - "vpavgw %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%ymm0,(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x00(%0,%3,1),%%ymm2 \n" + "vmovdqu 0x20(%0,%3,1),%%ymm3 \n" + "lea 0x40(%0),%0 \n" + "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vpsrlw $0x1,%%ymm0,%%ymm0 \n" + "vpsrlw $0x1,%%ymm1,%%ymm1 \n" + "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" + "vpavgw %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -301,24 +301,24 @@ void ScaleRowDown4_SSSE3(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "pcmpeqb %%xmm5,%%xmm5 \n" - "psrld $0x18,%%xmm5 \n" - "pslld $0x10,%%xmm5 \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrld $0x18,%%xmm5 \n" + "pslld $0x10,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pand %%xmm5,%%xmm0 \n" - "pand %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm0 \n" - "psrlw $0x8,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pand %%xmm5,%%xmm0 \n" + "pand %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "psrlw $0x8,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -332,46 +332,46 @@ void ScaleRowDown4Box_SSSE3(const uint8_t* src_ptr, int dst_width) { intptr_t stridex3; asm volatile( - "pcmpeqb %%xmm4,%%xmm4 \n" - "psrlw $0xf,%%xmm4 \n" - "movdqa %%xmm4,%%xmm5 \n" - "packuswb %%xmm4,%%xmm4 \n" - "psllw $0x3,%%xmm5 \n" - "lea 0x00(%4,%4,2),%3 \n" + "pcmpeqb %%xmm4,%%xmm4 \n" + "psrlw $0xf,%%xmm4 \n" + "movdqa %%xmm4,%%xmm5 \n" + "packuswb %%xmm4,%%xmm4 \n" + "psllw $0x3,%%xmm5 \n" + "lea 0x00(%4,%4,2),%3 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x00(%0,%4,1),%%xmm2 \n" - "movdqu 0x10(%0,%4,1),%%xmm3 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm1 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm4,%%xmm3 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm3,%%xmm1 \n" - "movdqu 0x00(%0,%4,2),%%xmm2 \n" - "movdqu 0x10(%0,%4,2),%%xmm3 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm4,%%xmm3 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm3,%%xmm1 \n" - "movdqu 0x00(%0,%3,1),%%xmm2 \n" - "movdqu 0x10(%0,%3,1),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" - "pmaddubsw %%xmm4,%%xmm3 \n" - "paddw %%xmm2,%%xmm0 \n" - "paddw %%xmm3,%%xmm1 \n" - "phaddw %%xmm1,%%xmm0 \n" - "paddw %%xmm5,%%xmm0 \n" - "psrlw $0x4,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "lea 0x8(%1),%1 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%4,1),%%xmm2 \n" + "movdqu 0x10(%0,%4,1),%%xmm3 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm3 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm3,%%xmm1 \n" + "movdqu 0x00(%0,%4,2),%%xmm2 \n" + "movdqu 0x10(%0,%4,2),%%xmm3 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm3 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm3,%%xmm1 \n" + "movdqu 0x00(%0,%3,1),%%xmm2 \n" + "movdqu 0x10(%0,%3,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm3 \n" + "paddw %%xmm2,%%xmm0 \n" + "paddw %%xmm3,%%xmm1 \n" + "phaddw %%xmm1,%%xmm0 \n" + "paddw %%xmm5,%%xmm0 \n" + "psrlw $0x4,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -387,26 +387,26 @@ void ScaleRowDown4_AVX2(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - "vpsrld $0x18,%%ymm5,%%ymm5 \n" - "vpslld $0x10,%%ymm5,%%ymm5 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + "vpsrld $0x18,%%ymm5,%%ymm5 \n" + "vpslld $0x10,%%ymm5,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "lea 0x40(%0),%0 \n" - "vpand %%ymm5,%%ymm0,%%ymm0 \n" - "vpand %%ymm5,%%ymm1,%%ymm1 \n" - "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpsrlw $0x8,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpand %%ymm5,%%ymm0,%%ymm0 \n" + "vpand %%ymm5,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -420,46 +420,46 @@ void ScaleRowDown4Box_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" - "vpsrlw $0xf,%%ymm4,%%ymm4 \n" - "vpsllw $0x3,%%ymm4,%%ymm5 \n" - "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $0xf,%%ymm4,%%ymm4 \n" + "vpsllw $0x3,%%ymm4,%%ymm5 \n" + "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm0 \n" - "vmovdqu 0x20(%0),%%ymm1 \n" - "vmovdqu 0x00(%0,%3,1),%%ymm2 \n" - "vmovdqu 0x20(%0,%3,1),%%ymm3 \n" - "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" - "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" - "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" - "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vmovdqu 0x00(%0,%3,2),%%ymm2 \n" - "vmovdqu 0x20(%0,%3,2),%%ymm3 \n" - "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" - "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vmovdqu 0x00(%0,%4,1),%%ymm2 \n" - "vmovdqu 0x20(%0,%4,1),%%ymm3 \n" - "lea 0x40(%0),%0 \n" - "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" - "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" - "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" - "vphaddw %%ymm1,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vpaddw %%ymm5,%%ymm0,%%ymm0 \n" - "vpsrlw $0x4,%%ymm0,%%ymm0 \n" - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" - "vpermq $0xd8,%%ymm0,%%ymm0 \n" - "vmovdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x00(%0,%3,1),%%ymm2 \n" + "vmovdqu 0x20(%0,%3,1),%%ymm3 \n" + "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm4,%%ymm1,%%ymm1 \n" + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vmovdqu 0x00(%0,%3,2),%%ymm2 \n" + "vmovdqu 0x20(%0,%3,2),%%ymm3 \n" + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vmovdqu 0x00(%0,%4,1),%%ymm2 \n" + "vmovdqu 0x20(%0,%4,1),%%ymm3 \n" + "lea 0x40(%0),%0 \n" + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm3,%%ymm3 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vphaddw %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm5,%%ymm0,%%ymm0 \n" + "vpsrlw $0x4,%%ymm0,%%ymm0 \n" + "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -476,9 +476,9 @@ void ScaleRowDown34_SSSE3(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "movdqa %0,%%xmm3 \n" - "movdqa %1,%%xmm4 \n" - "movdqa %2,%%xmm5 \n" + "movdqa %0,%%xmm3 \n" + "movdqa %1,%%xmm4 \n" + "movdqa %2,%%xmm5 \n" : : "m"(kShuf0), // %0 "m"(kShuf1), // %1 @@ -488,20 +488,20 @@ void ScaleRowDown34_SSSE3(const uint8_t* src_ptr, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm2 \n" - "lea 0x20(%0),%0 \n" - "movdqa %%xmm2,%%xmm1 \n" - "palignr $0x8,%%xmm0,%%xmm1 \n" - "pshufb %%xmm3,%%xmm0 \n" - "pshufb %%xmm4,%%xmm1 \n" - "pshufb %%xmm5,%%xmm2 \n" - "movq %%xmm0,(%1) \n" - "movq %%xmm1,0x8(%1) \n" - "movq %%xmm2,0x10(%1) \n" - "lea 0x18(%1),%1 \n" - "sub $0x18,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm2 \n" + "lea 0x20(%0),%0 \n" + "movdqa %%xmm2,%%xmm1 \n" + "palignr $0x8,%%xmm0,%%xmm1 \n" + "pshufb %%xmm3,%%xmm0 \n" + "pshufb %%xmm4,%%xmm1 \n" + "pshufb %%xmm5,%%xmm2 \n" + "movq %%xmm0,(%1) \n" + "movq %%xmm1,0x8(%1) \n" + "movq %%xmm2,0x10(%1) \n" + "lea 0x18(%1),%1 \n" + "sub $0x18,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -514,18 +514,18 @@ void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movdqa %0,%%xmm2 \n" // kShuf01 - "movdqa %1,%%xmm3 \n" // kShuf11 - "movdqa %2,%%xmm4 \n" // kShuf21 + "movdqa %0,%%xmm2 \n" // kShuf01 + "movdqa %1,%%xmm3 \n" // kShuf11 + "movdqa %2,%%xmm4 \n" // kShuf21 : : "m"(kShuf01), // %0 "m"(kShuf11), // %1 "m"(kShuf21) // %2 ); asm volatile( - "movdqa %0,%%xmm5 \n" // kMadd01 - "movdqa %1,%%xmm0 \n" // kMadd11 - "movdqa %2,%%xmm1 \n" // kRound34 + "movdqa %0,%%xmm5 \n" // kMadd01 + "movdqa %1,%%xmm0 \n" // kMadd11 + "movdqa %2,%%xmm1 \n" // kRound34 : : "m"(kMadd01), // %0 "m"(kMadd11), // %1 @@ -535,37 +535,37 @@ void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, LABELALIGN "1: \n" - "movdqu (%0),%%xmm6 \n" - "movdqu 0x00(%0,%3,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm5,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,(%1) \n" - "movdqu 0x8(%0),%%xmm6 \n" - "movdqu 0x8(%0,%3,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm3,%%xmm6 \n" - "pmaddubsw %%xmm0,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,0x8(%1) \n" - "movdqu 0x10(%0),%%xmm6 \n" - "movdqu 0x10(%0,%3,1),%%xmm7 \n" - "lea 0x20(%0),%0 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm4,%%xmm6 \n" - "pmaddubsw %4,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,0x10(%1) \n" - "lea 0x18(%1),%1 \n" - "sub $0x18,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm6 \n" + "movdqu 0x00(%0,%3,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm5,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,(%1) \n" + "movdqu 0x8(%0),%%xmm6 \n" + "movdqu 0x8(%0,%3,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm3,%%xmm6 \n" + "pmaddubsw %%xmm0,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,0x8(%1) \n" + "movdqu 0x10(%0),%%xmm6 \n" + "movdqu 0x10(%0,%3,1),%%xmm7 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm4,%%xmm6 \n" + "pmaddubsw %4,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,0x10(%1) \n" + "lea 0x18(%1),%1 \n" + "sub $0x18,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -580,18 +580,18 @@ void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movdqa %0,%%xmm2 \n" // kShuf01 - "movdqa %1,%%xmm3 \n" // kShuf11 - "movdqa %2,%%xmm4 \n" // kShuf21 + "movdqa %0,%%xmm2 \n" // kShuf01 + "movdqa %1,%%xmm3 \n" // kShuf11 + "movdqa %2,%%xmm4 \n" // kShuf21 : : "m"(kShuf01), // %0 "m"(kShuf11), // %1 "m"(kShuf21) // %2 ); asm volatile( - "movdqa %0,%%xmm5 \n" // kMadd01 - "movdqa %1,%%xmm0 \n" // kMadd11 - "movdqa %2,%%xmm1 \n" // kRound34 + "movdqa %0,%%xmm5 \n" // kMadd01 + "movdqa %1,%%xmm0 \n" // kMadd11 + "movdqa %2,%%xmm1 \n" // kRound34 : : "m"(kMadd01), // %0 "m"(kMadd11), // %1 @@ -602,40 +602,40 @@ void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, LABELALIGN "1: \n" - "movdqu (%0),%%xmm6 \n" - "movdqu 0x00(%0,%3,1),%%xmm7 \n" - "pavgb %%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm5,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,(%1) \n" - "movdqu 0x8(%0),%%xmm6 \n" - "movdqu 0x8(%0,%3,1),%%xmm7 \n" - "pavgb %%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm3,%%xmm6 \n" - "pmaddubsw %%xmm0,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,0x8(%1) \n" - "movdqu 0x10(%0),%%xmm6 \n" - "movdqu 0x10(%0,%3,1),%%xmm7 \n" - "lea 0x20(%0),%0 \n" - "pavgb %%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" - "pshufb %%xmm4,%%xmm6 \n" - "pmaddubsw %4,%%xmm6 \n" - "paddsw %%xmm1,%%xmm6 \n" - "psrlw $0x2,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movq %%xmm6,0x10(%1) \n" - "lea 0x18(%1),%1 \n" - "sub $0x18,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm6 \n" + "movdqu 0x00(%0,%3,1),%%xmm7 \n" + "pavgb %%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm5,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,(%1) \n" + "movdqu 0x8(%0),%%xmm6 \n" + "movdqu 0x8(%0,%3,1),%%xmm7 \n" + "pavgb %%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm3,%%xmm6 \n" + "pmaddubsw %%xmm0,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,0x8(%1) \n" + "movdqu 0x10(%0),%%xmm6 \n" + "movdqu 0x10(%0,%3,1),%%xmm7 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + "pshufb %%xmm4,%%xmm6 \n" + "pmaddubsw %4,%%xmm6 \n" + "paddsw %%xmm1,%%xmm6 \n" + "psrlw $0x2,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movq %%xmm6,0x10(%1) \n" + "lea 0x18(%1),%1 \n" + "sub $0x18,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -651,23 +651,23 @@ void ScaleRowDown38_SSSE3(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "movdqa %3,%%xmm4 \n" - "movdqa %4,%%xmm5 \n" + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "pshufb %%xmm4,%%xmm0 \n" - "pshufb %%xmm5,%%xmm1 \n" - "paddusb %%xmm1,%%xmm0 \n" - "movq %%xmm0,(%1) \n" - "movhlps %%xmm0,%%xmm1 \n" - "movd %%xmm1,0x8(%1) \n" - "lea 0xc(%1),%1 \n" - "sub $0xc,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "pshufb %%xmm4,%%xmm0 \n" + "pshufb %%xmm5,%%xmm1 \n" + "paddusb %%xmm1,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "movhlps %%xmm0,%%xmm1 \n" + "movd %%xmm1,0x8(%1) \n" + "lea 0xc(%1),%1 \n" + "sub $0xc,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -681,10 +681,10 @@ void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movdqa %0,%%xmm2 \n" - "movdqa %1,%%xmm3 \n" - "movdqa %2,%%xmm4 \n" - "movdqa %3,%%xmm5 \n" + "movdqa %0,%%xmm2 \n" + "movdqa %1,%%xmm3 \n" + "movdqa %2,%%xmm4 \n" + "movdqa %3,%%xmm5 \n" : : "m"(kShufAb0), // %0 "m"(kShufAb1), // %1 @@ -695,25 +695,25 @@ void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%3,1),%%xmm1 \n" - "lea 0x10(%0),%0 \n" - "pavgb %%xmm1,%%xmm0 \n" - "movdqa %%xmm0,%%xmm1 \n" - "pshufb %%xmm2,%%xmm1 \n" - "movdqa %%xmm0,%%xmm6 \n" - "pshufb %%xmm3,%%xmm6 \n" - "paddusw %%xmm6,%%xmm1 \n" - "pshufb %%xmm4,%%xmm0 \n" - "paddusw %%xmm0,%%xmm1 \n" - "pmulhuw %%xmm5,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movd %%xmm1,(%1) \n" - "psrlq $0x10,%%xmm1 \n" - "movd %%xmm1,0x2(%1) \n" - "lea 0x6(%1),%1 \n" - "sub $0x6,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%3,1),%%xmm1 \n" + "lea 0x10(%0),%0 \n" + "pavgb %%xmm1,%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm2,%%xmm1 \n" + "movdqa %%xmm0,%%xmm6 \n" + "pshufb %%xmm3,%%xmm6 \n" + "paddusw %%xmm6,%%xmm1 \n" + "pshufb %%xmm4,%%xmm0 \n" + "paddusw %%xmm0,%%xmm1 \n" + "pmulhuw %%xmm5,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movd %%xmm1,(%1) \n" + "psrlq $0x10,%%xmm1 \n" + "movd %%xmm1,0x2(%1) \n" + "lea 0x6(%1),%1 \n" + "sub $0x6,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -726,10 +726,10 @@ void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movdqa %0,%%xmm2 \n" - "movdqa %1,%%xmm3 \n" - "movdqa %2,%%xmm4 \n" - "pxor %%xmm5,%%xmm5 \n" + "movdqa %0,%%xmm2 \n" + "movdqa %1,%%xmm3 \n" + "movdqa %2,%%xmm4 \n" + "pxor %%xmm5,%%xmm5 \n" : : "m"(kShufAc), // %0 "m"(kShufAc3), // %1 @@ -739,44 +739,44 @@ void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%3,1),%%xmm6 \n" - "movhlps %%xmm0,%%xmm1 \n" - "movhlps %%xmm6,%%xmm7 \n" - "punpcklbw %%xmm5,%%xmm0 \n" - "punpcklbw %%xmm5,%%xmm1 \n" - "punpcklbw %%xmm5,%%xmm6 \n" - "punpcklbw %%xmm5,%%xmm7 \n" - "paddusw %%xmm6,%%xmm0 \n" - "paddusw %%xmm7,%%xmm1 \n" - "movdqu 0x00(%0,%3,2),%%xmm6 \n" - "lea 0x10(%0),%0 \n" - "movhlps %%xmm6,%%xmm7 \n" - "punpcklbw %%xmm5,%%xmm6 \n" - "punpcklbw %%xmm5,%%xmm7 \n" - "paddusw %%xmm6,%%xmm0 \n" - "paddusw %%xmm7,%%xmm1 \n" - "movdqa %%xmm0,%%xmm6 \n" - "psrldq $0x2,%%xmm0 \n" - "paddusw %%xmm0,%%xmm6 \n" - "psrldq $0x2,%%xmm0 \n" - "paddusw %%xmm0,%%xmm6 \n" - "pshufb %%xmm2,%%xmm6 \n" - "movdqa %%xmm1,%%xmm7 \n" - "psrldq $0x2,%%xmm1 \n" - "paddusw %%xmm1,%%xmm7 \n" - "psrldq $0x2,%%xmm1 \n" - "paddusw %%xmm1,%%xmm7 \n" - "pshufb %%xmm3,%%xmm7 \n" - "paddusw %%xmm7,%%xmm6 \n" - "pmulhuw %%xmm4,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movd %%xmm6,(%1) \n" - "psrlq $0x10,%%xmm6 \n" - "movd %%xmm6,0x2(%1) \n" - "lea 0x6(%1),%1 \n" - "sub $0x6,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%3,1),%%xmm6 \n" + "movhlps %%xmm0,%%xmm1 \n" + "movhlps %%xmm6,%%xmm7 \n" + "punpcklbw %%xmm5,%%xmm0 \n" + "punpcklbw %%xmm5,%%xmm1 \n" + "punpcklbw %%xmm5,%%xmm6 \n" + "punpcklbw %%xmm5,%%xmm7 \n" + "paddusw %%xmm6,%%xmm0 \n" + "paddusw %%xmm7,%%xmm1 \n" + "movdqu 0x00(%0,%3,2),%%xmm6 \n" + "lea 0x10(%0),%0 \n" + "movhlps %%xmm6,%%xmm7 \n" + "punpcklbw %%xmm5,%%xmm6 \n" + "punpcklbw %%xmm5,%%xmm7 \n" + "paddusw %%xmm6,%%xmm0 \n" + "paddusw %%xmm7,%%xmm1 \n" + "movdqa %%xmm0,%%xmm6 \n" + "psrldq $0x2,%%xmm0 \n" + "paddusw %%xmm0,%%xmm6 \n" + "psrldq $0x2,%%xmm0 \n" + "paddusw %%xmm0,%%xmm6 \n" + "pshufb %%xmm2,%%xmm6 \n" + "movdqa %%xmm1,%%xmm7 \n" + "psrldq $0x2,%%xmm1 \n" + "paddusw %%xmm1,%%xmm7 \n" + "psrldq $0x2,%%xmm1 \n" + "paddusw %%xmm1,%%xmm7 \n" + "pshufb %%xmm3,%%xmm7 \n" + "paddusw %%xmm7,%%xmm6 \n" + "pmulhuw %%xmm4,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movd %%xmm6,(%1) \n" + "psrlq $0x10,%%xmm6 \n" + "movd %%xmm6,0x2(%1) \n" + "lea 0x6(%1),%1 \n" + "sub $0x6,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -791,25 +791,25 @@ void ScaleAddRow_SSE2(const uint8_t* src_ptr, int src_width) { asm volatile( - "pxor %%xmm5,%%xmm5 \n" + "pxor %%xmm5,%%xmm5 \n" // 16 pixel loop. LABELALIGN "1: \n" - "movdqu (%0),%%xmm3 \n" - "lea 0x10(%0),%0 \n" // src_ptr += 16 - "movdqu (%1),%%xmm0 \n" - "movdqu 0x10(%1),%%xmm1 \n" - "movdqa %%xmm3,%%xmm2 \n" - "punpcklbw %%xmm5,%%xmm2 \n" - "punpckhbw %%xmm5,%%xmm3 \n" - "paddusw %%xmm2,%%xmm0 \n" - "paddusw %%xmm3,%%xmm1 \n" - "movdqu %%xmm0,(%1) \n" - "movdqu %%xmm1,0x10(%1) \n" - "lea 0x20(%1),%1 \n" - "sub $0x10,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm3 \n" + "lea 0x10(%0),%0 \n" // src_ptr += 16 + "movdqu (%1),%%xmm0 \n" + "movdqu 0x10(%1),%%xmm1 \n" + "movdqa %%xmm3,%%xmm2 \n" + "punpcklbw %%xmm5,%%xmm2 \n" + "punpckhbw %%xmm5,%%xmm3 \n" + "paddusw %%xmm2,%%xmm0 \n" + "paddusw %%xmm3,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(src_width) // %2 @@ -824,22 +824,22 @@ void ScaleAddRow_AVX2(const uint8_t* src_ptr, int src_width) { asm volatile( - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" LABELALIGN "1: \n" - "vmovdqu (%0),%%ymm3 \n" - "lea 0x20(%0),%0 \n" // src_ptr += 32 - "vpermq $0xd8,%%ymm3,%%ymm3 \n" - "vpunpcklbw %%ymm5,%%ymm3,%%ymm2 \n" - "vpunpckhbw %%ymm5,%%ymm3,%%ymm3 \n" - "vpaddusw (%1),%%ymm2,%%ymm0 \n" - "vpaddusw 0x20(%1),%%ymm3,%%ymm1 \n" - "vmovdqu %%ymm0,(%1) \n" - "vmovdqu %%ymm1,0x20(%1) \n" - "lea 0x40(%1),%1 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "vmovdqu (%0),%%ymm3 \n" + "lea 0x20(%0),%0 \n" // src_ptr += 32 + "vpermq $0xd8,%%ymm3,%%ymm3 \n" + "vpunpcklbw %%ymm5,%%ymm3,%%ymm2 \n" + "vpunpckhbw %%ymm5,%%ymm3,%%ymm3 \n" + "vpaddusw (%1),%%ymm2,%%ymm0 \n" + "vpaddusw 0x20(%1),%%ymm3,%%ymm1 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x40(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" "vzeroupper \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 @@ -866,69 +866,69 @@ void ScaleFilterCols_SSSE3(uint8_t* dst_ptr, int dx) { intptr_t x0, x1, temp_pixel; asm volatile( - "movd %6,%%xmm2 \n" - "movd %7,%%xmm3 \n" - "movl $0x04040000,%k2 \n" - "movd %k2,%%xmm5 \n" - "pcmpeqb %%xmm6,%%xmm6 \n" - "psrlw $0x9,%%xmm6 \n" // 0x007f007f - "pcmpeqb %%xmm7,%%xmm7 \n" - "psrlw $15,%%xmm7 \n" // 0x00010001 + "movd %6,%%xmm2 \n" + "movd %7,%%xmm3 \n" + "movl $0x04040000,%k2 \n" + "movd %k2,%%xmm5 \n" + "pcmpeqb %%xmm6,%%xmm6 \n" + "psrlw $0x9,%%xmm6 \n" // 0x007f007f + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $15,%%xmm7 \n" // 0x00010001 - "pextrw $0x1,%%xmm2,%k3 \n" - "subl $0x2,%5 \n" - "jl 29f \n" - "movdqa %%xmm2,%%xmm0 \n" - "paddd %%xmm3,%%xmm0 \n" - "punpckldq %%xmm0,%%xmm2 \n" - "punpckldq %%xmm3,%%xmm3 \n" - "paddd %%xmm3,%%xmm3 \n" - "pextrw $0x3,%%xmm2,%k4 \n" + "pextrw $0x1,%%xmm2,%k3 \n" + "subl $0x2,%5 \n" + "jl 29f \n" + "movdqa %%xmm2,%%xmm0 \n" + "paddd %%xmm3,%%xmm0 \n" + "punpckldq %%xmm0,%%xmm2 \n" + "punpckldq %%xmm3,%%xmm3 \n" + "paddd %%xmm3,%%xmm3 \n" + "pextrw $0x3,%%xmm2,%k4 \n" LABELALIGN "2: \n" - "movdqa %%xmm2,%%xmm1 \n" - "paddd %%xmm3,%%xmm2 \n" - "movzwl 0x00(%1,%3,1),%k2 \n" - "movd %k2,%%xmm0 \n" - "psrlw $0x9,%%xmm1 \n" - "movzwl 0x00(%1,%4,1),%k2 \n" - "movd %k2,%%xmm4 \n" - "pshufb %%xmm5,%%xmm1 \n" - "punpcklwd %%xmm4,%%xmm0 \n" - "psubb %8,%%xmm0 \n" // make pixels signed. - "pxor %%xmm6,%%xmm1 \n" // 128 - f = (f ^ 127 ) + + "movdqa %%xmm2,%%xmm1 \n" + "paddd %%xmm3,%%xmm2 \n" + "movzwl 0x00(%1,%3,1),%k2 \n" + "movd %k2,%%xmm0 \n" + "psrlw $0x9,%%xmm1 \n" + "movzwl 0x00(%1,%4,1),%k2 \n" + "movd %k2,%%xmm4 \n" + "pshufb %%xmm5,%%xmm1 \n" + "punpcklwd %%xmm4,%%xmm0 \n" + "psubb %8,%%xmm0 \n" // make pixels signed. + "pxor %%xmm6,%%xmm1 \n" // 128 - f = (f ^ 127 ) + // 1 - "paddusb %%xmm7,%%xmm1 \n" - "pmaddubsw %%xmm0,%%xmm1 \n" - "pextrw $0x1,%%xmm2,%k3 \n" - "pextrw $0x3,%%xmm2,%k4 \n" - "paddw %9,%%xmm1 \n" // make pixels unsigned. - "psrlw $0x7,%%xmm1 \n" - "packuswb %%xmm1,%%xmm1 \n" - "movd %%xmm1,%k2 \n" - "mov %w2,(%0) \n" - "lea 0x2(%0),%0 \n" - "subl $0x2,%5 \n" - "jge 2b \n" + "paddusb %%xmm7,%%xmm1 \n" + "pmaddubsw %%xmm0,%%xmm1 \n" + "pextrw $0x1,%%xmm2,%k3 \n" + "pextrw $0x3,%%xmm2,%k4 \n" + "paddw %9,%%xmm1 \n" // make pixels unsigned. + "psrlw $0x7,%%xmm1 \n" + "packuswb %%xmm1,%%xmm1 \n" + "movd %%xmm1,%k2 \n" + "mov %w2,(%0) \n" + "lea 0x2(%0),%0 \n" + "subl $0x2,%5 \n" + "jge 2b \n" LABELALIGN "29: \n" - "addl $0x1,%5 \n" - "jl 99f \n" - "movzwl 0x00(%1,%3,1),%k2 \n" - "movd %k2,%%xmm0 \n" - "psrlw $0x9,%%xmm2 \n" - "pshufb %%xmm5,%%xmm2 \n" - "psubb %8,%%xmm0 \n" // make pixels signed. - "pxor %%xmm6,%%xmm2 \n" - "paddusb %%xmm7,%%xmm2 \n" - "pmaddubsw %%xmm0,%%xmm2 \n" - "paddw %9,%%xmm2 \n" // make pixels unsigned. - "psrlw $0x7,%%xmm2 \n" - "packuswb %%xmm2,%%xmm2 \n" - "movd %%xmm2,%k2 \n" - "mov %b2,(%0) \n" + "addl $0x1,%5 \n" + "jl 99f \n" + "movzwl 0x00(%1,%3,1),%k2 \n" + "movd %k2,%%xmm0 \n" + "psrlw $0x9,%%xmm2 \n" + "pshufb %%xmm5,%%xmm2 \n" + "psubb %8,%%xmm0 \n" // make pixels signed. + "pxor %%xmm6,%%xmm2 \n" + "paddusb %%xmm7,%%xmm2 \n" + "pmaddubsw %%xmm0,%%xmm2 \n" + "paddw %9,%%xmm2 \n" // make pixels unsigned. + "psrlw $0x7,%%xmm2 \n" + "packuswb %%xmm2,%%xmm2 \n" + "movd %%xmm2,%k2 \n" + "mov %b2,(%0) \n" "99: \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 @@ -966,16 +966,16 @@ void ScaleColsUp2_SSE2(uint8_t* dst_ptr, LABELALIGN "1: \n" - "movdqu (%1),%%xmm0 \n" - "lea 0x10(%1),%1 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "punpckhbw %%xmm1,%%xmm1 \n" - "movdqu %%xmm0,(%0) \n" - "movdqu %%xmm1,0x10(%0) \n" - "lea 0x20(%0),%0 \n" - "sub $0x20,%2 \n" - "jg 1b \n" + "movdqu (%1),%%xmm0 \n" + "lea 0x10(%1),%1 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "movdqu %%xmm0,(%0) \n" + "movdqu %%xmm1,0x10(%0) \n" + "lea 0x20(%0),%0 \n" + "sub $0x20,%2 \n" + "jg 1b \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 @@ -993,14 +993,14 @@ void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "shufps $0xdd,%%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "shufps $0xdd,%%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -1017,17 +1017,17 @@ void ScaleARGBRowDown2Linear_SSE2(const uint8_t* src_argb, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "lea 0x20(%0),%0 \n" - "movdqa %%xmm0,%%xmm2 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm2 \n" - "pavgb %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea 0x20(%0),%0 \n" + "movdqa %%xmm0,%%xmm2 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm2 \n" + "pavgb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -1043,21 +1043,21 @@ void ScaleARGBRowDown2Box_SSE2(const uint8_t* src_argb, LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x00(%0,%3,1),%%xmm2 \n" - "movdqu 0x10(%0,%3,1),%%xmm3 \n" - "lea 0x20(%0),%0 \n" - "pavgb %%xmm2,%%xmm0 \n" - "pavgb %%xmm3,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm2 \n" - "pavgb %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "sub $0x4,%2 \n" - "jg 1b \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%3,1),%%xmm2 \n" + "movdqu 0x10(%0,%3,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm2,%%xmm0 \n" + "pavgb %%xmm3,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm2 \n" + "pavgb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -1076,23 +1076,23 @@ void ScaleARGBRowDownEven_SSE2(const uint8_t* src_argb, intptr_t src_stepx_x12; (void)src_stride; asm volatile( - "lea 0x00(,%1,4),%1 \n" - "lea 0x00(%1,%1,2),%4 \n" + "lea 0x00(,%1,4),%1 \n" + "lea 0x00(%1,%1,2),%4 \n" LABELALIGN "1: \n" - "movd (%0),%%xmm0 \n" - "movd 0x00(%0,%1,1),%%xmm1 \n" - "punpckldq %%xmm1,%%xmm0 \n" - "movd 0x00(%0,%1,2),%%xmm2 \n" - "movd 0x00(%0,%4,1),%%xmm3 \n" - "lea 0x00(%0,%1,4),%0 \n" - "punpckldq %%xmm3,%%xmm2 \n" - "punpcklqdq %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jg 1b \n" + "movd (%0),%%xmm0 \n" + "movd 0x00(%0,%1,1),%%xmm1 \n" + "punpckldq %%xmm1,%%xmm0 \n" + "movd 0x00(%0,%1,2),%%xmm2 \n" + "movd 0x00(%0,%4,1),%%xmm3 \n" + "lea 0x00(%0,%1,4),%0 \n" + "punpckldq %%xmm3,%%xmm2 \n" + "punpcklqdq %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(src_stepx_x4), // %1 "+r"(dst_argb), // %2 @@ -1113,32 +1113,32 @@ void ScaleARGBRowDownEvenBox_SSE2(const uint8_t* src_argb, intptr_t src_stepx_x12; intptr_t row1 = (intptr_t)(src_stride); asm volatile( - "lea 0x00(,%1,4),%1 \n" - "lea 0x00(%1,%1,2),%4 \n" - "lea 0x00(%0,%5,1),%5 \n" + "lea 0x00(,%1,4),%1 \n" + "lea 0x00(%1,%1,2),%4 \n" + "lea 0x00(%0,%5,1),%5 \n" LABELALIGN "1: \n" - "movq (%0),%%xmm0 \n" - "movhps 0x00(%0,%1,1),%%xmm0 \n" - "movq 0x00(%0,%1,2),%%xmm1 \n" - "movhps 0x00(%0,%4,1),%%xmm1 \n" - "lea 0x00(%0,%1,4),%0 \n" - "movq (%5),%%xmm2 \n" - "movhps 0x00(%5,%1,1),%%xmm2 \n" - "movq 0x00(%5,%1,2),%%xmm3 \n" - "movhps 0x00(%5,%4,1),%%xmm3 \n" - "lea 0x00(%5,%1,4),%5 \n" - "pavgb %%xmm2,%%xmm0 \n" - "pavgb %%xmm3,%%xmm1 \n" - "movdqa %%xmm0,%%xmm2 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm2 \n" - "pavgb %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%3 \n" - "jg 1b \n" + "movq (%0),%%xmm0 \n" + "movhps 0x00(%0,%1,1),%%xmm0 \n" + "movq 0x00(%0,%1,2),%%xmm1 \n" + "movhps 0x00(%0,%4,1),%%xmm1 \n" + "lea 0x00(%0,%1,4),%0 \n" + "movq (%5),%%xmm2 \n" + "movhps 0x00(%5,%1,1),%%xmm2 \n" + "movq 0x00(%5,%1,2),%%xmm3 \n" + "movhps 0x00(%5,%4,1),%%xmm3 \n" + "lea 0x00(%5,%1,4),%5 \n" + "pavgb %%xmm2,%%xmm0 \n" + "pavgb %%xmm3,%%xmm1 \n" + "movdqa %%xmm0,%%xmm2 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm2 \n" + "pavgb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%3 \n" + "jg 1b \n" : "+r"(src_argb), // %0 "+r"(src_stepx_x4), // %1 "+r"(dst_argb), // %2 @@ -1156,56 +1156,56 @@ void ScaleARGBCols_SSE2(uint8_t* dst_argb, int dx) { intptr_t x0, x1; asm volatile( - "movd %5,%%xmm2 \n" - "movd %6,%%xmm3 \n" - "pshufd $0x0,%%xmm2,%%xmm2 \n" - "pshufd $0x11,%%xmm3,%%xmm0 \n" - "paddd %%xmm0,%%xmm2 \n" - "paddd %%xmm3,%%xmm3 \n" - "pshufd $0x5,%%xmm3,%%xmm0 \n" - "paddd %%xmm0,%%xmm2 \n" - "paddd %%xmm3,%%xmm3 \n" - "pshufd $0x0,%%xmm3,%%xmm3 \n" - "pextrw $0x1,%%xmm2,%k0 \n" - "pextrw $0x3,%%xmm2,%k1 \n" - "cmp $0x0,%4 \n" - "jl 99f \n" - "sub $0x4,%4 \n" - "jl 49f \n" + "movd %5,%%xmm2 \n" + "movd %6,%%xmm3 \n" + "pshufd $0x0,%%xmm2,%%xmm2 \n" + "pshufd $0x11,%%xmm3,%%xmm0 \n" + "paddd %%xmm0,%%xmm2 \n" + "paddd %%xmm3,%%xmm3 \n" + "pshufd $0x5,%%xmm3,%%xmm0 \n" + "paddd %%xmm0,%%xmm2 \n" + "paddd %%xmm3,%%xmm3 \n" + "pshufd $0x0,%%xmm3,%%xmm3 \n" + "pextrw $0x1,%%xmm2,%k0 \n" + "pextrw $0x3,%%xmm2,%k1 \n" + "cmp $0x0,%4 \n" + "jl 99f \n" + "sub $0x4,%4 \n" + "jl 49f \n" LABELALIGN "40: \n" - "movd 0x00(%3,%0,4),%%xmm0 \n" - "movd 0x00(%3,%1,4),%%xmm1 \n" - "pextrw $0x5,%%xmm2,%k0 \n" - "pextrw $0x7,%%xmm2,%k1 \n" - "paddd %%xmm3,%%xmm2 \n" - "punpckldq %%xmm1,%%xmm0 \n" - "movd 0x00(%3,%0,4),%%xmm1 \n" - "movd 0x00(%3,%1,4),%%xmm4 \n" - "pextrw $0x1,%%xmm2,%k0 \n" - "pextrw $0x3,%%xmm2,%k1 \n" - "punpckldq %%xmm4,%%xmm1 \n" - "punpcklqdq %%xmm1,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" - "sub $0x4,%4 \n" - "jge 40b \n" + "movd 0x00(%3,%0,4),%%xmm0 \n" + "movd 0x00(%3,%1,4),%%xmm1 \n" + "pextrw $0x5,%%xmm2,%k0 \n" + "pextrw $0x7,%%xmm2,%k1 \n" + "paddd %%xmm3,%%xmm2 \n" + "punpckldq %%xmm1,%%xmm0 \n" + "movd 0x00(%3,%0,4),%%xmm1 \n" + "movd 0x00(%3,%1,4),%%xmm4 \n" + "pextrw $0x1,%%xmm2,%k0 \n" + "pextrw $0x3,%%xmm2,%k1 \n" + "punpckldq %%xmm4,%%xmm1 \n" + "punpcklqdq %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + "sub $0x4,%4 \n" + "jge 40b \n" "49: \n" - "test $0x2,%4 \n" - "je 29f \n" - "movd 0x00(%3,%0,4),%%xmm0 \n" - "movd 0x00(%3,%1,4),%%xmm1 \n" - "pextrw $0x5,%%xmm2,%k0 \n" - "punpckldq %%xmm1,%%xmm0 \n" - "movq %%xmm0,(%2) \n" - "lea 0x8(%2),%2 \n" + "test $0x2,%4 \n" + "je 29f \n" + "movd 0x00(%3,%0,4),%%xmm0 \n" + "movd 0x00(%3,%1,4),%%xmm1 \n" + "pextrw $0x5,%%xmm2,%k0 \n" + "punpckldq %%xmm1,%%xmm0 \n" + "movq %%xmm0,(%2) \n" + "lea 0x8(%2),%2 \n" "29: \n" - "test $0x1,%4 \n" - "je 99f \n" - "movd 0x00(%3,%0,4),%%xmm0 \n" - "movd %%xmm0,(%2) \n" + "test $0x1,%4 \n" + "je 99f \n" + "movd 0x00(%3,%0,4),%%xmm0 \n" + "movd %%xmm0,(%2) \n" "99: \n" : "=&a"(x0), // %0 "=&d"(x1), // %1 @@ -1230,16 +1230,16 @@ void ScaleARGBColsUp2_SSE2(uint8_t* dst_argb, LABELALIGN "1: \n" - "movdqu (%1),%%xmm0 \n" - "lea 0x10(%1),%1 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpckldq %%xmm0,%%xmm0 \n" - "punpckhdq %%xmm1,%%xmm1 \n" - "movdqu %%xmm0,(%0) \n" - "movdqu %%xmm1,0x10(%0) \n" - "lea 0x20(%0),%0 \n" - "sub $0x8,%2 \n" - "jg 1b \n" + "movdqu (%1),%%xmm0 \n" + "lea 0x10(%1),%1 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpckldq %%xmm0,%%xmm0 \n" + "punpckhdq %%xmm1,%%xmm1 \n" + "movdqu %%xmm0,(%0) \n" + "movdqu %%xmm1,0x10(%0) \n" + "lea 0x20(%0),%0 \n" + "sub $0x8,%2 \n" + "jg 1b \n" : "+r"(dst_argb), // %0 "+r"(src_argb), // %1 @@ -1267,63 +1267,64 @@ void ScaleARGBFilterCols_SSSE3(uint8_t* dst_argb, int dx) { intptr_t x0, x1; asm volatile( - "movdqa %0,%%xmm4 \n" - "movdqa %1,%%xmm5 \n" + "movdqa %0,%%xmm4 \n" + "movdqa %1,%%xmm5 \n" : : "m"(kShuffleColARGB), // %0 "m"(kShuffleFractions) // %1 ); asm volatile( - "movd %5,%%xmm2 \n" - "movd %6,%%xmm3 \n" - "pcmpeqb %%xmm6,%%xmm6 \n" - "psrlw $0x9,%%xmm6 \n" - "pextrw $0x1,%%xmm2,%k3 \n" - "sub $0x2,%2 \n" - "jl 29f \n" - "movdqa %%xmm2,%%xmm0 \n" - "paddd %%xmm3,%%xmm0 \n" - "punpckldq %%xmm0,%%xmm2 \n" - "punpckldq %%xmm3,%%xmm3 \n" - "paddd %%xmm3,%%xmm3 \n" - "pextrw $0x3,%%xmm2,%k4 \n" + "movd %5,%%xmm2 \n" + "movd %6,%%xmm3 \n" + "pcmpeqb %%xmm6,%%xmm6 \n" + "psrlw $0x9,%%xmm6 \n" + "pextrw $0x1,%%xmm2,%k3 \n" + "sub $0x2,%2 \n" + "jl 29f \n" + "movdqa %%xmm2,%%xmm0 \n" + "paddd %%xmm3,%%xmm0 \n" + "punpckldq %%xmm0,%%xmm2 \n" + "punpckldq %%xmm3,%%xmm3 \n" + "paddd %%xmm3,%%xmm3 \n" + "pextrw $0x3,%%xmm2,%k4 \n" LABELALIGN "2: \n" - "movdqa %%xmm2,%%xmm1 \n" - "paddd %%xmm3,%%xmm2 \n" - "movq 0x00(%1,%3,4),%%xmm0 \n" - "psrlw $0x9,%%xmm1 \n" - "movhps 0x00(%1,%4,4),%%xmm0 \n" - "pshufb %%xmm5,%%xmm1 \n" - "pshufb %%xmm4,%%xmm0 \n" - "pxor %%xmm6,%%xmm1 \n" - "pmaddubsw %%xmm1,%%xmm0 \n" - "psrlw $0x7,%%xmm0 \n" - "pextrw $0x1,%%xmm2,%k3 \n" - "pextrw $0x3,%%xmm2,%k4 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movq %%xmm0,(%0) \n" - "lea 0x8(%0),%0 \n" - "sub $0x2,%2 \n" - "jge 2b \n" + "movdqa %%xmm2,%%xmm1 \n" + "paddd %%xmm3,%%xmm2 \n" + "movq 0x00(%1,%3,4),%%xmm0 \n" + "psrlw $0x9,%%xmm1 \n" + "movhps 0x00(%1,%4,4),%%xmm0 \n" + "pshufb %%xmm5,%%xmm1 \n" + "pshufb %%xmm4,%%xmm0 \n" + "pxor %%xmm6,%%xmm1 \n" + "pmaddubsw %%xmm1,%%xmm0 \n" + "psrlw $0x7,%%xmm0 \n" + "pextrw $0x1,%%xmm2,%k3 \n" + "pextrw $0x3,%%xmm2,%k4 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movq %%xmm0,(%0) \n" + "lea 0x8(%0),%0 \n" + "sub $0x2,%2 \n" + "jge 2b \n" LABELALIGN "29: \n" - "add $0x1,%2 \n" - "jl 99f \n" - "psrlw $0x9,%%xmm2 \n" - "movq 0x00(%1,%3,4),%%xmm0 \n" - "pshufb %%xmm5,%%xmm2 \n" - "pshufb %%xmm4,%%xmm0 \n" - "pxor %%xmm6,%%xmm2 \n" - "pmaddubsw %%xmm2,%%xmm0 \n" - "psrlw $0x7,%%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - "movd %%xmm0,(%0) \n" + "add $0x1,%2 \n" + "jl 99f \n" + "psrlw $0x9,%%xmm2 \n" + "movq 0x00(%1,%3,4),%%xmm0 \n" + "pshufb %%xmm5,%%xmm2 \n" + "pshufb %%xmm4,%%xmm0 \n" + "pxor %%xmm6,%%xmm2 \n" + "pmaddubsw %%xmm2,%%xmm0 \n" + "psrlw $0x7,%%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + "movd %%xmm0,(%0) \n" - LABELALIGN "99: \n" // clang-format error. + LABELALIGN + "99: \n" // clang-format error. : "+r"(dst_argb), // %0 "+r"(src_argb), // %1 @@ -1339,10 +1340,10 @@ void ScaleARGBFilterCols_SSSE3(uint8_t* dst_argb, int FixedDiv_X86(int num, int div) { asm volatile( "cdq \n" - "shld $0x10,%%eax,%%edx \n" - "shl $0x10,%%eax \n" - "idiv %1 \n" - "mov %0, %%eax \n" + "shld $0x10,%%eax,%%edx \n" + "shl $0x10,%%eax \n" + "idiv %1 \n" + "mov %0, %%eax \n" : "+a"(num) // %0 : "c"(div) // %1 : "memory", "cc", "edx"); @@ -1353,19 +1354,108 @@ int FixedDiv_X86(int num, int div) { int FixedDiv1_X86(int num, int div) { asm volatile( "cdq \n" - "shld $0x10,%%eax,%%edx \n" - "shl $0x10,%%eax \n" - "sub $0x10001,%%eax \n" - "sbb $0x0,%%edx \n" - "sub $0x1,%1 \n" - "idiv %1 \n" - "mov %0, %%eax \n" + "shld $0x10,%%eax,%%edx \n" + "shl $0x10,%%eax \n" + "sub $0x10001,%%eax \n" + "sbb $0x0,%%edx \n" + "sub $0x1,%1 \n" + "idiv %1 \n" + "mov %0, %%eax \n" : "+a"(num) // %0 : "c"(div) // %1 : "memory", "cc", "edx"); return num; } +#ifdef HAS_SCALEUVROWDOWN2BOX_SSSE3 +// Shuffle table for splitting UV into upper and lower part of register. +static const uvec8 kShuffleSplitUV = {0u, 2u, 4u, 6u, 8u, 10u, 12u, 14u, + 1u, 3u, 5u, 7u, 9u, 11u, 13u, 15u}; +static const uvec8 kShuffleMergeUV = {0u, 8u, 2u, 10u, 4u, 12u, + 6u, 14u, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80}; + +void ScaleUVRowDown2Box_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "pcmpeqb %%xmm4,%%xmm4 \n" // 01010101 + "psrlw $0xf,%%xmm4 \n" + "packuswb %%xmm4,%%xmm4 \n" + "pxor %%xmm5, %%xmm5 \n" // zero + "movdqa %4,%%xmm1 \n" // split shuffler + "movdqa %5,%%xmm3 \n" // merge shuffler + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" // 8 UV row 0 + "movdqu 0x00(%0,%3,1),%%xmm2 \n" // 8 UV row 1 + "lea 0x10(%0),%0 \n" + "pshufb %%xmm1,%%xmm0 \n" // uuuuvvvv + "pshufb %%xmm1,%%xmm2 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" // horizontal add + "pmaddubsw %%xmm4,%%xmm2 \n" + "paddw %%xmm2,%%xmm0 \n" // vertical add + "psrlw $0x1,%%xmm0 \n" // round + "pavgw %%xmm5,%%xmm0 \n" + "pshufb %%xmm3,%%xmm0 \n" // merge uv + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" // 4 UV + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "m"(kShuffleSplitUV), // %4 + "m"(kShuffleMergeUV) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif // HAS_SCALEUVROWDOWN2BOX_SSSE3 + +#ifdef HAS_SCALEUVROWDOWN2BOX_AVX2 +void ScaleUVRowDown2Box_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" // 01010101 + "vpsrlw $0xf,%%ymm4,%%ymm4 \n" + "vpackuswb %%ymm4,%%ymm4,%%ymm4 \n" + "vpxor %%ymm5,%%ymm5,%%ymm5 \n" // zero + "vbroadcastf128 %4,%%ymm1 \n" // split shuffler + "vbroadcastf128 %5,%%ymm3 \n" // merge shuffler + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // 16 UV row 0 + "vmovdqu 0x00(%0,%3,1),%%ymm2 \n" // 16 UV row 1 + "lea 0x20(%0),%0 \n" + "vpshufb %%ymm1,%%ymm0,%%ymm0 \n" // uuuuvvvv + "vpshufb %%ymm1,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm4,%%ymm0,%%ymm0 \n" // horizontal add + "vpmaddubsw %%ymm4,%%ymm2,%%ymm2 \n" + "vpaddw %%ymm2,%%ymm0,%%ymm0 \n" // vertical add + "vpsrlw $0x1,%%ymm0,%%ymm0 \n" // round + "vpavgw %%ymm5,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm3,%%ymm0,%%ymm0 \n" // merge uv + "vpermq $0xd8,%%ymm0,%%ymm0 \n" // combine qwords + "vmovdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" // 8 UV + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "m"(kShuffleSplitUV), // %4 + "m"(kShuffleMergeUV) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif // HAS_SCALEUVROWDOWN2BOX_AVX2 + #endif // defined(__x86_64__) || defined(__i386__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc index 366b155ba..572b4bfa9 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc @@ -31,10 +31,10 @@ void ScaleRowDown2_NEON(const uint8_t* src_ptr, asm volatile( "1: \n" // load even pixels into q0, odd into q1 - "vld2.8 {q0, q1}, [%0]! \n" - "subs %2, %2, #16 \n" // 16 processed per loop - "vst1.8 {q1}, [%1]! \n" // store odd pixels - "bgt 1b \n" + "vld2.8 {q0, q1}, [%0]! \n" + "subs %2, %2, #16 \n" // 16 processed per loop + "vst1.8 {q1}, [%1]! \n" // store odd pixels + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -51,11 +51,11 @@ void ScaleRowDown2Linear_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "vld2.8 {q0, q1}, [%0]! \n" // load 32 pixels - "subs %2, %2, #16 \n" // 16 processed per loop - "vrhadd.u8 q0, q0, q1 \n" // rounding half add - "vst1.8 {q0}, [%1]! \n" - "bgt 1b \n" + "vld2.8 {q0, q1}, [%0]! \n" // load 32 pixels + "subs %2, %2, #16 \n" // 16 processed per loop + "vrhadd.u8 q0, q0, q1 \n" // rounding half add + "vst1.8 {q0}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -71,21 +71,21 @@ void ScaleRowDown2Box_NEON(const uint8_t* src_ptr, int dst_width) { asm volatile( // change the stride to row 2 pointer - "add %1, %0 \n" + "add %1, %0 \n" "1: \n" - "vld1.8 {q0, q1}, [%0]! \n" // load row 1 and post inc - "vld1.8 {q2, q3}, [%1]! \n" // load row 2 and post inc - "subs %3, %3, #16 \n" // 16 processed per loop - "vpaddl.u8 q0, q0 \n" // row 1 add adjacent - "vpaddl.u8 q1, q1 \n" - "vpadal.u8 q0, q2 \n" // row 2 add adjacent + + "vld1.8 {q0, q1}, [%0]! \n" // load row 1 and post inc + "vld1.8 {q2, q3}, [%1]! \n" // load row 2 and post inc + "subs %3, %3, #16 \n" // 16 processed per loop + "vpaddl.u8 q0, q0 \n" // row 1 add adjacent + "vpaddl.u8 q1, q1 \n" + "vpadal.u8 q0, q2 \n" // row 2 add adjacent + // row1 - "vpadal.u8 q1, q3 \n" - "vrshrn.u16 d0, q0, #2 \n" // downshift, round and + "vpadal.u8 q1, q3 \n" + "vrshrn.u16 d0, q0, #2 \n" // downshift, round and // pack - "vrshrn.u16 d1, q1, #2 \n" - "vst1.8 {q0}, [%2]! \n" - "bgt 1b \n" + "vrshrn.u16 d1, q1, #2 \n" + "vst1.8 {q0}, [%2]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -102,10 +102,10 @@ void ScaleRowDown4_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 - "subs %2, %2, #8 \n" // 8 processed per loop - "vst1.8 {d2}, [%1]! \n" - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 + "subs %2, %2, #8 \n" // 8 processed per loop + "vst1.8 {d2}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -122,20 +122,20 @@ void ScaleRowDown4Box_NEON(const uint8_t* src_ptr, const uint8_t* src_ptr3 = src_ptr + src_stride * 3; asm volatile( "1: \n" - "vld1.8 {q0}, [%0]! \n" // load up 16x4 - "vld1.8 {q1}, [%3]! \n" - "vld1.8 {q2}, [%4]! \n" - "vld1.8 {q3}, [%5]! \n" - "subs %2, %2, #4 \n" - "vpaddl.u8 q0, q0 \n" - "vpadal.u8 q0, q1 \n" - "vpadal.u8 q0, q2 \n" - "vpadal.u8 q0, q3 \n" - "vpaddl.u16 q0, q0 \n" - "vrshrn.u32 d0, q0, #4 \n" // divide by 16 w/rounding - "vmovn.u16 d0, q0 \n" - "vst1.32 {d0[0]}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {q0}, [%0]! \n" // load up 16x4 + "vld1.8 {q1}, [%3]! \n" + "vld1.8 {q2}, [%4]! \n" + "vld1.8 {q3}, [%5]! \n" + "subs %2, %2, #4 \n" + "vpaddl.u8 q0, q0 \n" + "vpadal.u8 q0, q1 \n" + "vpadal.u8 q0, q2 \n" + "vpadal.u8 q0, q3 \n" + "vpaddl.u16 q0, q0 \n" + "vrshrn.u32 d0, q0, #4 \n" // divide by 16 w/rounding + "vmovn.u16 d0, q0 \n" + "vst1.32 {d0[0]}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -156,11 +156,11 @@ void ScaleRowDown34_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 - "subs %2, %2, #24 \n" - "vmov d2, d3 \n" // order d0, d1, d2 - "vst3.8 {d0, d1, d2}, [%1]! \n" - "bgt 1b \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 + "subs %2, %2, #24 \n" + "vmov d2, d3 \n" // order d0, d1, d2 + "vst3.8 {d0, d1, d2}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -173,49 +173,49 @@ void ScaleRowDown34_0_Box_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "vmov.u8 d24, #3 \n" - "add %3, %0 \n" + "vmov.u8 d24, #3 \n" + "add %3, %0 \n" "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 - "vld4.8 {d4, d5, d6, d7}, [%3]! \n" // src line 1 - "subs %2, %2, #24 \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 + "vld4.8 {d4, d5, d6, d7}, [%3]! \n" // src line 1 + "subs %2, %2, #24 \n" // filter src line 0 with src line 1 // expand chars to shorts to allow for room // when adding lines together - "vmovl.u8 q8, d4 \n" - "vmovl.u8 q9, d5 \n" - "vmovl.u8 q10, d6 \n" - "vmovl.u8 q11, d7 \n" + "vmovl.u8 q8, d4 \n" + "vmovl.u8 q9, d5 \n" + "vmovl.u8 q10, d6 \n" + "vmovl.u8 q11, d7 \n" // 3 * line_0 + line_1 - "vmlal.u8 q8, d0, d24 \n" - "vmlal.u8 q9, d1, d24 \n" - "vmlal.u8 q10, d2, d24 \n" - "vmlal.u8 q11, d3, d24 \n" + "vmlal.u8 q8, d0, d24 \n" + "vmlal.u8 q9, d1, d24 \n" + "vmlal.u8 q10, d2, d24 \n" + "vmlal.u8 q11, d3, d24 \n" // (3 * line_0 + line_1) >> 2 - "vqrshrn.u16 d0, q8, #2 \n" - "vqrshrn.u16 d1, q9, #2 \n" - "vqrshrn.u16 d2, q10, #2 \n" - "vqrshrn.u16 d3, q11, #2 \n" + "vqrshrn.u16 d0, q8, #2 \n" + "vqrshrn.u16 d1, q9, #2 \n" + "vqrshrn.u16 d2, q10, #2 \n" + "vqrshrn.u16 d3, q11, #2 \n" // a0 = (src[0] * 3 + s[1] * 1) >> 2 - "vmovl.u8 q8, d1 \n" - "vmlal.u8 q8, d0, d24 \n" - "vqrshrn.u16 d0, q8, #2 \n" + "vmovl.u8 q8, d1 \n" + "vmlal.u8 q8, d0, d24 \n" + "vqrshrn.u16 d0, q8, #2 \n" // a1 = (src[1] * 1 + s[2] * 1) >> 1 - "vrhadd.u8 d1, d1, d2 \n" + "vrhadd.u8 d1, d1, d2 \n" // a2 = (src[2] * 1 + s[3] * 3) >> 2 - "vmovl.u8 q8, d2 \n" - "vmlal.u8 q8, d3, d24 \n" - "vqrshrn.u16 d2, q8, #2 \n" + "vmovl.u8 q8, d2 \n" + "vmlal.u8 q8, d3, d24 \n" + "vqrshrn.u16 d2, q8, #2 \n" - "vst3.8 {d0, d1, d2}, [%1]! \n" + "vst3.8 {d0, d1, d2}, [%1]! \n" - "bgt 1b \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -230,31 +230,31 @@ void ScaleRowDown34_1_Box_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "vmov.u8 d24, #3 \n" - "add %3, %0 \n" + "vmov.u8 d24, #3 \n" + "add %3, %0 \n" "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 - "vld4.8 {d4, d5, d6, d7}, [%3]! \n" // src line 1 - "subs %2, %2, #24 \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // src line 0 + "vld4.8 {d4, d5, d6, d7}, [%3]! \n" // src line 1 + "subs %2, %2, #24 \n" // average src line 0 with src line 1 - "vrhadd.u8 q0, q0, q2 \n" - "vrhadd.u8 q1, q1, q3 \n" + "vrhadd.u8 q0, q0, q2 \n" + "vrhadd.u8 q1, q1, q3 \n" // a0 = (src[0] * 3 + s[1] * 1) >> 2 - "vmovl.u8 q3, d1 \n" - "vmlal.u8 q3, d0, d24 \n" - "vqrshrn.u16 d0, q3, #2 \n" + "vmovl.u8 q3, d1 \n" + "vmlal.u8 q3, d0, d24 \n" + "vqrshrn.u16 d0, q3, #2 \n" // a1 = (src[1] * 1 + s[2] * 1) >> 1 - "vrhadd.u8 d1, d1, d2 \n" + "vrhadd.u8 d1, d1, d2 \n" // a2 = (src[2] * 1 + s[3] * 3) >> 2 - "vmovl.u8 q3, d2 \n" - "vmlal.u8 q3, d3, d24 \n" - "vqrshrn.u16 d2, q3, #2 \n" + "vmovl.u8 q3, d2 \n" + "vmlal.u8 q3, d3, d24 \n" + "vqrshrn.u16 d2, q3, #2 \n" - "vst3.8 {d0, d1, d2}, [%1]! \n" - "bgt 1b \n" + "vst3.8 {d0, d1, d2}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -282,15 +282,15 @@ void ScaleRowDown38_NEON(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "vld1.8 {q3}, [%3] \n" + "vld1.8 {q3}, [%3] \n" "1: \n" - "vld1.8 {d0, d1, d2, d3}, [%0]! \n" - "subs %2, %2, #12 \n" - "vtbl.u8 d4, {d0, d1, d2, d3}, d6 \n" - "vtbl.u8 d5, {d0, d1, d2, d3}, d7 \n" - "vst1.8 {d4}, [%1]! \n" - "vst1.32 {d5[0]}, [%1]! \n" - "bgt 1b \n" + "vld1.8 {d0, d1, d2, d3}, [%0]! \n" + "subs %2, %2, #12 \n" + "vtbl.u8 d4, {d0, d1, d2, d3}, d6 \n" + "vtbl.u8 d5, {d0, d1, d2, d3}, d7 \n" + "vst1.8 {d4}, [%1]! \n" + "vst1.32 {d5[0]}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -306,57 +306,57 @@ void OMITFP ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, const uint8_t* src_ptr1 = src_ptr + src_stride * 2; asm volatile( - "vld1.16 {q13}, [%5] \n" - "vld1.8 {q14}, [%6] \n" - "vld1.8 {q15}, [%7] \n" - "add %3, %0 \n" + "vld1.16 {q13}, [%5] \n" + "vld1.8 {q14}, [%6] \n" + "vld1.8 {q15}, [%7] \n" + "add %3, %0 \n" "1: \n" // d0 = 00 40 01 41 02 42 03 43 // d1 = 10 50 11 51 12 52 13 53 // d2 = 20 60 21 61 22 62 23 63 // d3 = 30 70 31 71 32 72 33 73 - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" - "vld4.8 {d4, d5, d6, d7}, [%3]! \n" - "vld4.8 {d16, d17, d18, d19}, [%4]! \n" - "subs %2, %2, #12 \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" + "vld4.8 {d4, d5, d6, d7}, [%3]! \n" + "vld4.8 {d16, d17, d18, d19}, [%4]! \n" + "subs %2, %2, #12 \n" // Shuffle the input data around to get align the data // so adjacent data can be added. 0,1 - 2,3 - 4,5 - 6,7 // d0 = 00 10 01 11 02 12 03 13 // d1 = 40 50 41 51 42 52 43 53 - "vtrn.u8 d0, d1 \n" - "vtrn.u8 d4, d5 \n" - "vtrn.u8 d16, d17 \n" + "vtrn.u8 d0, d1 \n" + "vtrn.u8 d4, d5 \n" + "vtrn.u8 d16, d17 \n" // d2 = 20 30 21 31 22 32 23 33 // d3 = 60 70 61 71 62 72 63 73 - "vtrn.u8 d2, d3 \n" - "vtrn.u8 d6, d7 \n" - "vtrn.u8 d18, d19 \n" + "vtrn.u8 d2, d3 \n" + "vtrn.u8 d6, d7 \n" + "vtrn.u8 d18, d19 \n" // d0 = 00+10 01+11 02+12 03+13 // d2 = 40+50 41+51 42+52 43+53 - "vpaddl.u8 q0, q0 \n" - "vpaddl.u8 q2, q2 \n" - "vpaddl.u8 q8, q8 \n" + "vpaddl.u8 q0, q0 \n" + "vpaddl.u8 q2, q2 \n" + "vpaddl.u8 q8, q8 \n" // d3 = 60+70 61+71 62+72 63+73 - "vpaddl.u8 d3, d3 \n" - "vpaddl.u8 d7, d7 \n" - "vpaddl.u8 d19, d19 \n" + "vpaddl.u8 d3, d3 \n" + "vpaddl.u8 d7, d7 \n" + "vpaddl.u8 d19, d19 \n" // combine source lines - "vadd.u16 q0, q2 \n" - "vadd.u16 q0, q8 \n" - "vadd.u16 d4, d3, d7 \n" - "vadd.u16 d4, d19 \n" + "vadd.u16 q0, q2 \n" + "vadd.u16 q0, q8 \n" + "vadd.u16 d4, d3, d7 \n" + "vadd.u16 d4, d19 \n" // dst_ptr[3] = (s[6 + st * 0] + s[7 + st * 0] // + s[6 + st * 1] + s[7 + st * 1] // + s[6 + st * 2] + s[7 + st * 2]) / 6 "vqrdmulh.s16 q2, q2, q13 \n" - "vmovn.u16 d4, q2 \n" + "vmovn.u16 d4, q2 \n" // Shuffle 2,3 reg around so that 2 can be added to the // 0,1 reg and 3 can be added to the 4,5 reg. This @@ -364,24 +364,24 @@ void OMITFP ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, // registers are already expanded. Then do transposes // to get aligned. // q2 = xx 20 xx 30 xx 21 xx 31 xx 22 xx 32 xx 23 xx 33 - "vmovl.u8 q1, d2 \n" - "vmovl.u8 q3, d6 \n" - "vmovl.u8 q9, d18 \n" + "vmovl.u8 q1, d2 \n" + "vmovl.u8 q3, d6 \n" + "vmovl.u8 q9, d18 \n" // combine source lines - "vadd.u16 q1, q3 \n" - "vadd.u16 q1, q9 \n" + "vadd.u16 q1, q3 \n" + "vadd.u16 q1, q9 \n" // d4 = xx 20 xx 30 xx 22 xx 32 // d5 = xx 21 xx 31 xx 23 xx 33 - "vtrn.u32 d2, d3 \n" + "vtrn.u32 d2, d3 \n" // d4 = xx 20 xx 21 xx 22 xx 23 // d5 = xx 30 xx 31 xx 32 xx 33 - "vtrn.u16 d2, d3 \n" + "vtrn.u16 d2, d3 \n" // 0+1+2, 3+4+5 - "vadd.u16 q0, q1 \n" + "vadd.u16 q0, q1 \n" // Need to divide, but can't downshift as the the value // isn't a power of 2. So multiply by 65536 / n @@ -390,14 +390,14 @@ void OMITFP ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, // Align for table lookup, vtbl requires registers to // be adjacent - "vmov.u8 d2, d4 \n" + "vmov.u8 d2, d4 \n" - "vtbl.u8 d3, {d0, d1, d2}, d28 \n" - "vtbl.u8 d4, {d0, d1, d2}, d29 \n" + "vtbl.u8 d3, {d0, d1, d2}, d28 \n" + "vtbl.u8 d4, {d0, d1, d2}, d29 \n" - "vst1.8 {d3}, [%1]! \n" - "vst1.32 {d4[0]}, [%1]! \n" - "bgt 1b \n" + "vst1.8 {d3}, [%1]! \n" + "vst1.32 {d4[0]}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -416,46 +416,46 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "vld1.16 {q13}, [%4] \n" - "vld1.8 {q14}, [%5] \n" - "add %3, %0 \n" + "vld1.16 {q13}, [%4] \n" + "vld1.8 {q14}, [%5] \n" + "add %3, %0 \n" "1: \n" // d0 = 00 40 01 41 02 42 03 43 // d1 = 10 50 11 51 12 52 13 53 // d2 = 20 60 21 61 22 62 23 63 // d3 = 30 70 31 71 32 72 33 73 - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" - "vld4.8 {d4, d5, d6, d7}, [%3]! \n" - "subs %2, %2, #12 \n" + "vld4.8 {d0, d1, d2, d3}, [%0]! \n" + "vld4.8 {d4, d5, d6, d7}, [%3]! \n" + "subs %2, %2, #12 \n" // Shuffle the input data around to get align the data // so adjacent data can be added. 0,1 - 2,3 - 4,5 - 6,7 // d0 = 00 10 01 11 02 12 03 13 // d1 = 40 50 41 51 42 52 43 53 - "vtrn.u8 d0, d1 \n" - "vtrn.u8 d4, d5 \n" + "vtrn.u8 d0, d1 \n" + "vtrn.u8 d4, d5 \n" // d2 = 20 30 21 31 22 32 23 33 // d3 = 60 70 61 71 62 72 63 73 - "vtrn.u8 d2, d3 \n" - "vtrn.u8 d6, d7 \n" + "vtrn.u8 d2, d3 \n" + "vtrn.u8 d6, d7 \n" // d0 = 00+10 01+11 02+12 03+13 // d2 = 40+50 41+51 42+52 43+53 - "vpaddl.u8 q0, q0 \n" - "vpaddl.u8 q2, q2 \n" + "vpaddl.u8 q0, q0 \n" + "vpaddl.u8 q2, q2 \n" // d3 = 60+70 61+71 62+72 63+73 - "vpaddl.u8 d3, d3 \n" - "vpaddl.u8 d7, d7 \n" + "vpaddl.u8 d3, d3 \n" + "vpaddl.u8 d7, d7 \n" // combine source lines - "vadd.u16 q0, q2 \n" - "vadd.u16 d4, d3, d7 \n" + "vadd.u16 q0, q2 \n" + "vadd.u16 d4, d3, d7 \n" // dst_ptr[3] = (s[6] + s[7] + s[6+st] + s[7+st]) / 4 - "vqrshrn.u16 d4, q2, #2 \n" + "vqrshrn.u16 d4, q2, #2 \n" // Shuffle 2,3 reg around so that 2 can be added to the // 0,1 reg and 3 can be added to the 4,5 reg. This @@ -463,22 +463,22 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, // registers are already expanded. Then do transposes // to get aligned. // q2 = xx 20 xx 30 xx 21 xx 31 xx 22 xx 32 xx 23 xx 33 - "vmovl.u8 q1, d2 \n" - "vmovl.u8 q3, d6 \n" + "vmovl.u8 q1, d2 \n" + "vmovl.u8 q3, d6 \n" // combine source lines - "vadd.u16 q1, q3 \n" + "vadd.u16 q1, q3 \n" // d4 = xx 20 xx 30 xx 22 xx 32 // d5 = xx 21 xx 31 xx 23 xx 33 - "vtrn.u32 d2, d3 \n" + "vtrn.u32 d2, d3 \n" // d4 = xx 20 xx 21 xx 22 xx 23 // d5 = xx 30 xx 31 xx 32 xx 33 - "vtrn.u16 d2, d3 \n" + "vtrn.u16 d2, d3 \n" // 0+1+2, 3+4+5 - "vadd.u16 q0, q1 \n" + "vadd.u16 q0, q1 \n" // Need to divide, but can't downshift as the the value // isn't a power of 2. So multiply by 65536 / n @@ -487,14 +487,14 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, // Align for table lookup, vtbl requires registers to // be adjacent - "vmov.u8 d2, d4 \n" + "vmov.u8 d2, d4 \n" - "vtbl.u8 d3, {d0, d1, d2}, d28 \n" - "vtbl.u8 d4, {d0, d1, d2}, d29 \n" + "vtbl.u8 d3, {d0, d1, d2}, d28 \n" + "vtbl.u8 d4, {d0, d1, d2}, d29 \n" - "vst1.8 {d3}, [%1]! \n" - "vst1.32 {d4[0]}, [%1]! \n" - "bgt 1b \n" + "vst1.8 {d3}, [%1]! \n" + "vst1.32 {d4[0]}, [%1]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -511,13 +511,13 @@ void ScaleAddRow_NEON(const uint8_t* src_ptr, int src_width) { asm volatile( "1: \n" - "vld1.16 {q1, q2}, [%1] \n" // load accumulator - "vld1.8 {q0}, [%0]! \n" // load 16 bytes - "vaddw.u8 q2, q2, d1 \n" // add - "vaddw.u8 q1, q1, d0 \n" - "vst1.16 {q1, q2}, [%1]! \n" // store accumulator - "subs %2, %2, #16 \n" // 16 processed per loop - "bgt 1b \n" + "vld1.16 {q1, q2}, [%1] \n" // load accumulator + "vld1.8 {q0}, [%0]! \n" // load 16 bytes + "vaddw.u8 q2, q2, d1 \n" // add + "vaddw.u8 q1, q1, d0 \n" + "vst1.16 {q1, q2}, [%1]! \n" // store accumulator + "subs %2, %2, #16 \n" // 16 processed per loop + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(src_width) // %2 @@ -547,17 +547,17 @@ void ScaleFilterCols_NEON(uint8_t* dst_ptr, int* tmp = dx_offset; const uint8_t* src_tmp = src_ptr; asm volatile ( - "vdup.32 q0, %3 \n" // x - "vdup.32 q1, %4 \n" // dx - "vld1.32 {q2}, [%5] \n" // 0 1 2 3 - "vshl.i32 q3, q1, #2 \n" // 4 * dx - "vmul.s32 q1, q1, q2 \n" + "vdup.32 q0, %3 \n" // x + "vdup.32 q1, %4 \n" // dx + "vld1.32 {q2}, [%5] \n" // 0 1 2 3 + "vshl.i32 q3, q1, #2 \n" // 4 * dx + "vmul.s32 q1, q1, q2 \n" // x , x + 1 * dx, x + 2 * dx, x + 3 * dx - "vadd.s32 q1, q1, q0 \n" + "vadd.s32 q1, q1, q0 \n" // x + 4 * dx, x + 5 * dx, x + 6 * dx, x + 7 * dx - "vadd.s32 q2, q1, q3 \n" - "vshl.i32 q0, q3, #1 \n" // 8 * dx - "1: \n" + "vadd.s32 q2, q1, q3 \n" + "vshl.i32 q0, q3, #1 \n" // 8 * dx + "1: \n" LOAD2_DATA8_LANE(0) LOAD2_DATA8_LANE(1) LOAD2_DATA8_LANE(2) @@ -566,27 +566,27 @@ void ScaleFilterCols_NEON(uint8_t* dst_ptr, LOAD2_DATA8_LANE(5) LOAD2_DATA8_LANE(6) LOAD2_DATA8_LANE(7) - "vmov q10, q1 \n" - "vmov q11, q2 \n" - "vuzp.16 q10, q11 \n" - "vmovl.u8 q8, d6 \n" - "vmovl.u8 q9, d7 \n" - "vsubl.s16 q11, d18, d16 \n" - "vsubl.s16 q12, d19, d17 \n" - "vmovl.u16 q13, d20 \n" - "vmovl.u16 q10, d21 \n" - "vmul.s32 q11, q11, q13 \n" - "vmul.s32 q12, q12, q10 \n" - "vrshrn.s32 d18, q11, #16 \n" - "vrshrn.s32 d19, q12, #16 \n" - "vadd.s16 q8, q8, q9 \n" - "vmovn.s16 d6, q8 \n" + "vmov q10, q1 \n" + "vmov q11, q2 \n" + "vuzp.16 q10, q11 \n" + "vmovl.u8 q8, d6 \n" + "vmovl.u8 q9, d7 \n" + "vsubl.s16 q11, d18, d16 \n" + "vsubl.s16 q12, d19, d17 \n" + "vmovl.u16 q13, d20 \n" + "vmovl.u16 q10, d21 \n" + "vmul.s32 q11, q11, q13 \n" + "vmul.s32 q12, q12, q10 \n" + "vrshrn.s32 d18, q11, #16 \n" + "vrshrn.s32 d19, q12, #16 \n" + "vadd.s16 q8, q8, q9 \n" + "vmovn.s16 d6, q8 \n" - "vst1.8 {d6}, [%0]! \n" // store pixels - "vadd.s32 q1, q1, q0 \n" - "vadd.s32 q2, q2, q0 \n" - "subs %2, %2, #8 \n" // 8 processed per loop - "bgt 1b \n" + "vst1.8 {d6}, [%0]! \n" // store pixels + "vadd.s32 q1, q1, q0 \n" + "vadd.s32 q2, q2, q0 \n" + "subs %2, %2, #8 \n" // 8 processed per loop + "bgt 1b \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 "+r"(dst_width), // %2 @@ -609,75 +609,75 @@ void ScaleFilterRows_NEON(uint8_t* dst_ptr, int dst_width, int source_y_fraction) { asm volatile( - "cmp %4, #0 \n" - "beq 100f \n" - "add %2, %1 \n" - "cmp %4, #64 \n" - "beq 75f \n" - "cmp %4, #128 \n" - "beq 50f \n" - "cmp %4, #192 \n" - "beq 25f \n" + "cmp %4, #0 \n" + "beq 100f \n" + "add %2, %1 \n" + "cmp %4, #64 \n" + "beq 75f \n" + "cmp %4, #128 \n" + "beq 50f \n" + "cmp %4, #192 \n" + "beq 25f \n" - "vdup.8 d5, %4 \n" - "rsb %4, #256 \n" - "vdup.8 d4, %4 \n" + "vdup.8 d5, %4 \n" + "rsb %4, #256 \n" + "vdup.8 d4, %4 \n" // General purpose row blend. "1: \n" - "vld1.8 {q0}, [%1]! \n" - "vld1.8 {q1}, [%2]! \n" - "subs %3, %3, #16 \n" - "vmull.u8 q13, d0, d4 \n" - "vmull.u8 q14, d1, d4 \n" - "vmlal.u8 q13, d2, d5 \n" - "vmlal.u8 q14, d3, d5 \n" - "vrshrn.u16 d0, q13, #8 \n" - "vrshrn.u16 d1, q14, #8 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 1b \n" - "b 99f \n" + "vld1.8 {q0}, [%1]! \n" + "vld1.8 {q1}, [%2]! \n" + "subs %3, %3, #16 \n" + "vmull.u8 q13, d0, d4 \n" + "vmull.u8 q14, d1, d4 \n" + "vmlal.u8 q13, d2, d5 \n" + "vmlal.u8 q14, d3, d5 \n" + "vrshrn.u16 d0, q13, #8 \n" + "vrshrn.u16 d1, q14, #8 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 1b \n" + "b 99f \n" // Blend 25 / 75. "25: \n" - "vld1.8 {q0}, [%1]! \n" - "vld1.8 {q1}, [%2]! \n" - "subs %3, %3, #16 \n" - "vrhadd.u8 q0, q1 \n" - "vrhadd.u8 q0, q1 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 25b \n" - "b 99f \n" + "vld1.8 {q0}, [%1]! \n" + "vld1.8 {q1}, [%2]! \n" + "subs %3, %3, #16 \n" + "vrhadd.u8 q0, q1 \n" + "vrhadd.u8 q0, q1 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 25b \n" + "b 99f \n" // Blend 50 / 50. "50: \n" - "vld1.8 {q0}, [%1]! \n" - "vld1.8 {q1}, [%2]! \n" - "subs %3, %3, #16 \n" - "vrhadd.u8 q0, q1 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 50b \n" - "b 99f \n" + "vld1.8 {q0}, [%1]! \n" + "vld1.8 {q1}, [%2]! \n" + "subs %3, %3, #16 \n" + "vrhadd.u8 q0, q1 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 50b \n" + "b 99f \n" // Blend 75 / 25. "75: \n" - "vld1.8 {q1}, [%1]! \n" - "vld1.8 {q0}, [%2]! \n" - "subs %3, %3, #16 \n" - "vrhadd.u8 q0, q1 \n" - "vrhadd.u8 q0, q1 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 75b \n" - "b 99f \n" + "vld1.8 {q1}, [%1]! \n" + "vld1.8 {q0}, [%2]! \n" + "subs %3, %3, #16 \n" + "vrhadd.u8 q0, q1 \n" + "vrhadd.u8 q0, q1 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 75b \n" + "b 99f \n" // Blend 100 / 0 - Copy row unchanged. "100: \n" - "vld1.8 {q0}, [%1]! \n" - "subs %3, %3, #16 \n" - "vst1.8 {q0}, [%0]! \n" - "bgt 100b \n" + "vld1.8 {q0}, [%1]! \n" + "subs %3, %3, #16 \n" + "vst1.8 {q0}, [%0]! \n" + "bgt 100b \n" "99: \n" - "vst1.8 {d1[7]}, [%0] \n" + "vst1.8 {d1[7]}, [%0] \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 "+r"(src_stride), // %2 @@ -694,12 +694,12 @@ void ScaleARGBRowDown2_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "vld4.32 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.32 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB - "subs %2, %2, #8 \n" // 8 processed per loop - "vmov q2, q1 \n" // load next 8 ARGB - "vst2.32 {q2, q3}, [%1]! \n" // store odd pixels - "bgt 1b \n" + "vld4.32 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.32 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB + "subs %2, %2, #8 \n" // 8 processed per loop + "vmov q2, q1 \n" // load next 8 ARGB + "vst2.32 {q2, q3}, [%1]! \n" // store odd pixels + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -722,13 +722,13 @@ void ScaleARGBRowDown2Linear_NEON(const uint8_t* src_argb, (void)src_stride; asm volatile( "1: \n" - "vld4.32 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.32 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB - "subs %2, %2, #8 \n" // 8 processed per loop - "vrhadd.u8 q0, q0, q1 \n" // rounding half add - "vrhadd.u8 q1, q2, q3 \n" // rounding half add - "vst2.32 {q0, q1}, [%1]! \n" - "bgt 1b \n" + "vld4.32 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.32 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB + "subs %2, %2, #8 \n" // 8 processed per loop + "vrhadd.u8 q0, q0, q1 \n" // rounding half add + "vrhadd.u8 q1, q2, q3 \n" // rounding half add + "vst2.32 {q0, q1}, [%1]! \n" + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -743,27 +743,27 @@ void ScaleARGBRowDown2Box_NEON(const uint8_t* src_ptr, int dst_width) { asm volatile( // change the stride to row 2 pointer - "add %1, %1, %0 \n" + "add %1, %1, %0 \n" "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB - "subs %3, %3, #8 \n" // 8 processed per loop. - "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. - "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. - "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vpaddl.u8 q3, q3 \n" // A 16 bytes -> 8 shorts. - "vld4.8 {d16, d18, d20, d22}, [%1]! \n" // load 8 more ARGB - "vld4.8 {d17, d19, d21, d23}, [%1]! \n" // load last 8 ARGB - "vpadal.u8 q0, q8 \n" // B 16 bytes -> 8 shorts. - "vpadal.u8 q1, q9 \n" // G 16 bytes -> 8 shorts. - "vpadal.u8 q2, q10 \n" // R 16 bytes -> 8 shorts. - "vpadal.u8 q3, q11 \n" // A 16 bytes -> 8 shorts. - "vrshrn.u16 d0, q0, #2 \n" // round and pack to bytes - "vrshrn.u16 d1, q1, #2 \n" - "vrshrn.u16 d2, q2, #2 \n" - "vrshrn.u16 d3, q3, #2 \n" - "vst4.8 {d0, d1, d2, d3}, [%2]! \n" - "bgt 1b \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB + "subs %3, %3, #8 \n" // 8 processed per loop. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vpaddl.u8 q3, q3 \n" // A 16 bytes -> 8 shorts. + "vld4.8 {d16, d18, d20, d22}, [%1]! \n" // load 8 more ARGB + "vld4.8 {d17, d19, d21, d23}, [%1]! \n" // load last 8 ARGB + "vpadal.u8 q0, q8 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q9 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q10 \n" // R 16 bytes -> 8 shorts. + "vpadal.u8 q3, q11 \n" // A 16 bytes -> 8 shorts. + "vrshrn.u16 d0, q0, #2 \n" // round and pack to bytes + "vrshrn.u16 d1, q1, #2 \n" + "vrshrn.u16 d2, q2, #2 \n" + "vrshrn.u16 d3, q3, #2 \n" + "vst4.8 {d0, d1, d2, d3}, [%2]! \n" + "bgt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -781,15 +781,15 @@ void ScaleARGBRowDownEven_NEON(const uint8_t* src_argb, int dst_width) { (void)src_stride; asm volatile( - "mov r12, %3, lsl #2 \n" + "mov r12, %3, lsl #2 \n" "1: \n" - "vld1.32 {d0[0]}, [%0], r12 \n" - "vld1.32 {d0[1]}, [%0], r12 \n" - "vld1.32 {d1[0]}, [%0], r12 \n" - "vld1.32 {d1[1]}, [%0], r12 \n" - "subs %2, %2, #4 \n" // 4 pixels per loop. - "vst1.8 {q0}, [%1]! \n" - "bgt 1b \n" + "vld1.32 {d0[0]}, [%0], r12 \n" + "vld1.32 {d0[1]}, [%0], r12 \n" + "vld1.32 {d1[0]}, [%0], r12 \n" + "vld1.32 {d1[1]}, [%0], r12 \n" + "subs %2, %2, #4 \n" // 4 pixels per loop. + "vst1.8 {q0}, [%1]! \n" + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -805,30 +805,30 @@ void ScaleARGBRowDownEvenBox_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int dst_width) { asm volatile( - "mov r12, %4, lsl #2 \n" - "add %1, %1, %0 \n" + "mov r12, %4, lsl #2 \n" + "add %1, %1, %0 \n" "1: \n" - "vld1.8 {d0}, [%0], r12 \n" // 4 2x2 blocks -> 2x1 - "vld1.8 {d1}, [%1], r12 \n" - "vld1.8 {d2}, [%0], r12 \n" - "vld1.8 {d3}, [%1], r12 \n" - "vld1.8 {d4}, [%0], r12 \n" - "vld1.8 {d5}, [%1], r12 \n" - "vld1.8 {d6}, [%0], r12 \n" - "vld1.8 {d7}, [%1], r12 \n" - "vaddl.u8 q0, d0, d1 \n" - "vaddl.u8 q1, d2, d3 \n" - "vaddl.u8 q2, d4, d5 \n" - "vaddl.u8 q3, d6, d7 \n" - "vswp.8 d1, d2 \n" // ab_cd -> ac_bd - "vswp.8 d5, d6 \n" // ef_gh -> eg_fh - "vadd.u16 q0, q0, q1 \n" // (a+b)_(c+d) - "vadd.u16 q2, q2, q3 \n" // (e+f)_(g+h) - "vrshrn.u16 d0, q0, #2 \n" // first 2 pixels. - "vrshrn.u16 d1, q2, #2 \n" // next 2 pixels. - "subs %3, %3, #4 \n" // 4 pixels per loop. - "vst1.8 {q0}, [%2]! \n" - "bgt 1b \n" + "vld1.8 {d0}, [%0], r12 \n" // 4 2x2 blocks -> 2x1 + "vld1.8 {d1}, [%1], r12 \n" + "vld1.8 {d2}, [%0], r12 \n" + "vld1.8 {d3}, [%1], r12 \n" + "vld1.8 {d4}, [%0], r12 \n" + "vld1.8 {d5}, [%1], r12 \n" + "vld1.8 {d6}, [%0], r12 \n" + "vld1.8 {d7}, [%1], r12 \n" + "vaddl.u8 q0, d0, d1 \n" + "vaddl.u8 q1, d2, d3 \n" + "vaddl.u8 q2, d4, d5 \n" + "vaddl.u8 q3, d6, d7 \n" + "vswp.8 d1, d2 \n" // ab_cd -> ac_bd + "vswp.8 d5, d6 \n" // ef_gh -> eg_fh + "vadd.u16 q0, q0, q1 \n" // (a+b)_(c+d) + "vadd.u16 q2, q2, q3 \n" // (e+f)_(g+h) + "vrshrn.u16 d0, q0, #2 \n" // first 2 pixels. + "vrshrn.u16 d1, q2, #2 \n" // next 2 pixels. + "subs %3, %3, #4 \n" // 4 pixels per loop. + "vst1.8 {q0}, [%2]! \n" + "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(src_stride), // %1 "+r"(dst_argb), // %2 @@ -865,8 +865,8 @@ void ScaleARGBCols_NEON(uint8_t* dst_argb, LOAD1_DATA32_LANE(d3, 1) // clang-format on "vst1.32 {q0, q1}, [%0]! \n" // store pixels - "subs %2, %2, #8 \n" // 8 processed per loop - "bgt 1b \n" + "subs %2, %2, #8 \n" // 8 processed per loop + "bgt 1b \n" : "+r"(dst_argb), // %0 "+r"(src_argb), // %1 "+r"(dst_width), // %2 @@ -897,16 +897,16 @@ void ScaleARGBFilterCols_NEON(uint8_t* dst_argb, int* tmp = dx_offset; const uint8_t* src_tmp = src_argb; asm volatile ( - "vdup.32 q0, %3 \n" // x - "vdup.32 q1, %4 \n" // dx - "vld1.32 {q2}, [%5] \n" // 0 1 2 3 - "vshl.i32 q9, q1, #2 \n" // 4 * dx - "vmul.s32 q1, q1, q2 \n" - "vmov.i8 q3, #0x7f \n" // 0x7F - "vmov.i16 q15, #0x7f \n" // 0x7F + "vdup.32 q0, %3 \n" // x + "vdup.32 q1, %4 \n" // dx + "vld1.32 {q2}, [%5] \n" // 0 1 2 3 + "vshl.i32 q9, q1, #2 \n" // 4 * dx + "vmul.s32 q1, q1, q2 \n" + "vmov.i8 q3, #0x7f \n" // 0x7F + "vmov.i16 q15, #0x7f \n" // 0x7F // x , x + 1 * dx, x + 2 * dx, x + 3 * dx - "vadd.s32 q8, q1, q0 \n" - "1: \n" + "vadd.s32 q8, q1, q0 \n" + "1: \n" // d0, d1: a // d2, d3: b LOAD2_DATA32_LANE(d0, d2, 0) @@ -950,6 +950,64 @@ void ScaleARGBFilterCols_NEON(uint8_t* dst_argb, #undef LOAD2_DATA32_LANE +void ScaleUVRowDown2Box_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + asm volatile( + // change the stride to row 2 pointer + "add %1, %1, %0 \n" + "1: \n" + "vld2.8 {d0, d2}, [%0]! \n" // load 8 UV pixels. + "vld2.8 {d1, d3}, [%0]! \n" // load next 8 UV + "subs %3, %3, #8 \n" // 8 processed per loop. + "vpaddl.u8 q0, q0 \n" // U 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // V 16 bytes -> 8 shorts. + "vld2.8 {d16, d18}, [%1]! \n" // load 8 more UV + "vld2.8 {d17, d19}, [%1]! \n" // load last 8 UV + "vpadal.u8 q0, q8 \n" // U 16 bytes -> 8 shorts. + "vpadal.u8 q1, q9 \n" // V 16 bytes -> 8 shorts. + "vrshrn.u16 d0, q0, #2 \n" // round and pack to bytes + "vrshrn.u16 d1, q1, #2 \n" + "vst2.8 {d0, d1}, [%2]! \n" + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_stride), // %1 + "+r"(dst), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "q0", "q1", "q8", "q9"); +} + +// Reads 4 pixels at a time. +void ScaleUVRowDownEven_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, // pixel step + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src1_ptr = src_ptr + src_stepx * 2; + const uint8_t* src2_ptr = src_ptr + src_stepx * 4; + const uint8_t* src3_ptr = src_ptr + src_stepx * 6; + (void)src_stride; + asm volatile( + "1: \n" + "vld1.16 {d0[0]}, [%0], %6 \n" + "vld1.16 {d0[1]}, [%1], %6 \n" + "vld1.16 {d0[2]}, [%2], %6 \n" + "vld1.16 {d0[3]}, [%3], %6 \n" + "subs %5, %5, #4 \n" // 4 pixels per loop. + "vst1.8 {d0}, [%4]! \n" + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src1_ptr), // %1 + "+r"(src2_ptr), // %2 + "+r"(src3_ptr), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_width) // %5 + : "r"(src_stepx * 8) // %6 + : "memory", "cc", "d0"); +} + #endif // defined(__ARM_NEON__) && !defined(__aarch64__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc index 0a7b80ce1..185591cb5 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc @@ -29,10 +29,11 @@ void ScaleRowDown2_NEON(const uint8_t* src_ptr, asm volatile( "1: \n" // load even pixels into v0, odd into v1 - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" - "subs %w2, %w2, #16 \n" // 16 processed per loop - "st1 {v1.16b}, [%1], #16 \n" // store odd pixels - "b.gt 1b \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" + "subs %w2, %w2, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st1 {v1.16b}, [%1], #16 \n" // store odd pixels + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -50,11 +51,12 @@ void ScaleRowDown2Linear_NEON(const uint8_t* src_ptr, asm volatile( "1: \n" // load even pixels into v0, odd into v1 - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" - "subs %w2, %w2, #16 \n" // 16 processed per loop - "urhadd v0.16b, v0.16b, v1.16b \n" // rounding half add - "st1 {v0.16b}, [%1], #16 \n" - "b.gt 1b \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" + "subs %w2, %w2, #16 \n" // 16 processed per loop + "urhadd v0.16b, v0.16b, v1.16b \n" // rounding half add + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st1 {v0.16b}, [%1], #16 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -70,19 +72,21 @@ void ScaleRowDown2Box_NEON(const uint8_t* src_ptr, int dst_width) { asm volatile( // change the stride to row 2 pointer - "add %1, %1, %0 \n" + "add %1, %1, %0 \n" "1: \n" - "ld1 {v0.16b, v1.16b}, [%0], #32 \n" // load row 1 and post inc - "ld1 {v2.16b, v3.16b}, [%1], #32 \n" // load row 2 and post inc - "subs %w3, %w3, #16 \n" // 16 processed per loop - "uaddlp v0.8h, v0.16b \n" // row 1 add adjacent - "uaddlp v1.8h, v1.16b \n" - "uadalp v0.8h, v2.16b \n" // += row 2 add adjacent - "uadalp v1.8h, v3.16b \n" - "rshrn v0.8b, v0.8h, #2 \n" // round and pack - "rshrn2 v0.16b, v1.8h, #2 \n" - "st1 {v0.16b}, [%2], #16 \n" - "b.gt 1b \n" + "ld1 {v0.16b, v1.16b}, [%0], #32 \n" // load row 1 and post inc + "ld1 {v2.16b, v3.16b}, [%1], #32 \n" // load row 2 and post inc + "subs %w3, %w3, #16 \n" // 16 processed per loop + "uaddlp v0.8h, v0.16b \n" // row 1 add adjacent + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uaddlp v1.8h, v1.16b \n" + "prfm pldl1keep, [%1, 448] \n" + "uadalp v0.8h, v2.16b \n" // += row 2 add adjacent + "uadalp v1.8h, v3.16b \n" + "rshrn v0.8b, v0.8h, #2 \n" // round and pack + "rshrn2 v0.16b, v1.8h, #2 \n" + "st1 {v0.16b}, [%2], #16 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -99,10 +103,11 @@ void ScaleRowDown4_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 - "subs %w2, %w2, #8 \n" // 8 processed per loop - "st1 {v2.8b}, [%1], #8 \n" - "b.gt 1b \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 + "subs %w2, %w2, #8 \n" // 8 processed per loop + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st1 {v2.8b}, [%1], #8 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -119,19 +124,23 @@ void ScaleRowDown4Box_NEON(const uint8_t* src_ptr, const uint8_t* src_ptr3 = src_ptr + src_stride * 3; asm volatile( "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load up 16x4 - "ld1 {v1.16b}, [%2], #16 \n" - "ld1 {v2.16b}, [%3], #16 \n" - "ld1 {v3.16b}, [%4], #16 \n" - "subs %w5, %w5, #4 \n" - "uaddlp v0.8h, v0.16b \n" - "uadalp v0.8h, v1.16b \n" - "uadalp v0.8h, v2.16b \n" - "uadalp v0.8h, v3.16b \n" - "addp v0.8h, v0.8h, v0.8h \n" - "rshrn v0.8b, v0.8h, #4 \n" // divide by 16 w/rounding - "st1 {v0.s}[0], [%1], #4 \n" - "b.gt 1b \n" + "ld1 {v0.16b}, [%0], #16 \n" // load up 16x4 + "ld1 {v1.16b}, [%2], #16 \n" + "ld1 {v2.16b}, [%3], #16 \n" + "ld1 {v3.16b}, [%4], #16 \n" + "subs %w5, %w5, #4 \n" + "uaddlp v0.8h, v0.16b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uadalp v0.8h, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "uadalp v0.8h, v2.16b \n" + "prfm pldl1keep, [%3, 448] \n" + "uadalp v0.8h, v3.16b \n" + "prfm pldl1keep, [%4, 448] \n" + "addp v0.8h, v0.8h, v0.8h \n" + "rshrn v0.8b, v0.8h, #4 \n" // divide by 16 w/rounding + "st1 {v0.s}[0], [%1], #4 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(src_ptr1), // %2 @@ -151,12 +160,13 @@ void ScaleRowDown34_NEON(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 - "subs %w2, %w2, #24 \n" - "orr v2.16b, v3.16b, v3.16b \n" // order v0,v1,v2 - "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" - "b.gt 1b \n" + "1: \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 + "subs %w2, %w2, #24 \n" + "orr v2.16b, v3.16b, v3.16b \n" // order v0,v1,v2 + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -169,49 +179,51 @@ void ScaleRowDown34_0_Box_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movi v20.8b, #3 \n" - "add %3, %3, %0 \n" - "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%3], #32 \n" // src line 1 - "subs %w2, %w2, #24 \n" + "movi v20.8b, #3 \n" + "add %3, %3, %0 \n" + "1: \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%3], #32 \n" // src line 1 + "subs %w2, %w2, #24 \n" // filter src line 0 with src line 1 // expand chars to shorts to allow for room // when adding lines together - "ushll v16.8h, v4.8b, #0 \n" - "ushll v17.8h, v5.8b, #0 \n" - "ushll v18.8h, v6.8b, #0 \n" - "ushll v19.8h, v7.8b, #0 \n" + "ushll v16.8h, v4.8b, #0 \n" + "ushll v17.8h, v5.8b, #0 \n" + "ushll v18.8h, v6.8b, #0 \n" + "ushll v19.8h, v7.8b, #0 \n" // 3 * line_0 + line_1 - "umlal v16.8h, v0.8b, v20.8b \n" - "umlal v17.8h, v1.8b, v20.8b \n" - "umlal v18.8h, v2.8b, v20.8b \n" - "umlal v19.8h, v3.8b, v20.8b \n" + "umlal v16.8h, v0.8b, v20.8b \n" + "umlal v17.8h, v1.8b, v20.8b \n" + "umlal v18.8h, v2.8b, v20.8b \n" + "umlal v19.8h, v3.8b, v20.8b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead // (3 * line_0 + line_1) >> 2 - "uqrshrn v0.8b, v16.8h, #2 \n" - "uqrshrn v1.8b, v17.8h, #2 \n" - "uqrshrn v2.8b, v18.8h, #2 \n" - "uqrshrn v3.8b, v19.8h, #2 \n" + "uqrshrn v0.8b, v16.8h, #2 \n" + "uqrshrn v1.8b, v17.8h, #2 \n" + "uqrshrn v2.8b, v18.8h, #2 \n" + "uqrshrn v3.8b, v19.8h, #2 \n" + "prfm pldl1keep, [%3, 448] \n" // a0 = (src[0] * 3 + s[1] * 1) >> 2 - "ushll v16.8h, v1.8b, #0 \n" - "umlal v16.8h, v0.8b, v20.8b \n" - "uqrshrn v0.8b, v16.8h, #2 \n" + "ushll v16.8h, v1.8b, #0 \n" + "umlal v16.8h, v0.8b, v20.8b \n" + "uqrshrn v0.8b, v16.8h, #2 \n" // a1 = (src[1] * 1 + s[2] * 1) >> 1 - "urhadd v1.8b, v1.8b, v2.8b \n" + "urhadd v1.8b, v1.8b, v2.8b \n" // a2 = (src[2] * 1 + s[3] * 3) >> 2 - "ushll v16.8h, v2.8b, #0 \n" - "umlal v16.8h, v3.8b, v20.8b \n" - "uqrshrn v2.8b, v16.8h, #2 \n" + "ushll v16.8h, v2.8b, #0 \n" + "umlal v16.8h, v3.8b, v20.8b \n" + "uqrshrn v2.8b, v16.8h, #2 \n" - "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" + "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" - "b.gt 1b \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -226,33 +238,35 @@ void ScaleRowDown34_1_Box_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { asm volatile( - "movi v20.8b, #3 \n" - "add %3, %3, %0 \n" - "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%3], #32 \n" // src line 1 - "subs %w2, %w2, #24 \n" + "movi v20.8b, #3 \n" + "add %3, %3, %0 \n" + "1: \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // src line 0 + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%3], #32 \n" // src line 1 + "subs %w2, %w2, #24 \n" // average src line 0 with src line 1 - "urhadd v0.8b, v0.8b, v4.8b \n" - "urhadd v1.8b, v1.8b, v5.8b \n" - "urhadd v2.8b, v2.8b, v6.8b \n" - "urhadd v3.8b, v3.8b, v7.8b \n" + "urhadd v0.8b, v0.8b, v4.8b \n" + "urhadd v1.8b, v1.8b, v5.8b \n" + "urhadd v2.8b, v2.8b, v6.8b \n" + "urhadd v3.8b, v3.8b, v7.8b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead // a0 = (src[0] * 3 + s[1] * 1) >> 2 - "ushll v4.8h, v1.8b, #0 \n" - "umlal v4.8h, v0.8b, v20.8b \n" - "uqrshrn v0.8b, v4.8h, #2 \n" + "ushll v4.8h, v1.8b, #0 \n" + "umlal v4.8h, v0.8b, v20.8b \n" + "uqrshrn v0.8b, v4.8h, #2 \n" + "prfm pldl1keep, [%3, 448] \n" // a1 = (src[1] * 1 + s[2] * 1) >> 1 - "urhadd v1.8b, v1.8b, v2.8b \n" + "urhadd v1.8b, v1.8b, v2.8b \n" // a2 = (src[2] * 1 + s[3] * 3) >> 2 - "ushll v4.8h, v2.8b, #0 \n" - "umlal v4.8h, v3.8b, v20.8b \n" - "uqrshrn v2.8b, v4.8h, #2 \n" + "ushll v4.8h, v2.8b, #0 \n" + "umlal v4.8h, v3.8b, v20.8b \n" + "uqrshrn v2.8b, v4.8h, #2 \n" - "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" - "b.gt 1b \n" + "st3 {v0.8b,v1.8b,v2.8b}, [%1], #24 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width), // %2 @@ -279,14 +293,15 @@ void ScaleRowDown38_NEON(const uint8_t* src_ptr, int dst_width) { (void)src_stride; asm volatile( - "ld1 {v3.16b}, [%3] \n" - "1: \n" - "ld1 {v0.16b,v1.16b}, [%0], #32 \n" - "subs %w2, %w2, #12 \n" - "tbl v2.16b, {v0.16b,v1.16b}, v3.16b \n" - "st1 {v2.8b}, [%1], #8 \n" - "st1 {v2.s}[2], [%1], #4 \n" - "b.gt 1b \n" + "ld1 {v3.16b}, [%3] \n" + "1: \n" + "ld1 {v0.16b,v1.16b}, [%0], #32 \n" + "subs %w2, %w2, #12 \n" + "tbl v2.16b, {v0.16b,v1.16b}, v3.16b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st1 {v2.8b}, [%1], #8 \n" + "st1 {v2.s}[2], [%1], #4 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(dst_width) // %2 @@ -303,68 +318,68 @@ void OMITFP ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, ptrdiff_t tmp_src_stride = src_stride; asm volatile( - "ld1 {v29.8h}, [%5] \n" - "ld1 {v30.16b}, [%6] \n" - "ld1 {v31.8h}, [%7] \n" - "add %2, %2, %0 \n" - "1: \n" + "ld1 {v29.8h}, [%5] \n" + "ld1 {v30.16b}, [%6] \n" + "ld1 {v31.8h}, [%7] \n" + "add %2, %2, %0 \n" + "1: \n" // 00 40 01 41 02 42 03 43 // 10 50 11 51 12 52 13 53 // 20 60 21 61 22 62 23 63 // 30 70 31 71 32 72 33 73 - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%2], #32 \n" - "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%3], #32 \n" - "subs %w4, %w4, #12 \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%2], #32 \n" + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%3], #32 \n" + "subs %w4, %w4, #12 \n" // Shuffle the input data around to get align the data // so adjacent data can be added. 0,1 - 2,3 - 4,5 - 6,7 // 00 10 01 11 02 12 03 13 // 40 50 41 51 42 52 43 53 - "trn1 v20.8b, v0.8b, v1.8b \n" - "trn2 v21.8b, v0.8b, v1.8b \n" - "trn1 v22.8b, v4.8b, v5.8b \n" - "trn2 v23.8b, v4.8b, v5.8b \n" - "trn1 v24.8b, v16.8b, v17.8b \n" - "trn2 v25.8b, v16.8b, v17.8b \n" + "trn1 v20.8b, v0.8b, v1.8b \n" + "trn2 v21.8b, v0.8b, v1.8b \n" + "trn1 v22.8b, v4.8b, v5.8b \n" + "trn2 v23.8b, v4.8b, v5.8b \n" + "trn1 v24.8b, v16.8b, v17.8b \n" + "trn2 v25.8b, v16.8b, v17.8b \n" // 20 30 21 31 22 32 23 33 // 60 70 61 71 62 72 63 73 - "trn1 v0.8b, v2.8b, v3.8b \n" - "trn2 v1.8b, v2.8b, v3.8b \n" - "trn1 v4.8b, v6.8b, v7.8b \n" - "trn2 v5.8b, v6.8b, v7.8b \n" - "trn1 v16.8b, v18.8b, v19.8b \n" - "trn2 v17.8b, v18.8b, v19.8b \n" + "trn1 v0.8b, v2.8b, v3.8b \n" + "trn2 v1.8b, v2.8b, v3.8b \n" + "trn1 v4.8b, v6.8b, v7.8b \n" + "trn2 v5.8b, v6.8b, v7.8b \n" + "trn1 v16.8b, v18.8b, v19.8b \n" + "trn2 v17.8b, v18.8b, v19.8b \n" // 00+10 01+11 02+12 03+13 // 40+50 41+51 42+52 43+53 - "uaddlp v20.4h, v20.8b \n" - "uaddlp v21.4h, v21.8b \n" - "uaddlp v22.4h, v22.8b \n" - "uaddlp v23.4h, v23.8b \n" - "uaddlp v24.4h, v24.8b \n" - "uaddlp v25.4h, v25.8b \n" + "uaddlp v20.4h, v20.8b \n" + "uaddlp v21.4h, v21.8b \n" + "uaddlp v22.4h, v22.8b \n" + "uaddlp v23.4h, v23.8b \n" + "uaddlp v24.4h, v24.8b \n" + "uaddlp v25.4h, v25.8b \n" // 60+70 61+71 62+72 63+73 - "uaddlp v1.4h, v1.8b \n" - "uaddlp v5.4h, v5.8b \n" - "uaddlp v17.4h, v17.8b \n" + "uaddlp v1.4h, v1.8b \n" + "uaddlp v5.4h, v5.8b \n" + "uaddlp v17.4h, v17.8b \n" // combine source lines - "add v20.4h, v20.4h, v22.4h \n" - "add v21.4h, v21.4h, v23.4h \n" - "add v20.4h, v20.4h, v24.4h \n" - "add v21.4h, v21.4h, v25.4h \n" - "add v2.4h, v1.4h, v5.4h \n" - "add v2.4h, v2.4h, v17.4h \n" + "add v20.4h, v20.4h, v22.4h \n" + "add v21.4h, v21.4h, v23.4h \n" + "add v20.4h, v20.4h, v24.4h \n" + "add v21.4h, v21.4h, v25.4h \n" + "add v2.4h, v1.4h, v5.4h \n" + "add v2.4h, v2.4h, v17.4h \n" // dst_ptr[3] = (s[6 + st * 0] + s[7 + st * 0] // + s[6 + st * 1] + s[7 + st * 1] // + s[6 + st * 2] + s[7 + st * 2]) / 6 - "sqrdmulh v2.8h, v2.8h, v29.8h \n" - "xtn v2.8b, v2.8h \n" + "sqrdmulh v2.8h, v2.8h, v29.8h \n" + "xtn v2.8b, v2.8h \n" // Shuffle 2,3 reg around so that 2 can be added to the // 0,1 reg and 3 can be added to the 4,5 reg. This @@ -372,35 +387,38 @@ void OMITFP ScaleRowDown38_3_Box_NEON(const uint8_t* src_ptr, // registers are already expanded. Then do transposes // to get aligned. // xx 20 xx 30 xx 21 xx 31 xx 22 xx 32 xx 23 xx 33 - "ushll v16.8h, v16.8b, #0 \n" - "uaddl v0.8h, v0.8b, v4.8b \n" + "ushll v16.8h, v16.8b, #0 \n" + "uaddl v0.8h, v0.8b, v4.8b \n" // combine source lines - "add v0.8h, v0.8h, v16.8h \n" + "add v0.8h, v0.8h, v16.8h \n" // xx 20 xx 21 xx 22 xx 23 // xx 30 xx 31 xx 32 xx 33 - "trn1 v1.8h, v0.8h, v0.8h \n" - "trn2 v4.8h, v0.8h, v0.8h \n" - "xtn v0.4h, v1.4s \n" - "xtn v4.4h, v4.4s \n" + "trn1 v1.8h, v0.8h, v0.8h \n" + "trn2 v4.8h, v0.8h, v0.8h \n" + "xtn v0.4h, v1.4s \n" + "xtn v4.4h, v4.4s \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead // 0+1+2, 3+4+5 - "add v20.8h, v20.8h, v0.8h \n" - "add v21.8h, v21.8h, v4.8h \n" + "add v20.8h, v20.8h, v0.8h \n" + "add v21.8h, v21.8h, v4.8h \n" + "prfm pldl1keep, [%2, 448] \n" // Need to divide, but can't downshift as the the value // isn't a power of 2. So multiply by 65536 / n // and take the upper 16 bits. - "sqrdmulh v0.8h, v20.8h, v31.8h \n" - "sqrdmulh v1.8h, v21.8h, v31.8h \n" + "sqrdmulh v0.8h, v20.8h, v31.8h \n" + "sqrdmulh v1.8h, v21.8h, v31.8h \n" + "prfm pldl1keep, [%3, 448] \n" // Align for table lookup, vtbl requires registers to be adjacent - "tbl v3.16b, {v0.16b, v1.16b, v2.16b}, v30.16b \n" + "tbl v3.16b, {v0.16b, v1.16b, v2.16b}, v30.16b \n" - "st1 {v3.8b}, [%1], #8 \n" - "st1 {v3.s}[2], [%1], #4 \n" - "b.gt 1b \n" + "st1 {v3.8b}, [%1], #8 \n" + "st1 {v3.s}[2], [%1], #4 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(tmp_src_stride), // %2 @@ -422,53 +440,53 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, // TODO(fbarchard): use src_stride directly for clang 3.5+. ptrdiff_t tmp_src_stride = src_stride; asm volatile( - "ld1 {v30.8h}, [%4] \n" - "ld1 {v31.16b}, [%5] \n" - "add %2, %2, %0 \n" - "1: \n" + "ld1 {v30.8h}, [%4] \n" + "ld1 {v31.16b}, [%5] \n" + "add %2, %2, %0 \n" + "1: \n" // 00 40 01 41 02 42 03 43 // 10 50 11 51 12 52 13 53 // 20 60 21 61 22 62 23 63 // 30 70 31 71 32 72 33 73 - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" - "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%2], #32 \n" - "subs %w3, %w3, #12 \n" + "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" + "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%2], #32 \n" + "subs %w3, %w3, #12 \n" // Shuffle the input data around to get align the data // so adjacent data can be added. 0,1 - 2,3 - 4,5 - 6,7 // 00 10 01 11 02 12 03 13 // 40 50 41 51 42 52 43 53 - "trn1 v16.8b, v0.8b, v1.8b \n" - "trn2 v17.8b, v0.8b, v1.8b \n" - "trn1 v18.8b, v4.8b, v5.8b \n" - "trn2 v19.8b, v4.8b, v5.8b \n" + "trn1 v16.8b, v0.8b, v1.8b \n" + "trn2 v17.8b, v0.8b, v1.8b \n" + "trn1 v18.8b, v4.8b, v5.8b \n" + "trn2 v19.8b, v4.8b, v5.8b \n" // 20 30 21 31 22 32 23 33 // 60 70 61 71 62 72 63 73 - "trn1 v0.8b, v2.8b, v3.8b \n" - "trn2 v1.8b, v2.8b, v3.8b \n" - "trn1 v4.8b, v6.8b, v7.8b \n" - "trn2 v5.8b, v6.8b, v7.8b \n" + "trn1 v0.8b, v2.8b, v3.8b \n" + "trn2 v1.8b, v2.8b, v3.8b \n" + "trn1 v4.8b, v6.8b, v7.8b \n" + "trn2 v5.8b, v6.8b, v7.8b \n" // 00+10 01+11 02+12 03+13 // 40+50 41+51 42+52 43+53 - "uaddlp v16.4h, v16.8b \n" - "uaddlp v17.4h, v17.8b \n" - "uaddlp v18.4h, v18.8b \n" - "uaddlp v19.4h, v19.8b \n" + "uaddlp v16.4h, v16.8b \n" + "uaddlp v17.4h, v17.8b \n" + "uaddlp v18.4h, v18.8b \n" + "uaddlp v19.4h, v19.8b \n" // 60+70 61+71 62+72 63+73 - "uaddlp v1.4h, v1.8b \n" - "uaddlp v5.4h, v5.8b \n" + "uaddlp v1.4h, v1.8b \n" + "uaddlp v5.4h, v5.8b \n" // combine source lines - "add v16.4h, v16.4h, v18.4h \n" - "add v17.4h, v17.4h, v19.4h \n" - "add v2.4h, v1.4h, v5.4h \n" + "add v16.4h, v16.4h, v18.4h \n" + "add v17.4h, v17.4h, v19.4h \n" + "add v2.4h, v1.4h, v5.4h \n" // dst_ptr[3] = (s[6] + s[7] + s[6+st] + s[7+st]) / 4 - "uqrshrn v2.8b, v2.8h, #2 \n" + "uqrshrn v2.8b, v2.8h, #2 \n" // Shuffle 2,3 reg around so that 2 can be added to the // 0,1 reg and 3 can be added to the 4,5 reg. This @@ -478,33 +496,35 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, // xx 20 xx 30 xx 21 xx 31 xx 22 xx 32 xx 23 xx 33 // combine source lines - "uaddl v0.8h, v0.8b, v4.8b \n" + "uaddl v0.8h, v0.8b, v4.8b \n" // xx 20 xx 21 xx 22 xx 23 // xx 30 xx 31 xx 32 xx 33 - "trn1 v1.8h, v0.8h, v0.8h \n" - "trn2 v4.8h, v0.8h, v0.8h \n" - "xtn v0.4h, v1.4s \n" - "xtn v4.4h, v4.4s \n" + "trn1 v1.8h, v0.8h, v0.8h \n" + "trn2 v4.8h, v0.8h, v0.8h \n" + "xtn v0.4h, v1.4s \n" + "xtn v4.4h, v4.4s \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead // 0+1+2, 3+4+5 - "add v16.8h, v16.8h, v0.8h \n" - "add v17.8h, v17.8h, v4.8h \n" + "add v16.8h, v16.8h, v0.8h \n" + "add v17.8h, v17.8h, v4.8h \n" + "prfm pldl1keep, [%2, 448] \n" // Need to divide, but can't downshift as the the value // isn't a power of 2. So multiply by 65536 / n // and take the upper 16 bits. - "sqrdmulh v0.8h, v16.8h, v30.8h \n" - "sqrdmulh v1.8h, v17.8h, v30.8h \n" + "sqrdmulh v0.8h, v16.8h, v30.8h \n" + "sqrdmulh v1.8h, v17.8h, v30.8h \n" // Align for table lookup, vtbl requires registers to // be adjacent - "tbl v3.16b, {v0.16b, v1.16b, v2.16b}, v31.16b \n" + "tbl v3.16b, {v0.16b, v1.16b, v2.16b}, v31.16b \n" - "st1 {v3.8b}, [%1], #8 \n" - "st1 {v3.s}[2], [%1], #4 \n" - "b.gt 1b \n" + "st1 {v3.8b}, [%1], #8 \n" + "st1 {v3.s}[2], [%1], #4 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(tmp_src_stride), // %2 @@ -522,13 +542,14 @@ void ScaleAddRow_NEON(const uint8_t* src_ptr, int src_width) { asm volatile( "1: \n" - "ld1 {v1.8h, v2.8h}, [%1] \n" // load accumulator - "ld1 {v0.16b}, [%0], #16 \n" // load 16 bytes - "uaddw2 v2.8h, v2.8h, v0.16b \n" // add - "uaddw v1.8h, v1.8h, v0.8b \n" - "st1 {v1.8h, v2.8h}, [%1], #32 \n" // store accumulator - "subs %w2, %w2, #16 \n" // 16 processed per loop - "b.gt 1b \n" + "ld1 {v1.8h, v2.8h}, [%1] \n" // load accumulator + "ld1 {v0.16b}, [%0], #16 \n" // load 16 bytes + "uaddw2 v2.8h, v2.8h, v0.16b \n" // add + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uaddw v1.8h, v1.8h, v0.8b \n" + "st1 {v1.8h, v2.8h}, [%1], #32 \n" // store accumulator + "subs %w2, %w2, #16 \n" // 16 processed per loop + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst_ptr), // %1 "+r"(src_width) // %2 @@ -560,17 +581,17 @@ void ScaleFilterCols_NEON(uint8_t* dst_ptr, int64_t x64 = (int64_t)x; // NOLINT int64_t dx64 = (int64_t)dx; // NOLINT asm volatile ( - "dup v0.4s, %w3 \n" // x - "dup v1.4s, %w4 \n" // dx - "ld1 {v2.4s}, [%5] \n" // 0 1 2 3 - "shl v3.4s, v1.4s, #2 \n" // 4 * dx - "mul v1.4s, v1.4s, v2.4s \n" + "dup v0.4s, %w3 \n" // x + "dup v1.4s, %w4 \n" // dx + "ld1 {v2.4s}, [%5] \n" // 0 1 2 3 + "shl v3.4s, v1.4s, #2 \n" // 4 * dx + "mul v1.4s, v1.4s, v2.4s \n" // x , x + 1 * dx, x + 2 * dx, x + 3 * dx - "add v1.4s, v1.4s, v0.4s \n" + "add v1.4s, v1.4s, v0.4s \n" // x + 4 * dx, x + 5 * dx, x + 6 * dx, x + 7 * dx - "add v2.4s, v1.4s, v3.4s \n" - "shl v0.4s, v3.4s, #1 \n" // 8 * dx - "1: \n" + "add v2.4s, v1.4s, v3.4s \n" + "shl v0.4s, v3.4s, #1 \n" // 8 * dx + "1: \n" LOAD2_DATA8_LANE(0) LOAD2_DATA8_LANE(1) LOAD2_DATA8_LANE(2) @@ -579,27 +600,27 @@ void ScaleFilterCols_NEON(uint8_t* dst_ptr, LOAD2_DATA8_LANE(5) LOAD2_DATA8_LANE(6) LOAD2_DATA8_LANE(7) - "mov v6.16b, v1.16b \n" - "mov v7.16b, v2.16b \n" - "uzp1 v6.8h, v6.8h, v7.8h \n" - "ushll v4.8h, v4.8b, #0 \n" - "ushll v5.8h, v5.8b, #0 \n" - "ssubl v16.4s, v5.4h, v4.4h \n" - "ssubl2 v17.4s, v5.8h, v4.8h \n" - "ushll v7.4s, v6.4h, #0 \n" - "ushll2 v6.4s, v6.8h, #0 \n" - "mul v16.4s, v16.4s, v7.4s \n" - "mul v17.4s, v17.4s, v6.4s \n" - "rshrn v6.4h, v16.4s, #16 \n" - "rshrn2 v6.8h, v17.4s, #16 \n" - "add v4.8h, v4.8h, v6.8h \n" - "xtn v4.8b, v4.8h \n" + "mov v6.16b, v1.16b \n" + "mov v7.16b, v2.16b \n" + "uzp1 v6.8h, v6.8h, v7.8h \n" + "ushll v4.8h, v4.8b, #0 \n" + "ushll v5.8h, v5.8b, #0 \n" + "ssubl v16.4s, v5.4h, v4.4h \n" + "ssubl2 v17.4s, v5.8h, v4.8h \n" + "ushll v7.4s, v6.4h, #0 \n" + "ushll2 v6.4s, v6.8h, #0 \n" + "mul v16.4s, v16.4s, v7.4s \n" + "mul v17.4s, v17.4s, v6.4s \n" + "rshrn v6.4h, v16.4s, #16 \n" + "rshrn2 v6.8h, v17.4s, #16 \n" + "add v4.8h, v4.8h, v6.8h \n" + "xtn v4.8b, v4.8h \n" - "st1 {v4.8b}, [%0], #8 \n" // store pixels - "add v1.4s, v1.4s, v0.4s \n" - "add v2.4s, v2.4s, v0.4s \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop - "b.gt 1b \n" + "st1 {v4.8b}, [%0], #8 \n" // store pixels + "add v1.4s, v1.4s, v0.4s \n" + "add v2.4s, v2.4s, v0.4s \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "b.gt 1b \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 "+r"(dst_width), // %2 @@ -623,74 +644,83 @@ void ScaleFilterRows_NEON(uint8_t* dst_ptr, int source_y_fraction) { int y_fraction = 256 - source_y_fraction; asm volatile( - "cmp %w4, #0 \n" - "b.eq 100f \n" - "add %2, %2, %1 \n" - "cmp %w4, #64 \n" - "b.eq 75f \n" - "cmp %w4, #128 \n" - "b.eq 50f \n" - "cmp %w4, #192 \n" - "b.eq 25f \n" + "cmp %w4, #0 \n" + "b.eq 100f \n" + "add %2, %2, %1 \n" + "cmp %w4, #64 \n" + "b.eq 75f \n" + "cmp %w4, #128 \n" + "b.eq 50f \n" + "cmp %w4, #192 \n" + "b.eq 25f \n" - "dup v5.8b, %w4 \n" - "dup v4.8b, %w5 \n" + "dup v5.8b, %w4 \n" + "dup v4.8b, %w5 \n" // General purpose row blend. "1: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "ld1 {v1.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "umull v6.8h, v0.8b, v4.8b \n" - "umull2 v7.8h, v0.16b, v4.16b \n" - "umlal v6.8h, v1.8b, v5.8b \n" - "umlal2 v7.8h, v1.16b, v5.16b \n" - "rshrn v0.8b, v6.8h, #8 \n" - "rshrn2 v0.16b, v7.8h, #8 \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 1b \n" - "b 99f \n" + "ld1 {v0.16b}, [%1], #16 \n" + "ld1 {v1.16b}, [%2], #16 \n" + "subs %w3, %w3, #16 \n" + "umull v6.8h, v0.8b, v4.8b \n" + "umull2 v7.8h, v0.16b, v4.16b \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "umlal v6.8h, v1.8b, v5.8b \n" + "umlal2 v7.8h, v1.16b, v5.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "rshrn v0.8b, v6.8h, #8 \n" + "rshrn2 v0.16b, v7.8h, #8 \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 1b \n" + "b 99f \n" // Blend 25 / 75. "25: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "ld1 {v1.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 25b \n" - "b 99f \n" + "ld1 {v0.16b}, [%1], #16 \n" + "ld1 {v1.16b}, [%2], #16 \n" + "subs %w3, %w3, #16 \n" + "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 25b \n" + "b 99f \n" // Blend 50 / 50. "50: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "ld1 {v1.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 50b \n" - "b 99f \n" + "ld1 {v0.16b}, [%1], #16 \n" + "ld1 {v1.16b}, [%2], #16 \n" + "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 50b \n" + "b 99f \n" // Blend 75 / 25. "75: \n" - "ld1 {v1.16b}, [%1], #16 \n" - "ld1 {v0.16b}, [%2], #16 \n" - "subs %w3, %w3, #16 \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "urhadd v0.16b, v0.16b, v1.16b \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 75b \n" - "b 99f \n" + "ld1 {v1.16b}, [%1], #16 \n" + "ld1 {v0.16b}, [%2], #16 \n" + "subs %w3, %w3, #16 \n" + "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 75b \n" + "b 99f \n" // Blend 100 / 0 - Copy row unchanged. "100: \n" - "ld1 {v0.16b}, [%1], #16 \n" - "subs %w3, %w3, #16 \n" - "st1 {v0.16b}, [%0], #16 \n" - "b.gt 100b \n" + "ld1 {v0.16b}, [%1], #16 \n" + "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "st1 {v0.16b}, [%0], #16 \n" + "b.gt 100b \n" "99: \n" - "st1 {v0.b}[15], [%0] \n" + "st1 {v0.b}[15], [%0] \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 "+r"(src_stride), // %2 @@ -709,11 +739,12 @@ void ScaleARGBRowDown2_NEON(const uint8_t* src_ptr, asm volatile( "1: \n" // load 16 ARGB pixels with even pixels into q0/q2, odd into q1/q3 - "ld4 {v0.4s,v1.4s,v2.4s,v3.4s}, [%0], #64 \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop - "mov v2.16b, v3.16b \n" - "st2 {v1.4s,v2.4s}, [%1], #32 \n" // store 8 odd pixels - "b.gt 1b \n" + "ld4 {v0.4s,v1.4s,v2.4s,v3.4s}, [%0], #64 \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop + "mov v2.16b, v3.16b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st2 {v1.4s,v2.4s}, [%1], #32 \n" // store 8 odd pixels + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(dst), // %1 "+r"(dst_width) // %2 @@ -730,13 +761,14 @@ void ScaleARGBRowDown2Linear_NEON(const uint8_t* src_argb, asm volatile( "1: \n" // load 16 ARGB pixels with even pixels into q0/q2, odd into q1/q3 - "ld4 {v0.4s,v1.4s,v2.4s,v3.4s}, [%0], #64 \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop + "ld4 {v0.4s,v1.4s,v2.4s,v3.4s}, [%0], #64 \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop - "urhadd v0.16b, v0.16b, v1.16b \n" // rounding half add - "urhadd v1.16b, v2.16b, v3.16b \n" - "st2 {v0.4s,v1.4s}, [%1], #32 \n" // store 8 pixels - "b.gt 1b \n" + "urhadd v0.16b, v0.16b, v1.16b \n" // rounding half add + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "urhadd v1.16b, v2.16b, v3.16b \n" + "st2 {v0.4s,v1.4s}, [%1], #32 \n" // store 8 pixels + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -751,25 +783,27 @@ void ScaleARGBRowDown2Box_NEON(const uint8_t* src_ptr, int dst_width) { asm volatile( // change the stride to row 2 pointer - "add %1, %1, %0 \n" + "add %1, %1, %0 \n" "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 8 ARGB - "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. - "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. - "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. - "uaddlp v3.8h, v3.16b \n" // A 16 bytes -> 8 shorts. - "ld4 {v16.16b,v17.16b,v18.16b,v19.16b}, [%1], #64 \n" // load 8 - "uadalp v0.8h, v16.16b \n" // B 16 bytes -> 8 shorts. - "uadalp v1.8h, v17.16b \n" // G 16 bytes -> 8 shorts. - "uadalp v2.8h, v18.16b \n" // R 16 bytes -> 8 shorts. - "uadalp v3.8h, v19.16b \n" // A 16 bytes -> 8 shorts. - "rshrn v0.8b, v0.8h, #2 \n" // round and pack - "rshrn v1.8b, v1.8h, #2 \n" - "rshrn v2.8b, v2.8h, #2 \n" - "rshrn v3.8b, v3.8h, #2 \n" - "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" - "b.gt 1b \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "uaddlp v3.8h, v3.16b \n" // A 16 bytes -> 8 shorts. + "ld4 {v16.16b,v17.16b,v18.16b,v19.16b}, [%1], #64 \n" // load 8 + "uadalp v0.8h, v16.16b \n" // B 16 bytes -> 8 shorts. + "uadalp v1.8h, v17.16b \n" // G 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uadalp v2.8h, v18.16b \n" // R 16 bytes -> 8 shorts. + "uadalp v3.8h, v19.16b \n" // A 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" + "rshrn v0.8b, v0.8h, #2 \n" // round and pack + "rshrn v1.8b, v1.8h, #2 \n" + "rshrn v2.8b, v2.8h, #2 \n" + "rshrn v3.8b, v3.8h, #2 \n" + "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -788,13 +822,14 @@ void ScaleARGBRowDownEven_NEON(const uint8_t* src_argb, (void)src_stride; asm volatile( "1: \n" - "ld1 {v0.s}[0], [%0], %3 \n" - "ld1 {v0.s}[1], [%0], %3 \n" - "ld1 {v0.s}[2], [%0], %3 \n" - "ld1 {v0.s}[3], [%0], %3 \n" - "subs %w2, %w2, #4 \n" // 4 pixels per loop. - "st1 {v0.16b}, [%1], #16 \n" - "b.gt 1b \n" + "ld1 {v0.s}[0], [%0], %3 \n" + "ld1 {v0.s}[1], [%0], %3 \n" + "ld1 {v0.s}[2], [%0], %3 \n" + "ld1 {v0.s}[3], [%0], %3 \n" + "subs %w2, %w2, #4 \n" // 4 pixels per loop. + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "st1 {v0.16b}, [%1], #16 \n" + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb), // %1 "+r"(dst_width) // %2 @@ -812,33 +847,35 @@ void ScaleARGBRowDownEvenBox_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int dst_width) { asm volatile( - "add %1, %1, %0 \n" + "add %1, %1, %0 \n" "1: \n" - "ld1 {v0.8b}, [%0], %4 \n" // Read 4 2x2 -> 2x1 - "ld1 {v1.8b}, [%1], %4 \n" - "ld1 {v2.8b}, [%0], %4 \n" - "ld1 {v3.8b}, [%1], %4 \n" - "ld1 {v4.8b}, [%0], %4 \n" - "ld1 {v5.8b}, [%1], %4 \n" - "ld1 {v6.8b}, [%0], %4 \n" - "ld1 {v7.8b}, [%1], %4 \n" - "uaddl v0.8h, v0.8b, v1.8b \n" - "uaddl v2.8h, v2.8b, v3.8b \n" - "uaddl v4.8h, v4.8b, v5.8b \n" - "uaddl v6.8h, v6.8b, v7.8b \n" - "mov v16.d[1], v0.d[1] \n" // ab_cd -> ac_bd - "mov v0.d[1], v2.d[0] \n" - "mov v2.d[0], v16.d[1] \n" - "mov v16.d[1], v4.d[1] \n" // ef_gh -> eg_fh - "mov v4.d[1], v6.d[0] \n" - "mov v6.d[0], v16.d[1] \n" - "add v0.8h, v0.8h, v2.8h \n" // (a+b)_(c+d) - "add v4.8h, v4.8h, v6.8h \n" // (e+f)_(g+h) - "rshrn v0.8b, v0.8h, #2 \n" // first 2 pixels. - "rshrn2 v0.16b, v4.8h, #2 \n" // next 2 pixels. - "subs %w3, %w3, #4 \n" // 4 pixels per loop. - "st1 {v0.16b}, [%2], #16 \n" - "b.gt 1b \n" + "ld1 {v0.8b}, [%0], %4 \n" // Read 4 2x2 -> 2x1 + "ld1 {v1.8b}, [%1], %4 \n" + "ld1 {v2.8b}, [%0], %4 \n" + "ld1 {v3.8b}, [%1], %4 \n" + "ld1 {v4.8b}, [%0], %4 \n" + "ld1 {v5.8b}, [%1], %4 \n" + "ld1 {v6.8b}, [%0], %4 \n" + "ld1 {v7.8b}, [%1], %4 \n" + "uaddl v0.8h, v0.8b, v1.8b \n" + "uaddl v2.8h, v2.8b, v3.8b \n" + "uaddl v4.8h, v4.8b, v5.8b \n" + "uaddl v6.8h, v6.8b, v7.8b \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "mov v16.d[1], v0.d[1] \n" // ab_cd -> ac_bd + "mov v0.d[1], v2.d[0] \n" + "mov v2.d[0], v16.d[1] \n" + "mov v16.d[1], v4.d[1] \n" // ef_gh -> eg_fh + "mov v4.d[1], v6.d[0] \n" + "mov v6.d[0], v16.d[1] \n" + "prfm pldl1keep, [%1, 448] \n" + "add v0.8h, v0.8h, v2.8h \n" // (a+b)_(c+d) + "add v4.8h, v4.8h, v6.8h \n" // (e+f)_(g+h) + "rshrn v0.8b, v0.8h, #2 \n" // first 2 pixels. + "rshrn2 v0.16b, v4.8h, #2 \n" // next 2 pixels. + "subs %w3, %w3, #4 \n" // 4 pixels per loop. + "st1 {v0.16b}, [%2], #16 \n" + "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(src_stride), // %1 "+r"(dst_argb), // %2 @@ -875,10 +912,11 @@ void ScaleARGBCols_NEON(uint8_t* dst_argb, LOAD1_DATA32_LANE(v1, 1) LOAD1_DATA32_LANE(v1, 2) LOAD1_DATA32_LANE(v1, 3) + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead // clang-format on - "st1 {v0.4s, v1.4s}, [%0], #32 \n" // store pixels - "subs %w2, %w2, #8 \n" // 8 processed per loop - "b.gt 1b \n" + "st1 {v0.4s, v1.4s}, [%0], #32 \n" // store pixels + "subs %w2, %w2, #8 \n" // 8 processed per loop + "b.gt 1b \n" : "+r"(dst_argb), // %0 "+r"(src_argb), // %1 "+r"(dst_width), // %2 @@ -911,16 +949,16 @@ void ScaleARGBFilterCols_NEON(uint8_t* dst_argb, int64_t x64 = (int64_t)x; // NOLINT int64_t dx64 = (int64_t)dx; // NOLINT asm volatile ( - "dup v0.4s, %w3 \n" // x - "dup v1.4s, %w4 \n" // dx - "ld1 {v2.4s}, [%5] \n" // 0 1 2 3 - "shl v6.4s, v1.4s, #2 \n" // 4 * dx - "mul v1.4s, v1.4s, v2.4s \n" - "movi v3.16b, #0x7f \n" // 0x7F - "movi v4.8h, #0x7f \n" // 0x7F + "dup v0.4s, %w3 \n" // x + "dup v1.4s, %w4 \n" // dx + "ld1 {v2.4s}, [%5] \n" // 0 1 2 3 + "shl v6.4s, v1.4s, #2 \n" // 4 * dx + "mul v1.4s, v1.4s, v2.4s \n" + "movi v3.16b, #0x7f \n" // 0x7F + "movi v4.8h, #0x7f \n" // 0x7F // x , x + 1 * dx, x + 2 * dx, x + 3 * dx - "add v5.4s, v1.4s, v0.4s \n" - "1: \n" + "add v5.4s, v1.4s, v0.4s \n" + "1: \n" // d0, d1: a // d2, d3: b LOAD2_DATA32_LANE(v0, v1, 0) @@ -941,15 +979,15 @@ void ScaleARGBFilterCols_NEON(uint8_t* dst_argb, "umull2 v17.8h, v0.16b, v7.16b \n" "umull v18.8h, v1.8b, v2.8b \n" "umull2 v19.8h, v1.16b, v2.16b \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead "add v16.8h, v16.8h, v18.8h \n" "add v17.8h, v17.8h, v19.8h \n" "shrn v0.8b, v16.8h, #7 \n" "shrn2 v0.16b, v17.8h, #7 \n" - "st1 {v0.4s}, [%0], #16 \n" // store pixels "add v5.4s, v5.4s, v6.4s \n" "subs %w2, %w2, #4 \n" // 4 processed per loop - "b.gt 1b \n" + "b.gt 1b \n" : "+r"(dst_argb), // %0 "+r"(src_argb), // %1 "+r"(dst_width), // %2 @@ -972,19 +1010,21 @@ void ScaleRowDown2Box_16_NEON(const uint16_t* src_ptr, int dst_width) { asm volatile( // change the stride to row 2 pointer - "add %1, %0, %1, lsl #1 \n" // ptr + stide * 2 + "add %1, %0, %1, lsl #1 \n" // ptr + stide * 2 "1: \n" - "ld1 {v0.8h, v1.8h}, [%0], #32 \n" // load row 1 and post inc - "ld1 {v2.8h, v3.8h}, [%1], #32 \n" // load row 2 and post inc - "subs %w3, %w3, #8 \n" // 8 processed per loop - "uaddlp v0.4s, v0.8h \n" // row 1 add adjacent - "uaddlp v1.4s, v1.8h \n" - "uadalp v0.4s, v2.8h \n" // +row 2 add adjacent - "uadalp v1.4s, v3.8h \n" - "rshrn v0.4h, v0.4s, #2 \n" // round and pack - "rshrn2 v0.8h, v1.4s, #2 \n" - "st1 {v0.8h}, [%2], #16 \n" - "b.gt 1b \n" + "ld1 {v0.8h, v1.8h}, [%0], #32 \n" // load row 1 and post inc + "ld1 {v2.8h, v3.8h}, [%1], #32 \n" // load row 2 and post inc + "subs %w3, %w3, #8 \n" // 8 processed per loop + "uaddlp v0.4s, v0.8h \n" // row 1 add adjacent + "uaddlp v1.4s, v1.8h \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uadalp v0.4s, v2.8h \n" // +row 2 add adjacent + "uadalp v1.4s, v3.8h \n" + "prfm pldl1keep, [%1, 448] \n" + "rshrn v0.4h, v0.4s, #2 \n" // round and pack + "rshrn2 v0.8h, v1.4s, #2 \n" + "st1 {v0.8h}, [%2], #16 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -1001,38 +1041,40 @@ void ScaleRowUp2_16_NEON(const uint16_t* src_ptr, uint16_t* dst, int dst_width) { asm volatile( - "add %1, %0, %1, lsl #1 \n" // ptr + stide * 2 - "movi v0.8h, #9 \n" // constants - "movi v1.4s, #3 \n" + "add %1, %0, %1, lsl #1 \n" // ptr + stide * 2 + "movi v0.8h, #9 \n" // constants + "movi v1.4s, #3 \n" "1: \n" - "ld1 {v3.8h}, [%0], %4 \n" // TL read first 8 - "ld1 {v4.8h}, [%0], %5 \n" // TR read 8 offset by 1 - "ld1 {v5.8h}, [%1], %4 \n" // BL read 8 from next row - "ld1 {v6.8h}, [%1], %5 \n" // BR offset by 1 - "subs %w3, %w3, #16 \n" // 16 dst pixels per loop - "umull v16.4s, v3.4h, v0.4h \n" - "umull2 v7.4s, v3.8h, v0.8h \n" - "umull v18.4s, v4.4h, v0.4h \n" - "umull2 v17.4s, v4.8h, v0.8h \n" - "uaddw v16.4s, v16.4s, v6.4h \n" - "uaddl2 v19.4s, v6.8h, v3.8h \n" - "uaddl v3.4s, v6.4h, v3.4h \n" - "uaddw2 v6.4s, v7.4s, v6.8h \n" - "uaddl2 v7.4s, v5.8h, v4.8h \n" - "uaddl v4.4s, v5.4h, v4.4h \n" - "uaddw v18.4s, v18.4s, v5.4h \n" - "mla v16.4s, v4.4s, v1.4s \n" - "mla v18.4s, v3.4s, v1.4s \n" - "mla v6.4s, v7.4s, v1.4s \n" - "uaddw2 v4.4s, v17.4s, v5.8h \n" - "uqrshrn v16.4h, v16.4s, #4 \n" - "mla v4.4s, v19.4s, v1.4s \n" - "uqrshrn2 v16.8h, v6.4s, #4 \n" - "uqrshrn v17.4h, v18.4s, #4 \n" - "uqrshrn2 v17.8h, v4.4s, #4 \n" - "st2 {v16.8h-v17.8h}, [%2], #32 \n" - "b.gt 1b \n" + "ld1 {v3.8h}, [%0], %4 \n" // TL read first 8 + "ld1 {v4.8h}, [%0], %5 \n" // TR read 8 offset by 1 + "ld1 {v5.8h}, [%1], %4 \n" // BL read 8 from next row + "ld1 {v6.8h}, [%1], %5 \n" // BR offset by 1 + "subs %w3, %w3, #16 \n" // 16 dst pixels per loop + "umull v16.4s, v3.4h, v0.4h \n" + "umull2 v7.4s, v3.8h, v0.8h \n" + "umull v18.4s, v4.4h, v0.4h \n" + "umull2 v17.4s, v4.8h, v0.8h \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "uaddw v16.4s, v16.4s, v6.4h \n" + "uaddl2 v19.4s, v6.8h, v3.8h \n" + "uaddl v3.4s, v6.4h, v3.4h \n" + "uaddw2 v6.4s, v7.4s, v6.8h \n" + "uaddl2 v7.4s, v5.8h, v4.8h \n" + "uaddl v4.4s, v5.4h, v4.4h \n" + "uaddw v18.4s, v18.4s, v5.4h \n" + "prfm pldl1keep, [%1, 448] \n" + "mla v16.4s, v4.4s, v1.4s \n" + "mla v18.4s, v3.4s, v1.4s \n" + "mla v6.4s, v7.4s, v1.4s \n" + "uaddw2 v4.4s, v17.4s, v5.8h \n" + "uqrshrn v16.4h, v16.4s, #4 \n" + "mla v4.4s, v19.4s, v1.4s \n" + "uqrshrn2 v16.8h, v6.4s, #4 \n" + "uqrshrn v17.4h, v18.4s, #4 \n" + "uqrshrn2 v17.8h, v4.4s, #4 \n" + "st2 {v16.8h-v17.8h}, [%2], #32 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(src_stride), // %1 "+r"(dst), // %2 @@ -1044,6 +1086,64 @@ void ScaleRowUp2_16_NEON(const uint16_t* src_ptr, ); } +void ScaleUVRowDown2Box_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + asm volatile( + // change the stride to row 2 pointer + "add %1, %1, %0 \n" + "1: \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 UV + "subs %w3, %w3, #8 \n" // 8 processed per loop. + "uaddlp v0.8h, v0.16b \n" // U 16 bytes -> 8 shorts. + "uaddlp v1.8h, v1.16b \n" // V 16 bytes -> 8 shorts. + "ld2 {v16.16b,v17.16b}, [%1], #32 \n" // load 16 + "uadalp v0.8h, v16.16b \n" // U 16 bytes -> 8 shorts. + "uadalp v1.8h, v17.16b \n" // V 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "rshrn v0.8b, v0.8h, #2 \n" // round and pack + "prfm pldl1keep, [%1, 448] \n" + "rshrn v1.8b, v1.8h, #2 \n" + "st2 {v0.8b,v1.8b}, [%2], #16 \n" + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_stride), // %1 + "+r"(dst), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v16", "v17"); +} + +// Reads 4 pixels at a time. +void ScaleUVRowDownEven_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + int src_stepx, // pixel step + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src1_ptr = src_ptr + src_stepx * 2; + const uint8_t* src2_ptr = src_ptr + src_stepx * 4; + const uint8_t* src3_ptr = src_ptr + src_stepx * 6; + (void)src_stride; + asm volatile( + "1: \n" + "ld1 {v0.h}[0], [%0], %6 \n" + "ld1 {v1.h}[0], [%1], %6 \n" + "ld1 {v2.h}[0], [%2], %6 \n" + "ld1 {v3.h}[0], [%3], %6 \n" + "subs %w5, %w5, #4 \n" // 4 pixels per loop. + "st4 {v0.h, v1.h, v2.h, v3.h}[0], [%4], #8 \n" + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src1_ptr), // %1 + "+r"(src2_ptr), // %2 + "+r"(src3_ptr), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_width) // %5 + : "r"((int64_t)(src_stepx * 8)) // %6 + : "memory", "cc", "v0", "v1", "v2", "v3"); +} + #endif // !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc new file mode 100644 index 000000000..b0469f09b --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc @@ -0,0 +1,891 @@ +/* + * Copyright 2020 The LibYuv Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "libyuv/scale.h" + +#include +#include + +#include "libyuv/cpu_id.h" +#include "libyuv/planar_functions.h" // For CopyUV +#include "libyuv/row.h" +#include "libyuv/scale_row.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +// Macros to enable specialized scalers + +#ifndef HAS_SCALEUVDOWN2 +#define HAS_SCALEUVDOWN2 1 +#endif +#ifndef HAS_SCALEUVDOWN4BOX +#define HAS_SCALEUVDOWN4BOX 1 +#endif +#ifndef HAS_SCALEUVDOWNEVEN +#define HAS_SCALEUVDOWNEVEN 1 +#endif +#ifndef HAS_SCALEUVBILINEARDOWN +#define HAS_SCALEUVBILINEARDOWN 1 +#endif +#ifndef HAS_SCALEUVBILINEARUP +#define HAS_SCALEUVBILINEARUP 1 +#endif +#ifndef HAS_UVCOPY +#define HAS_UVCOPY 1 +#endif +#ifndef HAS_SCALEPLANEVERTICAL +#define HAS_SCALEPLANEVERTICAL 1 +#endif + +static __inline int Abs(int v) { + return v >= 0 ? v : -v; +} + +// ScaleUV, 1/2 +// This is an optimized version for scaling down a UV to 1/2 of +// its original size. +#if HAS_SCALEUVDOWN2 +static void ScaleUVDown2(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy, + enum FilterMode filtering) { + int j; + int row_stride = src_stride * (dy >> 16); + void (*ScaleUVRowDown2)(const uint8_t* src_uv, ptrdiff_t src_stride, + uint8_t* dst_uv, int dst_width) = + filtering == kFilterNone + ? ScaleUVRowDown2_C + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_C + : ScaleUVRowDown2Box_C); + (void)src_width; + (void)src_height; + (void)dx; + assert(dx == 65536 * 2); // Test scale factor of 2. + assert((dy & 0x1ffff) == 0); // Test vertical scale is multiple of 2. + // Advance to odd row, even column. + if (filtering == kFilterBilinear) { + src_uv += (y >> 16) * src_stride + (x >> 16) * 2; + } else { + src_uv += (y >> 16) * src_stride + ((x >> 16) - 1) * 2; + } + +#if defined(HAS_SCALEUVROWDOWN2BOX_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && filtering) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_SSSE3; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_SSSE3; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2BOX_AVX2) + if (TestCpuFlag(kCpuHasAVX2) && filtering) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_AVX2; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_AVX2; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2BOX_NEON) + if (TestCpuFlag(kCpuHasNEON) && filtering) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_NEON; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_NEON; + } + } +#endif + +// This code is not enabled. Only box filter is available at this time. +#if defined(HAS_SCALEUVROWDOWN2_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_Any_SSSE3 + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_Any_SSSE3 + : ScaleUVRowDown2Box_Any_SSSE3); + if (IS_ALIGNED(dst_width, 2)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_SSSE3 + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_SSSE3 + : ScaleUVRowDown2Box_SSSE3); + } + } +#endif +// This code is not enabled. Only box filter is available at this time. +#if defined(HAS_SCALEUVROWDOWN2_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_Any_NEON + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_Any_NEON + : ScaleUVRowDown2Box_Any_NEON); + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_NEON + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_NEON + : ScaleUVRowDown2Box_NEON); + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_Any_MMI + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_Any_MMI + : ScaleUVRowDown2Box_Any_MMI); + if (IS_ALIGNED(dst_width, 2)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_MMI + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_MMI + : ScaleUVRowDown2Box_MMI); + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_Any_MSA + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_Any_MSA + : ScaleUVRowDown2Box_Any_MSA); + if (IS_ALIGNED(dst_width, 2)) { + ScaleUVRowDown2 = + filtering == kFilterNone + ? ScaleUVRowDown2_MSA + : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_MSA + : ScaleUVRowDown2Box_MSA); + } + } +#endif + + if (filtering == kFilterLinear) { + src_stride = 0; + } + for (j = 0; j < dst_height; ++j) { + ScaleUVRowDown2(src_uv, src_stride, dst_uv, dst_width); + src_uv += row_stride; + dst_uv += dst_stride; + } +} +#endif // HAS_SCALEUVDOWN2 + +// ScaleUV, 1/4 +// This is an optimized version for scaling down a UV to 1/4 of +// its original size. +#if HAS_SCALEUVDOWN4BOX +static void ScaleUVDown4Box(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy) { + int j; + // Allocate 2 rows of UV. + const int kRowSize = (dst_width * 2 * 2 + 15) & ~15; + align_buffer_64(row, kRowSize * 2); + int row_stride = src_stride * (dy >> 16); + void (*ScaleUVRowDown2)(const uint8_t* src_uv, ptrdiff_t src_stride, + uint8_t* dst_uv, int dst_width) = + ScaleUVRowDown2Box_C; + // Advance to odd row, even column. + src_uv += (y >> 16) * src_stride + (x >> 16) * 2; + (void)src_width; + (void)src_height; + (void)dx; + assert(dx == 65536 * 4); // Test scale factor of 4. + assert((dy & 0x3ffff) == 0); // Test vertical scale is multiple of 4. + +#if defined(HAS_SCALEUVROWDOWN2BOX_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_SSSE3; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_SSSE3; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2BOX_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_AVX2; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_AVX2; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWN2BOX_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_Any_NEON; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVRowDown2 = ScaleUVRowDown2Box_NEON; + } + } +#endif + + for (j = 0; j < dst_height; ++j) { + ScaleUVRowDown2(src_uv, src_stride, row, dst_width * 2); + ScaleUVRowDown2(src_uv + src_stride * 2, src_stride, row + kRowSize, + dst_width * 2); + ScaleUVRowDown2(row, kRowSize, dst_uv, dst_width); + src_uv += row_stride; + dst_uv += dst_stride; + } + free_aligned_buffer_64(row); +} +#endif // HAS_SCALEUVDOWN4BOX + +// ScaleUV Even +// This is an optimized version for scaling down a UV to even +// multiple of its original size. +#if HAS_SCALEUVDOWNEVEN +static void ScaleUVDownEven(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy, + enum FilterMode filtering) { + int j; + int col_step = dx >> 16; + int row_stride = (dy >> 16) * src_stride; + void (*ScaleUVRowDownEven)(const uint8_t* src_uv, ptrdiff_t src_stride, + int src_step, uint8_t* dst_uv, int dst_width) = + filtering ? ScaleUVRowDownEvenBox_C : ScaleUVRowDownEven_C; + (void)src_width; + (void)src_height; + assert(IS_ALIGNED(src_width, 2)); + assert(IS_ALIGNED(src_height, 2)); + src_uv += (y >> 16) * src_stride + (x >> 16) * 2; +#if defined(HAS_SCALEUVROWDOWNEVEN_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleUVRowDownEven = filtering ? ScaleUVRowDownEvenBox_Any_SSSE3 + : ScaleUVRowDownEven_Any_SSSE3; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_SSE2 : ScaleUVRowDownEven_SSSE3; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWNEVEN_NEON) + if (TestCpuFlag(kCpuHasNEON) && !filtering) { + ScaleUVRowDownEven = ScaleUVRowDownEven_Any_NEON; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDownEven = ScaleUVRowDownEven_NEON; + } + } +#endif// TODO(fbarchard): Enable Box filter +#if defined(HAS_SCALEUVROWDOWNEVENBOX_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleUVRowDownEven = filtering ? ScaleUVRowDownEvenBox_Any_NEON + : ScaleUVRowDownEven_Any_NEON; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_NEON : ScaleUVRowDownEven_NEON; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWNEVEN_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_Any_MMI : ScaleUVRowDownEven_Any_MMI; + if (IS_ALIGNED(dst_width, 2)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_MMI : ScaleUVRowDownEven_MMI; + } + } +#endif +#if defined(HAS_SCALEUVROWDOWNEVEN_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_Any_MSA : ScaleUVRowDownEven_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVRowDownEven = + filtering ? ScaleUVRowDownEvenBox_MSA : ScaleUVRowDownEven_MSA; + } + } +#endif + + if (filtering == kFilterLinear) { + src_stride = 0; + } + for (j = 0; j < dst_height; ++j) { + ScaleUVRowDownEven(src_uv, src_stride, col_step, dst_uv, dst_width); + src_uv += row_stride; + dst_uv += dst_stride; + } +} +#endif + +// Scale UV down with bilinear interpolation. +#if HAS_SCALEUVBILINEARDOWN +static void ScaleUVBilinearDown(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy, + enum FilterMode filtering) { + int j; + void (*InterpolateRow)(uint8_t * dst_uv, const uint8_t* src_uv, + ptrdiff_t src_stride, int dst_width, + int source_y_fraction) = InterpolateRow_C; + void (*ScaleUVFilterCols)(uint8_t * dst_uv, const uint8_t* src_uv, + int dst_width, int x, int dx) = + (src_width >= 32768) ? ScaleUVFilterCols64_C : ScaleUVFilterCols_C; + int64_t xlast = x + (int64_t)(dst_width - 1) * dx; + int64_t xl = (dx >= 0) ? x : xlast; + int64_t xr = (dx >= 0) ? xlast : x; + int clip_src_width; + xl = (xl >> 16) & ~3; // Left edge aligned. + xr = (xr >> 16) + 1; // Right most pixel used. Bilinear uses 2 pixels. + xr = (xr + 1 + 3) & ~3; // 1 beyond 4 pixel aligned right most pixel. + if (xr > src_width) { + xr = src_width; + } + clip_src_width = (int)(xr - xl) * 2; // Width aligned to 2. + src_uv += xl * 2; + x -= (int)(xl << 16); +#if defined(HAS_INTERPOLATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + InterpolateRow = InterpolateRow_Any_SSSE3; + if (IS_ALIGNED(clip_src_width, 16)) { + InterpolateRow = InterpolateRow_SSSE3; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + InterpolateRow = InterpolateRow_Any_AVX2; + if (IS_ALIGNED(clip_src_width, 32)) { + InterpolateRow = InterpolateRow_AVX2; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + InterpolateRow = InterpolateRow_Any_NEON; + if (IS_ALIGNED(clip_src_width, 16)) { + InterpolateRow = InterpolateRow_NEON; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(clip_src_width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif +#if defined(HAS_SCALEUVFILTERCOLS_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { + ScaleUVFilterCols = ScaleUVFilterCols_SSSE3; + } +#endif +#if defined(HAS_SCALEUVFILTERCOLS_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleUVFilterCols = ScaleUVFilterCols_Any_NEON; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVFilterCols = ScaleUVFilterCols_NEON; + } + } +#endif +#if defined(HAS_SCALEUVFILTERCOLS_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleUVFilterCols = ScaleUVFilterCols_Any_MSA; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVFilterCols = ScaleUVFilterCols_MSA; + } + } +#endif + // TODO(fbarchard): Consider not allocating row buffer for kFilterLinear. + // Allocate a row of UV. + { + align_buffer_64(row, clip_src_width * 2); + + const int max_y = (src_height - 1) << 16; + if (y > max_y) { + y = max_y; + } + for (j = 0; j < dst_height; ++j) { + int yi = y >> 16; + const uint8_t* src = src_uv + yi * src_stride; + if (filtering == kFilterLinear) { + ScaleUVFilterCols(dst_uv, src, dst_width, x, dx); + } else { + int yf = (y >> 8) & 255; + InterpolateRow(row, src, src_stride, clip_src_width, yf); + ScaleUVFilterCols(dst_uv, row, dst_width, x, dx); + } + dst_uv += dst_stride; + y += dy; + if (y > max_y) { + y = max_y; + } + } + free_aligned_buffer_64(row); + } +} +#endif + +// Scale UV up with bilinear interpolation. +#if HAS_SCALEUVBILINEARUP +static void ScaleUVBilinearUp(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy, + enum FilterMode filtering) { + int j; + void (*InterpolateRow)(uint8_t * dst_uv, const uint8_t* src_uv, + ptrdiff_t src_stride, int dst_width, + int source_y_fraction) = InterpolateRow_C; + void (*ScaleUVFilterCols)(uint8_t * dst_uv, const uint8_t* src_uv, + int dst_width, int x, int dx) = + filtering ? ScaleUVFilterCols_C : ScaleUVCols_C; + const int max_y = (src_height - 1) << 16; +#if defined(HAS_INTERPOLATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + InterpolateRow = InterpolateRow_Any_SSSE3; + if (IS_ALIGNED(dst_width, 4)) { + InterpolateRow = InterpolateRow_SSSE3; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + InterpolateRow = InterpolateRow_Any_AVX2; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow = InterpolateRow_AVX2; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + InterpolateRow = InterpolateRow_Any_NEON; + if (IS_ALIGNED(dst_width, 4)) { + InterpolateRow = InterpolateRow_NEON; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + InterpolateRow = InterpolateRow_Any_MMI; + if (IS_ALIGNED(dst_width, 2)) { + InterpolateRow = InterpolateRow_MMI; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow = InterpolateRow_MSA; + } + } +#endif + if (src_width >= 32768) { + ScaleUVFilterCols = filtering ? ScaleUVFilterCols64_C : ScaleUVCols64_C; + } +#if defined(HAS_SCALEUVFILTERCOLS_SSSE3) + if (filtering && TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { + ScaleUVFilterCols = ScaleUVFilterCols_SSSE3; + } +#endif +#if defined(HAS_SCALEUVFILTERCOLS_NEON) + if (filtering && TestCpuFlag(kCpuHasNEON)) { + ScaleUVFilterCols = ScaleUVFilterCols_Any_NEON; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVFilterCols = ScaleUVFilterCols_NEON; + } + } +#endif +#if defined(HAS_SCALEUVFILTERCOLS_MSA) + if (filtering && TestCpuFlag(kCpuHasMSA)) { + ScaleUVFilterCols = ScaleUVFilterCols_Any_MSA; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVFilterCols = ScaleUVFilterCols_MSA; + } + } +#endif +#if defined(HAS_SCALEUVCOLS_SSSE3) + if (!filtering && TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { + ScaleUVFilterCols = ScaleUVCols_SSSE3; + } +#endif +#if defined(HAS_SCALEUVCOLS_NEON) + if (!filtering && TestCpuFlag(kCpuHasNEON)) { + ScaleUVFilterCols = ScaleUVCols_Any_NEON; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVFilterCols = ScaleUVCols_NEON; + } + } +#endif +#if defined(HAS_SCALEUVCOLS_MMI) + if (!filtering && TestCpuFlag(kCpuHasMMI)) { + ScaleUVFilterCols = ScaleUVCols_Any_MMI; + if (IS_ALIGNED(dst_width, 1)) { + ScaleUVFilterCols = ScaleUVCols_MMI; + } + } +#endif +#if defined(HAS_SCALEUVCOLS_MSA) + if (!filtering && TestCpuFlag(kCpuHasMSA)) { + ScaleUVFilterCols = ScaleUVCols_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVFilterCols = ScaleUVCols_MSA; + } + } +#endif + if (!filtering && src_width * 2 == dst_width && x < 0x8000) { + ScaleUVFilterCols = ScaleUVColsUp2_C; +#if defined(HAS_SCALEUVCOLSUP2_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(dst_width, 8)) { + ScaleUVFilterCols = ScaleUVColsUp2_SSSE3; + } +#endif +#if defined(HAS_SCALEUVCOLSUP2_MMI) + if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { + ScaleUVFilterCols = ScaleUVColsUp2_MMI; + } +#endif + } + + if (y > max_y) { + y = max_y; + } + + { + int yi = y >> 16; + const uint8_t* src = src_uv + yi * src_stride; + + // Allocate 2 rows of UV. + const int kRowSize = (dst_width * 2 + 15) & ~15; + align_buffer_64(row, kRowSize * 2); + + uint8_t* rowptr = row; + int rowstride = kRowSize; + int lasty = yi; + + ScaleUVFilterCols(rowptr, src, dst_width, x, dx); + if (src_height > 1) { + src += src_stride; + } + ScaleUVFilterCols(rowptr + rowstride, src, dst_width, x, dx); + src += src_stride; + + for (j = 0; j < dst_height; ++j) { + yi = y >> 16; + if (yi != lasty) { + if (y > max_y) { + y = max_y; + yi = y >> 16; + src = src_uv + yi * src_stride; + } + if (yi != lasty) { + ScaleUVFilterCols(rowptr, src, dst_width, x, dx); + rowptr += rowstride; + rowstride = -rowstride; + lasty = yi; + src += src_stride; + } + } + if (filtering == kFilterLinear) { + InterpolateRow(dst_uv, rowptr, 0, dst_width * 2, 0); + } else { + int yf = (y >> 8) & 255; + InterpolateRow(dst_uv, rowptr, rowstride, dst_width * 2, yf); + } + dst_uv += dst_stride; + y += dy; + } + free_aligned_buffer_64(row); + } +} +#endif // HAS_SCALEUVBILINEARUP + +// Scale UV to/from any dimensions, without interpolation. +// Fixed point math is used for performance: The upper 16 bits +// of x and dx is the integer part of the source position and +// the lower 16 bits are the fixed decimal part. + +static void ScaleUVSimple(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv, + int x, + int dx, + int y, + int dy) { + int j; + void (*ScaleUVCols)(uint8_t * dst_uv, const uint8_t* src_uv, int dst_width, + int x, int dx) = + (src_width >= 32768) ? ScaleUVCols64_C : ScaleUVCols_C; + (void)src_height; +#if defined(HAS_SCALEUVCOLS_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { + ScaleUVCols = ScaleUVCols_SSSE3; + } +#endif +#if defined(HAS_SCALEUVCOLS_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleUVCols = ScaleUVCols_Any_NEON; + if (IS_ALIGNED(dst_width, 8)) { + ScaleUVCols = ScaleUVCols_NEON; + } + } +#endif +#if defined(HAS_SCALEUVCOLS_MMI) + if (TestCpuFlag(kCpuHasMMI)) { + ScaleUVCols = ScaleUVCols_Any_MMI; + if (IS_ALIGNED(dst_width, 1)) { + ScaleUVCols = ScaleUVCols_MMI; + } + } +#endif +#if defined(HAS_SCALEUVCOLS_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ScaleUVCols = ScaleUVCols_Any_MSA; + if (IS_ALIGNED(dst_width, 4)) { + ScaleUVCols = ScaleUVCols_MSA; + } + } +#endif + if (src_width * 2 == dst_width && x < 0x8000) { + ScaleUVCols = ScaleUVColsUp2_C; +#if defined(HAS_SCALEUVCOLSUP2_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(dst_width, 8)) { + ScaleUVCols = ScaleUVColsUp2_SSSE3; + } +#endif +#if defined(HAS_SCALEUVCOLSUP2_MMI) + if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { + ScaleUVCols = ScaleUVColsUp2_MMI; + } +#endif + } + + for (j = 0; j < dst_height; ++j) { + ScaleUVCols(dst_uv, src_uv + (y >> 16) * src_stride, dst_width, x, dx); + dst_uv += dst_stride; + y += dy; + } +} + +// Copy UV with optional flipping +#if HAS_UVCOPY +static int UVCopy(const uint8_t* src_UV, + int src_stride_UV, + uint8_t* dst_UV, + int dst_stride_UV, + int width, + int height) { + if (!src_UV || !dst_UV || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_UV = src_UV + (height - 1) * src_stride_UV; + src_stride_UV = -src_stride_UV; + } + + CopyPlane(src_UV, src_stride_UV, dst_UV, dst_stride_UV, width * 2, height); + return 0; +} +#endif // HAS_UVCOPY + +// Scale a UV plane (from NV12) +// This function in turn calls a scaling function +// suitable for handling the desired resolutions. +static void ScaleUV(const uint8_t* src, + int src_stride, + int src_width, + int src_height, + uint8_t* dst, + int dst_stride, + int dst_width, + int dst_height, + int clip_x, + int clip_y, + int clip_width, + int clip_height, + enum FilterMode filtering) { + // Initial source x/y coordinate and step values as 16.16 fixed point. + int x = 0; + int y = 0; + int dx = 0; + int dy = 0; + // UV does not support box filter yet, but allow the user to pass it. + // Simplify filtering when possible. + filtering = ScaleFilterReduce(src_width, src_height, dst_width, dst_height, + filtering); + + // Negative src_height means invert the image. + if (src_height < 0) { + src_height = -src_height; + src = src + (src_height - 1) * src_stride; + src_stride = -src_stride; + } + ScaleSlope(src_width, src_height, dst_width, dst_height, filtering, &x, &y, + &dx, &dy); + src_width = Abs(src_width); + if (clip_x) { + int64_t clipf = (int64_t)(clip_x)*dx; + x += (clipf & 0xffff); + src += (clipf >> 16) * 2; + dst += clip_x * 2; + } + if (clip_y) { + int64_t clipf = (int64_t)(clip_y)*dy; + y += (clipf & 0xffff); + src += (clipf >> 16) * src_stride; + dst += clip_y * dst_stride; + } + + // Special case for integer step values. + if (((dx | dy) & 0xffff) == 0) { + if (!dx || !dy) { // 1 pixel wide and/or tall. + filtering = kFilterNone; + } else { + // Optimized even scale down. ie 2, 4, 6, 8, 10x. + if (!(dx & 0x10000) && !(dy & 0x10000)) { +#if HAS_SCALEUVDOWN2 + if (dx == 0x20000) { + // Optimized 1/2 downsample. + ScaleUVDown2(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst, x, dx, y, dy, + filtering); + return; + } +#endif +#if HAS_SCALEUVDOWN4BOX + if (dx == 0x40000 && filtering == kFilterBox) { + // Optimized 1/4 box downsample. + ScaleUVDown4Box(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst, x, dx, y, dy); + return; + } +#endif +#if HAS_SCALEUVDOWNEVEN + ScaleUVDownEven(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst, x, dx, y, dy, + filtering); + return; +#endif + } + // Optimized odd scale down. ie 3, 5, 7, 9x. + if ((dx & 0x10000) && (dy & 0x10000)) { + filtering = kFilterNone; +#ifdef HAS_UVCOPY + if (dx == 0x10000 && dy == 0x10000) { + // Straight copy. + UVCopy(src + (y >> 16) * src_stride + (x >> 16) * 2, src_stride, dst, + dst_stride, clip_width, clip_height); + return; + } +#endif + } + } + } + // HAS_SCALEPLANEVERTICAL + if (dx == 0x10000 && (x & 0xffff) == 0) { + // Arbitrary scale vertically, but unscaled horizontally. + ScalePlaneVertical(src_height, clip_width, clip_height, src_stride, + dst_stride, src, dst, x, y, dy, 4, filtering); + return; + } + +#if HAS_SCALEUVBILINEARUP + if (filtering && dy < 65536) { + ScaleUVBilinearUp(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst, x, dx, y, dy, + filtering); + return; + } +#endif +#if HAS_SCALEUVBILINEARDOWN + if (filtering) { + ScaleUVBilinearDown(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst, x, dx, y, dy, + filtering); + return; + } +#endif + ScaleUVSimple(src_width, src_height, clip_width, clip_height, src_stride, + dst_stride, src, dst, x, dx, y, dy); +} + +// Scale an UV image. +LIBYUV_API +int UVScale(const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering) { + if (!src_uv || src_width == 0 || src_height == 0 || src_width > 32768 || + src_height > 32768 || !dst_uv || dst_width <= 0 || dst_height <= 0) { + return -1; + } + ScaleUV(src_uv, src_stride_uv, src_width, src_height, dst_uv, dst_stride_uv, + dst_width, dst_height, 0, 0, dst_width, dst_height, filtering); + return 0; +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif diff --git a/TMessagesProj/jni/voip/CMakeLists.txt b/TMessagesProj/jni/voip/CMakeLists.txt index bd97ae2ac..3873854aa 100644 --- a/TMessagesProj/jni/voip/CMakeLists.txt +++ b/TMessagesProj/jni/voip/CMakeLists.txt @@ -221,6 +221,8 @@ add_library(tgcalls_tp STATIC third_party/libvpx/source/libvpx/vp9/encoder/vp9_temporal_filter.c third_party/libvpx/source/libvpx/vp9/encoder/vp9_tokenize.c third_party/libvpx/source/libvpx/vp9/encoder/vp9_treewriter.c + third_party/libvpx/source/libvpx/vp9/encoder/vp9_ext_ratectrl.c + third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.cc third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c third_party/libvpx/source/libvpx/vp9/vp9_dx_iface.c third_party/libvpx/source/libvpx/vp9/vp9_iface_common.c @@ -738,6 +740,7 @@ add_library(tgcalls STATIC voip/tgcalls/JsonConfig.cpp voip/tgcalls/reference/InstanceImplReference.cpp voip/tgcalls/legacy/InstanceImplLegacy.cpp + voip/tgcalls/group/GroupInstanceImpl.cpp voip/webrtc/rtc_base/async_invoker.cc voip/webrtc/rtc_base/async_packet_socket.cc @@ -801,7 +804,6 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/numerics/moving_average.cc voip/webrtc/rtc_base/numerics/sample_counter.cc voip/webrtc/rtc_base/numerics/sample_stats.cc - voip/webrtc/rtc_base/numerics/samples_stats_counter.cc voip/webrtc/rtc_base/openssl_adapter.cc voip/webrtc/rtc_base/openssl_certificate.cc voip/webrtc/rtc_base/openssl_digest.cc @@ -837,8 +839,6 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/strings/audio_format_to_string.cc voip/webrtc/rtc_base/strings/string_builder.cc voip/webrtc/rtc_base/strings/string_format.cc - voip/webrtc/rtc_base/synchronization/rw_lock_posix.cc - voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.cc voip/webrtc/rtc_base/synchronization/mutex.cc voip/webrtc/rtc_base/synchronization/yield.cc voip/webrtc/rtc_base/synchronization/sequence_checker.cc @@ -860,6 +860,7 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/unique_id_generator.cc voip/webrtc/rtc_base/weak_ptr.cc voip/webrtc/rtc_base/zero_memory.cc + voip/webrtc/rtc_base/callback_list.cc voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc voip/webrtc/rtc_base/deprecated/signal_thread.cc voip/webrtc/api/audio/audio_frame.cc @@ -934,6 +935,7 @@ add_library(tgcalls STATIC voip/webrtc/api/video/video_adaptation_counters.cc voip/webrtc/api/video/video_frame_metadata.cc voip/webrtc/api/voip/voip_engine_factory.cc + voip/webrtc/api/numerics/samples_stats_counter.cc voip/webrtc/call/adaptation/adaptation_constraint.cc voip/webrtc/call/adaptation/broadcast_resource_listener.cc voip/webrtc/call/adaptation/degradation_preference_provider.cc @@ -964,6 +966,7 @@ add_library(tgcalls STATIC voip/webrtc/api/video/video_content_type.cc voip/webrtc/api/video/video_frame.cc voip/webrtc/api/video/video_frame_buffer.cc + voip/webrtc/api/video/nv12_buffer.cc voip/webrtc/api/video/video_source_interface.cc voip/webrtc/api/video/video_stream_decoder_create.cc voip/webrtc/api/video/video_stream_encoder_create.cc @@ -981,6 +984,7 @@ add_library(tgcalls STATIC voip/webrtc/api/video_codecs/vp8_frame_config.cc voip/webrtc/api/video_codecs/vp8_temporal_layers.cc voip/webrtc/api/video_codecs/vp8_temporal_layers_factory.cc + voip/webrtc/api/video_codecs/spatial_layer.cc voip/webrtc/pc/audio_rtp_receiver.cc voip/webrtc/pc/audio_track.cc voip/webrtc/pc/channel.cc @@ -1037,6 +1041,12 @@ add_library(tgcalls STATIC voip/webrtc/pc/video_track_source.cc voip/webrtc/pc/webrtc_sdp.cc voip/webrtc/pc/webrtc_session_description_factory.cc + voip/webrtc/pc/connection_context.cc + voip/webrtc/pc/peer_connection_message_handler.cc + voip/webrtc/pc/rtp_transmission_manager.cc + voip/webrtc/pc/sdp_offer_answer.cc + voip/webrtc/pc/transceiver_list.cc + voip/webrtc/pc/usage_pattern.cc voip/webrtc/media/base/adapted_video_track_source.cc voip/webrtc/media/base/codec.cc voip/webrtc/media/base/h264_profile_level_id.cc @@ -1176,9 +1186,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc - voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.cc - voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.cc - voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.cc voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc voip/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc @@ -1353,6 +1360,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.cc voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc + voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc voip/webrtc/modules/audio_processing/aecm/aecm_core.cc voip/webrtc/modules/audio_processing/aecm/aecm_core_c.cc @@ -1367,7 +1375,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc - voip/webrtc/modules/audio_processing/agc2/agc2_common.cc voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc voip/webrtc/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc @@ -1472,7 +1479,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc - voip/webrtc/modules/include/module_common_types.cc voip/webrtc/modules/pacing/bitrate_prober.cc voip/webrtc/modules/pacing/interval_budget.cc voip/webrtc/modules/pacing/paced_sender.cc @@ -1575,6 +1581,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc + voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc voip/webrtc/modules/utility/source/helpers_android.cc voip/webrtc/modules/utility/source/jvm_android.cc voip/webrtc/modules/utility/source/process_thread_impl.cc @@ -1646,10 +1653,23 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc - voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc + voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc + voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc + voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc voip/webrtc/modules/video_processing/util/denoiser_filter.cc voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc voip/webrtc/modules/video_processing/util/noise_estimation.cc @@ -1751,11 +1771,11 @@ add_library(tgcalls STATIC voip/webrtc/common_video/h265/h265_pps_parser.cc voip/webrtc/common_video/h265/h265_sps_parser.cc voip/webrtc/common_video/h265/h265_vps_parser.cc - voip/webrtc/common_video/i420_buffer_pool.cc voip/webrtc/common_video/incoming_video_stream.cc voip/webrtc/common_video/libyuv/webrtc_libyuv.cc voip/webrtc/common_video/video_frame_buffer.cc voip/webrtc/common_video/video_render_frames.cc + voip/webrtc/common_video/video_frame_buffer_pool.cc voip/webrtc/p2p/base/async_stun_tcp_socket.cc voip/webrtc/p2p/base/basic_async_resolver_factory.cc voip/webrtc/p2p/base/basic_ice_controller.cc @@ -1792,6 +1812,7 @@ add_library(tgcalls STATIC voip/webrtc/video/adaptation/quality_rampup_experiment_helper.cc voip/webrtc/modules/video_coding/deprecated/nack_module.cc voip/webrtc/modules/video_coding/nack_module2.cc + voip/webrtc/modules/async_audio_processing/async_audio_processing.cc voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_common.cc @@ -1830,6 +1851,8 @@ add_library(tgcalls STATIC voip/webrtc/video/adaptation/quality_scaler_resource.cc voip/webrtc/video/adaptation/video_stream_encoder_resource.cc voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc + voip/webrtc/video/adaptation/balanced_constraint.cc + voip/webrtc/video/adaptation/bitrate_constraint.cc voip/webrtc/video/buffered_frame_decryptor.cc voip/webrtc/video/call_stats.cc voip/webrtc/video/encoder_bitrate_adjuster.cc @@ -1864,6 +1887,7 @@ add_library(tgcalls STATIC voip/webrtc/video/rtp_streams_synchronizer2.cc voip/webrtc/video/receive_statistics_proxy2.cc voip/webrtc/video/call_stats2.cc + voip/webrtc/video/alignment_adjuster.cc voip/webrtc/audio/audio_level.cc voip/webrtc/audio/audio_receive_stream.cc voip/webrtc/audio/audio_send_stream.cc diff --git a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp index 8a852a6b0..b1e6485fb 100644 --- a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp @@ -10,6 +10,9 @@ #include #include #include +#include + +#include #include "pc/video_track.h" #include "legacy/InstanceImplLegacy.h" @@ -69,12 +72,14 @@ public: struct InstanceHolder { std::unique_ptr nativeInstance; + std::unique_ptr groupNativeInstance; jobject javaInstance; std::shared_ptr _videoCapture; std::shared_ptr _platformContext; }; jclass TrafficStatsClass; +jclass FingerprintClass; jclass FinalStateClass; jclass NativeInstanceClass; jmethodID FinalStateInitMethod; @@ -87,10 +92,6 @@ InstanceHolder *getInstanceHolder(JNIEnv *env, jobject obj) { return reinterpret_cast(getInstanceHolderId(env, obj)); } -Instance *getInstance(JNIEnv *env, jobject obj) { - return getInstanceHolder(env, obj)->nativeInstance.get(); -} - jint throwNewJavaException(JNIEnv *env, const char *className, const char *message) { return env->ThrowNew(env->FindClass(className), message); } @@ -220,6 +221,14 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) { return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested); } +jobject asJavaFingerprint(JNIEnv *env, std::string hash, std::string setup, std::string fingerprint) { + jstring hashStr = env->NewStringUTF(hash.c_str()); + jstring setupStr = env->NewStringUTF(setup.c_str()); + jstring fingerprintStr = env->NewStringUTF(fingerprint.c_str()); + jmethodID initMethodId = env->GetMethodID(FingerprintClass, "", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + return env->NewObject(FingerprintClass, initMethodId, hashStr, setupStr, fingerprintStr); +} + extern "C" { bool webrtcLoaded = false; @@ -237,10 +246,129 @@ void initWebRTC(JNIEnv *env) { NativeInstanceClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/NativeInstance"))); TrafficStatsClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats"))); + FingerprintClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$Fingerprint"))); FinalStateClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState"))); FinalStateInitMethod = env->GetMethodID(FinalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); } +JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jboolean highQuality) { + initWebRTC(env); + + jobject globalRef = env->NewGlobalRef(instanceObj); + std::shared_ptr platformContext = std::make_shared(env); + + GroupInstanceDescriptor descriptor = { + .networkStateUpdated = [globalRef](bool state) { + tgvoip::jni::DoWithJNI([globalRef, state](JNIEnv *env) { + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onNetworkStateUpdated", "(Z)V"), state); + }); + }, + .audioLevelsUpdated = [globalRef](GroupLevelsUpdate const &update) { + tgvoip::jni::DoWithJNI([globalRef, update](JNIEnv *env) { + unsigned int size = update.updates.size(); + jintArray intArray = env->NewIntArray(size); + jfloatArray floatArray = env->NewFloatArray(size); + jbooleanArray boolArray = env->NewBooleanArray(size); + + jint intFill[size]; + jfloat floatFill[size]; + jboolean boolFill[size]; + for (int a = 0; a < size; a++) { + intFill[a] = update.updates[a].ssrc; + floatFill[a] = update.updates[a].value.level; + boolFill[a] = update.updates[a].value.voice; + } + env->SetIntArrayRegion(intArray, 0, size, intFill); + env->SetFloatArrayRegion(floatArray, 0, size, floatFill); + env->SetBooleanArrayRegion(boolArray, 0, size, boolFill); + + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onAudioLevelsUpdated", "([I[F[Z)V"), intArray, floatArray, boolArray); + env->DeleteLocalRef(intArray); + env->DeleteLocalRef(floatArray); + env->DeleteLocalRef(boolArray); + }); + }, + .platformContext = platformContext + }; + + auto *holder = new InstanceHolder; + holder->groupNativeInstance = std::make_unique(std::move(descriptor)); + holder->javaInstance = globalRef; + holder->_platformContext = platformContext; + holder->groupNativeInstance->emitJoinPayload([globalRef](const GroupJoinPayload& payload) { + JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); + jobjectArray array = env->NewObjectArray(payload.fingerprints.size(), FingerprintClass, 0); + for (int a = 0; a < payload.fingerprints.size(); a++) { + env->SetObjectArrayElement(array, a, asJavaFingerprint(env, payload.fingerprints[a].hash, payload.fingerprints[a].setup, payload.fingerprints[a].fingerprint)); + } + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onEmitJoinPayload", "(Ljava/lang/String;Ljava/lang/String;[Lorg/telegram/messenger/voip/Instance$Fingerprint;I)V"), env->NewStringUTF(payload.ufrag.c_str()), env->NewStringUTF(payload.pwd.c_str()), array, (jint) payload.ssrc); + }); + return reinterpret_cast(holder); +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setJoinResponsePayload(JNIEnv *env, jobject obj, jstring ufrag, jstring pwd, jobjectArray fingerprints, jobjectArray candidates) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + std::vector fingerprintsArray; + std::vector candidatesArray; + + jsize size = env->GetArrayLength(fingerprints); + for (int i = 0; i < size; i++) { + JavaObject fingerprintObject(env, env->GetObjectArrayElement(fingerprints, i)); + fingerprintsArray.push_back( + { + .hash = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("hash")), + .setup = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("setup")), + .fingerprint = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("fingerprint")) + }); + } + size = env->GetArrayLength(candidates); + for (int i = 0; i < size; i++) { + JavaObject candidateObject(env, env->GetObjectArrayElement(candidates, i)); + candidatesArray.push_back( + { + .port = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("port")), + .protocol = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("protocol")), + .network = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("network")), + .generation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("generation")), + .id = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("id")), + .component = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("component")), + .foundation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("foundation")), + .priority = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("priority")), + .ip = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("ip")), + .type = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("type")), + .tcpType = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("tcpType")), + .relAddr = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relAddr")), + .relPort = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relPort")), + }); + } + + instance->groupNativeInstance->setJoinResponsePayload( + { + .ufrag = tgvoip::jni::JavaStringToStdString(env, ufrag), + .pwd = tgvoip::jni::JavaStringToStdString(env, pwd), + .fingerprints = fingerprintsArray, + .candidates = candidatesArray, + }); +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_removeSsrcs(JNIEnv *env, jobject obj, jintArray ssrcs) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + jsize size = env->GetArrayLength(ssrcs); + + std::vector ssrcsArray; + ssrcsArray.resize(size); + for (int i = 0; i < size; i++) { + env->GetIntArrayRegion(ssrcs, 0, size, reinterpret_cast(ssrcsArray.data())); + } + instance->groupNativeInstance->removeSsrcs(ssrcsArray); +} + JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) { initWebRTC(env); @@ -369,43 +497,83 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setBuffer } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setNetworkType(JNIEnv *env, jobject obj, jint networkType) { - getInstance(env, obj)->setNetworkType(parseNetworkType(networkType)); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } + instance->nativeInstance->setNetworkType(parseNetworkType(networkType)); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setMuteMicrophone(JNIEnv *env, jobject obj, jboolean muteMicrophone) { - getInstance(env, obj)->setMuteMicrophone(muteMicrophone); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance != nullptr) { + instance->nativeInstance->setMuteMicrophone(muteMicrophone); + } else if (instance->groupNativeInstance != nullptr) { + instance->groupNativeInstance->setIsMuted(muteMicrophone); + } } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setAudioOutputGainControlEnabled(JNIEnv *env, jobject obj, jboolean enabled) { - getInstance(env, obj)->setAudioOutputGainControlEnabled(enabled); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } + instance->nativeInstance->setAudioOutputGainControlEnabled(enabled); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setEchoCancellationStrength(JNIEnv *env, jobject obj, jint strength) { - getInstance(env, obj)->setEchoCancellationStrength(strength); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } + instance->nativeInstance->setEchoCancellationStrength(strength); } JNIEXPORT jstring JNICALL Java_org_telegram_messenger_voip_NativeInstance_getLastError(JNIEnv *env, jobject obj) { - return env->NewStringUTF(getInstance(env, obj)->getLastError().c_str()); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return nullptr; + } + return env->NewStringUTF(instance->nativeInstance->getLastError().c_str()); } JNIEXPORT jstring JNICALL Java_org_telegram_messenger_voip_NativeInstance_getDebugInfo(JNIEnv *env, jobject obj) { - return env->NewStringUTF(getInstance(env, obj)->getDebugInfo().c_str()); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return nullptr; + } + return env->NewStringUTF(instance->nativeInstance->getDebugInfo().c_str()); } JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_getPreferredRelayId(JNIEnv *env, jobject obj) { - return getInstance(env, obj)->getPreferredRelayId(); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return 0; + } + return instance->nativeInstance->getPreferredRelayId(); } JNIEXPORT jobject JNICALL Java_org_telegram_messenger_voip_NativeInstance_getTrafficStats(JNIEnv *env, jobject obj) { - return asJavaTrafficStats(env, getInstance(env, obj)->getTrafficStats()); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return nullptr; + } + return asJavaTrafficStats(env, instance->nativeInstance->getTrafficStats()); } JNIEXPORT jbyteArray JNICALL Java_org_telegram_messenger_voip_NativeInstance_getPersistentState(JNIEnv *env, jobject obj) { - return copyVectorToJavaByteArray(env, getInstance(env, obj)->getPersistentState().value); + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return nullptr; + } + return copyVectorToJavaByteArray(env, instance->nativeInstance->getPersistentState().value); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNative(JNIEnv *env, jobject obj) { InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } instance->nativeInstance->stop([instance](FinalState finalState) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath")); @@ -416,6 +584,17 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNativ }); } +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopGroupNative(JNIEnv *env, jobject obj) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + instance->groupNativeInstance->stop(); + instance->groupNativeInstance.reset(); + env->DeleteGlobalRef(instance->javaInstance); + delete instance; +} + JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink, jboolean front) { initWebRTC(env); std::unique_ptr capture = tgcalls::VideoCaptureInterface::Create(front ? "front" : "back", std::make_shared(env)); @@ -441,6 +620,9 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoS JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCamera(JNIEnv *env, jobject obj, jboolean front) { InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } if (instance->_videoCapture == nullptr) { return; } @@ -449,6 +631,9 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCam JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNIEnv *env, jobject obj, jint state) { InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } if (instance->_videoCapture == nullptr) { return; } @@ -457,6 +642,9 @@ JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNI JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideo(JNIEnv *env, jobject obj, jobject localSink, jboolean front) { InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } if (instance->_videoCapture) { return; } @@ -468,6 +656,9 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutg JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignalingDataReceive(JNIEnv *env, jobject obj, jbyteArray value) { InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance == nullptr) { + return; + } auto *valueBytes = (uint8_t *) env->GetByteArrayElements(value, nullptr); const size_t size = env->GetArrayLength(value); @@ -477,5 +668,4 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignali env->ReleaseByteArrayElements(value, (jbyte *) valueBytes, JNI_ABORT); } - } \ No newline at end of file diff --git a/TMessagesProj/jni/voip/tgcalls/CryptoHelper.cpp b/TMessagesProj/jni/voip/tgcalls/CryptoHelper.cpp index 67603734e..a9c8cd3af 100644 --- a/TMessagesProj/jni/voip/tgcalls/CryptoHelper.cpp +++ b/TMessagesProj/jni/voip/tgcalls/CryptoHelper.cpp @@ -1,6 +1,7 @@ #include "CryptoHelper.h" #include +#include namespace tgcalls { diff --git a/TMessagesProj/jni/voip/tgcalls/Instance.h b/TMessagesProj/jni/voip/tgcalls/Instance.h index 6a48907cd..f91c270a1 100644 --- a/TMessagesProj/jni/voip/tgcalls/Instance.h +++ b/TMessagesProj/jni/voip/tgcalls/Instance.h @@ -23,11 +23,13 @@ namespace tgcalls { class VideoCaptureInterface; class PlatformContext; +struct FilePath { #ifndef _WIN32 -using FilePath = std::string; + std::string data; #else -using FilePath = std::wstring; + std::wstring data; #endif +}; struct Proxy { std::string host; @@ -217,6 +219,7 @@ struct Descriptor { std::shared_ptr videoCapture; std::function stateUpdated; std::function signalBarsUpdated; + std::function audioLevelUpdated; std::function remoteBatteryLevelIsLowUpdated; std::function remoteMediaStateUpdated; std::function remotePrefferedAspectRatioUpdated; diff --git a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp index 0abdecdd6..508954928 100644 --- a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp @@ -25,11 +25,11 @@ rtc::Thread *getManagerThread() { } // namespace InstanceImpl::InstanceImpl(Descriptor &&descriptor) -: _logSink(std::make_unique(descriptor.config)) { +: _logSink(std::make_unique(descriptor.config.logPath)) { rtc::LogMessage::LogToDebug(rtc::LS_INFO); rtc::LogMessage::SetLogToStderr(false); rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); - + auto networkType = descriptor.initialNetworkType; _manager.reset(new ThreadLocalObject(getManagerThread(), [descriptor = std::move(descriptor)]() mutable { @@ -38,7 +38,7 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor) _manager->perform(RTC_FROM_HERE, [](Manager *manager) { manager->start(); }); - + setNetworkType(networkType); } @@ -74,7 +74,7 @@ void InstanceImpl::setNetworkType(NetworkType networkType) { default: break; } - + _manager->perform(RTC_FROM_HERE, [isLowCostNetwork](Manager *manager) { manager->setIsLocalNetworkLowCost(isLowCostNetwork); }); @@ -154,7 +154,7 @@ PersistentState InstanceImpl::getPersistentState() { void InstanceImpl::stop(std::function completion) { std::string debugLog = _logSink->result(); - + _manager->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](Manager *manager) { manager->getNetworkStats([completion, debugLog = std::move(debugLog)](TrafficStats stats, CallStats callStats) { FinalState finalState; @@ -162,7 +162,7 @@ void InstanceImpl::stop(std::function completion) { finalState.isRatingSuggested = false; finalState.trafficStats = stats; finalState.callStats = callStats; - + completion(finalState); }); }); diff --git a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp index 77755c319..b7e53e07f 100644 --- a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp @@ -11,9 +11,9 @@ namespace tgcalls { -LogSinkImpl::LogSinkImpl(const Config &config) { - if (!config.logPath.empty()) { - _file.open(config.logPath); +LogSinkImpl::LogSinkImpl(const FilePath &logPath) { + if (!logPath.data.empty()) { + _file.open(logPath.data); } } @@ -64,7 +64,7 @@ void LogSinkImpl::OnLogMessage(const std::string &message) { << ":" << timeinfo.tm_sec << ":" << milliseconds << " " << message; - + #if DEBUG printf("%d-%d-%d %d:%d:%d:%d %s\n", timeinfo.tm_year + 1900, timeinfo.tm_mon + 1, timeinfo.tm_mday, timeinfo.tm_hour, timeinfo.tm_min, timeinfo.tm_sec, milliseconds, message.c_str()); #endif diff --git a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.h b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.h index 5c6aa1d11..5d480ffb0 100644 --- a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.h @@ -6,11 +6,11 @@ namespace tgcalls { -struct Config; +struct FilePath; class LogSinkImpl final : public rtc::LogSink { public: - LogSinkImpl(const Config &config); + LogSinkImpl(const FilePath &logPath); void OnLogMessage(const std::string &msg, rtc::LoggingSeverity severity, const char *tag) override; void OnLogMessage(const std::string &message, rtc::LoggingSeverity severity) override; diff --git a/TMessagesProj/jni/voip/tgcalls/Manager.cpp b/TMessagesProj/jni/voip/tgcalls/Manager.cpp index affc0c346..a789d088c 100644 --- a/TMessagesProj/jni/voip/tgcalls/Manager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/Manager.cpp @@ -27,11 +27,11 @@ rtc::Thread *makeMediaThread() { } void dumpStatsLog(const FilePath &path, const CallStats &stats) { - if (path.empty()) { + if (path.data.empty()) { return; } std::ofstream file; - file.open(path); + file.open(path.data); file << "{"; file << "\"v\":\"" << 1 << "\""; @@ -112,6 +112,7 @@ _enableStunMarking(descriptor.config.enableStunMarking), _protocolVersion(descriptor.config.protocolVersion), _statsLogPath(descriptor.config.statsLogPath), _rtcServers(std::move(descriptor.rtcServers)), +_proxy(std::move(descriptor.proxy)), _mediaDevicesConfig(std::move(descriptor.mediaDevicesConfig)), _videoCapture(std::move(descriptor.videoCapture)), _stateUpdated(std::move(descriptor.stateUpdated)), @@ -120,6 +121,7 @@ _remoteBatteryLevelIsLowUpdated(std::move(descriptor.remoteBatteryLevelIsLowUpda _remotePrefferedAspectRatioUpdated(std::move(descriptor.remotePrefferedAspectRatioUpdated)), _signalingDataEmitted(std::move(descriptor.signalingDataEmitted)), _signalBarsUpdated(std::move(descriptor.signalBarsUpdated)), +_audioLevelUpdated(std::move(descriptor.audioLevelUpdated)), _enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo), _dataSaving(descriptor.config.dataSaving), _platformContext(descriptor.platformContext) { @@ -177,7 +179,7 @@ void Manager::start() { strong->_sendSignalingMessage(std::move(message)); }); }; - _networkManager.reset(new ThreadLocalObject(getNetworkThread(), [weak, thread, sendSignalingMessage, encryptionKey = _encryptionKey, enableP2P = _enableP2P, enableTCP = _enableTCP, enableStunMarking = _enableStunMarking, rtcServers = _rtcServers] { + _networkManager.reset(new ThreadLocalObject(getNetworkThread(), [weak, thread, sendSignalingMessage, encryptionKey = _encryptionKey, enableP2P = _enableP2P, enableTCP = _enableTCP, enableStunMarking = _enableStunMarking, rtcServers = _rtcServers, proxy = std::move(_proxy)] () mutable { return new NetworkManager( getNetworkThread(), encryptionKey, @@ -185,6 +187,7 @@ void Manager::start() { enableTCP, enableStunMarking, rtcServers, + std::move(proxy), [=](const NetworkManager::State &state) { thread->PostTask(RTC_FROM_HERE, [=] { const auto strong = weak.lock(); @@ -242,7 +245,7 @@ void Manager::start() { }); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs, platformContext = _platformContext]() { + _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, audioLevelUpdated = _audioLevelUpdated, preferredCodecs = _preferredCodecs, platformContext = _platformContext]() { return new MediaManager( getMediaThread(), isOutgoing, @@ -260,6 +263,7 @@ void Manager::start() { }); }, signalBarsUpdated, + audioLevelUpdated, enableHighBitrateVideo, preferredCodecs, platformContext); diff --git a/TMessagesProj/jni/voip/tgcalls/Manager.h b/TMessagesProj/jni/voip/tgcalls/Manager.h index bc720fbb9..621936c1a 100644 --- a/TMessagesProj/jni/voip/tgcalls/Manager.h +++ b/TMessagesProj/jni/voip/tgcalls/Manager.h @@ -57,6 +57,7 @@ private: ProtocolVersion _protocolVersion = ProtocolVersion::V0; FilePath _statsLogPath; std::vector _rtcServers; + std::unique_ptr _proxy; MediaDevicesConfig _mediaDevicesConfig; std::shared_ptr _videoCapture; std::function _stateUpdated; @@ -65,6 +66,7 @@ private: std::function _remotePrefferedAspectRatioUpdated; std::function &)> _signalingDataEmitted; std::function _signalBarsUpdated; + std::function _audioLevelUpdated; std::function _sendSignalingMessage; std::function _sendTransportMessage; std::unique_ptr> _networkManager; diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp index 1a7eed12b..64377fcbb 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp @@ -17,6 +17,8 @@ #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "call/call.h" #include "modules/rtp_rtcp/source/rtp_utility.h" +#include "api/call/audio_sink.h" +#include "modules/audio_processing/audio_buffer.h" namespace tgcalls { namespace { @@ -82,6 +84,47 @@ private: }; +class AudioTrackSinkInterfaceImpl: public webrtc::AudioSinkInterface { +private: + std::function _update; + + int _peakCount = 0; + uint16_t _peak = 0; + +public: + AudioTrackSinkInterfaceImpl(std::function update) : + _update(update) { + } + + virtual ~AudioTrackSinkInterfaceImpl() { + } + + virtual void OnData(const Data& audio) override { + if (audio.channels == 1) { + int16_t *samples = (int16_t *)audio.data; + int numberOfSamplesInFrame = (int)audio.samples_per_channel; + + for (int i = 0; i < numberOfSamplesInFrame; i++) { + int16_t sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + if (_peak < sample) { + _peak = sample; + } + _peakCount += 1; + } + + if (_peakCount >= 1200) { + float level = ((float)(_peak)) / 4000.0f; + _peak = 0; + _peakCount = 0; + _update(level); + } + } + } +}; + rtc::Thread *MediaManager::getWorkerThread() { static rtc::Thread *value = makeWorkerThread(); return value; @@ -96,6 +139,7 @@ MediaManager::MediaManager( std::function sendSignalingMessage, std::function sendTransportMessage, std::function signalBarsUpdated, + std::function audioLevelUpdated, bool enableHighBitrateVideo, std::vector preferredCodecs, std::shared_ptr platformContext) : @@ -105,6 +149,7 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), _sendSignalingMessage(std::move(sendSignalingMessage)), _sendTransportMessage(std::move(sendTransportMessage)), _signalBarsUpdated(std::move(signalBarsUpdated)), +_audioLevelUpdated(std::move(audioLevelUpdated)), _protocolVersion(protocolVersion), _outgoingVideoState(videoCapture ? VideoState::Active : VideoState::Inactive), _videoCapture(std::move(videoCapture)), @@ -264,6 +309,27 @@ rtc::scoped_refptr MediaManager::createAudioDeviceMod } void MediaManager::start() { + const auto weak = std::weak_ptr(shared_from_this()); + + // Here we hope that thread outlives the sink + rtc::Thread *thread = _thread; + std::unique_ptr incomingSink(new AudioTrackSinkInterfaceImpl([weak, thread](float level) { + thread->PostTask(RTC_FROM_HERE, [weak, level] { + if (const auto strong = weak.lock()) { + strong->_currentAudioLevel = level; + } + }); + })); + std::unique_ptr outgoingSink(new AudioTrackSinkInterfaceImpl([weak, thread](float level) { + thread->PostTask(RTC_FROM_HERE, [weak, level] { + if (const auto strong = weak.lock()) { + strong->_currentMyAudioLevel = level; + } + }); + })); + _audioChannel->SetRawAudioSink(_ssrcAudio.incoming, std::move(incomingSink)); + _audioChannel->SetRawAudioSink(_ssrcAudio.outgoing, std::move(outgoingSink)); + _sendSignalingMessage({ _myVideoFormats }); if (_videoCapture != nullptr) { @@ -271,6 +337,9 @@ void MediaManager::start() { } beginStatsTimer(3000); + if (_audioLevelUpdated != nullptr) { + beginLevelsTimer(50); + } } MediaManager::~MediaManager() { @@ -369,6 +438,21 @@ void MediaManager::beginStatsTimer(int timeoutMs) { }, timeoutMs); } +void MediaManager::beginLevelsTimer(int timeoutMs) { + const auto weak = std::weak_ptr(shared_from_this()); + _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel); + strong->_audioLevelUpdated(effectiveLevel); + + strong->beginLevelsTimer(50); + }, timeoutMs); +} + void MediaManager::collectStats() { auto stats = _call->GetStats(); float bitrateNorm = 16.0f; diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.h b/TMessagesProj/jni/voip/tgcalls/MediaManager.h index 1bd372a33..154a7f2cf 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.h +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.h @@ -47,6 +47,7 @@ public: std::function sendSignalingMessage, std::function sendTransportMessage, std::function signalBarsUpdated, + std::function audioLevelUpdated, bool enableHighBitrateVideo, std::vector preferredCodecs, std::shared_ptr platformContext); @@ -115,6 +116,7 @@ private: rtc::scoped_refptr createAudioDeviceModule(); void beginStatsTimer(int timeoutMs); + void beginLevelsTimer(int timeoutMs); void collectStats(); rtc::Thread *_thread = nullptr; @@ -124,6 +126,7 @@ private: std::function _sendSignalingMessage; std::function _sendTransportMessage; std::function _signalBarsUpdated; + std::function _audioLevelUpdated; SSRC _ssrcAudio; SSRC _ssrcVideo; @@ -158,6 +161,9 @@ private: bool _enableHighBitrateVideo = false; bool _isLowCostNetwork = false; bool _isDataSavingActive = false; + + float _currentAudioLevel = 0.0f; + float _currentMyAudioLevel = 0.0f; std::unique_ptr _audioNetworkInterface; std::unique_ptr _videoNetworkInterface; diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp index 4df4b14c7..e1fd6624f 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp @@ -23,6 +23,41 @@ extern "C" { namespace tgcalls { +class TgCallsCryptStringImpl : public rtc::CryptStringImpl { +public: + TgCallsCryptStringImpl(std::string const &value) : + _value(value) { + } + + virtual ~TgCallsCryptStringImpl() override { + } + + virtual size_t GetLength() const override { + return _value.size(); + } + + virtual void CopyTo(char* dest, bool nullterminate) const override { + memcpy(dest, _value.data(), _value.size()); + if (nullterminate) { + dest[_value.size()] = 0; + } + } + virtual std::string UrlEncode() const override { + return _value; + } + virtual CryptStringImpl* Copy() const override { + return new TgCallsCryptStringImpl(_value); + } + + virtual void CopyRawTo(std::vector* dest) const override { + dest->resize(_value.size()); + memcpy(dest->data(), _value.data(), _value.size()); + } + +private: + std::string _value; +}; + class TurnCustomizerImpl : public webrtc::TurnCustomizer { public: TurnCustomizerImpl() { @@ -48,6 +83,7 @@ NetworkManager::NetworkManager( bool enableTCP, bool enableStunMarking, std::vector const &rtcServers, + std::unique_ptr proxy, std::function stateUpdated, std::function transportMessageReceived, std::function sendSignalingMessage, @@ -57,6 +93,7 @@ _enableP2P(enableP2P), _enableTCP(enableTCP), _enableStunMarking(enableStunMarking), _rtcServers(rtcServers), +_proxy(std::move(proxy)), _transport( EncryptedConnection::Type::Transport, encryptionKey, @@ -100,6 +137,16 @@ void NetworkManager::start() { flags |= cricket::PORTALLOCATOR_DISABLE_UDP; flags |= cricket::PORTALLOCATOR_DISABLE_STUN; } + + if (_proxy) { + rtc::ProxyInfo proxyInfo; + proxyInfo.type = rtc::ProxyType::PROXY_SOCKS5; + proxyInfo.address = rtc::SocketAddress(_proxy->host, _proxy->port); + proxyInfo.username = _proxy->login; + proxyInfo.password = rtc::CryptString(TgCallsCryptStringImpl(_proxy->password)); + _portAllocator->set_proxy("t/1.0", proxyInfo); + } + _portAllocator->set_flags(_portAllocator->flags() | flags); _portAllocator->Initialize(); diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h index 7f657f0bf..53c53d28a 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h @@ -55,6 +55,7 @@ public: bool enableTCP, bool enableStunMarking, std::vector const &rtcServers, + std::unique_ptr proxy, std::function stateUpdated, std::function transportMessageReceived, std::function sendSignalingMessage, @@ -85,6 +86,7 @@ private: bool _enableTCP = false; bool _enableStunMarking = false; std::vector _rtcServers; + std::unique_ptr _proxy; EncryptedConnection _transport; bool _isOutgoing = false; std::function _stateUpdated; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp new file mode 100644 index 000000000..fcd5be934 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp @@ -0,0 +1,2125 @@ +#include "GroupInstanceImpl.h" + +#include +#include "api/scoped_refptr.h" +#include "rtc_base/thread.h" +#include "rtc_base/logging.h" +#include "api/peer_connection_interface.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "media/engine/webrtc_media_engine.h" +#include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/peer_connection_interface.h" +#include "api/video_track_source_proxy.h" +#include "system_wrappers/include/field_trial.h" +#include "api/stats/rtcstats_objects.h" +#include "modules/audio_processing/audio_buffer.h" +#include "common_audio/include/audio_util.h" +#include "common_audio/vad/include/webrtc_vad.h" +#include "modules/audio_processing/agc2/vad_with_level.h" + +#include "ThreadLocalObject.h" +#include "Manager.h" +#include "NetworkManager.h" +#include "VideoCaptureInterfaceImpl.h" +#include "platform/PlatformInterface.h" +#include "LogSinkImpl.h" + +#include +#include +#include + +namespace tgcalls { + +namespace { + +static std::vector splitSdpLines(std::string const &sdp) { + std::vector result; + + std::istringstream sdpStream(sdp); + + std::string s; + while (std::getline(sdpStream, s, '\n')) { + if (s.size() == 0) { + continue; + } + if (s[s.size() - 1] == '\r') { + s.resize(s.size() - 1); + } + result.push_back(s); + } + + return result; +} + +static std::vector splitFingerprintLines(std::string const &line) { + std::vector result; + + std::istringstream sdpStream(line); + + std::string s; + while (std::getline(sdpStream, s, ' ')) { + if (s.size() == 0) { + continue; + } + result.push_back(s); + } + + return result; +} + +static std::vector getLines(std::vector const &lines, std::string prefix) { + std::vector result; + + for (auto &line : lines) { + if (line.find(prefix) == 0) { + auto cleanLine = line; + cleanLine.replace(0, prefix.size(), ""); + result.push_back(cleanLine); + } + } + + return result; +} + +static absl::optional parseSdpIntoJoinPayload(std::string const &sdp) { + GroupJoinPayload result; + + auto lines = splitSdpLines(sdp); + + std::vector audioLines; + bool isAudioLine = false; + for (auto &line : lines) { + if (line.find("m=audio") == 0) { + isAudioLine = true; + } + if (isAudioLine) { + audioLines.push_back(line); + } + } + + /*std::vector audioSources; + for (auto &line : getLines(audioLines, "a=ssrc:")) { + std::istringstream iss(line); + uint32_t value = 0; + iss >> value; + if (std::find(audioSources.begin(), audioSources.end(), value) == audioSources.end()) { + audioSources.push_back(value); + } + } + + if (audioSources.size() != 1) { + return absl::nullopt; + } + result.ssrc = audioSources[0];*/ + result.ssrc = 0; + + auto ufragLines = getLines(lines, "a=ice-ufrag:"); + if (ufragLines.size() != 1) { + return absl::nullopt; + } + result.ufrag = ufragLines[0]; + + auto pwdLines = getLines(lines, "a=ice-pwd:"); + if (pwdLines.size() != 1) { + return absl::nullopt; + } + result.pwd = pwdLines[0]; + + for (auto &line : getLines(lines, "a=fingerprint:")) { + auto fingerprintComponents = splitFingerprintLines(line); + if (fingerprintComponents.size() != 2) { + continue; + } + + GroupJoinPayloadFingerprint fingerprint; + fingerprint.hash = fingerprintComponents[0]; + fingerprint.fingerprint = fingerprintComponents[1]; + fingerprint.setup = "active"; + result.fingerprints.push_back(fingerprint); + } + + return result; +} + +struct StreamSpec { + bool isMain = false; + uint32_t streamId = 0; + uint32_t audioSsrcOrZero = 0; + bool isRemoved = false; +}; + +static void appendSdp(std::vector &lines, std::string const &line) { + lines.push_back(line); +} + +static std::string createSdp(uint32_t sessionId, GroupJoinResponsePayload const &payload, bool isAnswer, std::vector const &bundleStreams) { + std::vector sdp; + + appendSdp(sdp, "v=0"); + + std::ostringstream sessionIdString; + sessionIdString << "o=- "; + sessionIdString << sessionId; + sessionIdString << " 2 IN IP4 0.0.0.0"; + appendSdp(sdp, sessionIdString.str()); + + appendSdp(sdp, "s=-"); + appendSdp(sdp, "t=0 0"); + + std::ostringstream bundleString; + bundleString << "a=group:BUNDLE"; + for (auto &stream : bundleStreams) { + bundleString << " "; + if (stream.isMain) { + bundleString << "0"; + } else { + bundleString << "audio"; + bundleString << stream.streamId; + } + } + appendSdp(sdp, bundleString.str()); + + appendSdp(sdp, "a=ice-lite"); + + for (auto &stream : bundleStreams) { + std::ostringstream audioMidString; + if (stream.isMain) { + audioMidString << "0"; + } else { + audioMidString << "audio"; + audioMidString << stream.streamId; + } + + std::ostringstream mLineString; + mLineString << "m=audio "; + if (stream.isMain) { + mLineString << "1"; + } else { + mLineString << "0"; + } + mLineString << " RTP/SAVPF 111 126"; + + appendSdp(sdp, mLineString.str()); + + if (stream.isMain) { + appendSdp(sdp, "c=IN IP4 0.0.0.0"); + } + + std::ostringstream mLineMidString; + mLineMidString << "a=mid:"; + mLineMidString << audioMidString.str(); + appendSdp(sdp, mLineMidString.str()); + + if (stream.isMain) { + std::ostringstream ufragString; + ufragString << "a=ice-ufrag:"; + ufragString << payload.ufrag; + appendSdp(sdp, ufragString.str()); + + std::ostringstream pwdString; + pwdString << "a=ice-pwd:"; + pwdString << payload.pwd; + appendSdp(sdp, pwdString.str()); + + for (auto &fingerprint : payload.fingerprints) { + std::ostringstream fingerprintString; + fingerprintString << "a=fingerprint:"; + fingerprintString << fingerprint.hash; + fingerprintString << " "; + fingerprintString << fingerprint.fingerprint; + appendSdp(sdp, fingerprintString.str()); + appendSdp(sdp, "a=setup:passive"); + } + + for (auto &candidate : payload.candidates) { + std::ostringstream candidateString; + candidateString << "a=candidate:"; + candidateString << candidate.foundation; + candidateString << " "; + candidateString << candidate.component; + candidateString << " "; + candidateString << candidate.protocol; + candidateString << " "; + candidateString << candidate.priority; + candidateString << " "; + candidateString << candidate.ip; + candidateString << " "; + candidateString << candidate.port; + candidateString << " "; + candidateString << "typ "; + candidateString << candidate.type; + candidateString << " "; + + if (candidate.type == "srflx" || candidate.type == "prflx" || candidate.type == "relay") { + if (candidate.relAddr.size() != 0 && candidate.relPort.size() != 0) { + candidateString << "raddr "; + candidateString << candidate.relAddr; + candidateString << " "; + candidateString << "rport "; + candidateString << candidate.relPort; + candidateString << " "; + } + } + + if (candidate.protocol == "tcp") { + if (candidate.tcpType.size() != 0) { + candidateString << "tcptype "; + candidateString << candidate.tcpType; + candidateString << " "; + } + } + + candidateString << "generation "; + candidateString << candidate.generation; + + appendSdp(sdp, candidateString.str()); + } + } + + appendSdp(sdp, "a=rtpmap:111 opus/48000/2"); + appendSdp(sdp, "a=rtpmap:126 telephone-event/8000"); + appendSdp(sdp, "a=fmtp:111 minptime=10; useinbandfec=1"); + appendSdp(sdp, "a=rtcp:1 IN IP4 0.0.0.0"); + appendSdp(sdp, "a=rtcp-mux"); + appendSdp(sdp, "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level"); + appendSdp(sdp, "a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"); + appendSdp(sdp, "a=extmap:5 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"); + appendSdp(sdp, "a=rtcp-fb:111 transport-cc"); + + if (isAnswer && stream.isMain) { + appendSdp(sdp, "a=recvonly"); + } else { + if (stream.isMain) { + appendSdp(sdp, "a=sendrecv"); + } else { + appendSdp(sdp, "a=sendonly"); + appendSdp(sdp, "a=bundle-only"); + } + + /*std::ostringstream ssrcGroupString; + ssrcGroupString << "a=ssrc-group:FID "; + ssrcGroupString << stream.audioSsrc; + appendSdp(sdp, ssrcGroupString.str());*/ + + if (stream.isRemoved) { + appendSdp(sdp, "a=inactive"); + } else { + std::ostringstream cnameString; + cnameString << "a=ssrc:"; + cnameString << stream.audioSsrcOrZero; + cnameString << " cname:stream"; + cnameString << stream.streamId; + appendSdp(sdp, cnameString.str()); + + std::ostringstream msidString; + msidString << "a=ssrc:"; + msidString << stream.audioSsrcOrZero; + msidString << " msid:stream"; + msidString << stream.streamId; + msidString << " audio" << stream.streamId; + appendSdp(sdp, msidString.str()); + + std::ostringstream mslabelString; + mslabelString << "a=ssrc:"; + mslabelString << stream.audioSsrcOrZero; + mslabelString << " mslabel:audio"; + mslabelString << stream.streamId; + appendSdp(sdp, mslabelString.str()); + + std::ostringstream labelString; + labelString << "a=ssrc:"; + labelString << stream.audioSsrcOrZero; + labelString << " label:audio"; + labelString << stream.streamId; + appendSdp(sdp, labelString.str()); + } + } + } + + std::ostringstream result; + for (auto &line : sdp) { + result << line << "\n"; + } + + return result.str(); +} + +static std::string parseJoinResponseIntoSdp(uint32_t sessionId, uint32_t mainStreamAudioSsrc, GroupJoinResponsePayload const &payload, bool isAnswer, std::vector const &allOtherSsrcs, std::set const &activeOtherSsrcs) { + + std::vector bundleStreams; + + StreamSpec mainStream; + mainStream.isMain = true; + mainStream.streamId = 0; + mainStream.audioSsrcOrZero = mainStreamAudioSsrc; + mainStream.isRemoved = false; + bundleStreams.push_back(mainStream); + + uint32_t numStreamsToAllocate = (uint32_t)allOtherSsrcs.size(); + /*if (numStreamsToAllocate < 10) { + numStreamsToAllocate = 10; + }*/ + + for (uint32_t i = 0; i < numStreamsToAllocate; i++) { + StreamSpec stream; + stream.isMain = false; + if (i < allOtherSsrcs.size()) { + uint32_t ssrc = allOtherSsrcs[i]; + stream.audioSsrcOrZero = ssrc; + stream.isRemoved = activeOtherSsrcs.find(ssrc) == activeOtherSsrcs.end(); + stream.streamId = ssrc; + } else { + stream.audioSsrcOrZero = 0; + stream.isRemoved = true; + stream.streamId = 1 + (uint32_t)i; + } + bundleStreams.push_back(stream); + } + + return createSdp(sessionId, payload, isAnswer, bundleStreams); +} + +rtc::Thread *makeNetworkThread() { + static std::unique_ptr value = rtc::Thread::CreateWithSocketServer(); + value->SetName("WebRTC-Group-Network", nullptr); + value->Start(); + return value.get(); +} + +rtc::Thread *getNetworkThread() { + static rtc::Thread *value = makeNetworkThread(); + return value; +} + +rtc::Thread *makeWorkerThread() { + static std::unique_ptr value = rtc::Thread::Create(); + value->SetName("WebRTC-Group-Worker", nullptr); + value->Start(); + return value.get(); +} + +rtc::Thread *getWorkerThread() { + static rtc::Thread *value = makeWorkerThread(); + return value; +} + +rtc::Thread *getSignalingThread() { + return Manager::getMediaThread(); +} + +rtc::Thread *getMediaThread() { + return Manager::getMediaThread(); +} + +class FrameEncryptorImpl : public webrtc::FrameEncryptorInterface { +public: + FrameEncryptorImpl() { + } + + virtual int Encrypt(cricket::MediaType media_type, + uint32_t ssrc, + rtc::ArrayView additional_data, + rtc::ArrayView frame, + rtc::ArrayView encrypted_frame, + size_t* bytes_written) override { + memcpy(encrypted_frame.data(), frame.data(), frame.size()); + for (auto it = encrypted_frame.begin(); it != encrypted_frame.end(); it++) { + *it ^= 123; + } + *bytes_written = frame.size(); + return 0; + } + + virtual size_t GetMaxCiphertextByteSize(cricket::MediaType media_type, + size_t frame_size) override { + return frame_size; + } +}; + +class FrameDecryptorImpl : public webrtc::FrameDecryptorInterface { +public: + FrameDecryptorImpl() { + } + + virtual webrtc::FrameDecryptorInterface::Result Decrypt(cricket::MediaType media_type, + const std::vector& csrcs, + rtc::ArrayView additional_data, + rtc::ArrayView encrypted_frame, + rtc::ArrayView frame) override { + memcpy(frame.data(), encrypted_frame.data(), encrypted_frame.size()); + for (auto it = frame.begin(); it != frame.end(); it++) { + *it ^= 123; + } + return webrtc::FrameDecryptorInterface::Result(webrtc::FrameDecryptorInterface::Status::kOk, encrypted_frame.size()); + } + + virtual size_t GetMaxPlaintextByteSize(cricket::MediaType media_type, + size_t encrypted_frame_size) override { + return encrypted_frame_size; + } +}; + +class PeerConnectionObserverImpl : public webrtc::PeerConnectionObserver { +private: + std::function _discoveredIceCandidate; + std::function _connectionStateChanged; + std::function)> _onTrackAdded; + std::function)> _onTrackRemoved; + std::function _onMissingSsrc; + +public: + PeerConnectionObserverImpl( + std::function discoveredIceCandidate, + std::function connectionStateChanged, + std::function)> onTrackAdded, + std::function)> onTrackRemoved, + std::function onMissingSsrc + ) : + _discoveredIceCandidate(discoveredIceCandidate), + _connectionStateChanged(connectionStateChanged), + _onTrackAdded(onTrackAdded), + _onTrackRemoved(onTrackRemoved), + _onMissingSsrc(onMissingSsrc) { + } + + virtual void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override { + } + + virtual void OnAddStream(rtc::scoped_refptr stream) override { + } + + virtual void OnRemoveStream(rtc::scoped_refptr stream) override { + } + + virtual void OnDataChannel(rtc::scoped_refptr data_channel) override { + } + + virtual void OnRenegotiationNeeded() override { + } + + virtual void OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) override { + bool isConnected = false; + switch (new_state) { + case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionConnected: + case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionCompleted: + isConnected = true; + break; + default: + break; + } + _connectionStateChanged(isConnected); + } + + virtual void OnStandardizedIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) override { + } + + virtual void OnConnectionChange(webrtc::PeerConnectionInterface::PeerConnectionState new_state) override { + } + + virtual void OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState new_state) override { + } + + virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override { + std::string sdp; + candidate->ToString(&sdp); + _discoveredIceCandidate(sdp, candidate->sdp_mline_index(), candidate->sdp_mid()); + } + + virtual void OnIceCandidateError(const std::string& host_candidate, const std::string& url, int error_code, const std::string& error_text) override { + } + + virtual void OnIceCandidateError(const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) override { + } + + virtual void OnIceCandidatesRemoved(const std::vector& candidates) override { + } + + virtual void OnIceConnectionReceivingChange(bool receiving) override { + } + + virtual void OnIceSelectedCandidatePairChanged(const cricket::CandidatePairChangeEvent& event) override { + } + + virtual void OnAddTrack(rtc::scoped_refptr receiver, const std::vector>& streams) override { + } + + virtual void OnTrack(rtc::scoped_refptr transceiver) override { + /*if (transceiver->receiver()) { + rtc::scoped_refptr decryptor(new rtc::RefCountedObject()); + transceiver->receiver()->SetFrameDecryptor(decryptor); + }*/ + + _onTrackAdded(transceiver); + } + + virtual void OnRemoveTrack(rtc::scoped_refptr receiver) override { + _onTrackRemoved(receiver); + } + + virtual void OnInterestingUsage(int usage_pattern) override { + } + + virtual void OnErrorDemuxingPacket(uint32_t ssrc) override { + _onMissingSsrc(ssrc); + } +}; + +class RTCStatsCollectorCallbackImpl : public webrtc::RTCStatsCollectorCallback { +public: + RTCStatsCollectorCallbackImpl(std::function &)> completion) : + _completion(completion) { + } + + virtual void OnStatsDelivered(const rtc::scoped_refptr &report) override { + _completion(report); + } + +private: + std::function &)> _completion; +}; + +static const int kVadResultHistoryLength = 8; + +class CombinedVad { +private: + webrtc::VadLevelAnalyzer _vadWithLevel; + float _vadResultHistory[kVadResultHistoryLength]; + +public: + CombinedVad() { + for (int i = 0; i < kVadResultHistoryLength; i++) { + _vadResultHistory[i] = 0.0f; + } + } + + ~CombinedVad() { + } + + bool update(webrtc::AudioBuffer *buffer) { + webrtc::AudioFrameView frameView(buffer->channels(), buffer->num_channels(), buffer->num_frames()); + auto result = _vadWithLevel.AnalyzeFrame(frameView); + for (int i = 1; i < kVadResultHistoryLength; i++) { + _vadResultHistory[i - 1] = _vadResultHistory[i]; + } + _vadResultHistory[kVadResultHistoryLength - 1] = result.speech_probability; + + float movingAverage = 0.0f; + for (int i = 0; i < kVadResultHistoryLength; i++) { + movingAverage += _vadResultHistory[i]; + } + movingAverage /= (float)kVadResultHistoryLength; + + bool vadResult = false; + if (movingAverage > 0.8f) { + vadResult = true; + } + + return vadResult; + } +}; + +class AudioTrackSinkInterfaceImpl: public webrtc::AudioTrackSinkInterface { +private: + std::function _update; + + int _peakCount = 0; + uint16_t _peak = 0; + + CombinedVad _vad; + +public: + AudioTrackSinkInterfaceImpl(std::function update) : + _update(update) { + } + + virtual ~AudioTrackSinkInterfaceImpl() { + } + + virtual void OnData(const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) override { + if (bits_per_sample == 16 && number_of_channels == 1) { + int16_t *samples = (int16_t *)audio_data; + int numberOfSamplesInFrame = (int)number_of_frames; + + webrtc::AudioBuffer buffer(sample_rate, 1, 48000, 1, 48000, 1); + webrtc::StreamConfig config(sample_rate, 1); + buffer.CopyFrom(samples, config); + + bool vadResult = _vad.update(&buffer); + + for (int i = 0; i < numberOfSamplesInFrame; i++) { + int16_t sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + if (_peak < sample) { + _peak = sample; + } + _peakCount += 1; + } + + if (_peakCount >= 1200) { + float level = ((float)(_peak)) / 4000.0f; + _peak = 0; + _peakCount = 0; + _update(level, vadResult); + } + } + } +}; + +class CreateSessionDescriptionObserverImpl : public webrtc::CreateSessionDescriptionObserver { +private: + std::function _completion; + +public: + CreateSessionDescriptionObserverImpl(std::function completion) : + _completion(completion) { + } + + virtual void OnSuccess(webrtc::SessionDescriptionInterface* desc) override { + if (desc) { + std::string sdp; + desc->ToString(&sdp); + + _completion(sdp, desc->type()); + } + } + + virtual void OnFailure(webrtc::RTCError error) override { + } +}; + +class SetSessionDescriptionObserverImpl : public webrtc::SetSessionDescriptionObserver { +private: + std::function _completion; + std::function _error; + +public: + SetSessionDescriptionObserverImpl(std::function completion, std::function error) : + _completion(completion), _error(error) { + } + + virtual void OnSuccess() override { + _completion(); + } + + virtual void OnFailure(webrtc::RTCError error) override { + _error(error); + } +}; + +class AudioCaptureAnalyzer : public webrtc::CustomAudioAnalyzer { +private: + void Initialize(int sample_rate_hz, int num_channels) override { + + } + // Analyzes the given capture or render signal. + void Analyze(const webrtc::AudioBuffer* audio) override { + _analyze(audio); + } + // Returns a string representation of the module state. + std::string ToString() const override { + return "analyzing"; + } + + std::function _analyze; + +public: + AudioCaptureAnalyzer(std::function analyze) : + _analyze(analyze) { + } + + virtual ~AudioCaptureAnalyzer() = default; +}; + +class WrappedAudioDeviceModule : public webrtc::AudioDeviceModule { +private: + rtc::scoped_refptr _impl; + +public: + WrappedAudioDeviceModule(rtc::scoped_refptr impl) : + _impl(impl) { + } + + virtual ~WrappedAudioDeviceModule() { + } + + virtual int32_t ActiveAudioLayer(AudioLayer *audioLayer) const override { + return _impl->ActiveAudioLayer(audioLayer); + } + + virtual int32_t RegisterAudioCallback(webrtc::AudioTransport *audioCallback) override { + return _impl->RegisterAudioCallback(audioCallback); + } + + virtual int32_t Init() override { + return _impl->Init(); + } + + virtual int32_t Terminate() override { + return _impl->Terminate(); + } + + virtual bool Initialized() const override { + return _impl->Initialized(); + } + + virtual int16_t PlayoutDevices() override { + return _impl->PlayoutDevices(); + } + + virtual int16_t RecordingDevices() override { + return _impl->RecordingDevices(); + } + + virtual int32_t PlayoutDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { + return _impl->PlayoutDeviceName(index, name, guid); + } + + virtual int32_t RecordingDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { + return _impl->RecordingDeviceName(index, name, guid); + } + + virtual int32_t SetPlayoutDevice(uint16_t index) override { + return _impl->SetPlayoutDevice(index); + } + + virtual int32_t SetPlayoutDevice(WindowsDeviceType device) override { + return _impl->SetPlayoutDevice(device); + } + + virtual int32_t SetRecordingDevice(uint16_t index) override { + return _impl->SetRecordingDevice(index); + } + + virtual int32_t SetRecordingDevice(WindowsDeviceType device) override { + return _impl->SetRecordingDevice(device); + } + + virtual int32_t PlayoutIsAvailable(bool *available) override { + return _impl->PlayoutIsAvailable(available); + } + + virtual int32_t InitPlayout() override { + return _impl->InitPlayout(); + } + + virtual bool PlayoutIsInitialized() const override { + return _impl->PlayoutIsInitialized(); + } + + virtual int32_t RecordingIsAvailable(bool *available) override { + return _impl->RecordingIsAvailable(available); + } + + virtual int32_t InitRecording() override { + return _impl->InitRecording(); + } + + virtual bool RecordingIsInitialized() const override { + return _impl->RecordingIsInitialized(); + } + + virtual int32_t StartPlayout() override { + return _impl->StartPlayout(); + } + + virtual int32_t StopPlayout() override { + return _impl->StopPlayout(); + } + + virtual bool Playing() const override { + return _impl->Playing(); + } + + virtual int32_t StartRecording() override { + return _impl->StartRecording(); + } + + virtual int32_t StopRecording() override { + return _impl->StopRecording(); + } + + virtual bool Recording() const override { + return _impl->Recording(); + } + + virtual int32_t InitSpeaker() override { + return _impl->InitSpeaker(); + } + + virtual bool SpeakerIsInitialized() const override { + return _impl->SpeakerIsInitialized(); + } + + virtual int32_t InitMicrophone() override { + return _impl->InitMicrophone(); + } + + virtual bool MicrophoneIsInitialized() const override { + return _impl->MicrophoneIsInitialized(); + } + + virtual int32_t SpeakerVolumeIsAvailable(bool *available) override { + return _impl->SpeakerVolumeIsAvailable(available); + } + + virtual int32_t SetSpeakerVolume(uint32_t volume) override { + return _impl->SetSpeakerVolume(volume); + } + + virtual int32_t SpeakerVolume(uint32_t* volume) const override { + return _impl->SpeakerVolume(volume); + } + + virtual int32_t MaxSpeakerVolume(uint32_t *maxVolume) const override { + return _impl->MaxSpeakerVolume(maxVolume); + } + + virtual int32_t MinSpeakerVolume(uint32_t *minVolume) const override { + return _impl->MinSpeakerVolume(minVolume); + } + + virtual int32_t MicrophoneVolumeIsAvailable(bool *available) override { + return _impl->MicrophoneVolumeIsAvailable(available); + } + + virtual int32_t SetMicrophoneVolume(uint32_t volume) override { + return _impl->SetMicrophoneVolume(volume); + } + + virtual int32_t MicrophoneVolume(uint32_t *volume) const override { + return _impl->MicrophoneVolume(volume); + } + + virtual int32_t MaxMicrophoneVolume(uint32_t *maxVolume) const override { + return _impl->MaxMicrophoneVolume(maxVolume); + } + + virtual int32_t MinMicrophoneVolume(uint32_t *minVolume) const override { + return _impl->MinMicrophoneVolume(minVolume); + } + + virtual int32_t SpeakerMuteIsAvailable(bool *available) override { + return _impl->SpeakerMuteIsAvailable(available); + } + + virtual int32_t SetSpeakerMute(bool enable) override { + return _impl->SetSpeakerMute(enable); + } + + virtual int32_t SpeakerMute(bool *enabled) const override { + return _impl->SpeakerMute(enabled); + } + + virtual int32_t MicrophoneMuteIsAvailable(bool *available) override { + return _impl->MicrophoneMuteIsAvailable(available); + } + + virtual int32_t SetMicrophoneMute(bool enable) override { + return _impl->SetMicrophoneMute(enable); + } + + virtual int32_t MicrophoneMute(bool *enabled) const override { + return _impl->MicrophoneMute(enabled); + } + + virtual int32_t StereoPlayoutIsAvailable(bool *available) const override { + return _impl->StereoPlayoutIsAvailable(available); + } + + virtual int32_t SetStereoPlayout(bool enable) override { + return _impl->SetStereoPlayout(enable); + } + + virtual int32_t StereoPlayout(bool *enabled) const override { + return _impl->StereoPlayout(enabled); + } + + virtual int32_t StereoRecordingIsAvailable(bool *available) const override { + return _impl->StereoRecordingIsAvailable(available); + } + + virtual int32_t SetStereoRecording(bool enable) override { + return _impl->SetStereoRecording(enable); + } + + virtual int32_t StereoRecording(bool *enabled) const override { + return _impl->StereoRecording(enabled); + } + + virtual int32_t PlayoutDelay(uint16_t* delayMS) const override { + return _impl->PlayoutDelay(delayMS); + } + + virtual bool BuiltInAECIsAvailable() const override { + return _impl->BuiltInAECIsAvailable(); + } + + virtual bool BuiltInAGCIsAvailable() const override { + return _impl->BuiltInAGCIsAvailable(); + } + + virtual bool BuiltInNSIsAvailable() const override { + return _impl->BuiltInNSIsAvailable(); + } + + virtual int32_t EnableBuiltInAEC(bool enable) override { + return _impl->EnableBuiltInAEC(enable); + } + + virtual int32_t EnableBuiltInAGC(bool enable) override { + return _impl->EnableBuiltInAGC(enable); + } + + virtual int32_t EnableBuiltInNS(bool enable) override { + return _impl->EnableBuiltInNS(enable); + } + + virtual int32_t GetPlayoutUnderrunCount() const override { + return _impl->GetPlayoutUnderrunCount(); + } + +#if defined(WEBRTC_IOS) + virtual int GetPlayoutAudioParameters(webrtc::AudioParameters *params) const override { + return _impl->GetPlayoutAudioParameters(params); + } + virtual int GetRecordAudioParameters(webrtc::AudioParameters *params) const override { + return _impl->GetRecordAudioParameters(params); + } +#endif // WEBRTC_IOS +}; + +template +void split(const std::string &s, char delim, Out result) { + std::istringstream iss(s); + std::string item; + while (std::getline(iss, item, delim)) { + *result++ = item; + } +} + +std::vector split(const std::string &s, char delim) { + std::vector elems; + split(s, delim, std::back_inserter(elems)); + return elems; +} + +std::string adjustLocalDescription(const std::string &sdp) { + std::vector lines = split(sdp, '\n'); + + std::string pattern = "c=IN "; + + bool foundAudio = false; + std::stringstream result; + for (const auto &it : lines) { + result << it << "\n"; + if (!foundAudio && it.compare(0, pattern.size(), pattern) == 0) { + foundAudio = true; + result << "b=AS:" << 32 << "\n"; + } + } + + return result.str(); +} + +} // namespace + + + +class GroupInstanceManager : public std::enable_shared_from_this { +public: + GroupInstanceManager(GroupInstanceDescriptor &&descriptor) : + _networkStateUpdated(descriptor.networkStateUpdated), + _audioLevelsUpdated(descriptor.audioLevelsUpdated), + _initialInputDeviceId(descriptor.initialInputDeviceId), + _initialOutputDeviceId(descriptor.initialOutputDeviceId), + _platformContext(descriptor.platformContext) { + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + _mainStreamAudioSsrc = distribution(generator); + } while (!_mainStreamAudioSsrc); + } + + ~GroupInstanceManager() { + assert(getMediaThread()->IsCurrent()); + + destroyAudioDeviceModule(); + if (_peerConnection) { + _peerConnection->Close(); + } + } + + void generateAndInsertFakeIncomingSsrc() { + // At least on Windows recording can't be started without playout. + // We keep a fake incoming stream, so that playout is always started. + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + while (true) { + _fakeIncomingSsrc = distribution(generator); + if (_fakeIncomingSsrc != 0 + && _fakeIncomingSsrc != _mainStreamAudioSsrc + && std::find(_allOtherSsrcs.begin(), _allOtherSsrcs.end(), _fakeIncomingSsrc) == _allOtherSsrcs.end()) { + break; + } + } + _activeOtherSsrcs.emplace(_fakeIncomingSsrc); + _allOtherSsrcs.emplace_back(_fakeIncomingSsrc); + } + + bool createAudioDeviceModule( + const webrtc::PeerConnectionFactoryDependencies &dependencies) { + _adm_thread = dependencies.worker_thread; + if (!_adm_thread) { + return false; + } + _adm_thread->Invoke(RTC_FROM_HERE, [&] { + const auto check = [&](webrtc::AudioDeviceModule::AudioLayer layer) { + auto result = webrtc::AudioDeviceModule::Create( + layer, + dependencies.task_queue_factory.get()); + return (result && (result->Init() == 0)) ? result : nullptr; + }; + if (auto result = check(webrtc::AudioDeviceModule::kPlatformDefaultAudio)) { + _adm_use_withAudioDeviceModule = new rtc::RefCountedObject(result); +#ifdef WEBRTC_LINUX + } else if (auto result = check(webrtc::AudioDeviceModule::kLinuxAlsaAudio)) { + _adm_use_withAudioDeviceModule = new rtc::RefCountedObject(result); +#endif // WEBRTC_LINUX + } + }); + return (_adm_use_withAudioDeviceModule != nullptr); + } + void destroyAudioDeviceModule() { + if (!_adm_thread) { + return; + } + _adm_thread->Invoke(RTC_FROM_HERE, [&] { + _adm_use_withAudioDeviceModule = nullptr; + }); + } + + void start() { + const auto weak = std::weak_ptr(shared_from_this()); + + webrtc::field_trial::InitFieldTrialsFromString( + //"WebRTC-Audio-SendSideBwe/Enabled/" + "WebRTC-Audio-Allocation/min:6kbps,max:32kbps/" + "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" + //"WebRTC-FlexFEC-03/Enabled/" + //"WebRTC-FlexFEC-03-Advertised/Enabled/" + "WebRTC-PcFactoryDefaultBitrates/min:6kbps,start:32kbps,max:32kbps/" + ); + + PlatformInterface::SharedInstance()->configurePlatformAudio(); + + webrtc::PeerConnectionFactoryDependencies dependencies; + dependencies.network_thread = getNetworkThread(); + dependencies.worker_thread = getWorkerThread(); + dependencies.signaling_thread = getSignalingThread(); + dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); + + if (!createAudioDeviceModule(dependencies)) { + return; + } + + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = dependencies.task_queue_factory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); + mediaDeps.adm = _adm_use_withAudioDeviceModule; + + std::shared_ptr myVad(new CombinedVad()); + + auto analyzer = new AudioCaptureAnalyzer([&, weak, myVad](const webrtc::AudioBuffer* buffer) { + if (!buffer) { + return; + } + if (buffer->num_channels() != 1) { + return; + } + + float peak = 0; + int peakCount = 0; + const float *samples = buffer->channels_const()[0]; + for (int i = 0; i < buffer->num_frames(); i++) { + float sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + if (peak < sample) { + peak = sample; + } + peakCount += 1; + } + + bool vadStatus = myVad->update((webrtc::AudioBuffer *)buffer); + + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, peak, peakCount, vadStatus](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->_myAudioLevelPeakCount += peakCount; + if (strong->_myAudioLevelPeak < peak) { + strong->_myAudioLevelPeak = peak; + } + if (strong->_myAudioLevelPeakCount >= 1200) { + float level = strong->_myAudioLevelPeak / 4000.0f; + if (strong->_isMuted) { + level = 0.0f; + } + strong->_myAudioLevelPeak = 0; + strong->_myAudioLevelPeakCount = 0; + strong->_myAudioLevel = GroupLevelValue{ + level, + vadStatus, + }; + } + }); + }); + + webrtc::AudioProcessingBuilder builder; + builder.SetCaptureAnalyzer(std::unique_ptr(analyzer)); + webrtc::AudioProcessing *apm = builder.Create(); + + webrtc::AudioProcessing::Config audioConfig; + webrtc::AudioProcessing::Config::NoiseSuppression noiseSuppression; + noiseSuppression.enabled = true; + noiseSuppression.level = webrtc::AudioProcessing::Config::NoiseSuppression::kHigh; + audioConfig.noise_suppression = noiseSuppression; + + audioConfig.high_pass_filter.enabled = true; + + audioConfig.voice_detection.enabled = true; + + apm->ApplyConfig(audioConfig); + + mediaDeps.audio_processing = apm; + + mediaDeps.onUnknownAudioSsrc = [weak](uint32_t ssrc) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onMissingSsrc(ssrc); + }); + }; + + dependencies.media_engine = cricket::CreateMediaEngine(std::move(mediaDeps)); + dependencies.call_factory = webrtc::CreateCallFactory(); + dependencies.event_log_factory = + std::make_unique(dependencies.task_queue_factory.get()); + dependencies.network_controller_factory = nullptr; + + _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); + + webrtc::PeerConnectionInterface::RTCConfiguration config; + config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; + //config.continual_gathering_policy = webrtc::PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY; + config.audio_jitter_buffer_fast_accelerate = true; + config.prioritize_most_likely_ice_candidate_pairs = true; + config.presume_writable_when_fully_relayed = true; + //config.audio_jitter_buffer_enable_rtx_handling = true; + + /*webrtc::CryptoOptions cryptoOptions; + webrtc::CryptoOptions::SFrame sframe; + sframe.require_frame_encryption = true; + cryptoOptions.sframe = sframe; + config.crypto_options = cryptoOptions;*/ + + _observer.reset(new PeerConnectionObserverImpl( + [weak](std::string sdp, int mid, std::string sdpMid) { + /*getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, mid, sdpMid](){ + auto strong = weak.lock(); + if (strong) { + //strong->emitIceCandidate(sdp, mid, sdpMid); + } + });*/ + }, + [weak](bool isConnected) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isConnected](){ + auto strong = weak.lock(); + if (strong) { + strong->updateIsConnected(isConnected); + } + }); + }, + [weak](rtc::scoped_refptr transceiver) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, transceiver](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onTrackAdded(transceiver); + }); + }, + [weak](rtc::scoped_refptr receiver) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, receiver](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onTrackRemoved(receiver); + }); + }, + [weak](uint32_t ssrc) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onMissingSsrc(ssrc); + }); + } + )); + _peerConnection = _nativeFactory->CreatePeerConnection(config, nullptr, nullptr, _observer.get()); + assert(_peerConnection != nullptr); + + cricket::AudioOptions options; + rtc::scoped_refptr audioSource = _nativeFactory->CreateAudioSource(options); + std::stringstream name; + name << "audio"; + name << 0; + std::vector streamIds; + streamIds.push_back(name.str()); + _localAudioTrack = _nativeFactory->CreateAudioTrack(name.str(), audioSource); + _localAudioTrack->set_enabled(false); + auto addedTrack = _peerConnection->AddTrack(_localAudioTrack, streamIds); + + if (addedTrack.ok()) { + _localAudioTrackSender = addedTrack.value(); + for (auto &it : _peerConnection->GetTransceivers()) { + if (it->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (_localAudioTrackSender.get() == it->sender().get()) { + it->SetDirection(webrtc::RtpTransceiverDirection::kRecvOnly); + } + + break; + } + } + } + + setAudioInputDevice(_initialInputDeviceId); + setAudioOutputDevice(_initialOutputDeviceId); + + // At least on Windows recording doesn't work without started playout. + withAudioDeviceModule([weak](webrtc::AudioDeviceModule *adm) { +#ifdef WEBRTC_WIN + // At least on Windows starting/stopping playout while recording + // is active leads to errors in recording and assertion violation. + adm->EnableBuiltInAEC(false); +#endif // WEBRTC_WIN + + if (adm->InitPlayout()) { + adm->StartPlayout(); + } else { + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->withAudioDeviceModule([](webrtc::AudioDeviceModule *adm) { + if (adm->InitPlayout()) { + adm->StartPlayout(); + } + }); + }, 2000); + } + }); + + //beginStatsTimer(100); + beginLevelsTimer(50); + } + + + void setAudioInputDevice(std::string id) { +#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) + withAudioDeviceModule([&](webrtc::AudioDeviceModule *adm) { + const auto recording = adm->Recording(); + if (recording) { + adm->StopRecording(); + } + const auto finish = [&] { + if (recording) { + adm->InitRecording(); + adm->StartRecording(); + } + }; + if (id == "default" || id.empty()) { + if (const auto result = adm->SetRecordingDevice(webrtc::AudioDeviceModule::kDefaultCommunicationDevice)) { + RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): SetRecordingDevice(kDefaultCommunicationDevice) failed: " << result << "."; + } else { + RTC_LOG(LS_INFO) << "setAudioInputDevice(" << id << "): SetRecordingDevice(kDefaultCommunicationDevice) success."; + } + return finish(); + } + const auto count = adm + ? adm->RecordingDevices() + : int16_t(-666); + if (count <= 0) { + RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): Could not get recording devices count: " << count << "."; + return finish(); + } + for (auto i = 0; i != count; ++i) { + char name[webrtc::kAdmMaxDeviceNameSize + 1] = { 0 }; + char guid[webrtc::kAdmMaxGuidSize + 1] = { 0 }; + adm->RecordingDeviceName(i, name, guid); + if (id == guid) { + const auto result = adm->SetRecordingDevice(i); + if (result != 0) { + RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << ") name '" << std::string(name) << "' failed: " << result << "."; + } else { + RTC_LOG(LS_INFO) << "setAudioInputDevice(" << id << ") name '" << std::string(name) << "' success."; + } + return finish(); + } + } + RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): Could not find recording device."; + return finish(); + }); +#endif + } + + void setAudioOutputDevice(std::string id) { +#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) + withAudioDeviceModule([&](webrtc::AudioDeviceModule *adm) { + const auto playing = adm->Playing(); + if (playing) { + adm->StopPlayout(); + } + const auto finish = [&] { + if (playing) { + adm->InitPlayout(); + adm->StartPlayout(); + } + }; + if (id == "default" || id.empty()) { + if (const auto result = adm->SetPlayoutDevice(webrtc::AudioDeviceModule::kDefaultCommunicationDevice)) { + RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): SetPlayoutDevice(kDefaultCommunicationDevice) failed: " << result << "."; + } else { + RTC_LOG(LS_INFO) << "setAudioOutputDevice(" << id << "): SetPlayoutDevice(kDefaultCommunicationDevice) success."; + } + return finish(); + } + const auto count = adm + ? adm->PlayoutDevices() + : int16_t(-666); + if (count <= 0) { + RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): Could not get playout devices count: " << count << "."; + return finish(); + } + for (auto i = 0; i != count; ++i) { + char name[webrtc::kAdmMaxDeviceNameSize + 1] = { 0 }; + char guid[webrtc::kAdmMaxGuidSize + 1] = { 0 }; + adm->PlayoutDeviceName(i, name, guid); + if (id == guid) { + const auto result = adm->SetPlayoutDevice(i); + if (result != 0) { + RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << ") name '" << std::string(name) << "' failed: " << result << "."; + } else { + RTC_LOG(LS_INFO) << "setAudioOutputDevice(" << id << ") name '" << std::string(name) << "' success."; + } + return finish(); + } + } + RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): Could not find playout device."; + return finish(); + }); +#endif + } + + void updateIsConnected(bool isConnected) { + _isConnected = isConnected; + + auto timestamp = rtc::TimeMillis(); + + _isConnectedUpdateValidTaskId++; + + if (!isConnected && _appliedOfferTimestamp > timestamp - 1000) { + auto taskId = _isConnectedUpdateValidTaskId; + const auto weak = std::weak_ptr(shared_from_this()); + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, taskId]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + if (strong->_isConnectedUpdateValidTaskId == taskId) { + strong->_networkStateUpdated(strong->_isConnected); + } + }, 1000); + } else { + _networkStateUpdated(_isConnected); + } + } + + void stop() { + _peerConnection->Close(); + } + + void emitJoinPayload(std::function completion) { + const auto weak = std::weak_ptr(shared_from_this()); + webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, completion](std::string sdp, std::string type) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type, completion](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + + auto lines = splitSdpLines(sdp); + std::vector resultSdp; + + std::ostringstream generatedSsrcStringStream; + generatedSsrcStringStream << strong->_mainStreamAudioSsrc; + auto generatedSsrcString = generatedSsrcStringStream.str(); + + for (auto &line : lines) { + auto adjustedLine = line; + if (adjustedLine.find("a=ssrc:") == 0) { + int startIndex = 7; + int i = startIndex; + while (i < adjustedLine.size()) { + if (!isdigit(adjustedLine[i])) { + break; + } + i++; + } + if (i >= startIndex) { + adjustedLine.replace(startIndex, i - startIndex, generatedSsrcString); + } + } + appendSdp(resultSdp, adjustedLine); + } + + std::ostringstream result; + for (auto &line : resultSdp) { + result << line << "\n"; + } + + auto adjustedSdp = result.str(); + + RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription join -----"; + RTC_LOG(LoggingSeverity::WARNING) << adjustedSdp; + RTC_LOG(LoggingSeverity::WARNING) << "-----"; + + webrtc::SdpParseError error; + webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(adjustedSdp), &error); + if (sessionDescription != nullptr) { + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, adjustedSdp, completion]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + auto payload = parseSdpIntoJoinPayload(adjustedSdp); + if (payload) { + payload->ssrc = strong->_mainStreamAudioSsrc; + completion(payload.value()); + } + }, [](webrtc::RTCError error) { + })); + strong->_peerConnection->SetLocalDescription(observer, sessionDescription); + } else { + return; + } + }); + })); + _peerConnection->CreateOffer(observer, options); + } + + void setJoinResponsePayload(GroupJoinResponsePayload payload) { + _joinPayload = payload; + auto sdp = parseJoinResponseIntoSdp(_sessionId, _mainStreamAudioSsrc, payload, true, _allOtherSsrcs, _activeOtherSsrcs); + setOfferSdp(sdp, true, true, false); + } + + void removeSsrcs(std::vector ssrcs) { + if (!_joinPayload) { + return; + } + + bool updated = false; + for (auto ssrc : ssrcs) { + if (std::find(_allOtherSsrcs.begin(), _allOtherSsrcs.end(), ssrc) != _allOtherSsrcs.end() && std::find(_activeOtherSsrcs.begin(), _activeOtherSsrcs.end(), ssrc) != _activeOtherSsrcs.end()) { + if (!_fakeIncomingSsrc || ssrc == _fakeIncomingSsrc) { + generateAndInsertFakeIncomingSsrc(); + } + _activeOtherSsrcs.erase(ssrc); + updated = true; + } + } + + if (updated) { + auto sdp = parseJoinResponseIntoSdp(_sessionId, _mainStreamAudioSsrc, _joinPayload.value(), false, _allOtherSsrcs, _activeOtherSsrcs); + setOfferSdp(sdp, false, false, false); + } + } + + void addSsrcsInternal(std::vector const &ssrcs, bool completeMissingSsrcSetup) { + if (!_joinPayload) { + if (completeMissingSsrcSetup) { + completeProcessingMissingSsrcs(); + } + return; + } + + for (auto ssrc : ssrcs) { + if (std::find(_allOtherSsrcs.begin(), _allOtherSsrcs.end(), ssrc) == _allOtherSsrcs.end()) { + _allOtherSsrcs.push_back(ssrc); + _activeOtherSsrcs.insert(ssrc); + } + } + + auto sdp = parseJoinResponseIntoSdp(_sessionId, _mainStreamAudioSsrc, _joinPayload.value(), false, _allOtherSsrcs, _activeOtherSsrcs); + setOfferSdp(sdp, false, false, completeMissingSsrcSetup); + } + + void applyLocalSdp() { + const auto weak = std::weak_ptr(shared_from_this()); + webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak](std::string sdp, std::string type) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + + auto lines = splitSdpLines(sdp); + std::vector resultSdp; + + std::ostringstream generatedSsrcStringStream; + generatedSsrcStringStream << strong->_mainStreamAudioSsrc; + auto generatedSsrcString = generatedSsrcStringStream.str(); + + for (auto &line : lines) { + auto adjustedLine = line; + if (adjustedLine.find("a=ssrc:") == 0) { + int startIndex = 7; + int i = startIndex; + while (i < adjustedLine.size()) { + if (!isdigit(adjustedLine[i])) { + break; + } + i++; + } + if (i >= startIndex) { + adjustedLine.replace(startIndex, i - startIndex, generatedSsrcString); + } + } + appendSdp(resultSdp, adjustedLine); + } + + std::ostringstream result; + for (auto &line : resultSdp) { + result << line << "\n"; + } + + auto adjustedSdp = result.str(); + + RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription applyLocalSdp -----"; + RTC_LOG(LoggingSeverity::WARNING) << adjustedSdp; + RTC_LOG(LoggingSeverity::WARNING) << "-----"; + + webrtc::SdpParseError error; + webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(adjustedSdp), &error); + if (sessionDescription != nullptr) { + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, adjustedSdp]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + if (!strong->_joinPayload) { + return; + } + + auto sdp = parseJoinResponseIntoSdp(strong->_sessionId, strong->_mainStreamAudioSsrc, strong->_joinPayload.value(), true, strong->_allOtherSsrcs, strong->_activeOtherSsrcs); + strong->setOfferSdp(sdp, false, true, false); + }, [](webrtc::RTCError error) { + })); + strong->_peerConnection->SetLocalDescription(observer, sessionDescription); + } else { + return; + } + }); + })); + _peerConnection->CreateOffer(observer, options); + } + + void setOfferSdp(std::string const &offerSdp, bool isInitialJoinAnswer, bool isAnswer, bool completeMissingSsrcSetup) { + if (!isAnswer && _appliedRemoteRescription == offerSdp) { + if (completeMissingSsrcSetup) { + completeProcessingMissingSsrcs(); + } + return; + } + _appliedRemoteRescription = offerSdp; + + RTC_LOG(LoggingSeverity::WARNING) << "----- setOfferSdp " << (isAnswer ? "answer" : "offer") << " -----"; + RTC_LOG(LoggingSeverity::WARNING) << offerSdp; + RTC_LOG(LoggingSeverity::WARNING) << "-----"; + + webrtc::SdpParseError error; + webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(isAnswer ? "answer" : "offer", adjustLocalDescription(offerSdp), &error); + if (!sessionDescription) { + if (completeMissingSsrcSetup) { + completeProcessingMissingSsrcs(); + } + return; + } + + if (!isAnswer) { + _appliedOfferTimestamp = rtc::TimeMillis(); + } + + const auto weak = std::weak_ptr(shared_from_this()); + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, isInitialJoinAnswer, isAnswer, completeMissingSsrcSetup]() { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isInitialJoinAnswer, isAnswer, completeMissingSsrcSetup](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + if (!isAnswer) { + strong->emitAnswer(completeMissingSsrcSetup); + } else { + if (isInitialJoinAnswer) { + strong->completedInitialSetup(); + } + + if (completeMissingSsrcSetup) { + strong->completeProcessingMissingSsrcs(); + } + } + }); + }, [weak, completeMissingSsrcSetup](webrtc::RTCError error) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, completeMissingSsrcSetup](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + if (completeMissingSsrcSetup) { + strong->completeProcessingMissingSsrcs(); + } + }); + })); + + _peerConnection->SetRemoteDescription(observer, sessionDescription); + } + + void beginStatsTimer(int timeoutMs) { + const auto weak = std::weak_ptr(shared_from_this()); + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->collectStats(); + }); + }, timeoutMs); + } + + void beginLevelsTimer(int timeoutMs) { + const auto weak = std::weak_ptr(shared_from_this()); + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + GroupLevelsUpdate levelsUpdate; + levelsUpdate.updates.reserve(strong->_audioLevels.size() + 1); + for (auto &it : strong->_audioLevels) { + if (it.second.level > 0.001f) { + levelsUpdate.updates.push_back(GroupLevelUpdate{ + it.first, + it.second, + }); + } + } + levelsUpdate.updates.push_back(GroupLevelUpdate{ 0, strong->_myAudioLevel }); + + strong->_audioLevels.clear(); + strong->_audioLevelsUpdated(levelsUpdate); + + strong->beginLevelsTimer(50); + }, timeoutMs); + } + + void collectStats() { + const auto weak = std::weak_ptr(shared_from_this()); + + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak](const rtc::scoped_refptr &stats) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, stats](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->reportStats(stats); + strong->beginStatsTimer(100); + }); + })); + _peerConnection->GetStats(observer); + } + + void reportStats(const rtc::scoped_refptr &stats) { + } + + void onTrackAdded(rtc::scoped_refptr transceiver) { + if (transceiver->direction() == webrtc::RtpTransceiverDirection::kRecvOnly && transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (transceiver->mid()) { + auto streamId = transceiver->mid().value(); + if (streamId.find("audio") != 0) { + return; + } + streamId.replace(0, 5, ""); + std::istringstream iss(streamId); + uint32_t ssrc = 0; + iss >> ssrc; + + auto remoteAudioTrack = static_cast(transceiver->receiver()->track().get()); + if (_audioTrackSinks.find(ssrc) == _audioTrackSinks.end()) { + const auto weak = std::weak_ptr(shared_from_this()); + std::shared_ptr sink(new AudioTrackSinkInterfaceImpl([weak, ssrc](float level, bool hasSpeech) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, level, hasSpeech]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + auto current = strong->_audioLevels.find(ssrc); + if (current != strong->_audioLevels.end()) { + if (current->second.level < level) { + strong->_audioLevels[ssrc] = GroupLevelValue{ + level, + hasSpeech, + }; + } + } else { + strong->_audioLevels.emplace( + ssrc, + GroupLevelValue{ + level, + hasSpeech, + }); + } + }); + })); + _audioTrackSinks[ssrc] = sink; + remoteAudioTrack->AddSink(sink.get()); + } + } + } + } + + void onTrackRemoved(rtc::scoped_refptr receiver) { + } + + void onMissingSsrc(uint32_t ssrc) { + if (_processedMissingSsrcs.find(ssrc) == _processedMissingSsrcs.end()) { + _processedMissingSsrcs.insert(ssrc); + + _missingSsrcQueue.insert(ssrc); + if (!_isProcessingMissingSsrcs) { + beginProcessingMissingSsrcs(); + } + } + } + + void beginProcessingMissingSsrcs() { + if (_isProcessingMissingSsrcs) { + return; + } + _isProcessingMissingSsrcs = true; + auto timestamp = rtc::TimeMillis(); + if (timestamp > _missingSsrcsProcessedTimestamp + 200) { + applyMissingSsrcs(); + } else { + const auto weak = std::weak_ptr(shared_from_this()); + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->applyMissingSsrcs(); + }, 200); + } + } + + void applyMissingSsrcs() { + assert(_isProcessingMissingSsrcs); + if (_missingSsrcQueue.size() == 0) { + completeProcessingMissingSsrcs(); + return; + } + + std::vector addSsrcs; + for (auto ssrc : _missingSsrcQueue) { + addSsrcs.push_back(ssrc); + } + _missingSsrcQueue.clear(); + + const auto weak = std::weak_ptr(shared_from_this()); + addSsrcsInternal(addSsrcs, true); + } + + void completeProcessingMissingSsrcs() { + assert(_isProcessingMissingSsrcs); + _isProcessingMissingSsrcs = false; + _missingSsrcsProcessedTimestamp = rtc::TimeMillis(); + + if (_missingSsrcQueue.size() != 0) { + beginProcessingMissingSsrcs(); + } + } + + void completedInitialSetup() { + //beginDebugSsrcTimer(1000); + } + + uint32_t _nextTestSsrc = 100; + + void beginDebugSsrcTimer(int timeout) { + const auto weak = std::weak_ptr(shared_from_this()); + getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + if (strong->_nextTestSsrc >= 100 + 50) { + return; + } + + strong->_nextTestSsrc++; + strong->onMissingSsrc(strong->_nextTestSsrc); + + strong->beginDebugSsrcTimer(20); + }, timeout); + } + + void setIsMuted(bool isMuted) { + if (!_localAudioTrackSender) { + return; + } + if (_isMuted == isMuted) { + return; + } + + for (auto &it : _peerConnection->GetTransceivers()) { + if (it->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (_localAudioTrackSender.get() == it->sender().get()) { + if (isMuted) { + /*if (it->direction() == webrtc::RtpTransceiverDirection::kSendRecv) { + it->SetDirection(webrtc::RtpTransceiverDirection::kRecvOnly); + + applyLocalSdp(); + + break; + }*/ + } else { + if (it->direction() == webrtc::RtpTransceiverDirection::kRecvOnly) { + it->SetDirection(webrtc::RtpTransceiverDirection::kSendRecv); + + applyLocalSdp(); + + break; + } + } + } + + break; + } + } + + _isMuted = isMuted; + _localAudioTrack->set_enabled(!isMuted); + + RTC_LOG(LoggingSeverity::WARNING) << "setIsMuted: " << isMuted; + } + + void emitAnswer(bool completeMissingSsrcSetup) { + const auto weak = std::weak_ptr(shared_from_this()); + + webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, completeMissingSsrcSetup](std::string sdp, std::string type) { + getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type, completeMissingSsrcSetup](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + + RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription answer -----"; + RTC_LOG(LoggingSeverity::WARNING) << sdp; + RTC_LOG(LoggingSeverity::WARNING) << "-----"; + + webrtc::SdpParseError error; + webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(sdp), &error); + if (sessionDescription != nullptr) { + rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, sdp, completeMissingSsrcSetup]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + if (completeMissingSsrcSetup) { + strong->completeProcessingMissingSsrcs(); + } + }, [weak, completeMissingSsrcSetup](webrtc::RTCError error) { + auto strong = weak.lock(); + if (!strong) { + return; + } + + if (completeMissingSsrcSetup) { + strong->completeProcessingMissingSsrcs(); + } + })); + strong->_peerConnection->SetLocalDescription(observer, sessionDescription); + } else { + if (completeMissingSsrcSetup) { + strong->completeProcessingMissingSsrcs(); + } + } + }); + })); + _peerConnection->CreateAnswer(observer, options); + } + +private: + void withAudioDeviceModule(std::function callback) { + _adm_thread->Invoke(RTC_FROM_HERE, [&] { + callback(_adm_use_withAudioDeviceModule.get()); + }); + } + + std::function _networkStateUpdated; + std::function _audioLevelsUpdated; + + int32_t _myAudioLevelPeakCount = 0; + float _myAudioLevelPeak = 0; + GroupLevelValue _myAudioLevel; + + std::string _initialInputDeviceId; + std::string _initialOutputDeviceId; + + uint32_t _sessionId = 6543245; + uint32_t _mainStreamAudioSsrc = 0; + uint32_t _fakeIncomingSsrc = 0; + absl::optional _joinPayload; + + int64_t _appliedOfferTimestamp = 0; + bool _isConnected = false; + int _isConnectedUpdateValidTaskId = 0; + + bool _isMuted = true; + + std::vector _allOtherSsrcs; + std::set _activeOtherSsrcs; + std::set _processedMissingSsrcs; + + int64_t _missingSsrcsProcessedTimestamp = 0; + bool _isProcessingMissingSsrcs = false; + std::set _missingSsrcQueue; + + std::string _appliedRemoteRescription; + + rtc::scoped_refptr _nativeFactory; + std::unique_ptr _observer; + rtc::scoped_refptr _peerConnection; + std::unique_ptr _localAudioTrackSink; + rtc::scoped_refptr _localAudioTrack; + rtc::scoped_refptr _localAudioTrackSender; + + rtc::Thread *_adm_thread = nullptr; + rtc::scoped_refptr _adm_use_withAudioDeviceModule; + + std::map> _audioTrackSinks; + std::map _audioLevels; + + std::shared_ptr _platformContext; +}; + +GroupInstanceImpl::GroupInstanceImpl(GroupInstanceDescriptor &&descriptor) +: _logSink(std::make_unique(descriptor.config.logPath)) { + rtc::LogMessage::LogToDebug(rtc::LS_INFO); + rtc::LogMessage::SetLogToStderr(true); + if (_logSink) { + rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); + } + + _manager.reset(new ThreadLocalObject(getMediaThread(), [descriptor = std::move(descriptor)]() mutable { + return new GroupInstanceManager(std::move(descriptor)); + })); + _manager->perform(RTC_FROM_HERE, [](GroupInstanceManager *manager) { + manager->start(); + }); +} + +GroupInstanceImpl::~GroupInstanceImpl() { + if (_logSink) { + rtc::LogMessage::RemoveLogToStream(_logSink.get()); + } + _manager = nullptr; + + // Wait until _manager is destroyed, otherwise there is a race condition + // in destruction of PeerConnection on media thread and network thread. + getMediaThread()->Invoke(RTC_FROM_HERE, [] {}); +} + +void GroupInstanceImpl::stop() { + _manager->perform(RTC_FROM_HERE, [](GroupInstanceManager *manager) { + manager->stop(); + }); +} + +void GroupInstanceImpl::emitJoinPayload(std::function completion) { + _manager->perform(RTC_FROM_HERE, [completion](GroupInstanceManager *manager) { + manager->emitJoinPayload(completion); + }); +} + +void GroupInstanceImpl::setJoinResponsePayload(GroupJoinResponsePayload payload) { + _manager->perform(RTC_FROM_HERE, [payload](GroupInstanceManager *manager) { + manager->setJoinResponsePayload(payload); + }); +} + +void GroupInstanceImpl::removeSsrcs(std::vector ssrcs) { + _manager->perform(RTC_FROM_HERE, [ssrcs](GroupInstanceManager *manager) { + manager->removeSsrcs(ssrcs); + }); +} + +void GroupInstanceImpl::setIsMuted(bool isMuted) { + _manager->perform(RTC_FROM_HERE, [isMuted](GroupInstanceManager *manager) { + manager->setIsMuted(isMuted); + }); +} + +void GroupInstanceImpl::setAudioInputDevice(std::string id) { + _manager->perform(RTC_FROM_HERE, [id](GroupInstanceManager *manager) { + manager->setAudioInputDevice(id); + }); +} +void GroupInstanceImpl::setAudioOutputDevice(std::string id) { + _manager->perform(RTC_FROM_HERE, [id](GroupInstanceManager *manager) { + manager->setAudioOutputDevice(id); + }); +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h new file mode 100644 index 000000000..8a8ed96ae --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h @@ -0,0 +1,120 @@ +#ifndef TGCALLS_GROUP_INSTANCE_IMPL_H +#define TGCALLS_GROUP_INSTANCE_IMPL_H + +#include +#include +#include +#include +#include + +#include "../Instance.h" + +namespace webrtc { +class AudioDeviceModule; +class TaskQueueFactory; +} + +namespace tgcalls { + +class LogSinkImpl; +class GroupInstanceManager; + +struct GroupConfig { + FilePath logPath; +}; + +struct GroupLevelValue { + float level = 0.; + bool voice = false; +}; + +struct GroupLevelUpdate { + uint32_t ssrc = 0; + GroupLevelValue value; +}; + +struct GroupLevelsUpdate { + std::vector updates; +}; + +struct GroupInstanceDescriptor { + GroupConfig config; + std::function networkStateUpdated; + std::function audioLevelsUpdated; + std::string initialInputDeviceId; + std::string initialOutputDeviceId; + bool debugIgnoreMissingSsrcs = false; + std::shared_ptr platformContext; +}; + +struct GroupJoinPayloadFingerprint { + std::string hash; + std::string setup; + std::string fingerprint; +}; + +struct GroupJoinPayload { + std::string ufrag; + std::string pwd; + std::vector fingerprints; + + uint32_t ssrc = 0; +}; + +struct GroupJoinResponseCandidate { + std::string port; + std::string protocol; + std::string network; + std::string generation; + std::string id; + std::string component; + std::string foundation; + std::string priority; + std::string ip; + std::string type; + + std::string tcpType; + std::string relAddr; + std::string relPort; +}; + +struct GroupJoinResponsePayload { + std::string ufrag; + std::string pwd; + std::vector fingerprints; + std::vector candidates; +}; + +template +class ThreadLocalObject; + +class GroupInstanceImpl final { +public: + explicit GroupInstanceImpl(GroupInstanceDescriptor &&descriptor); + ~GroupInstanceImpl(); + + void stop(); + + void emitJoinPayload(std::function completion); + void setJoinResponsePayload(GroupJoinResponsePayload payload); + void removeSsrcs(std::vector ssrcs); + + void setIsMuted(bool isMuted); + void setAudioOutputDevice(std::string id); + void setAudioInputDevice(std::string id); + + struct AudioDevice { + enum class Type {Input, Output}; + std::string name; + std::string guid; + }; + static std::vector getAudioDevices(AudioDevice::Type type); +private: + std::unique_ptr> _manager; + std::unique_ptr _logSink; + +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/legacy/InstanceImplLegacy.cpp b/TMessagesProj/jni/voip/tgcalls/legacy/InstanceImplLegacy.cpp index 03735d5ca..ef66e29ba 100644 --- a/TMessagesProj/jni/voip/tgcalls/legacy/InstanceImplLegacy.cpp +++ b/TMessagesProj/jni/voip/tgcalls/legacy/InstanceImplLegacy.cpp @@ -138,7 +138,7 @@ onSignalBarsUpdated_(std::move(descriptor.signalBarsUpdated)) { descriptor.config.enableCallUpgrade ); mappedConfig.enableVolumeControl = descriptor.config.enableVolumeControl; - mappedConfig.logFilePath = descriptor.config.logPath; + mappedConfig.logFilePath = descriptor.config.logPath.data; mappedConfig.statsDumpFilePath = {}; controller_->SetConfig(mappedConfig); diff --git a/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp b/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp index aaa9e7dfe..4ab539429 100644 --- a/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp +++ b/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp @@ -934,7 +934,7 @@ private: }; InstanceImplReference::InstanceImplReference(Descriptor &&descriptor) : - logSink_(std::make_unique(descriptor.config)) { + logSink_(std::make_unique(descriptor.config.logPath)) { rtc::LogMessage::AddLogToStream(logSink_.get(), rtc::LS_INFO); internal_.reset(new ThreadLocalObject(getMediaThread(), [descriptor = std::move(descriptor)]() { diff --git a/TMessagesProj/jni/voip/webrtc/api/adaptation/resource.cc b/TMessagesProj/jni/voip/webrtc/api/adaptation/resource.cc index 0a9c83a31..dac03fe01 100644 --- a/TMessagesProj/jni/voip/webrtc/api/adaptation/resource.cc +++ b/TMessagesProj/jni/voip/webrtc/api/adaptation/resource.cc @@ -10,6 +10,8 @@ #include "api/adaptation/resource.h" +#include "rtc_base/checks.h" + namespace webrtc { const char* ResourceUsageStateToString(ResourceUsageState usage_state) { @@ -19,6 +21,7 @@ const char* ResourceUsageStateToString(ResourceUsageState usage_state) { case ResourceUsageState::kUnderuse: return "kUnderuse"; } + RTC_CHECK_NOTREACHED(); } ResourceListener::~ResourceListener() {} diff --git a/TMessagesProj/jni/voip/webrtc/api/array_view.h b/TMessagesProj/jni/voip/webrtc/api/array_view.h index a66369a3d..df365cb74 100644 --- a/TMessagesProj/jni/voip/webrtc/api/array_view.h +++ b/TMessagesProj/jni/voip/webrtc/api/array_view.h @@ -13,6 +13,7 @@ #include #include +#include #include #include "rtc_base/checks.h" @@ -258,6 +259,18 @@ class ArrayView final : public impl::ArrayViewBase { T* end() const { return this->data() + this->size(); } const T* cbegin() const { return this->data(); } const T* cend() const { return this->data() + this->size(); } + std::reverse_iterator rbegin() const { + return std::make_reverse_iterator(end()); + } + std::reverse_iterator rend() const { + return std::make_reverse_iterator(begin()); + } + std::reverse_iterator crbegin() const { + return std::make_reverse_iterator(cend()); + } + std::reverse_iterator crend() const { + return std::make_reverse_iterator(cbegin()); + } ArrayView subview(size_t offset, size_t size) const { return offset < this->size() diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame_processor.h b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame_processor.h new file mode 100644 index 000000000..bc21d1485 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame_processor.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ +#define API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ + +#include +#include + +namespace webrtc { + +class AudioFrame; + +// If passed into PeerConnectionFactory, will be used for additional +// processing of captured audio frames, performed before encoding. +// Implementations must be thread-safe. +class AudioFrameProcessor { + public: + using OnAudioFrameCallback = std::function)>; + virtual ~AudioFrameProcessor() = default; + + // Processes the frame received from WebRTC, is called by WebRTC off the + // realtime audio capturing path. AudioFrameProcessor must reply with + // processed frames by calling |sink_callback| if it was provided in SetSink() + // call. |sink_callback| can be called in the context of Process(). + virtual void Process(std::unique_ptr frame) = 0; + + // Atomically replaces the current sink with the new one. Before the + // first call to this function, or if the provided |sink_callback| is nullptr, + // processed frames are simply discarded. + virtual void SetSink(OnAudioFrameCallback sink_callback) = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h index a50562553..3ed11ff8b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h @@ -143,6 +143,7 @@ struct RTC_EXPORT EchoCanceller3Config { float noise_gate_slope = 0.3f; size_t render_pre_window_size = 1; size_t render_post_window_size = 1; + bool model_reverb_in_nonlinear_mode = true; } echo_model; struct ComfortNoise { @@ -215,11 +216,12 @@ struct RTC_EXPORT EchoCanceller3Config { struct HighBandsSuppression { float enr_threshold = 1.f; float max_gain_during_echo = 1.f; - float anti_howling_activation_threshold = 25.f; - float anti_howling_gain = 0.01f; + float anti_howling_activation_threshold = 400.f; + float anti_howling_gain = 1.f; } high_bands_suppression; float floor_first_increase = 0.00001f; + bool conservative_hf_suppression = false; } suppressor; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc index f5c124967..907b47271 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc @@ -302,6 +302,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.echo_model.render_pre_window_size); ReadParam(section, "render_post_window_size", &cfg.echo_model.render_post_window_size); + ReadParam(section, "model_reverb_in_nonlinear_mode", + &cfg.echo_model.model_reverb_in_nonlinear_mode); } if (rtc::GetValueFromJsonObject(aec3_root, "comfort_noise", §ion)) { @@ -381,6 +383,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "floor_first_increase", &cfg.suppressor.floor_first_increase); + ReadParam(section, "conservative_hf_suppression", + &cfg.suppressor.conservative_hf_suppression); } } @@ -585,7 +589,9 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"render_pre_window_size\": " << config.echo_model.render_pre_window_size << ","; ost << "\"render_post_window_size\": " - << config.echo_model.render_post_window_size; + << config.echo_model.render_post_window_size << ","; + ost << "\"model_reverb_in_nonlinear_mode\": " + << (config.echo_model.model_reverb_in_nonlinear_mode ? "true" : "false"); ost << "},"; ost << "\"comfort_noise\": {"; @@ -672,7 +678,10 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"anti_howling_gain\": " << config.suppressor.high_bands_suppression.anti_howling_gain; ost << "},"; - ost << "\"floor_first_increase\": " << config.suppressor.floor_first_increase; + ost << "\"floor_first_increase\": " << config.suppressor.floor_first_increase + << ","; + ost << "\"conservative_hf_suppression\": " + << config.suppressor.conservative_hf_suppression; ost << "}"; ost << "}"; ost << "}"; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory.h index c36a0e103..2811f6704 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory.h @@ -38,6 +38,8 @@ class AudioDecoderFactory : public rtc::RefCountInterface { // communication between the AudioEncoder and AudioDecoder instances, which is // needed for some codecs with built-in bandwidth adaptation.) // + // Returns null if the format isn't supported. + // // Note: Implementations need to be robust against combinations other than // one encoder, one decoder getting the same ID; such decoders must still // work. diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory.h index 48995a876..6128b1b6f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory.h @@ -44,6 +44,8 @@ class AudioEncoderFactory : public rtc::RefCountInterface { // communication between the AudioEncoder and AudioDecoder instances, which is // needed for some codecs with built-in bandwidth adaptation.) // + // Returns null if the format isn't supported. + // // Note: Implementations need to be robust against combinations other than // one encoder, one decoder getting the same ID; such encoders must still // work. diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc index bd653b797..035b0dc34 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc @@ -32,7 +32,7 @@ int GetIlbcBitrate(int ptime) { // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. return 13333; default: - FATAL(); + RTC_CHECK_NOTREACHED(); } } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc index 622315007..008fce3e8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc @@ -18,6 +18,7 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "media/base/media_engine.h" #include "media/engine/webrtc_media_engine.h" #include "modules/audio_device/include/audio_device.h" @@ -36,7 +37,8 @@ rtc::scoped_refptr CreatePeerConnectionFactory( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing) { + rtc::scoped_refptr audio_processing, + AudioFrameProcessor* audio_frame_processor) { PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = network_thread; dependencies.worker_thread = worker_thread; @@ -45,12 +47,14 @@ rtc::scoped_refptr CreatePeerConnectionFactory( dependencies.call_factory = CreateCallFactory(); dependencies.event_log_factory = std::make_unique( dependencies.task_queue_factory.get()); + dependencies.trials = std::make_unique(); cricket::MediaEngineDependencies media_dependencies; media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); media_dependencies.adm = std::move(default_adm); media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory); media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory); + media_dependencies.audio_frame_processor = audio_frame_processor; if (audio_processing) { media_dependencies.audio_processing = std::move(audio_processing); } else { @@ -59,6 +63,7 @@ rtc::scoped_refptr CreatePeerConnectionFactory( media_dependencies.audio_mixer = std::move(audio_mixer); media_dependencies.video_encoder_factory = std::move(video_encoder_factory); media_dependencies.video_decoder_factory = std::move(video_decoder_factory); + media_dependencies.trials = dependencies.trials.get(); dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_dependencies)); diff --git a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h index ac50736b8..4eb0a00e5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h @@ -31,6 +31,7 @@ class Thread; namespace webrtc { class AudioDeviceModule; +class AudioFrameProcessor; class AudioProcessing; // Create a new instance of PeerConnectionFactoryInterface with optional video @@ -47,7 +48,8 @@ CreatePeerConnectionFactory( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing); + rtc::scoped_refptr audio_processing, + AudioFrameProcessor* audio_frame_processor = nullptr); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h index 516967998..8ee33ca0e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h @@ -22,7 +22,7 @@ namespace webrtc { // are called on is an implementation detail. BEGIN_SIGNALING_PROXY_MAP(MediaStream) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_METHOD0(AudioTrackVector, GetAudioTracks) PROXY_METHOD0(VideoTrackVector, GetVideoTracks) PROXY_METHOD1(rtc::scoped_refptr, diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h similarity index 88% rename from TMessagesProj/jni/voip/webrtc/pc/media_stream_track.h rename to TMessagesProj/jni/voip/webrtc/api/media_stream_track.h index 358d89a25..738f03414 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef PC_MEDIA_STREAM_TRACK_H_ -#define PC_MEDIA_STREAM_TRACK_H_ +#ifndef API_MEDIA_STREAM_TRACK_H_ +#define API_MEDIA_STREAM_TRACK_H_ #include @@ -38,6 +38,7 @@ class MediaStreamTrack : public Notifier { } return fire_on_change; } + void set_ended() { set_state(MediaStreamTrackInterface::TrackState::kEnded); } protected: explicit MediaStreamTrack(const std::string& id) @@ -53,10 +54,10 @@ class MediaStreamTrack : public Notifier { private: bool enabled_; - std::string id_; + const std::string id_; MediaStreamTrackInterface::TrackState state_; }; } // namespace webrtc -#endif // PC_MEDIA_STREAM_TRACK_H_ +#endif // API_MEDIA_STREAM_TRACK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h index d3dc25504..59dcb7724 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h @@ -26,8 +26,8 @@ namespace webrtc { BEGIN_SIGNALING_PROXY_MAP(AudioTrack) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, kind) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, kind) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(bool, enabled) PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) @@ -42,8 +42,8 @@ END_PROXY_MAP() BEGIN_PROXY_MAP(VideoTrack) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, kind) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, kind) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(bool, enabled) PROXY_METHOD1(bool, set_enabled, bool) diff --git a/TMessagesProj/jni/voip/webrtc/api/media_types.cc b/TMessagesProj/jni/voip/webrtc/api/media_types.cc index 6bc693860..3453ce390 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_types.cc +++ b/TMessagesProj/jni/voip/webrtc/api/media_types.cc @@ -26,10 +26,12 @@ std::string MediaTypeToString(MediaType type) { return kMediaTypeVideo; case MEDIA_TYPE_DATA: return kMediaTypeData; + case MEDIA_TYPE_UNSUPPORTED: + // Unsupported media stores the m= differently. + RTC_NOTREACHED(); + return ""; } - FATAL(); - // Not reachable; avoids compile warning. - return ""; + RTC_CHECK_NOTREACHED(); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/api/media_types.h b/TMessagesProj/jni/voip/webrtc/api/media_types.h index 8c6ba3d1e..b2ff08c0c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_types.h @@ -20,7 +20,12 @@ namespace cricket { -enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA }; +enum MediaType { + MEDIA_TYPE_AUDIO, + MEDIA_TYPE_VIDEO, + MEDIA_TYPE_DATA, + MEDIA_TYPE_UNSUPPORTED +}; extern const char kMediaTypeAudio[]; extern const char kMediaTypeVideo[]; diff --git a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h index 15ad3aac0..9781377ca 100644 --- a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h +++ b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h @@ -36,7 +36,6 @@ struct NetEqNetworkStatistics { uint16_t preferred_buffer_size_ms; // Target buffer size in ms. uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky // jitter; 0 otherwise. - uint16_t packet_loss_rate; // Loss rate (network + late) in Q14. uint16_t expand_rate; // Fraction (of original stream) of synthesized // audio inserted through expansion (in Q14). uint16_t speech_expand_rate; // Fraction (of original stream) of synthesized @@ -49,7 +48,6 @@ struct NetEqNetworkStatistics { // decoding (in Q14). uint16_t secondary_discarded_rate; // Fraction of discarded FEC/RED data (in // Q14). - size_t added_zero_samples; // Number of zero samples added in "off" mode. // Statistics for packet waiting times, i.e., the time between a packet // arrives until it is decoded. int mean_waiting_time_ms; @@ -274,6 +272,9 @@ class NetEq { // after the call. virtual int NetworkStatistics(NetEqNetworkStatistics* stats) = 0; + // Current values only, not resetting any state. + virtual NetEqNetworkStatistics CurrentNetworkStatistics() const = 0; + // Returns a copy of this class's lifetime statistics. These statistics are // never reset. virtual NetEqLifetimeStatistics GetLifetimeStatistics() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h index 1d47eaca7..2c09c3e15 100644 --- a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h +++ b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h @@ -97,6 +97,14 @@ class NetEqController { size_t sync_buffer_samples; }; + struct PacketArrivedInfo { + size_t packet_length_samples; + uint32_t main_timestamp; + uint16_t main_sequence_number; + bool is_cng_or_dtmf; + bool is_dtx; + }; + virtual ~NetEqController() = default; // Resets object to a clean state. @@ -152,16 +160,17 @@ class NetEqController { virtual void AddSampleMemory(int32_t value) = 0; // Returns the target buffer level in ms. - virtual int TargetLevelMs() = 0; + virtual int TargetLevelMs() const = 0; // Notify the NetEqController that a packet has arrived. Returns the relative // arrival delay, if it can be computed. - virtual absl::optional PacketArrived(bool last_cng_or_dtmf, - size_t packet_length_samples, + virtual absl::optional PacketArrived(int fs_hz, bool should_update_stats, - uint16_t main_sequence_number, - uint32_t main_timestamp, - int fs_hz) = 0; + const PacketArrivedInfo& info) = 0; + + // Notify the NetEqController that we are currently in muted state. + // TODO(ivoc): Make pure virtual when downstream is updated. + virtual void NotifyMutedState() {} // Returns true if a peak was found. virtual bool PeakFound() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.cc b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc similarity index 97% rename from TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.cc rename to TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc index 9b98a3181..36871a671 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.cc +++ b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc @@ -8,8 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/numerics/samples_stats_counter.h" +#include "api/numerics/samples_stats_counter.h" +#include #include #include "absl/algorithm/container.h" diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.h b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h similarity index 92% rename from TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.h rename to TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h index a4ec443d3..283c1e4ed 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/samples_stats_counter.h +++ b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ -#define RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#ifndef API_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#define API_NUMERICS_SAMPLES_STATS_COUNTER_H_ #include @@ -45,6 +45,8 @@ class SamplesStatsCounter { // Returns if there are any values in O(1) time. bool IsEmpty() const { return samples_.empty(); } + // Returns the amount of samples added into counter in O(1) time. + int64_t NumSamples() const { return stats_.Size(); } // Returns min in O(1) time. This function may not be called if there are no // samples. @@ -98,7 +100,7 @@ class SamplesStatsCounter { } private: - RunningStatistics stats_; + webrtc_impl::RunningStatistics stats_; std::vector samples_; bool sorted_ = false; }; @@ -116,4 +118,4 @@ SamplesStatsCounter operator/(const SamplesStatsCounter& counter, double value); } // namespace webrtc -#endif // RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#endif // API_NUMERICS_SAMPLES_STATS_COUNTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h index 09317b828..c5b04b25e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h @@ -105,6 +105,7 @@ #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" #include "api/transport/network_control.h" +#include "api/transport/sctp_transport_factory_interface.h" #include "api/transport/webrtc_key_value_config.h" #include "api/turn_customizer.h" #include "media/base/media_config.h" @@ -639,6 +640,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Whether network condition based codec switching is allowed. absl::optional allow_codec_switching; + // The delay before doing a usage histogram report for long-lived + // PeerConnections. Used for testing only. + absl::optional report_usage_pattern_delay_ms; // // Don't forget to update operator== if adding something. // @@ -1003,6 +1007,16 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) {} + // According to spec, we must only fire "negotiationneeded" if the Operations + // Chain is empty. This method takes care of validating an event previously + // generated with PeerConnectionObserver::OnNegotiationNeededEvent() to make + // sure that even if there was a delay (e.g. due to a PostTask) between the + // event being generated and the time of firing, the Operations Chain is empty + // and the event is still valid to be fired. + virtual bool ShouldFireNegotiationNeededEvent(uint32_t event_id) { + return true; + } + virtual PeerConnectionInterface::RTCConfiguration GetConfiguration() = 0; // Sets the PeerConnection's global configuration to |config|. @@ -1176,7 +1190,17 @@ class PeerConnectionObserver { // Triggered when renegotiation is needed. For example, an ICE restart // has begun. - virtual void OnRenegotiationNeeded() = 0; + // TODO(hbos): Delete in favor of OnNegotiationNeededEvent() when downstream + // projects have migrated. + virtual void OnRenegotiationNeeded() {} + // Used to fire spec-compliant onnegotiationneeded events, which should only + // fire when the Operations Chain is empty. The observer is responsible for + // queuing a task (e.g. Chromium: jump to main thread) to maybe fire the + // event. The event identified using |event_id| must only fire if + // PeerConnection::ShouldFireNegotiationNeededEvent() returns true since it is + // possible for the event to become invalidated by operations subsequently + // chained. + virtual void OnNegotiationNeededEvent(uint32_t event_id) {} // Called any time the legacy IceConnectionState changes. // @@ -1272,8 +1296,25 @@ class PeerConnectionObserver { // The heuristics for defining what constitutes "interesting" are // implementation-defined. virtual void OnInterestingUsage(int usage_pattern) {} + + virtual void OnErrorDemuxingPacket(uint32_t ssrc) {} }; +class ErrorDemuxingPacketObserver : public rtc::RefCountInterface { +public: + ErrorDemuxingPacketObserver(PeerConnectionObserver *observer) : + observer_(observer) { + } + + void OnErrorDemuxingPacket(uint32_t ssrc) { + observer_->OnErrorDemuxingPacket(ssrc); + } + +private: + PeerConnectionObserver *observer_ = nullptr; +}; + + // PeerConnectionDependencies holds all of PeerConnections dependencies. // A dependency is distinct from a configuration as it defines significant // executable code that can be provided by a user of the API. @@ -1343,6 +1384,7 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { // used. std::unique_ptr network_monitor_factory; std::unique_ptr neteq_factory; + std::unique_ptr sctp_factory; std::unique_ptr trials; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h index 0cc3b3b8e..2d4cb5cad 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h @@ -116,6 +116,7 @@ PROXY_METHOD2(void, SetRemoteDescription, SetSessionDescriptionObserver*, SessionDescriptionInterface*) +PROXY_METHOD1(bool, ShouldFireNegotiationNeededEvent, uint32_t) PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration) PROXY_METHOD1(RTCError, SetConfiguration, diff --git a/TMessagesProj/jni/voip/webrtc/api/proxy.cc b/TMessagesProj/jni/voip/webrtc/api/proxy.cc index e668285ba..67318e7da 100644 --- a/TMessagesProj/jni/voip/webrtc/api/proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/api/proxy.cc @@ -10,28 +10,3 @@ #include "api/proxy.h" -namespace webrtc { -namespace internal { - -SynchronousMethodCall::SynchronousMethodCall(rtc::MessageHandler* proxy) - : proxy_(proxy) {} - -SynchronousMethodCall::~SynchronousMethodCall() = default; - -void SynchronousMethodCall::Invoke(const rtc::Location& posted_from, - rtc::Thread* t) { - if (t->IsCurrent()) { - proxy_->OnMessage(nullptr); - } else { - t->Post(posted_from, this, 0); - e_.Wait(rtc::Event::kForever); - } -} - -void SynchronousMethodCall::OnMessage(rtc::Message*) { - proxy_->OnMessage(nullptr); - e_.Set(); -} - -} // namespace internal -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/proxy.h b/TMessagesProj/jni/voip/webrtc/api/proxy.h index 0e5d622eb..05f7414bc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/proxy.h @@ -59,6 +59,8 @@ #include #include "api/scoped_refptr.h" +#include "api/task_queue/queued_task.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/event.h" #include "rtc_base/message_handler.h" #include "rtc_base/ref_counted_object.h" @@ -96,27 +98,8 @@ class ReturnType { void moved_result() {} }; -namespace internal { - -class RTC_EXPORT SynchronousMethodCall : public rtc::MessageData, - public rtc::MessageHandler { - public: - explicit SynchronousMethodCall(rtc::MessageHandler* proxy); - ~SynchronousMethodCall() override; - - void Invoke(const rtc::Location& posted_from, rtc::Thread* t); - - private: - void OnMessage(rtc::Message*) override; - - rtc::Event e_; - rtc::MessageHandler* proxy_; -}; - -} // namespace internal - template -class MethodCall : public rtc::Message, public rtc::MessageHandler { +class MethodCall : public QueuedTask { public: typedef R (C::*Method)(Args...); MethodCall(C* c, Method m, Args&&... args) @@ -125,12 +108,21 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler { args_(std::forward_as_tuple(std::forward(args)...)) {} R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { - internal::SynchronousMethodCall(this).Invoke(posted_from, t); + if (t->IsCurrent()) { + Invoke(std::index_sequence_for()); + } else { + t->PostTask(std::unique_ptr(this)); + event_.Wait(rtc::Event::kForever); + } return r_.moved_result(); } private: - void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + bool Run() override { + Invoke(std::index_sequence_for()); + event_.Set(); + return false; + } template void Invoke(std::index_sequence) { @@ -141,10 +133,11 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler { Method m_; ReturnType r_; std::tuple args_; + rtc::Event event_; }; template -class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { +class ConstMethodCall : public QueuedTask { public: typedef R (C::*Method)(Args...) const; ConstMethodCall(const C* c, Method m, Args&&... args) @@ -153,12 +146,21 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { args_(std::forward_as_tuple(std::forward(args)...)) {} R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { - internal::SynchronousMethodCall(this).Invoke(posted_from, t); + if (t->IsCurrent()) { + Invoke(std::index_sequence_for()); + } else { + t->PostTask(std::unique_ptr(this)); + event_.Wait(rtc::Event::kForever); + } return r_.moved_result(); } private: - void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + bool Run() override { + Invoke(std::index_sequence_for()); + event_.Set(); + return false; + } template void Invoke(std::index_sequence) { @@ -169,6 +171,7 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { Method m_; ReturnType r_; std::tuple args_; + rtc::Event event_; }; // Helper macros to reduce code duplication. diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h index b8cb7f0bc..d24737c25 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h @@ -137,7 +137,7 @@ class RTC_EXPORT RTCError { RTCErrorDetailType error_detail() const { return error_detail_; } void set_error_detail(RTCErrorDetailType detail) { error_detail_ = detail; } - absl::optional sctp_cause_code() { return sctp_cause_code_; } + absl::optional sctp_cause_code() const { return sctp_cause_code_; } void set_sctp_cause_code(uint16_t cause_code) { sctp_cause_code_ = cause_code; } diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc index 201358439..fdf267b7b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc @@ -14,6 +14,7 @@ #include #include "rtc_base/checks.h" +#include "system_wrappers/include/field_trial.h" #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG #include "logging/rtc_event_log/rtc_event_log_impl.h" @@ -29,6 +30,9 @@ RtcEventLogFactory::RtcEventLogFactory(TaskQueueFactory* task_queue_factory) std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) { #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG + if (field_trial::IsEnabled("WebRTC-RtcEventLogKillSwitch")) { + return std::make_unique(); + } return std::make_unique(encoding_type, task_queue_factory_); #else return std::make_unique(); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h index 454149ca6..b9a97c885 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h @@ -23,7 +23,6 @@ #include "api/video/video_content_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT (build/include) namespace webrtc { @@ -142,7 +141,7 @@ struct RTPHeaderExtension { bool has_video_timing; VideoSendTiming video_timing; - PlayoutDelay playout_delay = {-1, -1}; + VideoPlayoutDelay playout_delay; // For identification of a stream when ssrc is not signaled. See // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc index 28acb68be..92f99e9bb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc @@ -30,6 +30,7 @@ const char* DegradationPreferenceToString( case DegradationPreference::BALANCED: return "balanced"; } + RTC_CHECK_NOTREACHED(); } const double kDefaultBitratePriority = 1.0; @@ -121,6 +122,7 @@ constexpr char RtpExtension::kVideoContentTypeUri[]; constexpr char RtpExtension::kVideoTimingUri[]; constexpr char RtpExtension::kGenericFrameDescriptorUri00[]; constexpr char RtpExtension::kDependencyDescriptorUri[]; +constexpr char RtpExtension::kVideoLayersAllocationUri[]; constexpr char RtpExtension::kTransportSequenceNumberUri[]; constexpr char RtpExtension::kTransportSequenceNumberV2Uri[]; constexpr char RtpExtension::kPlayoutDelayUri[]; @@ -161,7 +163,8 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { uri == webrtc::RtpExtension::kDependencyDescriptorUri || uri == webrtc::RtpExtension::kColorSpaceUri || uri == webrtc::RtpExtension::kRidUri || - uri == webrtc::RtpExtension::kRepairedRidUri; + uri == webrtc::RtpExtension::kRepairedRidUri || + uri == webrtc::RtpExtension::kVideoLayersAllocationUri; } bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { @@ -183,7 +186,8 @@ bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { uri == webrtc::RtpExtension::kVideoContentTypeUri || uri == webrtc::RtpExtension::kMidUri || uri == webrtc::RtpExtension::kRidUri || - uri == webrtc::RtpExtension::kRepairedRidUri; + uri == webrtc::RtpExtension::kRepairedRidUri || + uri == webrtc::RtpExtension::kVideoLayersAllocationUri; } const RtpExtension* RtpExtension::FindHeaderExtensionByUri( diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h index b667bf812..df0e7a93b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h @@ -318,6 +318,10 @@ struct RTC_EXPORT RtpExtension { "https://aomediacodec.github.io/av1-rtp-spec/" "#dependency-descriptor-rtp-header-extension"; + // Experimental extension for signalling target bitrate per layer. + static constexpr char kVideoLayersAllocationUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00"; + // Header extension for transport sequence number, see url for details: // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions static constexpr char kTransportSequenceNumberUri[] = @@ -461,6 +465,9 @@ struct RTC_EXPORT RtpEncodingParameters { // For video, scale the resolution down by this factor. absl::optional scale_resolution_down_by; + // https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters + absl::optional scalability_mode; + // For an RtpSender, set to true to cause this encoding to be encoded and // sent, and false for it not to be encoded and sent. This allows control // across multiple encodings of a sender for turning simulcast layers on and diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h index a15864e34..786ea3ace 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h @@ -128,8 +128,8 @@ PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector>, streams) -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*) PROXY_METHOD1(void, SetJitterBufferMinimumDelay, absl::optional) diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h index bdbd6dc64..a33b80042 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h @@ -110,8 +110,8 @@ PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(uint32_t, ssrc) -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector, init_send_encodings) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h index cdda34b19..fd3555fb4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h @@ -111,7 +111,8 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction // TODO(hta): Deprecate SetDirection without error and rename // SetDirectionWithError to SetDirection, remove default implementations. - virtual void SetDirection(RtpTransceiverDirection new_direction); + RTC_DEPRECATED virtual void SetDirection( + RtpTransceiverDirection new_direction); virtual RTCError SetDirectionWithError(RtpTransceiverDirection new_direction); // The current_direction attribute indicates the current direction negotiated diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h index dc1593769..94bd813b0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h @@ -84,8 +84,8 @@ class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface { // Removes the stats object from the report, returning ownership of it or null // if there is no object with |id|. std::unique_ptr Take(const std::string& id); - // Takes ownership of all the stats in |victim|, leaving it empty. - void TakeMembersFrom(rtc::scoped_refptr victim); + // Takes ownership of all the stats in |other|, leaving it empty. + void TakeMembersFrom(rtc::scoped_refptr other); // Stats iterators. Stats are ordered lexicographically on |RTCStats::id|. ConstIterator begin() const; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h index 7d8f5f5f9..ee3d70727 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h @@ -115,6 +115,7 @@ class RTC_EXPORT RTCCodecStats final : public RTCStats { RTCCodecStats(const RTCCodecStats& other); ~RTCCodecStats() override; + RTCStatsMember transport_id; RTCStatsMember payload_type; RTCStatsMember mime_type; RTCStatsMember clock_rate; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc index 4cece5b28..6f0674299 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc @@ -31,10 +31,13 @@ #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" #include "api/test/mock_audio_mixer.h" +#include "api/test/mock_data_channel.h" #include "api/test/mock_frame_decryptor.h" #include "api/test/mock_frame_encryptor.h" +#include "api/test/mock_media_stream_interface.h" #include "api/test/mock_peer_connection_factory_interface.h" #include "api/test/mock_peerconnectioninterface.h" +#include "api/test/mock_rtp_transceiver.h" #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" #include "api/test/mock_transformable_video_frame.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.cc b/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.cc new file mode 100644 index 000000000..7f0ba20c8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.cc @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_peer_connection_quality_test_frame_generator.h" + +#include +#include + +#include "api/test/create_frame_generator.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "rtc_base/checks.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +using VideoConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; +using ScreenShareConfig = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::ScreenShareConfig; + +void ValidateScreenShareConfig(const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + if (screen_share_config.slides_yuv_file_names.empty()) { + if (screen_share_config.scrolling_params) { + // If we have scrolling params, then its |source_width| and |source_heigh| + // will be used as width and height of video input, so we have to validate + // it against width and height of default input. + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_width, + kDefaultSlidesWidth); + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_height, + kDefaultSlidesHeight); + } else { + RTC_CHECK_EQ(video_config.width, kDefaultSlidesWidth); + RTC_CHECK_EQ(video_config.height, kDefaultSlidesHeight); + } + } + if (screen_share_config.scrolling_params) { + RTC_CHECK_LE(screen_share_config.scrolling_params->duration, + screen_share_config.slide_change_interval); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_width, + video_config.width); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_height, + video_config.height); + } +} + +std::unique_ptr CreateSquareFrameGenerator( + const VideoConfig& video_config, + absl::optional type) { + return test::CreateSquareFrameGenerator( + video_config.width, video_config.height, std::move(type), absl::nullopt); +} + +std::unique_ptr CreateFromYuvFileFrameGenerator( + const VideoConfig& video_config, + std::string filename) { + return test::CreateFromYuvFileFrameGenerator( + {std::move(filename)}, video_config.width, video_config.height, + /*frame_repeat_count=*/1); +} + +std::unique_ptr CreateScreenShareFrameGenerator( + const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + ValidateScreenShareConfig(video_config, screen_share_config); + if (screen_share_config.generate_slides) { + return test::CreateSlideFrameGenerator( + video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + std::vector slides = screen_share_config.slides_yuv_file_names; + if (slides.empty()) { + // If slides is empty we need to add default slides as source. In such case + // video width and height is validated to be equal to kDefaultSlidesWidth + // and kDefaultSlidesHeight. + slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); + } + if (!screen_share_config.scrolling_params) { + // Cycle image every slide_change_interval seconds. + return test::CreateFromYuvFileFrameGenerator( + slides, video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + + TimeDelta pause_duration = screen_share_config.slide_change_interval - + screen_share_config.scrolling_params->duration; + RTC_DCHECK(pause_duration >= TimeDelta::Zero()); + return test::CreateScrollingInputFromYuvFilesFrameGenerator( + Clock::GetRealTimeClock(), slides, + screen_share_config.scrolling_params->source_width, + screen_share_config.scrolling_params->source_height, video_config.width, + video_config.height, screen_share_config.scrolling_params->duration.ms(), + pause_duration.ms()); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.h b/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.h new file mode 100644 index 000000000..ff8733120 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_peer_connection_quality_test_frame_generator.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ +#define API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/peerconnection_quality_test_fixture.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Creates a frame generator that produces frames with small squares that move +// randomly towards the lower right corner. |type| has the default value +// FrameGeneratorInterface::OutputType::I420. video_config specifies frame +// weight and height. +std::unique_ptr CreateSquareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + absl::optional type); + +// Creates a frame generator that plays frames from the yuv file. +std::unique_ptr CreateFromYuvFileFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + std::string filename); + +// Creates a proper frame generator for testing screen sharing. +std::unique_ptr CreateScreenShareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + const PeerConnectionE2EQualityTestFixture::ScreenShareConfig& + screen_share_config); + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h b/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h index 0ca7d3f1b..4d17aeddd 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h @@ -36,7 +36,7 @@ class DummyPeerConnection : public PeerConnectionInterface { bool AddStream(MediaStreamInterface* stream) override { return false; } void RemoveStream(MediaStreamInterface* stream) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } RTCErrorOr> AddTrack( @@ -100,17 +100,17 @@ class DummyPeerConnection : public PeerConnectionInterface { } void GetStats(RTCStatsCollectorCallback* callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void GetStats( rtc::scoped_refptr selector, rtc::scoped_refptr callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void GetStats( rtc::scoped_refptr selector, rtc::scoped_refptr callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void ClearStatsCache() override {} @@ -145,33 +145,33 @@ class DummyPeerConnection : public PeerConnectionInterface { return nullptr; } - void RestartIce() override { FATAL() << "Not implemented"; } + void RestartIce() override { RTC_CHECK_NOTREACHED(); } // Create a new offer. // The CreateSessionDescriptionObserver callback will be called when done. void CreateOffer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void CreateAnswer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetRemoteDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetRemoteDescription( std::unique_ptr desc, rtc::scoped_refptr observer) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } PeerConnectionInterface::RTCConfiguration GetConfiguration() override { @@ -194,10 +194,8 @@ class DummyPeerConnection : public PeerConnectionInterface { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; } - void SetAudioRecording(bool recording) override { - FATAL() << "Not implemented"; - } + void SetAudioPlayout(bool playout) override { RTC_CHECK_NOTREACHED(); } + void SetAudioRecording(bool recording) override { RTC_CHECK_NOTREACHED(); } rtc::scoped_refptr LookupDtlsTransportByMid( const std::string& mid) override { @@ -235,7 +233,7 @@ class DummyPeerConnection : public PeerConnectionInterface { return false; } - void StopRtcEventLog() { FATAL() << "Not implemented"; } + void StopRtcEventLog() { RTC_CHECK_NOTREACHED(); } void Close() override {} diff --git a/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.cc b/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.cc new file mode 100644 index 000000000..356fe3af5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/frame_generator_interface.h" + +namespace webrtc { +namespace test { + +// static +const char* FrameGeneratorInterface::OutputTypeToString( + FrameGeneratorInterface::OutputType type) { + switch (type) { + case OutputType::kI420: + return "I420"; + case OutputType::kI420A: + return "I420A"; + case OutputType::kI010: + return "I010"; + case OutputType::kNV12: + return "NV12"; + default: + RTC_NOTREACHED(); + } +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.h index 691b6ee3f..90e60deba 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/frame_generator_interface.h @@ -32,7 +32,8 @@ class FrameGeneratorInterface { absl::optional update_rect; }; - enum class OutputType { kI420, kI420A, kI010 }; + enum class OutputType { kI420, kI420A, kI010, kNV12 }; + static const char* OutputTypeToString(OutputType type); virtual ~FrameGeneratorInterface() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h new file mode 100644 index 000000000..9346ffd63 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h @@ -0,0 +1,60 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_DATA_CHANNEL_H_ +#define API_TEST_MOCK_DATA_CHANNEL_H_ + +#include + +#include "api/data_channel_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockDataChannelInterface final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockDataChannelInterface(); + } + + MOCK_METHOD(void, + RegisterObserver, + (DataChannelObserver * observer), + (override)); + MOCK_METHOD(void, UnregisterObserver, (), (override)); + MOCK_METHOD(std::string, label, (), (const, override)); + MOCK_METHOD(bool, reliable, (), (const, override)); + MOCK_METHOD(bool, ordered, (), (const, override)); + MOCK_METHOD(uint16_t, maxRetransmitTime, (), (const, override)); + MOCK_METHOD(uint16_t, maxRetransmits, (), (const, override)); + MOCK_METHOD(absl::optional, maxRetransmitsOpt, (), (const, override)); + MOCK_METHOD(absl::optional, maxPacketLifeTime, (), (const, override)); + MOCK_METHOD(std::string, protocol, (), (const, override)); + MOCK_METHOD(bool, negotiated, (), (const, override)); + MOCK_METHOD(int, id, (), (const, override)); + MOCK_METHOD(Priority, priority, (), (const, override)); + MOCK_METHOD(DataState, state, (), (const, override)); + MOCK_METHOD(RTCError, error, (), (const, override)); + MOCK_METHOD(uint32_t, messages_sent, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_sent, (), (const, override)); + MOCK_METHOD(uint32_t, messages_received, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_received, (), (const, override)); + MOCK_METHOD(uint64_t, buffered_amount, (), (const, override)); + MOCK_METHOD(void, Close, (), (override)); + MOCK_METHOD(bool, Send, (const DataBuffer& buffer), (override)); + + protected: + MockDataChannelInterface() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_DATA_CHANNEL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h new file mode 100644 index 000000000..29521e6e2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h @@ -0,0 +1,89 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ +#define API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockAudioSource final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockAudioSource(); + } + + MOCK_METHOD(void, + RegisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(void, + UnregisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(SourceState, state, (), (const, override)); + MOCK_METHOD(bool, remote, (), (const, override)); + MOCK_METHOD(void, SetVolume, (double volume), (override)); + MOCK_METHOD(void, + RegisterAudioObserver, + (AudioObserver * observer), + (override)); + MOCK_METHOD(void, + UnregisterAudioObserver, + (AudioObserver * observer), + (override)); + MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(const cricket::AudioOptions, options, (), (const, override)); + + private: + MockAudioSource() = default; +}; + +class MockAudioTrack final : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockAudioTrack(); + } + + MOCK_METHOD(void, + RegisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(void, + UnregisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(std::string, kind, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(bool, enabled, (), (const, override)); + MOCK_METHOD(bool, set_enabled, (bool enable), (override)); + MOCK_METHOD(TrackState, state, (), (const, override)); + MOCK_METHOD(AudioSourceInterface*, GetSource, (), (const, override)); + MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(bool, GetSignalLevel, (int* level), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetAudioProcessor, + (), + (override)); + + private: + MockAudioTrack() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h index 19c3f4063..7319cebbc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h @@ -22,7 +22,7 @@ namespace webrtc { class MockPeerConnectionFactoryInterface final : public rtc::RefCountedObject { public: - rtc::scoped_refptr Create() { + static rtc::scoped_refptr Create() { return new MockPeerConnectionFactoryInterface(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h new file mode 100644 index 000000000..a0a08c477 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h @@ -0,0 +1,85 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTP_TRANSCEIVER_H_ +#define API_TEST_MOCK_RTP_TRANSCEIVER_H_ + +#include +#include + +#include "api/rtp_transceiver_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpTransceiver final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockRtpTransceiver(); + } + + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(absl::optional, mid, (), (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + sender, + (), + (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + receiver, + (), + (const, override)); + MOCK_METHOD(bool, stopped, (), (const, override)); + MOCK_METHOD(bool, stopping, (), (const, override)); + MOCK_METHOD(RtpTransceiverDirection, direction, (), (const, override)); + MOCK_METHOD(void, + SetDirection, + (RtpTransceiverDirection new_direction), + (override)); + MOCK_METHOD(RTCError, + SetDirectionWithError, + (RtpTransceiverDirection new_direction), + (override)); + MOCK_METHOD(absl::optional, + current_direction, + (), + (const, override)); + MOCK_METHOD(absl::optional, + fired_direction, + (), + (const, override)); + MOCK_METHOD(RTCError, StopStandard, (), (override)); + MOCK_METHOD(void, StopInternal, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(RTCError, + SetCodecPreferences, + (rtc::ArrayView codecs), + (override)); + MOCK_METHOD(std::vector, + codec_preferences, + (), + (const, override)); + MOCK_METHOD(std::vector, + HeaderExtensionsToOffer, + (), + (const, override)); + MOCK_METHOD(webrtc::RTCError, + SetOfferedRtpHeaderExtensions, + (rtc::ArrayView + header_extensions_to_offer), + (override)); + + private: + MockRtpTransceiver() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTP_TRANSCEIVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.cc b/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.cc index ea5be8517..82b27e546 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.cc @@ -21,6 +21,24 @@ namespace webrtc { namespace test { +namespace { +NetEqTestFactory::Config convertConfig( + const NetEqSimulatorFactory::Config& simulation_config, + absl::string_view replacement_audio_filename) { + NetEqTestFactory::Config config; + config.replacement_audio_file = std::string(replacement_audio_filename); + config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; + config.initial_dummy_packets = simulation_config.initial_dummy_packets; + config.skip_get_audio_events = simulation_config.skip_get_audio_events; + config.field_trial_string = simulation_config.field_trial_string; + config.output_audio_filename = simulation_config.output_audio_filename; + config.pythonplot = simulation_config.python_plot_filename.has_value(); + config.plot_scripts_basename = simulation_config.python_plot_filename; + config.textlog = simulation_config.text_log_filename.has_value(); + config.textlog_filename = simulation_config.text_log_filename; + return config; +} +} // namespace NetEqSimulatorFactory::NetEqSimulatorFactory() : factory_(std::make_unique()) {} @@ -31,13 +49,8 @@ std::unique_ptr NetEqSimulatorFactory::CreateSimulatorFromFile( absl::string_view event_log_filename, absl::string_view replacement_audio_filename, Config simulation_config) { - NetEqTestFactory::Config config; - config.replacement_audio_file = std::string(replacement_audio_filename); - config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; - config.initial_dummy_packets = simulation_config.initial_dummy_packets; - config.skip_get_audio_events = simulation_config.skip_get_audio_events; - config.field_trial_string = simulation_config.field_trial_string; - config.output_audio_filename = simulation_config.output_audio_filename; + NetEqTestFactory::Config config = + convertConfig(simulation_config, replacement_audio_filename); return factory_->InitializeTestFromFile( std::string(event_log_filename), simulation_config.neteq_factory, config); } @@ -47,12 +60,8 @@ NetEqSimulatorFactory::CreateSimulatorFromString( absl::string_view event_log_file_contents, absl::string_view replacement_audio_filename, Config simulation_config) { - NetEqTestFactory::Config config; - config.replacement_audio_file = std::string(replacement_audio_filename); - config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; - config.initial_dummy_packets = simulation_config.initial_dummy_packets; - config.skip_get_audio_events = simulation_config.skip_get_audio_events; - config.field_trial_string = simulation_config.field_trial_string; + NetEqTestFactory::Config config = + convertConfig(simulation_config, replacement_audio_filename); return factory_->InitializeTestFromString( std::string(event_log_file_contents), simulation_config.neteq_factory, config); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.h index b3c77b140..2a716e665 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/neteq_simulator_factory.h @@ -44,6 +44,10 @@ class NetEqSimulatorFactory { std::string field_trial_string; // A filename for the generated output audio file. absl::optional output_audio_filename; + // A filename for the python plot. + absl::optional python_plot_filename; + // A filename for the text log. + absl::optional text_log_filename; // A custom NetEqFactory can be used. NetEqFactory* neteq_factory = nullptr; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h index db1f9ada6..36fb99654 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -69,6 +70,12 @@ class EmulatedNetworkOutgoingStats { virtual DataSize BytesSent() const = 0; + // Returns the timestamped sizes of all sent packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; + virtual DataSize FirstSentPacketSize() const = 0; // Returns time of the first packet sent or infinite value if no packets were @@ -91,10 +98,21 @@ class EmulatedNetworkIncomingStats { virtual int64_t PacketsReceived() const = 0; // Total amount of bytes in received packets. virtual DataSize BytesReceived() const = 0; + // Returns the timestamped sizes of all received packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; // Total amount of packets that were received, but no destination was found. virtual int64_t PacketsDropped() const = 0; // Total amount of bytes in dropped packets. virtual DataSize BytesDropped() const = 0; + // Returns the timestamped sizes of all packets that were received, + // but no destination was found if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; virtual DataSize FirstReceivedPacketSize() const = 0; @@ -120,6 +138,17 @@ class EmulatedNetworkStats { virtual int64_t PacketsSent() const = 0; virtual DataSize BytesSent() const = 0; + // Returns the timestamped sizes of all sent packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; + // Returns the timestamped duration between packet was received on + // network interface and was dispatched to the network in microseconds if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsQueueWaitTimeUs() const = 0; virtual DataSize FirstSentPacketSize() const = 0; // Returns time of the first packet sent or infinite value if no packets were @@ -134,10 +163,21 @@ class EmulatedNetworkStats { virtual int64_t PacketsReceived() const = 0; // Total amount of bytes in received packets. virtual DataSize BytesReceived() const = 0; + // Returns the timestamped sizes of all received packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; // Total amount of packets that were received, but no destination was found. virtual int64_t PacketsDropped() const = 0; // Total amount of bytes in dropped packets. virtual DataSize BytesDropped() const = 0; + // Returns counter with timestamped sizes of all packets that were received, + // but no destination was found if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; virtual DataSize FirstReceivedPacketSize() const = 0; // Returns time of the first packet received or infinite value if no packets @@ -159,7 +199,8 @@ class EmulatedNetworkStats { }; // EmulatedEndpoint is an abstraction for network interface on device. Instances -// of this are created by NetworkEmulationManager::CreateEndpoint. +// of this are created by NetworkEmulationManager::CreateEndpoint and +// thread safe. class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { public: // Send packet into network. diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h index 90441e4f2..8619f3630 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h @@ -13,6 +13,7 @@ #include #include +#include #include #include "api/array_view.h" @@ -46,6 +47,13 @@ class EmulatedRoute; struct EmulatedEndpointConfig { enum class IpAddressFamily { kIpv4, kIpv6 }; + enum class StatsGatheringMode { + // Gather main network stats counters. + kDefault, + // kDefault + also gather per packet statistics. In this mode more memory + // will be used. + kDebug + }; IpAddressFamily generated_ip_family = IpAddressFamily::kIpv4; // If specified will be used as IP address for endpoint node. Must be unique @@ -56,8 +64,40 @@ struct EmulatedEndpointConfig { bool start_as_enabled = true; // Network type which will be used to represent endpoint to WebRTC. rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; + StatsGatheringMode stats_gathering_mode = StatsGatheringMode::kDefault; }; +struct EmulatedTURNServerConfig { + EmulatedEndpointConfig client_config; + EmulatedEndpointConfig peer_config; +}; + +// EmulatedTURNServer is an abstraction for a TURN server. +class EmulatedTURNServerInterface { + public: + struct IceServerConfig { + std::string username; + std::string password; + std::string url; + }; + + virtual ~EmulatedTURNServerInterface() {} + + // Get an IceServer configuration suitable to add to a PeerConnection. + virtual IceServerConfig GetIceServerConfig() const = 0; + + // Get non-null client endpoint, an endpoint that accepts TURN allocations. + // This shall typically be connected to one or more webrtc endpoint. + virtual EmulatedEndpoint* GetClientEndpoint() const = 0; + + // Returns socket address, which client should use to connect to TURN server + // and do TURN allocation. + virtual rtc::SocketAddress GetClientEndpointAddress() const = 0; + + // Get non-null peer endpoint, that is "connected to the internet". + // This shall typically be connected to another TURN server. + virtual EmulatedEndpoint* GetPeerEndpoint() const = 0; +}; // Provide interface to obtain all required objects to inject network emulation // layer into PeerConnection. Also contains information about network interfaces @@ -79,7 +119,8 @@ class EmulatedNetworkManagerInterface { virtual std::vector endpoints() const = 0; // Passes summarized network stats for endpoints for this manager into - // specified |stats_callback|. + // specified |stats_callback|. Callback will be executed on network emulation + // internal task queue. virtual void GetStats( std::function)> stats_callback) const = 0; @@ -194,12 +235,20 @@ class NetworkEmulationManager { CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) = 0; - // Passes summarized network stats for specified |endpoints| into specifield - // |stats_callback|. + // Passes summarized network stats for specified |endpoints| into specified + // |stats_callback|. Callback will be executed on network emulation + // internal task queue. virtual void GetStats( rtc::ArrayView endpoints, std::function)> stats_callback) = 0; + + // Create a EmulatedTURNServer. + // The TURN server has 2 endpoints that need to be connected with routes, + // - GetClientEndpoint() - the endpoint that accepts TURN allocations. + // - GetPeerEndpoint() - the endpoint that is "connected to the internet". + virtual EmulatedTURNServerInterface* CreateTURNServer( + EmulatedTURNServerConfig config) = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/simulcast_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/simulcast_test_fixture.h index 5270d1330..cd470703c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/simulcast_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/simulcast_test_fixture.h @@ -34,6 +34,8 @@ class SimulcastTestFixture { virtual void TestSpatioTemporalLayers321PatternEncoder() = 0; virtual void TestStrideEncodeDecode() = 0; virtual void TestDecodeWidthHeightSet() = 0; + virtual void + TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() = 0; }; } // namespace test diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h index 2ed40910b..395c5cb80 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h @@ -138,6 +138,9 @@ class VideoCodecTestFixture { bool save_encoded_ivf = false; bool save_decoded_y4m = false; } visualization_params; + + // Enables quality analysis for dropped frames. + bool analyze_quality_of_dropped_frames = false; }; virtual ~VideoCodecTestFixture() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h index 63e15768d..df1aed73a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h @@ -67,6 +67,7 @@ class VideoCodecTestStats { int qp = -1; // Quality. + bool quality_analysis_successful = false; float psnr_y = 0.0f; float psnr_u = 0.0f; float psnr_v = 0.0f; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h b/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h index 6fc1f7c0d..c2b005e71 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h @@ -61,42 +61,42 @@ class NetworkControllerInterface { virtual ~NetworkControllerInterface() = default; // Called when network availabilty changes. - virtual NetworkControlUpdate OnNetworkAvailability(NetworkAvailability) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkAvailability( + NetworkAvailability) = 0; // Called when the receiving or sending endpoint changes address. - virtual NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkRouteChange( + NetworkRouteChange) = 0; // Called periodically with a periodicy as specified by // NetworkControllerFactoryInterface::GetProcessInterval. - virtual NetworkControlUpdate OnProcessInterval(ProcessInterval) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnProcessInterval( + ProcessInterval) = 0; // Called when remotely calculated bitrate is received. - virtual NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnRemoteBitrateReport( + RemoteBitrateReport) = 0; // Called round trip time has been calculated by protocol specific mechanisms. - virtual NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnRoundTripTimeUpdate( + RoundTripTimeUpdate) = 0; // Called when a packet is sent on the network. - virtual NetworkControlUpdate OnSentPacket(SentPacket) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnSentPacket( + SentPacket) = 0; // Called when a packet is received from the remote client. - virtual NetworkControlUpdate OnReceivedPacket(ReceivedPacket) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnReceivedPacket( + ReceivedPacket) = 0; // Called when the stream specific configuration has been updated. - virtual NetworkControlUpdate OnStreamsConfig(StreamsConfig) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnStreamsConfig( + StreamsConfig) = 0; // Called when target transfer rate constraints has been changed. - virtual NetworkControlUpdate OnTargetRateConstraints(TargetRateConstraints) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTargetRateConstraints( + TargetRateConstraints) = 0; // Called when a protocol specific calculation of packet loss has been made. - virtual NetworkControlUpdate OnTransportLossReport(TransportLossReport) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTransportLossReport( + TransportLossReport) = 0; // Called with per packet feedback regarding receive time. - virtual NetworkControlUpdate OnTransportPacketsFeedback( - TransportPacketsFeedback) ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback) = 0; // Called with network state estimate updates. - virtual NetworkControlUpdate OnNetworkStateEstimate(NetworkStateEstimate) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkStateEstimate( + NetworkStateEstimate) = 0; }; // NetworkControllerFactoryInterface is an interface for creating a network diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/sctp_transport_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/transport/sctp_transport_factory_interface.h new file mode 100644 index 000000000..912be3a37 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/transport/sctp_transport_factory_interface.h @@ -0,0 +1,42 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ +#define API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ + +#include + +// These classes are not part of the API, and are treated as opaque pointers. +namespace cricket { +class SctpTransportInternal; +} // namespace cricket + +namespace rtc { +class PacketTransportInternal; +} // namespace rtc + +namespace webrtc { + +// Factory class which can be used to allow fake SctpTransports to be injected +// for testing. An application is not intended to implement this interface nor +// 'cricket::SctpTransportInternal' because SctpTransportInternal is not +// guaranteed to remain stable in future WebRTC versions. +class SctpTransportFactoryInterface { + public: + virtual ~SctpTransportFactoryInterface() = default; + + // Create an SCTP transport using |channel| for the underlying transport. + virtual std::unique_ptr CreateSctpTransport( + rtc::PacketTransportInternal* channel) = 0; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc index b083f1583..c3f589a69 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc @@ -11,8 +11,9 @@ #include "api/transport/stun.h" #include - #include +#include +#include #include #include @@ -25,8 +26,14 @@ using rtc::ByteBufferReader; using rtc::ByteBufferWriter; +namespace cricket { + namespace { +const int k127Utf8CharactersLengthInBytes = 508; +const int kDefaultMaxAttributeLength = 508; +const int kMessageIntegrityAttributeLength = 20; + uint32_t ReduceTransactionId(const std::string& transaction_id) { RTC_DCHECK(transaction_id.length() == cricket::kStunTransactionIdLength || transaction_id.length() == @@ -40,9 +47,46 @@ uint32_t ReduceTransactionId(const std::string& transaction_id) { return result; } -} // namespace +// Check the maximum length of a BYTE_STRING attribute against specifications. +bool LengthValid(int type, int length) { + // "Less than 509 bytes" is intended to indicate a maximum of 127 + // UTF-8 characters, which may take up to 4 bytes per character. + switch (type) { + case STUN_ATTR_USERNAME: + return length <= + k127Utf8CharactersLengthInBytes; // RFC 8489 section 14.3 + case STUN_ATTR_MESSAGE_INTEGRITY: + return length == + kMessageIntegrityAttributeLength; // RFC 8489 section 14.5 + case STUN_ATTR_REALM: + return length <= + k127Utf8CharactersLengthInBytes; // RFC 8489 section 14.9 + case STUN_ATTR_NONCE: + return length <= + k127Utf8CharactersLengthInBytes; // RFC 8489 section 14.10 + case STUN_ATTR_SOFTWARE: + return length <= + k127Utf8CharactersLengthInBytes; // RFC 8489 section 14.14 + case STUN_ATTR_ORIGIN: + // 0x802F is unassigned by IANA. + // RESPONSE-ORIGIN is defined in RFC 5780 section 7.3, but does not + // specify a maximum length. It's an URL, so return an arbitrary + // restriction. + return length <= kDefaultMaxAttributeLength; + case STUN_ATTR_DATA: + // No length restriction in RFC; it's the content of an UDP datagram, + // which in theory can be up to 65.535 bytes. + // TODO(bugs.webrtc.org/12179): Write a test to find the real limit. + return length <= 65535; + default: + // Return an arbitrary restriction for all other types. + return length <= kDefaultMaxAttributeLength; + } + RTC_NOTREACHED(); + return true; +} -namespace cricket { +} // namespace const char STUN_ERROR_REASON_TRY_ALTERNATE_SERVER[] = "Try Alternate Server"; const char STUN_ERROR_REASON_BAD_REQUEST[] = "Bad Request"; @@ -555,7 +599,7 @@ StunAttributeValueType StunMessage::GetAttributeValueType(int type) const { return STUN_VALUE_BYTE_STRING; case STUN_ATTR_RETRANSMIT_COUNT: return STUN_VALUE_UINT32; - case STUN_ATTR_LAST_ICE_CHECK_RECEIVED: + case STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED: return STUN_VALUE_BYTE_STRING; case STUN_ATTR_GOOG_MISC_INFO: return STUN_VALUE_UINT16_LIST; @@ -993,6 +1037,10 @@ bool StunByteStringAttribute::Read(ByteBufferReader* buf) { } bool StunByteStringAttribute::Write(ByteBufferWriter* buf) const { + // Check that length is legal according to specs + if (!LengthValid(type(), length())) { + return false; + } buf->WriteBytes(bytes_, length()); WritePadding(buf); return true; @@ -1309,7 +1357,7 @@ StunMessage* TurnMessage::CreateNew() const { StunAttributeValueType IceMessage::GetAttributeValueType(int type) const { switch (type) { case STUN_ATTR_PRIORITY: - case STUN_ATTR_NETWORK_INFO: + case STUN_ATTR_GOOG_NETWORK_INFO: case STUN_ATTR_NOMINATION: return STUN_VALUE_UINT32; case STUN_ATTR_USE_CANDIDATE: diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h index 51ca30653..8893b2a1f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h @@ -16,11 +16,13 @@ #include #include - +#include #include #include #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/ip_address.h" #include "rtc_base/socket_address.h" @@ -133,7 +135,6 @@ class StunAddressAttribute; class StunAttribute; class StunByteStringAttribute; class StunErrorCodeAttribute; - class StunUInt16ListAttribute; class StunUInt32Attribute; class StunUInt64Attribute; @@ -667,11 +668,16 @@ enum IceAttributeType { STUN_ATTR_NOMINATION = 0xC001, // UInt32 // UInt32. The higher 16 bits are the network ID. The lower 16 bits are the // network cost. - STUN_ATTR_NETWORK_INFO = 0xC057, + STUN_ATTR_GOOG_NETWORK_INFO = 0xC057, // Experimental: Transaction ID of the last connectivity check received. - STUN_ATTR_LAST_ICE_CHECK_RECEIVED = 0xC058, + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED = 0xC058, // Uint16List. Miscellaneous attributes for future extension. STUN_ATTR_GOOG_MISC_INFO = 0xC059, + // Obsolete. + STUN_ATTR_GOOG_OBSOLETE_1 = 0xC05A, + STUN_ATTR_GOOG_CONNECTION_ID = 0xC05B, // Not yet implemented. + STUN_ATTR_GOOG_DELTA = 0xC05C, // Not yet implemented. + STUN_ATTR_GOOG_DELTA_ACK = 0xC05D, // Not yet implemented. // MESSAGE-INTEGRITY truncated to 32-bit. STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 = 0xC060, }; diff --git a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h index 8436d4f9e..30543b68b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h @@ -8,42 +8,34 @@ * be found in the AUTHORS file in the root of the source tree. */ -// This file contains enums related to IPv4/IPv6 metrics. +// This file contains enums related to Chrome UMA histograms. See +// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#requirements +// for requirements when adding or changing metrics. #ifndef API_UMA_METRICS_H_ #define API_UMA_METRICS_H_ -#include "rtc_base/ref_count.h" - namespace webrtc { -// Currently this contains information related to WebRTC network/transport -// information. - -// The difference between PeerConnectionEnumCounter and -// PeerConnectionMetricsName is that the "EnumCounter" is only counting the -// occurrences of events, while "Name" has a value associated with it which is -// used to form a histogram. - -// This enum is backed by Chromium's histograms.xml, -// chromium/src/tools/metrics/histograms/histograms.xml -// Existing values cannot be re-ordered and new enums must be added -// before kBoundary. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum PeerConnectionAddressFamilyCounter { - kPeerConnection_IPv4, - kPeerConnection_IPv6, - kBestConnections_IPv4, - kBestConnections_IPv6, - kPeerConnectionAddressFamilyCounter_Max, + kPeerConnection_IPv4 = 0, + kPeerConnection_IPv6 = 1, + kBestConnections_IPv4 = 2, + kBestConnections_IPv6 = 3, + kPeerConnectionAddressFamilyCounter_Max }; // This enum defines types for UMA samples, which will have a range. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum PeerConnectionMetricsName { - kNetworkInterfaces_IPv4, // Number of IPv4 interfaces. - kNetworkInterfaces_IPv6, // Number of IPv6 interfaces. - kTimeToConnect, // In milliseconds. - kLocalCandidates_IPv4, // Number of IPv4 local candidates. - kLocalCandidates_IPv6, // Number of IPv6 local candidates. + kNetworkInterfaces_IPv4 = 0, // Number of IPv4 interfaces. + kNetworkInterfaces_IPv6 = 1, // Number of IPv6 interfaces. + kTimeToConnect = 2, // In milliseconds. + kLocalCandidates_IPv4 = 3, // Number of IPv4 local candidates. + kLocalCandidates_IPv6 = 4, // Number of IPv6 local candidates. kPeerConnectionMetricsName_Max }; @@ -51,109 +43,134 @@ enum PeerConnectionMetricsName { // _. It is recorded based on the // type of candidate pair used when the PeerConnection first goes to a completed // state. When BUNDLE is enabled, only the first transport gets recorded. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum IceCandidatePairType { // HostHost is deprecated. It was replaced with the set of types at the bottom // to report private or public host IP address. - kIceCandidatePairHostHost, - kIceCandidatePairHostSrflx, - kIceCandidatePairHostRelay, - kIceCandidatePairHostPrflx, - kIceCandidatePairSrflxHost, - kIceCandidatePairSrflxSrflx, - kIceCandidatePairSrflxRelay, - kIceCandidatePairSrflxPrflx, - kIceCandidatePairRelayHost, - kIceCandidatePairRelaySrflx, - kIceCandidatePairRelayRelay, - kIceCandidatePairRelayPrflx, - kIceCandidatePairPrflxHost, - kIceCandidatePairPrflxSrflx, - kIceCandidatePairPrflxRelay, + kIceCandidatePairHostHost = 0, + kIceCandidatePairHostSrflx = 1, + kIceCandidatePairHostRelay = 2, + kIceCandidatePairHostPrflx = 3, + kIceCandidatePairSrflxHost = 4, + kIceCandidatePairSrflxSrflx = 5, + kIceCandidatePairSrflxRelay = 6, + kIceCandidatePairSrflxPrflx = 7, + kIceCandidatePairRelayHost = 8, + kIceCandidatePairRelaySrflx = 9, + kIceCandidatePairRelayRelay = 10, + kIceCandidatePairRelayPrflx = 11, + kIceCandidatePairPrflxHost = 12, + kIceCandidatePairPrflxSrflx = 13, + kIceCandidatePairPrflxRelay = 14, // The following 9 types tell whether local and remote hosts have hostname, // private or public IP addresses. - kIceCandidatePairHostPrivateHostPrivate, - kIceCandidatePairHostPrivateHostPublic, - kIceCandidatePairHostPublicHostPrivate, - kIceCandidatePairHostPublicHostPublic, - kIceCandidatePairHostNameHostName, - kIceCandidatePairHostNameHostPrivate, - kIceCandidatePairHostNameHostPublic, - kIceCandidatePairHostPrivateHostName, - kIceCandidatePairHostPublicHostName, + kIceCandidatePairHostPrivateHostPrivate = 15, + kIceCandidatePairHostPrivateHostPublic = 16, + kIceCandidatePairHostPublicHostPrivate = 17, + kIceCandidatePairHostPublicHostPublic = 18, + kIceCandidatePairHostNameHostName = 19, + kIceCandidatePairHostNameHostPrivate = 20, + kIceCandidatePairHostNameHostPublic = 21, + kIceCandidatePairHostPrivateHostName = 22, + kIceCandidatePairHostPublicHostName = 23, kIceCandidatePairMax }; +// The difference between PeerConnectionEnumCounter and +// PeerConnectionMetricsName is that the "EnumCounter" is only counting the +// occurrences of events, while "Name" has a value associated with it which is +// used to form a histogram. + +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum KeyExchangeProtocolType { - kEnumCounterKeyProtocolDtls, - kEnumCounterKeyProtocolSdes, + kEnumCounterKeyProtocolDtls = 0, + kEnumCounterKeyProtocolSdes = 1, kEnumCounterKeyProtocolMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum KeyExchangeProtocolMedia { - kEnumCounterKeyProtocolMediaTypeDtlsAudio, - kEnumCounterKeyProtocolMediaTypeDtlsVideo, - kEnumCounterKeyProtocolMediaTypeDtlsData, - kEnumCounterKeyProtocolMediaTypeSdesAudio, - kEnumCounterKeyProtocolMediaTypeSdesVideo, - kEnumCounterKeyProtocolMediaTypeSdesData, + kEnumCounterKeyProtocolMediaTypeDtlsAudio = 0, + kEnumCounterKeyProtocolMediaTypeDtlsVideo = 1, + kEnumCounterKeyProtocolMediaTypeDtlsData = 2, + kEnumCounterKeyProtocolMediaTypeSdesAudio = 3, + kEnumCounterKeyProtocolMediaTypeSdesVideo = 4, + kEnumCounterKeyProtocolMediaTypeSdesData = 5, kEnumCounterKeyProtocolMediaTypeMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpSemanticRequested { - kSdpSemanticRequestDefault, - kSdpSemanticRequestPlanB, - kSdpSemanticRequestUnifiedPlan, + kSdpSemanticRequestDefault = 0, + kSdpSemanticRequestPlanB = 1, + kSdpSemanticRequestUnifiedPlan = 2, kSdpSemanticRequestMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpSemanticNegotiated { - kSdpSemanticNegotiatedNone, - kSdpSemanticNegotiatedPlanB, - kSdpSemanticNegotiatedUnifiedPlan, - kSdpSemanticNegotiatedMixed, + kSdpSemanticNegotiatedNone = 0, + kSdpSemanticNegotiatedPlanB = 1, + kSdpSemanticNegotiatedUnifiedPlan = 2, + kSdpSemanticNegotiatedMixed = 3, kSdpSemanticNegotiatedMax }; // Metric which records the format of the received SDP for tracking how much the // difference between Plan B and Unified Plan affect users. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpFormatReceived { // No audio or video tracks. This is worth special casing since it seems to be // the most common scenario (data-channel only). - kSdpFormatReceivedNoTracks, + kSdpFormatReceivedNoTracks = 0, // No more than one audio and one video track. Should be compatible with both // Plan B and Unified Plan endpoints. - kSdpFormatReceivedSimple, + kSdpFormatReceivedSimple = 1, // More than one audio track or more than one video track in the Plan B format // (e.g., one audio media section with multiple streams). - kSdpFormatReceivedComplexPlanB, + kSdpFormatReceivedComplexPlanB = 2, // More than one audio track or more than one video track in the Unified Plan // format (e.g., two audio media sections). - kSdpFormatReceivedComplexUnifiedPlan, + kSdpFormatReceivedComplexUnifiedPlan = 3, kSdpFormatReceivedMax }; // Metric for counting the outcome of adding an ICE candidate +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum AddIceCandidateResult { - kAddIceCandidateSuccess, - kAddIceCandidateFailClosed, - kAddIceCandidateFailNoRemoteDescription, - kAddIceCandidateFailNullCandidate, - kAddIceCandidateFailNotValid, - kAddIceCandidateFailNotReady, - kAddIceCandidateFailInAddition, - kAddIceCandidateFailNotUsable, + kAddIceCandidateSuccess = 0, + kAddIceCandidateFailClosed = 1, + kAddIceCandidateFailNoRemoteDescription = 2, + kAddIceCandidateFailNullCandidate = 3, + kAddIceCandidateFailNotValid = 4, + kAddIceCandidateFailNotReady = 5, + kAddIceCandidateFailInAddition = 6, + kAddIceCandidateFailNotUsable = 7, kAddIceCandidateMax }; // Metric for recording which api surface was used to enable simulcast. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SimulcastApiVersion { - kSimulcastApiVersionNone, - kSimulcastApiVersionLegacy, - kSimulcastApiVersionSpecCompliant, - kSimulcastApiVersionMax, + kSimulcastApiVersionNone = 0, + kSimulcastApiVersionLegacy = 1, + kSimulcastApiVersionSpecCompliant = 2, + kSimulcastApiVersionMax }; +// When adding new metrics please consider using the style described in +// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#usage +// instead of the legacy enums used above. + } // namespace webrtc #endif // API_UMA_METRICS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/builtin_video_bitrate_allocator_factory.cc b/TMessagesProj/jni/voip/webrtc/api/video/builtin_video_bitrate_allocator_factory.cc index 96b2545d2..593bca60f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/builtin_video_bitrate_allocator_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/builtin_video_bitrate_allocator_factory.cc @@ -15,7 +15,7 @@ #include "absl/base/macros.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" -#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/svc_rate_allocator.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" namespace webrtc { @@ -30,16 +30,14 @@ class BuiltinVideoBitrateAllocatorFactory std::unique_ptr CreateVideoBitrateAllocator( const VideoCodec& codec) override { - std::unique_ptr rate_allocator; switch (codec.codecType) { + case kVideoCodecAV1: case kVideoCodecVP9: - rate_allocator.reset(new SvcRateAllocator(codec)); - break; - // TODO: add an allocator here for H.265 + // TODO: add an allocator here for H.265 + return std::make_unique(codec); default: - rate_allocator.reset(new SimulcastRateAllocator(codec)); + return std::make_unique(codec); } - return rate_allocator; } }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc index 13d57ef5f..1c73bdabe 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc @@ -61,7 +61,7 @@ void EncodedImageBuffer::Realloc(size_t size) { size_ = size; } -EncodedImage::EncodedImage() : EncodedImage(nullptr, 0, 0) {} +EncodedImage::EncodedImage() = default; EncodedImage::EncodedImage(EncodedImage&&) = default; EncodedImage::EncodedImage(const EncodedImage&) = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h index 35c2584df..650766ab6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h @@ -25,7 +25,6 @@ #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include_directory) #include "rtc_base/checks.h" #include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" @@ -73,9 +72,8 @@ class RTC_EXPORT EncodedImage { public: EncodedImage(); EncodedImage(EncodedImage&&); - // Discouraged: potentially expensive. EncodedImage(const EncodedImage&); - EncodedImage(uint8_t* buffer, size_t length, size_t capacity); + RTC_DEPRECATED EncodedImage(uint8_t* buffer, size_t length, size_t capacity); ~EncodedImage(); @@ -130,11 +128,6 @@ class RTC_EXPORT EncodedImage { RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity()); size_ = new_size; } - // TODO(nisse): Delete, provide only read-only access to the buffer. - size_t capacity() const { - return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); - } - void SetEncodedData( rtc::scoped_refptr encoded_data) { encoded_data_ = encoded_data; @@ -154,11 +147,6 @@ class RTC_EXPORT EncodedImage { return encoded_data_; } - // TODO(nisse): Delete, provide only read-only access to the buffer. - uint8_t* data() { - return buffer_ ? buffer_ - : (encoded_data_ ? encoded_data_->data() : nullptr); - } const uint8_t* data() const { return buffer_ ? buffer_ : (encoded_data_ ? encoded_data_->data() : nullptr); @@ -177,13 +165,12 @@ class RTC_EXPORT EncodedImage { VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta; VideoRotation rotation_ = kVideoRotation_0; VideoContentType content_type_ = VideoContentType::UNSPECIFIED; - bool _completeFrame = false; int qp_ = -1; // Quantizer value. // When an application indicates non-zero values here, it is taken as an // indication that all future frames will be constrained with those limits // until the application indicates a change again. - PlayoutDelay playout_delay_ = {-1, -1}; + VideoPlayoutDelay playout_delay_; struct Timing { uint8_t flags = VideoSendTiming::kInvalid; @@ -198,14 +185,18 @@ class RTC_EXPORT EncodedImage { } timing_; private: + size_t capacity() const { + return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); + } + // TODO(bugs.webrtc.org/9378): We're transitioning to always owning the // encoded data. rtc::scoped_refptr encoded_data_; - size_t size_; // Size of encoded frame data. + size_t size_ = 0; // Size of encoded frame data. // Non-null when used with an un-owned buffer. - uint8_t* buffer_; + uint8_t* buffer_ = nullptr; // Allocated size of _buffer; relevant only if it's non-null. - size_t capacity_; + size_t capacity_ = 0; uint32_t timestamp_rtp_ = 0; absl::optional spatial_index_; std::map spatial_layer_frame_size_bytes_; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc new file mode 100644 index 000000000..cfa85ac52 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/nv12_buffer.h" + +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +namespace webrtc { + +namespace { + +static const int kBufferAlignment = 64; + +int NV12DataSize(int height, int stride_y, int stride_uv) { + return stride_y * height + stride_uv * ((height + 1) / 2); +} + +} // namespace + +NV12Buffer::NV12Buffer(int width, int height) + : NV12Buffer(width, height, width, width + width % 2) {} + +NV12Buffer::NV12Buffer(int width, int height, int stride_y, int stride_uv) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_uv_(stride_uv), + data_(static_cast( + AlignedMalloc(NV12DataSize(height_, stride_y_, stride_uv), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_uv, (width + width % 2)); +} + +NV12Buffer::~NV12Buffer() = default; + +// static +rtc::scoped_refptr NV12Buffer::Create(int width, int height) { + return new rtc::RefCountedObject(width, height); +} + +// static +rtc::scoped_refptr NV12Buffer::Create(int width, + int height, + int stride_y, + int stride_uv) { + return new rtc::RefCountedObject(width, height, stride_y, + stride_uv); +} + +// static +rtc::scoped_refptr NV12Buffer::Copy( + const I420BufferInterface& i420_buffer) { + rtc::scoped_refptr buffer = + NV12Buffer::Create(i420_buffer.width(), i420_buffer.height()); + libyuv::I420ToNV12( + i420_buffer.DataY(), i420_buffer.StrideY(), i420_buffer.DataU(), + i420_buffer.StrideU(), i420_buffer.DataV(), i420_buffer.StrideV(), + buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataUV(), + buffer->StrideUV(), buffer->width(), buffer->height()); + return buffer; +} + +rtc::scoped_refptr NV12Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::NV12ToI420(DataY(), StrideY(), DataUV(), StrideUV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +int NV12Buffer::width() const { + return width_; +} +int NV12Buffer::height() const { + return height_; +} + +int NV12Buffer::StrideY() const { + return stride_y_; +} +int NV12Buffer::StrideUV() const { + return stride_uv_; +} + +const uint8_t* NV12Buffer::DataY() const { + return data_.get(); +} + +const uint8_t* NV12Buffer::DataUV() const { + return data_.get() + UVOffset(); +} + +uint8_t* NV12Buffer::MutableDataY() { + return data_.get(); +} + +uint8_t* NV12Buffer::MutableDataUV() { + return data_.get() + UVOffset(); +} + +size_t NV12Buffer::UVOffset() const { + return stride_y_ * height_; +} + +void NV12Buffer::InitializeData() { + memset(data_.get(), 0, NV12DataSize(height_, stride_y_, stride_uv_)); +} + +void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y / 2; + offset_x = uv_offset_x * 2; + offset_y = uv_offset_y * 2; + + const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint8_t* uv_plane = + src.DataUV() + src.StrideUV() * uv_offset_y + uv_offset_x * 2; + + // kFilterBox is unsupported in libyuv, so using kFilterBilinear instead. + int res = libyuv::NV12Scale(y_plane, src.StrideY(), uv_plane, src.StrideUV(), + crop_width, crop_height, MutableDataY(), + StrideY(), MutableDataUV(), StrideUV(), width(), + height(), libyuv::kFilterBilinear); + + RTC_DCHECK_EQ(res, 0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h new file mode 100644 index 000000000..cb989e84b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_NV12_BUFFER_H_ +#define API_VIDEO_NV12_BUFFER_H_ + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// NV12 is a biplanar encoding format, with full-resolution Y and +// half-resolution interleved UV. More information can be found at +// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12. +class RTC_EXPORT NV12Buffer : public NV12BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_uv); + static rtc::scoped_refptr Copy( + const I420BufferInterface& i420_buffer); + + rtc::scoped_refptr ToI420() override; + + int width() const override; + int height() const override; + + int StrideY() const override; + int StrideUV() const override; + + const uint8_t* DataY() const override; + const uint8_t* DataUV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataUV(); + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(nisse): Deprecated. Should be deleted if/when those issues + // are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const NV12BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + protected: + NV12Buffer(int width, int height); + NV12Buffer(int width, int height, int stride_y, int stride_uv); + + ~NV12Buffer() override; + + private: + size_t UVOffset() const; + + const int width_; + const int height_; + const int stride_y_; + const int stride_uv_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_NV12_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h deleted file mode 100644 index 2178932d2..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ -#define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ - -#include "api/video/recordable_encoded_frame.h" -#include "test/gmock.h" - -namespace webrtc { -class MockRecordableEncodedFrame : public RecordableEncodedFrame { - public: - MOCK_METHOD(rtc::scoped_refptr, - encoded_buffer, - (), - (const, override)); - MOCK_METHOD(absl::optional, - color_space, - (), - (const, override)); - MOCK_METHOD(VideoCodecType, codec, (), (const, override)); - MOCK_METHOD(bool, is_key_frame, (), (const, override)); - MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); - MOCK_METHOD(Timestamp, render_time, (), (const, override)); -}; -} // namespace webrtc -#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_codec_constants.h b/TMessagesProj/jni/voip/webrtc/api/video/video_codec_constants.h index 6b6feee4c..5859f9b4c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_codec_constants.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_codec_constants.h @@ -17,6 +17,7 @@ enum : int { kMaxEncoderBuffers = 8 }; enum : int { kMaxSimulcastStreams = 3 }; enum : int { kMaxSpatialLayers = 5 }; enum : int { kMaxTemporalStreams = 4 }; +enum : int { kMaxPreferredPixelFormats = 5 }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h index 08c939d91..e62aae8e5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h @@ -186,6 +186,16 @@ class RTC_EXPORT VideoFrame { color_space_ = color_space; } + // max_composition_delay_in_frames() is used in an experiment of a low-latency + // renderer algorithm see crbug.com/1138888. + absl::optional max_composition_delay_in_frames() const { + return max_composition_delay_in_frames_; + } + void set_max_composition_delay_in_frames( + absl::optional max_composition_delay_in_frames) { + max_composition_delay_in_frames_ = max_composition_delay_in_frames; + } + // Get render time in milliseconds. // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). int64_t render_time_ms() const; @@ -255,6 +265,7 @@ class RTC_EXPORT VideoFrame { int64_t timestamp_us_; VideoRotation rotation_; absl::optional color_space_; + absl::optional max_composition_delay_in_frames_; // Updated since the last frame area. If present it means that the bounding // box of all the changes is within the rectangular area and is close to it. // If absent, it means that there's no information about the change at all and diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc index b9fd9cd92..64f339448 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc @@ -10,10 +10,25 @@ #include "api/video/video_frame_buffer.h" +#include "api/video/i420_buffer.h" #include "rtc_base/checks.h" namespace webrtc { +rtc::scoped_refptr VideoFrameBuffer::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + rtc::scoped_refptr result = + I420Buffer::Create(scaled_width, scaled_height); + result->CropAndScaleFrom(*this->ToI420(), offset_x, offset_y, crop_width, + crop_height); + return result; +} + const I420BufferInterface* VideoFrameBuffer::GetI420() const { // Overridden by subclasses that can return an I420 buffer without any // conversion, in particular, I420BufferInterface. @@ -35,10 +50,40 @@ const I010BufferInterface* VideoFrameBuffer::GetI010() const { return static_cast(this); } +const NV12BufferInterface* VideoFrameBuffer::GetNV12() const { + RTC_CHECK(type() == Type::kNV12); + return static_cast(this); +} + +rtc::scoped_refptr VideoFrameBuffer::GetMappedFrameBuffer( + rtc::ArrayView types) { + RTC_CHECK(type() == Type::kNative); + return nullptr; +} + VideoFrameBuffer::Type I420BufferInterface::type() const { return Type::kI420; } +const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) { + switch (type) { + case VideoFrameBuffer::Type::kNative: + return "kNative"; + case VideoFrameBuffer::Type::kI420: + return "kI420"; + case VideoFrameBuffer::Type::kI420A: + return "kI420A"; + case VideoFrameBuffer::Type::kI444: + return "kI444"; + case VideoFrameBuffer::Type::kI010: + return "kI010"; + case VideoFrameBuffer::Type::kNV12: + return "kNV12"; + default: + RTC_NOTREACHED(); + } +} + int I420BufferInterface::ChromaWidth() const { return (width() + 1) / 2; } @@ -83,4 +128,15 @@ int I010BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } +VideoFrameBuffer::Type NV12BufferInterface::type() const { + return Type::kNV12; +} + +int NV12BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int NV12BufferInterface::ChromaHeight() const { + return (height() + 1) / 2; +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h index d87a4230a..67b879732 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h @@ -13,6 +13,7 @@ #include +#include "api/array_view.h" #include "api/scoped_refptr.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -23,6 +24,7 @@ class I420BufferInterface; class I420ABufferInterface; class I444BufferInterface; class I010BufferInterface; +class NV12BufferInterface; // Base class for frame buffers of different types of pixel format and storage. // The tag in type() indicates how the data is represented, and each type is @@ -50,6 +52,7 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { kI420A, kI444, kI010, + kNV12, }; // This function specifies in what pixel format the data is stored in. @@ -72,18 +75,50 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // WebrtcVideoFrameAdapter in Chrome - it's I420 buffer backed by a shared // memory buffer. Therefore it must have type kNative. Yet, ToI420() // doesn't affect binary data at all. Another example is any I420A buffer. + // TODO(https://crbug.com/webrtc/12021): Make this method non-virtual and + // behave as the other GetXXX methods below. virtual const I420BufferInterface* GetI420() const; + // A format specific scale function. Default implementation works by + // converting to I420. But more efficient implementations may override it, + // especially for kNative. + // First, the image is cropped to |crop_width| and |crop_height| and then + // scaled to |scaled_width| and |scaled_height|. + virtual rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height); + + // Alias for common use case. + rtc::scoped_refptr Scale(int scaled_width, + int scaled_height) { + return CropAndScale(0, 0, width(), height(), scaled_width, scaled_height); + } + // These functions should only be called if type() is of the correct type. // Calling with a different type will result in a crash. const I420ABufferInterface* GetI420A() const; const I444BufferInterface* GetI444() const; const I010BufferInterface* GetI010() const; + const NV12BufferInterface* GetNV12() const; + + // From a kNative frame, returns a VideoFrameBuffer with a pixel format in + // the list of types that is in the main memory with a pixel perfect + // conversion for encoding with a software encoder. Returns nullptr if the + // frame type is not supported, mapping is not possible, or if the kNative + // frame has not implemented this method. Only callable if type() is kNative. + virtual rtc::scoped_refptr GetMappedFrameBuffer( + rtc::ArrayView types); protected: ~VideoFrameBuffer() override {} }; +// Update when VideoFrameBuffer::Type is updated. +const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type); + // This interface represents planar formats. class PlanarYuvBuffer : public VideoFrameBuffer { public: @@ -175,6 +210,42 @@ class I010BufferInterface : public PlanarYuv16BBuffer { ~I010BufferInterface() override {} }; +class BiplanarYuvBuffer : public VideoFrameBuffer { + public: + virtual int ChromaWidth() const = 0; + virtual int ChromaHeight() const = 0; + + // Returns the number of steps(in terms of Data*() return type) between + // successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideUV() const = 0; + + protected: + ~BiplanarYuvBuffer() override {} +}; + +class BiplanarYuv8Buffer : public BiplanarYuvBuffer { + public: + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataUV() const = 0; + + protected: + ~BiplanarYuv8Buffer() override {} +}; + +// Represents Type::kNV12. NV12 is full resolution Y and half-resolution +// interleved UV. +class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~NV12BufferInterface() override {} +}; + } // namespace webrtc #endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_layers_allocation.h b/TMessagesProj/jni/voip/webrtc/api/video/video_layers_allocation.h new file mode 100644 index 000000000..39734151a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_layers_allocation.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ +#define API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "api/units/data_rate.h" + +namespace webrtc { + +// This struct contains additional stream-level information needed by a +// Selective Forwarding Middlebox to make relay decisions of RTP streams. +struct VideoLayersAllocation { + static constexpr int kMaxSpatialIds = 4; + static constexpr int kMaxTemporalIds = 4; + + friend bool operator==(const VideoLayersAllocation& lhs, + const VideoLayersAllocation& rhs) { + return lhs.rtp_stream_index == rhs.rtp_stream_index && + lhs.resolution_and_frame_rate_is_valid == + rhs.resolution_and_frame_rate_is_valid && + lhs.active_spatial_layers == rhs.active_spatial_layers; + } + + friend bool operator!=(const VideoLayersAllocation& lhs, + const VideoLayersAllocation& rhs) { + return !(lhs == rhs); + } + + struct SpatialLayer { + friend bool operator==(const SpatialLayer& lhs, const SpatialLayer& rhs) { + return lhs.rtp_stream_index == rhs.rtp_stream_index && + lhs.spatial_id == rhs.spatial_id && + lhs.target_bitrate_per_temporal_layer == + rhs.target_bitrate_per_temporal_layer && + lhs.width == rhs.width && lhs.height == rhs.height && + lhs.frame_rate_fps == rhs.frame_rate_fps; + } + + friend bool operator!=(const SpatialLayer& lhs, const SpatialLayer& rhs) { + return !(lhs == rhs); + } + int rtp_stream_index = 0; + // Index of the spatial layer per `rtp_stream_index`. + int spatial_id = 0; + // Target bitrate per decode target. + absl::InlinedVector + target_bitrate_per_temporal_layer; + + // These fields are only valid if `resolution_and_frame_rate_is_valid` is + // true + uint16_t width = 0; + uint16_t height = 0; + // Max frame rate used in any temporal layer of this spatial layer. + uint8_t frame_rate_fps = 0; + }; + + // Index of the rtp stream this allocation is sent on. Used for mapping + // a SpatialLayer to a rtp stream. + int rtp_stream_index = 0; + bool resolution_and_frame_rate_is_valid = false; + absl::InlinedVector active_spatial_layers; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h index 8f27fa4db..4bf8b985c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h @@ -17,6 +17,7 @@ #include "api/units/time_delta.h" #include "api/video/encoded_frame.h" +#include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" @@ -29,6 +30,11 @@ class VideoStreamDecoderInterface { public: virtual ~Callbacks() = default; + struct FrameInfo { + absl::optional qp; + VideoContentType content_type; + }; + // Called when the VideoStreamDecoder enters a non-decodable state. virtual void OnNonDecodableState() = 0; @@ -36,10 +42,8 @@ class VideoStreamDecoderInterface { virtual void OnContinuousUntil( const video_coding::VideoLayerFrameId& key) = 0; - // Called with the decoded frame. - virtual void OnDecodedFrame(VideoFrame decodedImage, - absl::optional decode_time_ms, - absl::optional qp) = 0; + virtual void OnDecodedFrame(VideoFrame frame, + const FrameInfo& frame_info) = 0; }; virtual ~VideoStreamDecoderInterface() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h index d8dd8e159..34fa6421c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h @@ -19,6 +19,7 @@ #include "api/scoped_refptr.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_layers_allocation.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video_codecs/video_encoder.h" @@ -49,6 +50,12 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface { bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) = 0; + + virtual void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) = 0; + + virtual void OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) = 0; }; // If the resource is overusing, the VideoStreamEncoder will try to reduce @@ -110,11 +117,6 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface { int64_t round_trip_time_ms, double cwnd_reduce_ratio) = 0; - // Register observer for the bitrate allocation between the temporal - // and spatial layers. - virtual void SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) = 0; - // Set a FecControllerOverride, through which the encoder may override // decisions made by FecController. virtual void SetFecControllerOverride( diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h index 743524b35..cbeed3d07 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h @@ -39,6 +39,12 @@ class EncoderSwitchRequestCallback { }; struct VideoStreamEncoderSettings { + enum class BitrateAllocationCallbackType { + kVideoBitrateAllocation, + kVideoBitrateAllocationWhenScreenSharing, + kVideoLayersAllocation + }; + explicit VideoStreamEncoderSettings( const VideoEncoder::Capabilities& capabilities) : capabilities(capabilities) {} @@ -59,6 +65,11 @@ struct VideoStreamEncoderSettings { // Negotiated capabilities which the VideoEncoder may expect the other // side to use. VideoEncoder::Capabilities capabilities; + + // TODO(bugs.webrtc.org/12000): Reporting of VideoBitrateAllocation is beeing + // deprecated. Instead VideoLayersAllocation should be reported. + BitrateAllocationCallbackType allocation_cb_type = + BitrateAllocationCallbackType::kVideoBitrateAllocationWhenScreenSharing; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h index 4cc75dd0b..fbd92254a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h @@ -100,6 +100,30 @@ struct TimingFrameInfo { uint8_t flags; // Flags indicating validity and/or why tracing was triggered. }; +// Minimum and maximum playout delay values from capture to render. +// These are best effort values. +// +// A value < 0 indicates no change from previous valid value. +// +// min = max = 0 indicates that the receiver should try and render +// frame as soon as possible. +// +// min = x, max = y indicates that the receiver is free to adapt +// in the range (x, y) based on network jitter. +struct VideoPlayoutDelay { + VideoPlayoutDelay() = default; + VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {} + int min_ms = -1; + int max_ms = -1; + + bool operator==(const VideoPlayoutDelay& rhs) const { + return min_ms == rhs.min_ms && max_ms == rhs.max_ms; + } +}; + +// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated. +using PlayoutDelay = VideoPlayoutDelay; + } // namespace webrtc #endif // API_VIDEO_VIDEO_TIMING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.cc new file mode 100644 index 000000000..25ccdfeb4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.cc @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/spatial_layer.h" + +namespace webrtc { + +bool SpatialLayer::operator==(const SpatialLayer& other) const { + return (width == other.width && height == other.height && + maxFramerate == other.maxFramerate && + numberOfTemporalLayers == other.numberOfTemporalLayers && + maxBitrate == other.maxBitrate && + targetBitrate == other.targetBitrate && + minBitrate == other.minBitrate && qpMax == other.qpMax && + active == other.active); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.h new file mode 100644 index 000000000..5a1b42542 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/spatial_layer.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SPATIAL_LAYER_H_ +#define API_VIDEO_CODECS_SPATIAL_LAYER_H_ + +namespace webrtc { + +struct SpatialLayer { + bool operator==(const SpatialLayer& other) const; + bool operator!=(const SpatialLayer& other) const { return !(*this == other); } + + unsigned short width; // NOLINT(runtime/int) + unsigned short height; // NOLINT(runtime/int) + float maxFramerate; // fps. + unsigned char numberOfTemporalLayers; + unsigned int maxBitrate; // kilobits/sec. + unsigned int targetBitrate; // kilobits/sec. + unsigned int minBitrate; // kilobits/sec. + unsigned int qpMax; // minimum quality + bool active; // encoded and sent. +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_SPATIAL_LAYER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc index a21e692aa..d7ee165e6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc @@ -70,19 +70,8 @@ bool VideoCodecH265::operator==(const VideoCodecH265& other) const { } #endif -bool SpatialLayer::operator==(const SpatialLayer& other) const { - return (width == other.width && height == other.height && - maxFramerate == other.maxFramerate && - numberOfTemporalLayers == other.numberOfTemporalLayers && - maxBitrate == other.maxBitrate && - targetBitrate == other.targetBitrate && - minBitrate == other.minBitrate && qpMax == other.qpMax && - active == other.active); -} - VideoCodec::VideoCodec() : codecType(kVideoCodecGeneric), - plType(0), width(0), height(0), startBitrate(0), @@ -162,6 +151,7 @@ const char* CodecTypeToPayloadString(VideoCodecType type) { default: return kPayloadNameGeneric; } + RTC_CHECK_NOTREACHED(); } VideoCodecType PayloadStringToCodecType(const std::string& name) { diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h index 52315282b..14da6006e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h @@ -16,10 +16,11 @@ #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" -#include "common_types.h" // NOLINT(build/include_directory) +#include "api/video_codecs/spatial_layer.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -121,11 +122,16 @@ class RTC_EXPORT VideoCodec { public: VideoCodec(); + // Scalability mode as described in + // https://www.w3.org/TR/webrtc-svc/#scalabilitymodes* + // or value 'NONE' to indicate no scalability. + absl::string_view ScalabilityMode() const { return scalability_mode_; } + void SetScalabilityMode(absl::string_view scalability_mode) { + scalability_mode_ = std::string(scalability_mode); + } + // Public variables. TODO(hta): Make them private with accessors. VideoCodecType codecType; - // TODO(nisse): Unused in webrtc, delete as soon as downstream projects are - // updated. - unsigned char plType; // TODO(nisse): Change to int, for consistency. uint16_t width; @@ -143,7 +149,7 @@ class RTC_EXPORT VideoCodec { unsigned int qpMax; unsigned char numberOfSimulcastStreams; - SimulcastStream simulcastStream[kMaxSimulcastStreams]; + SpatialLayer simulcastStream[kMaxSimulcastStreams]; SpatialLayer spatialLayers[kMaxSpatialLayers]; VideoCodecMode mode; @@ -193,6 +199,7 @@ class RTC_EXPORT VideoCodec { // TODO(hta): Consider replacing the union with a pointer type. // This will allow removing the VideoCodec* types from this file. VideoCodecUnion codec_specific_; + std::string scalability_mode_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc index 3f881e261..b03933a5c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc @@ -18,19 +18,6 @@ namespace webrtc { -EncodedImageCallback::Result EncodedImageCallback::OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* /*fragmentation*/) { - return OnEncodedImage(encoded_image, codec_specific_info); -} - -EncodedImageCallback::Result EncodedImageCallback::OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) { - return OnEncodedImage(encoded_image, codec_specific_info, nullptr); -} - // TODO(mflodman): Add default complexity for VP9 and VP9. VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() { VideoCodecVP8 vp8_settings; @@ -124,6 +111,7 @@ bool VideoEncoder::ResolutionBitrateLimits::operator==( VideoEncoder::EncoderInfo::EncoderInfo() : scaling_settings(VideoEncoder::ScalingSettings::kOff), requested_resolution_alignment(1), + apply_alignment_to_all_simulcast_layers(false), supports_native_handle(false), implementation_name("unknown"), has_trusted_rate_controller(false), @@ -132,7 +120,8 @@ VideoEncoder::EncoderInfo::EncoderInfo() fps_allocation{absl::InlinedVector( 1, kMaxFramerateFraction)}, - supports_simulcast(false) {} + supports_simulcast(false), + preferred_pixel_formats{VideoFrameBuffer::Type::kI420} {} VideoEncoder::EncoderInfo::EncoderInfo(const EncoderInfo&) = default; @@ -153,6 +142,8 @@ std::string VideoEncoder::EncoderInfo::ToString() const { oss << "min_pixels_per_frame = " << scaling_settings.min_pixels_per_frame << " }"; oss << ", requested_resolution_alignment = " << requested_resolution_alignment + << ", apply_alignment_to_all_simulcast_layers = " + << apply_alignment_to_all_simulcast_layers << ", supports_native_handle = " << supports_native_handle << ", implementation_name = '" << implementation_name << "'" @@ -196,7 +187,15 @@ std::string VideoEncoder::EncoderInfo::ToString() const { } oss << "] " ", supports_simulcast = " - << supports_simulcast << "}"; + << supports_simulcast; + oss << ", preferred_pixel_formats = ["; + for (size_t i = 0; i < preferred_pixel_formats.size(); ++i) { + if (i > 0) + oss << ", "; + oss << VideoFrameBufferTypeToString(preferred_pixel_formats.at(i)); + } + oss << "]"; + oss << "}"; return oss.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h index e8af1dd75..3c9c2376a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h @@ -30,7 +30,6 @@ namespace webrtc { -class RTPFragmentationHeader; // TODO(pbos): Expose these through a public (root) header or change these APIs. struct CodecSpecificInfo; @@ -73,16 +72,9 @@ class RTC_EXPORT EncodedImageCallback { }; // Callback function which is called when an image has been encoded. - // Deprecated, use OnEncodedImage below instead, see bugs.webrtc.org/6471 - virtual Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation); - - // Callback function which is called when an image has been encoded. - // TODO(bugs.webrtc.org/6471): Make pure virtual - // when OnEncodedImage above is deleted. - virtual Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info); + virtual Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) = 0; virtual void OnDroppedFrame(DropReason reason) {} }; @@ -182,6 +174,15 @@ class RTC_EXPORT VideoEncoder { // requirements the encoder has on the incoming video frame buffers. int requested_resolution_alignment; + // Same as above but if true, each simulcast layer should also be divisible + // by |requested_resolution_alignment|. + // Note that scale factors |scale_resolution_down_by| may be adjusted so a + // common multiple is not too large to avoid largely cropped frames and + // possibly with an aspect ratio far from the original. + // Warning: large values of scale_resolution_down_by could be changed + // considerably, especially if |requested_resolution_alignment| is large. + bool apply_alignment_to_all_simulcast_layers; + // If true, encoder supports working with a native handle (e.g. texture // handle for hw codecs) rather than requiring a raw I420 buffer. bool supports_native_handle; @@ -253,6 +254,12 @@ class RTC_EXPORT VideoEncoder { // in such case the encoder should return // WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED. bool supports_simulcast; + + // The list of pixel formats preferred by the encoder. It is assumed that if + // the list is empty and supports_native_handle is false, then {I420} is the + // preferred pixel format. The order of the formats does not matter. + absl::InlinedVector + preferred_pixel_formats; }; struct RTC_EXPORT RateControlParameters { @@ -266,6 +273,9 @@ class RTC_EXPORT VideoEncoder { // Target bitrate, per spatial/temporal layer. // A target bitrate of 0bps indicates a layer should not be encoded at all. + VideoBitrateAllocation target_bitrate; + // Adjusted target bitrate, per spatial/temporal layer. May be lower or + // higher than the target depending on encoder behaviour. VideoBitrateAllocation bitrate; // Target framerate, in fps. A value <= 0.0 is invalid and should be // interpreted as framerate target not available. In this case the encoder @@ -377,7 +387,7 @@ class RTC_EXPORT VideoEncoder { // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. virtual int32_t Release() = 0; - // Encode an I420 image (as a part of a video stream). The encoded image + // Encode an image (as a part of a video stream). The encoded image // will be returned to the user through the encode complete callback. // // Input: diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc index 1185e2ced..45d579503 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc @@ -43,6 +43,7 @@ std::string VideoStream::ToString() const { ss << ", num_temporal_layers: " << num_temporal_layers.value_or(1); ss << ", bitrate_priority: " << bitrate_priority.value_or(0); ss << ", active: " << active; + ss << ", scale_down_by: " << scale_resolution_down_by; return ss.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h index b613218d4..81de62daf 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h @@ -64,6 +64,8 @@ struct VideoStream { // between multiple streams. absl::optional bitrate_priority; + absl::optional scalability_mode; + // If this stream is enabled by the user, or not. bool active; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h index 0a3c1aee6..22430eb19 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h @@ -28,8 +28,6 @@ class VideoEncoderFactory { public: // TODO(magjed): Try to get rid of this struct. struct CodecInfo { - // TODO(nisse): Unused in webrtc, delete as soon as downstream use is fixed. - bool is_hardware_accelerated = false; // |has_internal_source| is true if encoders created by this factory of the // given codec will use internal camera sources, meaning that they don't // require/expect frames to be delivered via webrtc::VideoEncoder::Encode. diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc index 354e8c25a..94a18171a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -56,7 +56,7 @@ struct ForcedFallbackParams { bool SupportsTemporalBasedSwitch(const VideoCodec& codec) const { return enable_temporal_based_switch && - SimulcastUtility::NumberOfTemporalLayers(codec, 0) > 1; + SimulcastUtility::NumberOfTemporalLayers(codec, 0) != 1; } bool enable_temporal_based_switch = false; @@ -162,6 +162,7 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { case EncoderState::kForcedFallback: return fallback_encoder_.get(); } + RTC_CHECK_NOTREACHED(); } // Updates encoder with last observed parameters, such as callbacks, rates, @@ -345,7 +346,9 @@ int32_t VideoEncoderSoftwareFallbackWrapper::Encode( case EncoderState::kForcedFallback: return fallback_encoder_->Encode(frame, frame_types); } + RTC_CHECK_NOTREACHED(); } + int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder( const VideoFrame& frame, const std::vector* frame_types) { @@ -367,9 +370,12 @@ int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder( RTC_LOG(LS_ERROR) << "Failed to convert from to I420"; return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; } - rtc::scoped_refptr dst_buffer = - I420Buffer::Create(codec_settings_.width, codec_settings_.height); - dst_buffer->ScaleFrom(*src_buffer); + rtc::scoped_refptr dst_buffer = + src_buffer->Scale(codec_settings_.width, codec_settings_.height); + if (!dst_buffer) { + RTC_LOG(LS_ERROR) << "Failed to scale video frame."; + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } VideoFrame scaled_frame = frame; scaled_frame.set_video_frame_buffer(dst_buffer); scaled_frame.set_update_rect(VideoFrame::UpdateRect{ @@ -456,7 +462,7 @@ bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() { } if (encoder_state_ == EncoderState::kMainEncoderUsed && - encoder_->GetEncoderInfo().fps_allocation[0].size() > 1) { + encoder_->GetEncoderInfo().fps_allocation[0].size() != 1) { // Primary encoder already supports temporal layers, use that instead. return true; } @@ -465,7 +471,7 @@ bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() { if (fallback_encoder_->InitEncode(&codec_settings_, encoder_settings_.value()) == WEBRTC_VIDEO_CODEC_OK) { - if (fallback_encoder_->GetEncoderInfo().fps_allocation[0].size() > 1) { + if (fallback_encoder_->GetEncoderInfo().fps_allocation[0].size() != 1) { // Fallback encoder available and supports temporal layers, use it! if (encoder_state_ == EncoderState::kMainEncoderUsed) { // Main encoder initialized but does not support temporal layers, diff --git a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h index 528b7cf70..692ff6493 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h @@ -23,8 +23,8 @@ namespace webrtc { BEGIN_PROXY_MAP(VideoTrackSource) PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) -PROXY_CONSTMETHOD0(bool, remote) -PROXY_CONSTMETHOD0(bool, is_screencast) +BYPASS_PROXY_CONSTMETHOD0(bool, remote) +BYPASS_PROXY_CONSTMETHOD0(bool, is_screencast) PROXY_CONSTMETHOD0(absl::optional, needs_denoising) PROXY_METHOD1(bool, GetStats, Stats*) PROXY_WORKER_METHOD2(void, diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h new file mode 100644 index 000000000..56817bae5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_DTMF_H_ +#define API_VOIP_VOIP_DTMF_H_ + +#include "api/voip/voip_base.h" + +namespace webrtc { + +// DTMF events and their event codes as defined in +// https://tools.ietf.org/html/rfc4733#section-7 +enum class DtmfEvent : uint8_t { + kDigitZero = 0, + kDigitOne, + kDigitTwo, + kDigitThree, + kDigitFour, + kDigitFive, + kDigitSix, + kDigitSeven, + kDigitEight, + kDigitNine, + kAsterisk, + kHash, + kLetterA, + kLetterB, + kLetterC, + kLetterD +}; + +// VoipDtmf interface provides DTMF related interfaces such +// as sending DTMF events to the remote endpoint. +class VoipDtmf { + public: + // Register the payload type and sample rate for DTMF (RFC 4733) payload. + // Must be called exactly once prior to calling SendDtmfEvent after payload + // type has been negotiated with remote. + virtual void RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) = 0; + + // Send DTMF named event as specified by + // https://tools.ietf.org/html/rfc4733#section-3.2 + // |duration_ms| specifies the duration of DTMF packets that will be emitted + // in place of real RTP packets instead. + // Must be called after RegisterTelephoneEventType and VoipBase::StartSend + // have been called. + // Returns true if the requested DTMF event is successfully scheduled. + virtual bool SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) = 0; + + protected: + virtual ~VoipDtmf() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_DTMF_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h index 81c97c02e..69c0a8504 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h @@ -16,6 +16,9 @@ namespace webrtc { class VoipBase; class VoipCodec; class VoipNetwork; +class VoipDtmf; +class VoipStatistics; +class VoipVolumeControl; // VoipEngine is the main interface serving as the entry point for all VoIP // APIs. A single instance of VoipEngine should suffice the most of the need for @@ -80,6 +83,16 @@ class VoipEngine { // VoipCodec provides codec configuration APIs for encoder and decoders. virtual VoipCodec& Codec() = 0; + + // VoipDtmf provides DTMF event APIs to register and send DTMF events. + virtual VoipDtmf& Dtmf() = 0; + + // VoipStatistics provides performance metrics around audio decoding module + // and jitter buffer (NetEq). + virtual VoipStatistics& Statistics() = 0; + + // VoipVolumeControl provides various input/output volume control. + virtual VoipVolumeControl& VolumeControl() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.cc b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.cc index 6ac3c8621..88f63f9c9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.cc @@ -27,18 +27,11 @@ std::unique_ptr CreateVoipEngine(VoipEngineConfig config) { RTC_DLOG(INFO) << "No audio processing functionality provided."; } - auto voip_core = std::make_unique(); - - if (!voip_core->Init(std::move(config.encoder_factory), - std::move(config.decoder_factory), - std::move(config.task_queue_factory), - std::move(config.audio_device_module), - std::move(config.audio_processing))) { - RTC_DLOG(LS_ERROR) << "Failed to initialize VoIP core."; - return nullptr; - } - - return voip_core; + return std::make_unique(std::move(config.encoder_factory), + std::move(config.decoder_factory), + std::move(config.task_queue_factory), + std::move(config.audio_device_module), + std::move(config.audio_processing)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.h index 658ebfac8..62fe8011a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine_factory.h @@ -61,9 +61,6 @@ struct VoipEngineConfig { }; // Creates a VoipEngine instance with provided VoipEngineConfig. -// This could return nullptr if AudioDeviceModule (ADM) initialization fails -// during construction of VoipEngine which would render VoipEngine -// nonfunctional. std::unique_ptr CreateVoipEngine(VoipEngineConfig config); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h new file mode 100644 index 000000000..cf01e95e9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_STATISTICS_H_ +#define API_VOIP_VOIP_STATISTICS_H_ + +#include "api/neteq/neteq.h" +#include "api/voip/voip_base.h" + +namespace webrtc { + +struct IngressStatistics { + // Stats included from api/neteq/neteq.h. + NetEqLifetimeStatistics neteq_stats; + + // Represents the total duration in seconds of all samples that have been + // received. + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalsamplesduration + double total_duration = 0.0; +}; + +// VoipStatistics interface provides the interfaces for querying metrics around +// the jitter buffer (NetEq) performance. +class VoipStatistics { + public: + // Gets the audio ingress statistics. Returns absl::nullopt when channel_id is + // invalid. + virtual absl::optional GetIngressStatistics( + ChannelId channel_id) = 0; + + protected: + virtual ~VoipStatistics() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_STATISTICS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h new file mode 100644 index 000000000..54e446715 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_VOLUME_CONTROL_H_ +#define API_VOIP_VOIP_VOLUME_CONTROL_H_ + +#include "api/voip/voip_base.h" + +namespace webrtc { + +struct VolumeInfo { + // https://w3c.github.io/webrtc-stats/#dom-rtcaudiosourcestats-audiolevel + double audio_level = 0; + // https://w3c.github.io/webrtc-stats/#dom-rtcaudiosourcestats-totalaudioenergy + double total_energy = 0.0; + // https://w3c.github.io/webrtc-stats/#dom-rtcaudiosourcestats-totalsamplesduration + double total_duration = 0.0; +}; + +// VoipVolumeControl interface. +// +// This sub-API supports functions related to the input (microphone) and output +// (speaker) device. +// +// Caller must ensure that ChannelId is valid otherwise it will result in no-op +// with error logging. +class VoipVolumeControl { + public: + // Mute/unmutes the microphone input sample before encoding process. Note that + // mute doesn't affect audio input level and energy values as input sample is + // silenced after the measurement. + virtual void SetInputMuted(ChannelId channel_id, bool enable) = 0; + + // Gets the microphone volume info. + // Returns absl::nullopt if |channel_id| is invalid. + virtual absl::optional GetInputVolumeInfo( + ChannelId channel_id) = 0; + + // Gets the speaker volume info. + // Returns absl::nullopt if |channel_id| is invalid. + virtual absl::optional GetOutputVolumeInfo( + ChannelId channel_id) = 0; + + protected: + virtual ~VoipVolumeControl() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_VOLUME_CONTROL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc index 6bc0d4137..54c8a0297 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc @@ -173,7 +173,8 @@ void AudioReceiveStream::Stop() { audio_state()->RemoveReceivingStream(this); } -webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const { +webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( + bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); webrtc::AudioReceiveStream::Stats stats; stats.remote_ssrc = config_.rtp.remote_ssrc; @@ -210,7 +211,7 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const { rtc::TimeMillis()); // Get jitter buffer and total delay (alg + jitter + playout) stats. - auto ns = channel_receive_->GetNetworkStatistics(); + auto ns = channel_receive_->GetNetworkStatistics(get_and_clear_legacy_stats); stats.fec_packets_received = ns.fecPacketsReceived; stats.fec_packets_discarded = ns.fecPacketsDiscarded; stats.jitter_buffer_ms = ns.currentBufferSize; @@ -329,7 +330,7 @@ void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs( time_ms); } -void AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { +bool AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&module_process_thread_checker_); return channel_receive_->SetMinimumPlayoutDelay(delay_ms); } @@ -349,14 +350,6 @@ void AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { channel_receive_->ReceivedRTCPPacket(packet, length); } -void AudioReceiveStream::OnRtpPacket(const RtpPacketReceived& packet) { - // TODO(solenberg): Tests call this function on a network thread, libjingle - // calls on the worker thread. We should move towards always using a network - // thread. Then this check can be enabled. - // RTC_DCHECK(!thread_checker_.IsCurrent()); - channel_receive_->OnRtpPacket(packet); -} - const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return config_; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h index c197aa883..32f8b60d5 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h @@ -21,7 +21,6 @@ #include "call/audio_receive_stream.h" #include "call/syncable.h" #include "modules/rtp_rtcp/source/source_tracker.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" @@ -61,25 +60,25 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, const rtc::scoped_refptr& audio_state, webrtc::RtcEventLog* event_log, std::unique_ptr channel_receive); + + AudioReceiveStream() = delete; + AudioReceiveStream(const AudioReceiveStream&) = delete; + AudioReceiveStream& operator=(const AudioReceiveStream&) = delete; + ~AudioReceiveStream() override; // webrtc::AudioReceiveStream implementation. void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override; void Start() override; void Stop() override; - webrtc::AudioReceiveStream::Stats GetStats() const override; + webrtc::AudioReceiveStream::Stats GetStats( + bool get_and_clear_legacy_stats) const override; void SetSink(AudioSinkInterface* sink) override; void SetGain(float gain) override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override; int GetBaseMinimumPlayoutDelayMs() const override; std::vector GetSources() const override; - // TODO(nisse): We don't formally implement RtpPacketSinkInterface, and this - // method shouldn't be needed. But it's currently used by the - // AudioReceiveStreamTest.ReceiveRtpPacket unittest. Figure out if that test - // shuld be refactored or deleted, and then delete this method. - void OnRtpPacket(const RtpPacketReceived& packet); - // AudioMixer::Source AudioFrameInfo GetAudioFrameWithInfo(int sample_rate_hz, AudioFrame* audio_frame) override; @@ -93,7 +92,7 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) override; - void SetMinimumPlayoutDelay(int delay_ms) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; void AssociateSendStream(AudioSendStream* send_stream); void DeliverRtcp(const uint8_t* packet, size_t length); @@ -118,8 +117,6 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false; std::unique_ptr rtp_stream_receiver_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioReceiveStream); }; } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc index 1856902d5..1c0a32f86 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc @@ -149,7 +149,7 @@ AudioSendStream::AudioSendStream( enable_audio_alr_probing_( !field_trial::IsDisabled("WebRTC-Audio-AlrProbing")), send_side_bwe_with_overhead_( - field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")), + !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), channel_send_(std::move(channel_send)), @@ -638,11 +638,11 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { if (new_config.audio_network_adaptor_config) { if (encoder->EnableAudioNetworkAdaptor( *new_config.audio_network_adaptor_config, event_log_)) { - RTC_DLOG(LS_INFO) << "Audio network adaptor enabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " + << new_config.rtp.ssrc; } else { - RTC_DLOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " + << new_config.rtp.ssrc; } } @@ -734,21 +734,29 @@ void AudioSendStream::ReconfigureANA(const Config& new_config) { return; } if (new_config.audio_network_adaptor_config) { + // This lock needs to be acquired before CallEncoder, since it aquires + // another lock and we need to maintain the same order at all call sites to + // avoid deadlock. + MutexLock lock(&overhead_per_packet_lock_); + size_t overhead = GetPerPacketOverheadBytes(); channel_send_->CallEncoder([&](AudioEncoder* encoder) { if (encoder->EnableAudioNetworkAdaptor( *new_config.audio_network_adaptor_config, event_log_)) { - RTC_DLOG(LS_INFO) << "Audio network adaptor enabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " + << new_config.rtp.ssrc; + if (overhead > 0) { + encoder->OnReceivedOverhead(overhead); + } } else { - RTC_DLOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " + << new_config.rtp.ssrc; } }); } else { channel_send_->CallEncoder( [&](AudioEncoder* encoder) { encoder->DisableAudioNetworkAdaptor(); }); - RTC_DLOG(LS_INFO) << "Audio network adaptor disabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor disabled on SSRC " + << new_config.rtp.ssrc; } } diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h index 7bc318312..12fcb9f21 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h @@ -21,7 +21,6 @@ #include "call/audio_state.h" #include "call/bitrate_allocator.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" @@ -75,6 +74,11 @@ class AudioSendStream final : public webrtc::AudioSendStream, RtcEventLog* event_log, const absl::optional& suspended_rtp_state, std::unique_ptr channel_send); + + AudioSendStream() = delete; + AudioSendStream(const AudioSendStream&) = delete; + AudioSendStream& operator=(const AudioSendStream&) = delete; + ~AudioSendStream() override; // webrtc::AudioSendStream implementation. @@ -206,8 +210,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, size_t total_packet_overhead_bytes_ RTC_GUARDED_BY(worker_queue_) = 0; absl::optional> frame_length_range_ RTC_GUARDED_BY(worker_queue_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioSendStream); }; } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc index 73366e20a..566bae131 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc @@ -28,7 +28,9 @@ namespace internal { AudioState::AudioState(const AudioState::Config& config) : config_(config), - audio_transport_(config_.audio_mixer, config_.audio_processing.get()) { + audio_transport_(config_.audio_mixer, + config_.audio_processing.get(), + config_.async_audio_processing_factory.get()) { process_thread_checker_.Detach(); RTC_DCHECK(config_.audio_mixer); RTC_DCHECK(config_.audio_device_module); diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h index 70c720832..5e766428d 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h @@ -18,7 +18,6 @@ #include "audio/audio_transport_impl.h" #include "audio/null_audio_poller.h" #include "call/audio_state.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/thread_checker.h" @@ -32,6 +31,11 @@ namespace internal { class AudioState : public webrtc::AudioState { public: explicit AudioState(const AudioState::Config& config); + + AudioState() = delete; + AudioState(const AudioState&) = delete; + AudioState& operator=(const AudioState&) = delete; + ~AudioState() override; AudioProcessing* audio_processing() override; @@ -82,8 +86,6 @@ class AudioState : public webrtc::AudioState { size_t num_channels = 0; }; std::map sending_streams_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioState); }; } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc index 11b37ffcf..8710ced9b 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc @@ -17,6 +17,7 @@ #include "audio/remix_resample.h" #include "audio/utility/audio_frame_operations.h" #include "call/audio_sender.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_processing/include/audio_frame_proxies.h" #include "rtc_base/checks.h" @@ -83,9 +84,19 @@ int Resample(const AudioFrame& frame, } } // namespace -AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer, - AudioProcessing* audio_processing) - : audio_processing_(audio_processing), mixer_(mixer) { +AudioTransportImpl::AudioTransportImpl( + AudioMixer* mixer, + AudioProcessing* audio_processing, + AsyncAudioProcessing::Factory* async_audio_processing_factory) + : audio_processing_(audio_processing), + async_audio_processing_( + async_audio_processing_factory + ? async_audio_processing_factory->CreateAsyncAudioProcessing( + [this](std::unique_ptr frame) { + this->SendProcessedData(std::move(frame)); + }) + : nullptr), + mixer_(mixer) { RTC_DCHECK(mixer); } @@ -151,23 +162,34 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( { MutexLock lock(&capture_lock_); typing_noise_detected_ = typing_detected; - - RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); - if (!audio_senders_.empty()) { - auto it = audio_senders_.begin(); - while (++it != audio_senders_.end()) { - std::unique_ptr audio_frame_copy(new AudioFrame()); - audio_frame_copy->CopyFrom(*audio_frame); - (*it)->SendAudioData(std::move(audio_frame_copy)); - } - // Send the original frame to the first stream w/o copying. - (*audio_senders_.begin())->SendAudioData(std::move(audio_frame)); - } } + RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); + if (async_audio_processing_) + async_audio_processing_->Process(std::move(audio_frame)); + else + SendProcessedData(std::move(audio_frame)); + return 0; } +void AudioTransportImpl::SendProcessedData( + std::unique_ptr audio_frame) { + RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); + MutexLock lock(&capture_lock_); + if (audio_senders_.empty()) + return; + + auto it = audio_senders_.begin(); + while (++it != audio_senders_.end()) { + auto audio_frame_copy = std::make_unique(); + audio_frame_copy->CopyFrom(*audio_frame); + (*it)->SendAudioData(std::move(audio_frame_copy)); + } + // Send the original frame to the first stream w/o copying. + (*audio_senders_.begin())->SendAudioData(std::move(audio_frame)); +} + // Mix all received streams, feed the result to the AudioProcessing module, then // resample the result to the requested output rate. int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h index 1643a2997..f3ca2fa84 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h @@ -11,15 +11,16 @@ #ifndef AUDIO_AUDIO_TRANSPORT_IMPL_H_ #define AUDIO_AUDIO_TRANSPORT_IMPL_H_ +#include #include #include "api/audio/audio_mixer.h" #include "api/scoped_refptr.h" #include "common_audio/resampler/include/push_resampler.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/typing_detection.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -29,7 +30,15 @@ class AudioSender; class AudioTransportImpl : public AudioTransport { public: - AudioTransportImpl(AudioMixer* mixer, AudioProcessing* audio_processing); + AudioTransportImpl( + AudioMixer* mixer, + AudioProcessing* audio_processing, + AsyncAudioProcessing::Factory* async_audio_processing_factory); + + AudioTransportImpl() = delete; + AudioTransportImpl(const AudioTransportImpl&) = delete; + AudioTransportImpl& operator=(const AudioTransportImpl&) = delete; + ~AudioTransportImpl() override; int32_t RecordedDataIsAvailable(const void* audioSamples, @@ -67,10 +76,16 @@ class AudioTransportImpl : public AudioTransport { bool typing_noise_detected() const; private: + void SendProcessedData(std::unique_ptr audio_frame); + // Shared. AudioProcessing* audio_processing_ = nullptr; // Capture side. + + // Thread-safe. + const std::unique_ptr async_audio_processing_; + mutable Mutex capture_lock_; std::vector audio_senders_ RTC_GUARDED_BY(capture_lock_); int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000; @@ -81,12 +96,11 @@ class AudioTransportImpl : public AudioTransport { TypingDetection typing_detection_; // Render side. + rtc::scoped_refptr mixer_; AudioFrame mixed_frame_; // Converts mixed audio to the audio device output rate. PushResampler render_resampler_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTransportImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc index 9cbaabbbb..2788dacf7 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc @@ -128,12 +128,13 @@ class ChannelReceive : public ChannelReceiveInterface { double GetTotalOutputDuration() const override; // Stats. - NetworkStatistics GetNetworkStatistics() const override; + NetworkStatistics GetNetworkStatistics( + bool get_and_clear_legacy_stats) const override; AudioDecodingCallStats GetDecodingCallStatistics() const override; // Audio+Video Sync. uint32_t GetDelayEstimate() const override; - void SetMinimumPlayoutDelay(int delayMs) override; + bool SetMinimumPlayoutDelay(int delayMs) override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, @@ -801,10 +802,11 @@ void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( InitFrameTransformerDelegate(std::move(frame_transformer)); } -NetworkStatistics ChannelReceive::GetNetworkStatistics() const { +NetworkStatistics ChannelReceive::GetNetworkStatistics( + bool get_and_clear_legacy_stats) const { RTC_DCHECK(worker_thread_checker_.IsCurrent()); NetworkStatistics stats; - acm_receiver_.GetNetworkStatistics(&stats); + acm_receiver_.GetNetworkStatistics(&stats, get_and_clear_legacy_stats); return stats; } @@ -822,7 +824,7 @@ uint32_t ChannelReceive::GetDelayEstimate() const { return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_; } -void ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { +bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); // Limit to range accepted by both VoE and ACM, so we're at least getting as // close as possible, instead of failing. @@ -831,7 +833,9 @@ void ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { if (acm_receiver_.SetMinimumDelay(delay_ms) != 0) { RTC_DLOG(LS_ERROR) << "SetMinimumPlayoutDelay() failed to set min playout delay"; + return false; } + return true; } bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h index bc02ff302..eef2db425 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h @@ -99,12 +99,13 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual double GetTotalOutputDuration() const = 0; // Stats. - virtual NetworkStatistics GetNetworkStatistics() const = 0; + virtual NetworkStatistics GetNetworkStatistics( + bool get_and_clear_legacy_stats) const = 0; virtual AudioDecodingCallStats GetDecodingCallStatistics() const = 0; // Audio+Video Sync. virtual uint32_t GetDelayEstimate() const = 0; - virtual void SetMinimumPlayoutDelay(int delay_ms) = 0; + virtual bool SetMinimumPlayoutDelay(int delay_ms) = 0; virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const = 0; virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, diff --git a/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h b/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h index 542358f68..52e5b2fc8 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h @@ -35,7 +35,10 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { (override)); MOCK_METHOD(void, ResetReceiverCongestionControlObjects, (), (override)); MOCK_METHOD(CallReceiveStatistics, GetRTCPStatistics, (), (const, override)); - MOCK_METHOD(NetworkStatistics, GetNetworkStatistics, (), (const, override)); + MOCK_METHOD(NetworkStatistics, + GetNetworkStatistics, + (bool), + (const, override)); MOCK_METHOD(AudioDecodingCallStats, GetDecodingCallStatistics, (), @@ -76,7 +79,7 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { GetSyncInfo, (), (const, override)); - MOCK_METHOD(void, SetMinimumPlayoutDelay, (int delay_ms), (override)); + MOCK_METHOD(bool, SetMinimumPlayoutDelay, (int delay_ms), (override)); MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override)); MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override)); MOCK_METHOD((absl::optional>), diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc index 43d4d0f15..dc53acf3a 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc @@ -129,4 +129,34 @@ void AudioChannel::StopPlay() { } } +IngressStatistics AudioChannel::GetIngressStatistics() { + IngressStatistics ingress_stats; + NetworkStatistics stats = ingress_->GetNetworkStatistics(); + ingress_stats.neteq_stats.total_samples_received = stats.totalSamplesReceived; + ingress_stats.neteq_stats.concealed_samples = stats.concealedSamples; + ingress_stats.neteq_stats.concealment_events = stats.concealmentEvents; + ingress_stats.neteq_stats.jitter_buffer_delay_ms = stats.jitterBufferDelayMs; + ingress_stats.neteq_stats.jitter_buffer_emitted_count = + stats.jitterBufferEmittedCount; + ingress_stats.neteq_stats.jitter_buffer_target_delay_ms = + stats.jitterBufferTargetDelayMs; + ingress_stats.neteq_stats.inserted_samples_for_deceleration = + stats.insertedSamplesForDeceleration; + ingress_stats.neteq_stats.removed_samples_for_acceleration = + stats.removedSamplesForAcceleration; + ingress_stats.neteq_stats.silent_concealed_samples = + stats.silentConcealedSamples; + ingress_stats.neteq_stats.fec_packets_received = stats.fecPacketsReceived; + ingress_stats.neteq_stats.fec_packets_discarded = stats.fecPacketsDiscarded; + ingress_stats.neteq_stats.delayed_packet_outage_samples = + stats.delayedPacketOutageSamples; + ingress_stats.neteq_stats.relative_packet_arrival_delay_ms = + stats.relativePacketArrivalDelayMs; + ingress_stats.neteq_stats.interruption_count = stats.interruptionCount; + ingress_stats.neteq_stats.total_interruption_duration_ms = + stats.totalInterruptionDurationMs; + ingress_stats.total_duration = ingress_->GetOutputTotalDuration(); + return ingress_stats; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h index 12138ee67..5bc748359 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h @@ -18,6 +18,7 @@ #include "api/task_queue/task_queue_factory.h" #include "api/voip/voip_base.h" +#include "api/voip/voip_statistics.h" #include "audio/voip/audio_egress.h" #include "audio/voip/audio_ingress.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" @@ -63,6 +64,13 @@ class AudioChannel : public rtc::RefCountInterface { absl::optional GetEncoderFormat() const { return egress_->GetEncoderFormat(); } + void RegisterTelephoneEventType(int rtp_payload_type, int sample_rate_hz) { + egress_->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + } + bool SendTelephoneEvent(int dtmf_event, int duration_ms) { + return egress_->SendTelephoneEvent(dtmf_event, duration_ms); + } + void SetMute(bool enable) { egress_->SetMute(enable); } // APIs relayed to AudioIngress. bool IsPlaying() const { return ingress_->IsPlaying(); } @@ -75,6 +83,28 @@ class AudioChannel : public rtc::RefCountInterface { void SetReceiveCodecs(const std::map& codecs) { ingress_->SetReceiveCodecs(codecs); } + IngressStatistics GetIngressStatistics(); + + // See comments on the methods used from AudioEgress and AudioIngress. + // Conversion to double is following what is done in + // DoubleAudioLevelFromIntAudioLevel method in rtc_stats_collector.cc to be + // consistent. + double GetInputAudioLevel() const { + return egress_->GetInputAudioLevel() / 32767.0; + } + double GetInputTotalEnergy() const { return egress_->GetInputTotalEnergy(); } + double GetInputTotalDuration() const { + return egress_->GetInputTotalDuration(); + } + double GetOutputAudioLevel() const { + return ingress_->GetOutputAudioLevel() / 32767.0; + } + double GetOutputTotalEnergy() const { + return ingress_->GetOutputTotalEnergy(); + } + double GetOutputTotalDuration() const { + return ingress_->GetOutputTotalDuration(); + } private: // ChannelId that this audio channel belongs for logging purpose. diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.cc index 90e069e1c..1162824c9 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.cc @@ -80,6 +80,12 @@ void AudioEgress::SendAudioData(std::unique_ptr audio_frame) { return; } + double duration_seconds = + static_cast(audio_frame->samples_per_channel_) / + audio_frame->sample_rate_hz_; + + input_audio_level_.ComputeLevel(*audio_frame, duration_seconds); + AudioFrameOperations::Mute(audio_frame.get(), encoder_context_.previously_muted_, encoder_context_.mute_); diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h index 6b2d37471..fcd9ed0f2 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h @@ -16,6 +16,7 @@ #include "api/audio_codecs/audio_format.h" #include "api/task_queue/task_queue_factory.h" +#include "audio/audio_level.h" #include "audio/utility/audio_frame_operations.h" #include "call/audio_sender.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -89,6 +90,16 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { // otherwise false when the dtmf queue reached maximum of 20 events. bool SendTelephoneEvent(int dtmf_event, int duration_ms); + // See comments on LevelFullRange, TotalEnergy, TotalDuration from + // audio/audio_level.h. + int GetInputAudioLevel() const { return input_audio_level_.LevelFullRange(); } + double GetInputTotalEnergy() const { + return input_audio_level_.TotalEnergy(); + } + double GetInputTotalDuration() const { + return input_audio_level_.TotalDuration(); + } + // Implementation of AudioSender interface. void SendAudioData(std::unique_ptr audio_frame) override; @@ -119,6 +130,9 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { // Synchronization is handled internally by AudioCodingModule. const std::unique_ptr audio_coding_; + // Synchronization is handled internally by voe::AudioLevel. + voe::AudioLevel input_audio_level_; + // Struct that holds all variables used by encoder task queue. struct EncoderContext { // Offset used to mark rtp timestamp in sample rate unit in diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc index 0bddb4280..07def9955 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc @@ -73,6 +73,12 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( constexpr double kAudioSampleDurationSeconds = 0.01; output_audio_level_.ComputeLevel(*audio_frame, kAudioSampleDurationSeconds); + // If caller invoked StopPlay(), then mute the frame. + if (!playing_) { + AudioFrameOperations::Mute(audio_frame); + muted = true; + } + // Set first rtp timestamp with first audio frame with valid timestamp. if (first_rtp_timestamp_ < 0 && audio_frame->timestamp_ != 0) { first_rtp_timestamp_ = audio_frame->timestamp_; @@ -127,10 +133,6 @@ void AudioIngress::SetReceiveCodecs( } void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { - if (!IsPlaying()) { - return; - } - RtpPacketReceived rtp_packet_received; rtp_packet_received.Parse(rtp_packet.data(), rtp_packet.size()); diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h index d09de606d..d3680e0f0 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h @@ -68,13 +68,15 @@ class AudioIngress : public AudioMixer::Source { void ReceivedRTPPacket(rtc::ArrayView rtp_packet); void ReceivedRTCPPacket(rtc::ArrayView rtcp_packet); - // Retrieve highest speech output level in last 100 ms. Note that - // this isn't RMS but absolute raw audio level on int16_t sample unit. - // Therefore, the return value will vary between 0 ~ 0xFFFF. This type of - // value may be useful to be used for measuring active speaker gauge. - int GetSpeechOutputLevelFullRange() const { + // See comments on LevelFullRange, TotalEnergy, TotalDuration from + // audio/audio_level.h. + int GetOutputAudioLevel() const { return output_audio_level_.LevelFullRange(); } + double GetOutputTotalEnergy() { return output_audio_level_.TotalEnergy(); } + double GetOutputTotalDuration() { + return output_audio_level_.TotalDuration(); + } // Returns network round trip time (RTT) measued by RTCP exchange with // remote media endpoint. RTT value -1 indicates that it's not initialized. @@ -82,12 +84,8 @@ class AudioIngress : public AudioMixer::Source { NetworkStatistics GetNetworkStatistics() const { NetworkStatistics stats; - acm_receiver_.GetNetworkStatistics(&stats); - return stats; - } - AudioDecodingCallStats GetDecodingStatistics() const { - AudioDecodingCallStats stats; - acm_receiver_.GetDecodingCallStatistics(&stats); + acm_receiver_.GetNetworkStatistics(&stats, + /*get_and_clear_legacy_stats=*/false); return stats; } diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc index 639022363..ac29fbf6d 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc @@ -37,29 +37,39 @@ static constexpr int kMaxChannelId = 100000; } // namespace -bool VoipCore::Init(rtc::scoped_refptr encoder_factory, - rtc::scoped_refptr decoder_factory, - std::unique_ptr task_queue_factory, - rtc::scoped_refptr audio_device_module, - rtc::scoped_refptr audio_processing) { +VoipCore::VoipCore(rtc::scoped_refptr encoder_factory, + rtc::scoped_refptr decoder_factory, + std::unique_ptr task_queue_factory, + rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr audio_processing, + std::unique_ptr process_thread) { encoder_factory_ = std::move(encoder_factory); decoder_factory_ = std::move(decoder_factory); task_queue_factory_ = std::move(task_queue_factory); audio_device_module_ = std::move(audio_device_module); + audio_processing_ = std::move(audio_processing); + process_thread_ = std::move(process_thread); - process_thread_ = ProcessThread::Create("ModuleProcessThread"); - audio_mixer_ = AudioMixerImpl::Create(); - - if (audio_processing) { - audio_processing_ = std::move(audio_processing); - AudioProcessing::Config apm_config = audio_processing_->GetConfig(); - apm_config.echo_canceller.enabled = true; - audio_processing_->ApplyConfig(apm_config); + if (!process_thread_) { + process_thread_ = ProcessThread::Create("ModuleProcessThread"); } + audio_mixer_ = AudioMixerImpl::Create(); // AudioTransportImpl depends on audio mixer and audio processing instances. audio_transport_ = std::make_unique( - audio_mixer_.get(), audio_processing_.get()); + audio_mixer_.get(), audio_processing_.get(), nullptr); +} + +bool VoipCore::InitializeIfNeeded() { + // |audio_device_module_| internally owns a lock and the whole logic here + // needs to be executed atomically once using another lock in VoipCore. + // Further changes in this method will need to make sure that no deadlock is + // introduced in the future. + MutexLock lock(&lock_); + + if (initialized_) { + return true; + } // Initialize ADM. if (audio_device_module_->Init() != 0) { @@ -72,7 +82,6 @@ bool VoipCore::Init(rtc::scoped_refptr encoder_factory, // recording device functioning (e.g webinar where only speaker is available). // It's also possible that there are other audio devices available that may // work. - // TODO(natim@webrtc.org): consider moving this part out of initialization. // Initialize default speaker device. if (audio_device_module_->SetPlayoutDevice(kAudioDeviceId) != 0) { @@ -113,13 +122,15 @@ bool VoipCore::Init(rtc::scoped_refptr encoder_factory, RTC_LOG(LS_WARNING) << "Unable to register audio callback."; } + initialized_ = true; + return true; } absl::optional VoipCore::CreateChannel( Transport* transport, absl::optional local_ssrc) { - absl::optional channel; + absl::optional channel_id; // Set local ssrc to random if not set by caller. if (!local_ssrc) { @@ -127,16 +138,22 @@ absl::optional VoipCore::CreateChannel( local_ssrc = random.Rand(); } - rtc::scoped_refptr audio_channel = + rtc::scoped_refptr channel = new rtc::RefCountedObject( transport, local_ssrc.value(), task_queue_factory_.get(), process_thread_.get(), audio_mixer_.get(), decoder_factory_); + // Check if we need to start the process thread. + bool start_process_thread = false; + { MutexLock lock(&lock_); - channel = static_cast(next_channel_id_); - channels_[*channel] = audio_channel; + // Start process thread if the channel is the first one. + start_process_thread = channels_.empty(); + + channel_id = static_cast(next_channel_id_); + channels_[*channel_id] = channel; next_channel_id_++; if (next_channel_id_ >= kMaxChannelId) { next_channel_id_ = 0; @@ -144,41 +161,64 @@ absl::optional VoipCore::CreateChannel( } // Set ChannelId in audio channel for logging/debugging purpose. - audio_channel->SetId(*channel); + channel->SetId(*channel_id); - return channel; + if (start_process_thread) { + process_thread_->Start(); + } + + return channel_id; } -void VoipCore::ReleaseChannel(ChannelId channel) { +void VoipCore::ReleaseChannel(ChannelId channel_id) { // Destroy channel outside of the lock. - rtc::scoped_refptr audio_channel; + rtc::scoped_refptr channel; + + bool no_channels_after_release = false; + { MutexLock lock(&lock_); - auto iter = channels_.find(channel); + auto iter = channels_.find(channel_id); if (iter != channels_.end()) { - audio_channel = std::move(iter->second); + channel = std::move(iter->second); channels_.erase(iter); } + + no_channels_after_release = channels_.empty(); } - if (!audio_channel) { - RTC_LOG(LS_WARNING) << "Channel " << channel << " not found"; + + if (!channel) { + RTC_LOG(LS_WARNING) << "Channel " << channel_id << " not found"; + } + + if (no_channels_after_release) { + // Release audio channel first to have it DeRegisterModule first. + channel = nullptr; + process_thread_->Stop(); + + // Make sure to stop playout on ADM if it is playing. + if (audio_device_module_->Playing()) { + if (audio_device_module_->StopPlayout() != 0) { + RTC_LOG(LS_WARNING) << "StopPlayout failed"; + } + } } } -rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel) { - rtc::scoped_refptr audio_channel; +rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel_id) { + rtc::scoped_refptr channel; { MutexLock lock(&lock_); - auto iter = channels_.find(channel); + auto iter = channels_.find(channel_id); if (iter != channels_.end()) { - audio_channel = iter->second; + channel = iter->second; } } - if (!audio_channel) { - RTC_LOG(LS_ERROR) << "Channel " << channel << " not found"; + if (!channel) { + RTC_LOG(LS_ERROR) << "Channel " << channel_id << " not found"; } - return audio_channel; + return channel; } bool VoipCore::UpdateAudioTransportWithSenders() { @@ -216,6 +256,11 @@ bool VoipCore::UpdateAudioTransportWithSenders() { // Depending on availability of senders, turn on or off ADM recording. if (!audio_senders.empty()) { + // Initialize audio device module and default device if needed. + if (!InitializeIfNeeded()) { + return false; + } + if (!audio_device_module_->Recording()) { if (audio_device_module_->InitRecording() != 0) { RTC_LOG(LS_ERROR) << "InitRecording failed"; @@ -236,29 +281,45 @@ bool VoipCore::UpdateAudioTransportWithSenders() { return true; } -bool VoipCore::StartSend(ChannelId channel) { - auto audio_channel = GetChannel(channel); - if (!audio_channel || !audio_channel->StartSend()) { +bool VoipCore::StartSend(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel || !channel->StartSend()) { return false; } return UpdateAudioTransportWithSenders(); } -bool VoipCore::StopSend(ChannelId channel) { - auto audio_channel = GetChannel(channel); - if (!audio_channel) { +bool VoipCore::StopSend(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { return false; } - audio_channel->StopSend(); + channel->StopSend(); return UpdateAudioTransportWithSenders(); } -bool VoipCore::StartPlayout(ChannelId channel) { - auto audio_channel = GetChannel(channel); - if (!audio_channel || !audio_channel->StartPlay()) { +bool VoipCore::StartPlayout(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return false; + } + + if (channel->IsPlaying()) { + return true; + } + + if (!channel->StartPlay()) { + return false; + } + + // Initialize audio device module and default device if needed. + if (!InitializeIfNeeded()) { return false; } @@ -275,69 +336,119 @@ bool VoipCore::StartPlayout(ChannelId channel) { return true; } -bool VoipCore::StopPlayout(ChannelId channel) { - auto audio_channel = GetChannel(channel); - if (!audio_channel) { +bool VoipCore::StopPlayout(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { return false; } - audio_channel->StopPlay(); + channel->StopPlay(); - bool stop_device = true; - { - MutexLock lock(&lock_); - for (auto kv : channels_) { - rtc::scoped_refptr& channel = kv.second; - if (channel->IsPlaying()) { - stop_device = false; - break; - } - } - } - - if (stop_device && audio_device_module_->Playing()) { - if (audio_device_module_->StopPlayout() != 0) { - RTC_LOG(LS_ERROR) << "StopPlayout failed"; - return false; - } - } return true; } -void VoipCore::ReceivedRTPPacket(ChannelId channel, +void VoipCore::ReceivedRTPPacket(ChannelId channel_id, rtc::ArrayView rtp_packet) { - // Failure to locate channel is logged internally in GetChannel. - if (auto audio_channel = GetChannel(channel)) { - audio_channel->ReceivedRTPPacket(rtp_packet); + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->ReceivedRTPPacket(rtp_packet); } } -void VoipCore::ReceivedRTCPPacket(ChannelId channel, +void VoipCore::ReceivedRTCPPacket(ChannelId channel_id, rtc::ArrayView rtcp_packet) { - // Failure to locate channel is logged internally in GetChannel. - if (auto audio_channel = GetChannel(channel)) { - audio_channel->ReceivedRTCPPacket(rtcp_packet); + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->ReceivedRTCPPacket(rtcp_packet); } } -void VoipCore::SetSendCodec(ChannelId channel, +void VoipCore::SetSendCodec(ChannelId channel_id, int payload_type, const SdpAudioFormat& encoder_format) { - // Failure to locate channel is logged internally in GetChannel. - if (auto audio_channel = GetChannel(channel)) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { auto encoder = encoder_factory_->MakeAudioEncoder( payload_type, encoder_format, absl::nullopt); - audio_channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); + channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); } } void VoipCore::SetReceiveCodecs( - ChannelId channel, + ChannelId channel_id, const std::map& decoder_specs) { - // Failure to locate channel is logged internally in GetChannel. - if (auto audio_channel = GetChannel(channel)) { - audio_channel->SetReceiveCodecs(decoder_specs); + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->SetReceiveCodecs(decoder_specs); } } +void VoipCore::RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + } +} + +bool VoipCore::SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + return channel->SendTelephoneEvent(static_cast(dtmf_event), + duration_ms); + } + return false; +} + +absl::optional VoipCore::GetIngressStatistics( + ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + return channel->GetIngressStatistics(); + } + return absl::nullopt; +} + +void VoipCore::SetInputMuted(ChannelId channel_id, bool enable) { + rtc::scoped_refptr channel = GetChannel(channel_id); + if (channel) { + channel->SetMute(enable); + } +} + +absl::optional VoipCore::GetInputVolumeInfo(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + if (channel) { + VolumeInfo input_volume; + input_volume.audio_level = channel->GetInputAudioLevel(); + input_volume.total_energy = channel->GetInputTotalEnergy(); + input_volume.total_duration = channel->GetInputTotalDuration(); + return input_volume; + } + return absl::nullopt; +} + +absl::optional VoipCore::GetOutputVolumeInfo(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + if (channel) { + VolumeInfo output_volume; + output_volume.audio_level = channel->GetOutputAudioLevel(); + output_volume.total_energy = channel->GetOutputTotalEnergy(); + output_volume.total_duration = channel->GetOutputTotalDuration(); + return output_volume; + } + return absl::nullopt; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h index 22a655998..5ebf4381c 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h @@ -23,8 +23,11 @@ #include "api/task_queue/task_queue_factory.h" #include "api/voip/voip_base.h" #include "api/voip/voip_codec.h" +#include "api/voip/voip_dtmf.h" #include "api/voip/voip_engine.h" #include "api/voip/voip_network.h" +#include "api/voip/voip_statistics.h" +#include "api/voip/voip_volume_control.h" #include "audio/audio_transport_impl.h" #include "audio/voip/audio_channel.h" #include "modules/audio_device/include/audio_device.h" @@ -45,53 +48,86 @@ namespace webrtc { class VoipCore : public VoipEngine, public VoipBase, public VoipNetwork, - public VoipCodec { + public VoipCodec, + public VoipDtmf, + public VoipStatistics, + public VoipVolumeControl { public: + // Construct VoipCore with provided arguments. + // ProcessThread implementation can be injected by |process_thread| + // (mainly for testing purpose) and when set to nullptr, default + // implementation will be used. + VoipCore(rtc::scoped_refptr encoder_factory, + rtc::scoped_refptr decoder_factory, + std::unique_ptr task_queue_factory, + rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr audio_processing, + std::unique_ptr process_thread = nullptr); ~VoipCore() override = default; - // Initialize VoipCore components with provided arguments. - // Returns false only when |audio_device_module| fails to initialize which - // would presumably render further processing useless. - // TODO(natim@webrtc.org): Need to report audio device errors to user layer. - bool Init(rtc::scoped_refptr encoder_factory, - rtc::scoped_refptr decoder_factory, - std::unique_ptr task_queue_factory, - rtc::scoped_refptr audio_device_module, - rtc::scoped_refptr audio_processing); - // Implements VoipEngine interfaces. VoipBase& Base() override { return *this; } VoipNetwork& Network() override { return *this; } VoipCodec& Codec() override { return *this; } + VoipDtmf& Dtmf() override { return *this; } + VoipStatistics& Statistics() override { return *this; } + VoipVolumeControl& VolumeControl() override { return *this; } // Implements VoipBase interfaces. absl::optional CreateChannel( Transport* transport, absl::optional local_ssrc) override; - void ReleaseChannel(ChannelId channel) override; - bool StartSend(ChannelId channel) override; - bool StopSend(ChannelId channel) override; - bool StartPlayout(ChannelId channel) override; - bool StopPlayout(ChannelId channel) override; + void ReleaseChannel(ChannelId channel_id) override; + bool StartSend(ChannelId channel_id) override; + bool StopSend(ChannelId channel_id) override; + bool StartPlayout(ChannelId channel_id) override; + bool StopPlayout(ChannelId channel_id) override; // Implements VoipNetwork interfaces. - void ReceivedRTPPacket(ChannelId channel, + void ReceivedRTPPacket(ChannelId channel_id, rtc::ArrayView rtp_packet) override; - void ReceivedRTCPPacket(ChannelId channel, + void ReceivedRTCPPacket(ChannelId channel_id, rtc::ArrayView rtcp_packet) override; // Implements VoipCodec interfaces. - void SetSendCodec(ChannelId channel, + void SetSendCodec(ChannelId channel_id, int payload_type, const SdpAudioFormat& encoder_format) override; void SetReceiveCodecs( - ChannelId channel, + ChannelId channel_id, const std::map& decoder_specs) override; + // Implements VoipDtmf interfaces. + void RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) override; + bool SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) override; + + // Implements VoipStatistics interfaces. + absl::optional GetIngressStatistics( + ChannelId channel_id) override; + + // Implements VoipVolumeControl interfaces. + void SetInputMuted(ChannelId channel_id, bool enable) override; + absl::optional GetInputVolumeInfo(ChannelId channel_id) override; + absl::optional GetOutputVolumeInfo(ChannelId channel_id) override; + private: + // Initialize ADM and default audio device if needed. + // Returns true if ADM is successfully initialized or already in such state + // (e.g called more than once). Returns false when ADM fails to initialize + // which would presumably render further processing useless. Note that such + // failure won't necessarily succeed in next initialization attempt as it + // would mean changing the ADM implementation. From Android N and onwards, the + // mobile app may not be able to gain microphone access when in background + // mode. Therefore it would be better to delay the logic as late as possible. + bool InitializeIfNeeded(); + // Fetches the corresponding AudioChannel assigned with given |channel|. // Returns nullptr if not found. - rtc::scoped_refptr GetChannel(ChannelId channel); + rtc::scoped_refptr GetChannel(ChannelId channel_id); // Updates AudioTransportImpl with a new set of actively sending AudioSender // (AudioEgress). This needs to be invoked whenever StartSend/StopSend is @@ -104,7 +140,7 @@ class VoipCore : public VoipEngine, rtc::scoped_refptr decoder_factory_; std::unique_ptr task_queue_factory_; - // Synchronization is handled internally by AudioProessing. + // Synchronization is handled internally by AudioProcessing. // Must be placed before |audio_device_module_| for proper destruction. rtc::scoped_refptr audio_processing_; @@ -132,6 +168,9 @@ class VoipCore : public VoipEngine, // ChannelId. std::unordered_map> channels_ RTC_GUARDED_BY(lock_); + + // Boolean flag to ensure initialization only occurs once. + bool initialized_ RTC_GUARDED_BY(lock_) = false; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.cc deleted file mode 100644 index 1199bb955..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.cc +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/test/fake_task.h" - -namespace base { -namespace sequence_manager { - -FakeTask::FakeTask() : FakeTask(0 /* task_type */) {} - -FakeTask::FakeTask(TaskType task_type) - : Task(internal::PostedTask(nullptr, - OnceClosure(), - FROM_HERE, - TimeDelta(), - Nestable::kNestable, - task_type), - TimeTicks(), - EnqueueOrder(), - EnqueueOrder(), - internal::WakeUpResolution::kLow) {} - -FakeTaskTiming::FakeTaskTiming() - : TaskTiming(false /* has_wall_time */, false /* has_thread_time */) {} - -FakeTaskTiming::FakeTaskTiming(TimeTicks start, TimeTicks end) - : FakeTaskTiming() { - has_wall_time_ = true; - start_time_ = start; - end_time_ = end; - state_ = State::Finished; -} - -FakeTaskTiming::FakeTaskTiming(TimeTicks start, - TimeTicks end, - ThreadTicks thread_start, - ThreadTicks thread_end) - : FakeTaskTiming(start, end) { - has_thread_time_ = true; - start_thread_time_ = thread_start; - end_thread_time_ = thread_end; - state_ = State::Finished; -} - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.h deleted file mode 100644 index 134e2da7a..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/fake_task.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_FAKE_TASK_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_FAKE_TASK_H_ - -#include "base/task/sequence_manager/task_queue.h" -#include "base/task/sequence_manager/tasks.h" - -namespace base { -namespace sequence_manager { - -class FakeTask : public Task { - public: - FakeTask(); - explicit FakeTask(TaskType task_type); -}; - -class FakeTaskTiming : public TaskQueue::TaskTiming { - public: - FakeTaskTiming(); - FakeTaskTiming(TimeTicks start, TimeTicks end); - FakeTaskTiming(TimeTicks start, - TimeTicks end, - ThreadTicks thread_start, - ThreadTicks thread_end); -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_FAKE_TASK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.cc deleted file mode 100644 index b6b37332c..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.cc +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/test/mock_time_domain.h" - -namespace base { -namespace sequence_manager { - -MockTimeDomain::MockTimeDomain(TimeTicks initial_now_ticks) - : now_ticks_(initial_now_ticks) {} - -MockTimeDomain::~MockTimeDomain() = default; - -LazyNow MockTimeDomain::CreateLazyNow() const { - return LazyNow(now_ticks_); -} - -TimeTicks MockTimeDomain::Now() const { - return now_ticks_; -} - -void MockTimeDomain::SetNowTicks(TimeTicks now_ticks) { - now_ticks_ = now_ticks; -} - -Optional MockTimeDomain::DelayTillNextTask(LazyNow* lazy_now) { - return nullopt; -} - -bool MockTimeDomain::MaybeFastForwardToNextTask(bool quit_when_idle_requested) { - return false; -} - -void MockTimeDomain::SetNextDelayedDoWork(LazyNow* lazy_now, - TimeTicks run_time) {} - -const char* MockTimeDomain::GetName() const { - return "MockTimeDomain"; -} - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.h deleted file mode 100644 index c9070edd7..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_domain.h +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_DOMAIN_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_DOMAIN_H_ - -#include "base/task/sequence_manager/time_domain.h" - -namespace base { -namespace sequence_manager { - -// TimeDomain with a mock clock and not invoking SequenceManager. -// NOTE: All methods are main thread only. -class MockTimeDomain : public TimeDomain { - public: - explicit MockTimeDomain(TimeTicks initial_now_ticks); - ~MockTimeDomain() override; - - void SetNowTicks(TimeTicks now_ticks); - - // TimeDomain implementation: - LazyNow CreateLazyNow() const override; - TimeTicks Now() const override; - Optional DelayTillNextTask(LazyNow* lazy_now) override; - void SetNextDelayedDoWork(LazyNow* lazy_now, TimeTicks run_time) override; - bool MaybeFastForwardToNextTask(bool quit_when_idle_requested) override; - const char* GetName() const override; - - private: - TimeTicks now_ticks_; - - DISALLOW_COPY_AND_ASSIGN(MockTimeDomain); -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_DOMAIN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.cc deleted file mode 100644 index 66b6f2cb6..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.cc +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/test/mock_time_message_pump.h" - -#include - -#include "base/auto_reset.h" -#include "base/test/simple_test_tick_clock.h" - -namespace base { -namespace sequence_manager { - -MockTimeMessagePump::MockTimeMessagePump(SimpleTestTickClock* clock) - : clock_(clock) {} - -MockTimeMessagePump::~MockTimeMessagePump() {} - -bool MockTimeMessagePump::MaybeAdvanceTime(TimeTicks target_time) { - auto now = clock_->NowTicks(); - - if (target_time <= now) - return true; - - TimeTicks next_now; - - if (!target_time.is_max()) { - next_now = std::min(allow_advance_until_, target_time); - } else if (allow_advance_until_ == TimeTicks::Max()) { - next_now = now; - } else { - next_now = allow_advance_until_; - } - - if (now < next_now) { - clock_->SetNowTicks(next_now); - return true; - } - return false; -} - -void MockTimeMessagePump::Run(Delegate* delegate) { - AutoReset auto_reset_keep_running(&keep_running_, true); - - for (;;) { - Delegate::NextWorkInfo info = delegate->DoSomeWork(); - - if (!keep_running_ || quit_after_do_some_work_) - break; - - if (info.is_immediate()) - continue; - - bool have_immediate_work = delegate->DoIdleWork(); - - if (!keep_running_) - break; - - if (have_immediate_work) - continue; - - if (MaybeAdvanceTime(info.delayed_run_time)) - continue; - - next_wake_up_time_ = info.delayed_run_time; - - if (stop_when_message_pump_is_idle_) - return; - - NOTREACHED() << "Pump would go to sleep. Probably not what you wanted, " - "consider rewriting your test."; - } -} - -void MockTimeMessagePump::Quit() { - keep_running_ = false; -} - -void MockTimeMessagePump::ScheduleWork() {} - -void MockTimeMessagePump::ScheduleDelayedWork( - const TimeTicks& delayed_work_time) { - next_wake_up_time_ = delayed_work_time; -} - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.h deleted file mode 100644 index 223dfa569..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/mock_time_message_pump.h +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_MESSAGE_PUMP_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_MESSAGE_PUMP_H_ - -#include "base/callback.h" -#include "base/message_loop/message_pump.h" -#include "base/optional.h" -#include "base/synchronization/waitable_event.h" -#include "base/time/time.h" - -namespace base { - -class SimpleTestTickClock; - -namespace sequence_manager { - -// MessagePump implementation that uses a SimpleTestTickClock to keep track of -// time and will advance it as needed to keep running tasks. -// -// This pump will actually check fail if it ever has to go to sleep as this -// would indicate that the unit test might block indefinitely. -// TODO(carlscab): In the future we could consider sleeping if there is no -// outstanding |delayed_work_time_|, because we could be woken up by concurrent -// ScheduleWork() calls. -class MockTimeMessagePump : public MessagePump { - public: - explicit MockTimeMessagePump(SimpleTestTickClock* clock); - ~MockTimeMessagePump() override; - - // MessagePump implementation - void Run(Delegate* delegate) override; - void Quit() override; - void ScheduleWork() override; - void ScheduleDelayedWork(const TimeTicks& delayed_work_time) override; - - // Returns the time at which the pump would have to wake up to be perform - // work. - TimeTicks next_wake_up_time() const { return next_wake_up_time_; } - - // Quits after the first call to Delegate::DoSomeWork(). Useful - // for tests that want to make sure certain things happen during a DoSomeWork - // call. - void SetQuitAfterDoSomeWork(bool quit_after_do_some_work) { - quit_after_do_some_work_ = quit_after_do_some_work; - } - - // Allows this instance to advance the SimpleTestTickClock up to but not over - // |advance_until| when idle (i.e. when a regular pump would go to sleep). - // The clock will allways be advanced to |advance_until|, even if there are no - // tasks requiring it (i.e. delayed tasks to be run after - // |advance_until|) except for a value of TimeTicks::Max() which will advance - // the clock as long as there is pending delayed work. - void SetAllowTimeToAutoAdvanceUntil(TimeTicks advance_until) { - allow_advance_until_ = advance_until; - } - - // Quit when this pump's Delegate is out of work (i.e. when a regular pump - // would go to sleep) and we are not allowed to advance the clock anymore. - void SetStopWhenMessagePumpIsIdle(bool stop_when_message_pump_is_idle) { - stop_when_message_pump_is_idle_ = stop_when_message_pump_is_idle; - } - - private: - // Returns true if the clock was indeed advanced and thus we should attempt - // another iteration of the DoSomeWork-DoIdleWork-loop. - bool MaybeAdvanceTime(TimeTicks target_time); - - SimpleTestTickClock* const clock_; - // This flag is set to false when Run should return. - bool keep_running_ = true; - - bool stop_when_message_pump_is_idle_ = false; - bool quit_after_do_some_work_ = false; - - TimeTicks next_wake_up_time_{TimeTicks::Max()}; - - TimeTicks allow_advance_until_ = TimeTicks::Min(); -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_MOCK_TIME_MESSAGE_PUMP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.cc deleted file mode 100644 index 4bb4b3cee..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.cc +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/test/sequence_manager_for_test.h" - -#include "base/task/sequence_manager/thread_controller_impl.h" - -namespace base { -namespace sequence_manager { - -namespace { - -class ThreadControllerForTest : public internal::ThreadControllerImpl { - public: - ThreadControllerForTest( - internal::SequenceManagerImpl* funneled_sequence_manager, - scoped_refptr task_runner, - const TickClock* time_source) - : ThreadControllerImpl(funneled_sequence_manager, - std::move(task_runner), - time_source) {} - - void AddNestingObserver(RunLoop::NestingObserver* observer) override { - if (!funneled_sequence_manager_) - return; - ThreadControllerImpl::AddNestingObserver(observer); - } - - void RemoveNestingObserver(RunLoop::NestingObserver* observer) override { - if (!funneled_sequence_manager_) - return; - ThreadControllerImpl::RemoveNestingObserver(observer); - } - - ~ThreadControllerForTest() override = default; -}; - -} // namespace - -SequenceManagerForTest::SequenceManagerForTest( - std::unique_ptr thread_controller, - SequenceManager::Settings settings) - : SequenceManagerImpl(std::move(thread_controller), std::move(settings)) {} - -// static -std::unique_ptr SequenceManagerForTest::Create( - SequenceManagerImpl* funneled_sequence_manager, - scoped_refptr task_runner, - const TickClock* clock, - SequenceManager::Settings settings) { - std::unique_ptr manager(new SequenceManagerForTest( - std::make_unique(funneled_sequence_manager, - std::move(task_runner), clock), - std::move(settings))); - manager->BindToCurrentThread(); - return manager; -} - -// static -std::unique_ptr SequenceManagerForTest::Create( - std::unique_ptr thread_controller, - SequenceManager::Settings settings) { - std::unique_ptr manager(new SequenceManagerForTest( - std::move(thread_controller), std::move(settings))); - manager->BindToCurrentThread(); - return manager; -} - -// static -std::unique_ptr -SequenceManagerForTest::CreateOnCurrentThread( - SequenceManager::Settings settings) { - return Create(CreateThreadControllerImplForCurrentThread(settings.clock), - std::move(settings)); -} - -size_t SequenceManagerForTest::ActiveQueuesCount() const { - return main_thread_only().active_queues.size(); -} - -bool SequenceManagerForTest::HasImmediateWork() const { - return main_thread_only().selector.GetHighestPendingPriority().has_value(); -} - -size_t SequenceManagerForTest::PendingTasksCount() const { - size_t task_count = 0; - for (auto* const queue : main_thread_only().active_queues) - task_count += queue->GetNumberOfPendingTasks(); - return task_count; -} - -size_t SequenceManagerForTest::QueuesToDeleteCount() const { - return main_thread_only().queues_to_delete.size(); -} - -size_t SequenceManagerForTest::QueuesToShutdownCount() { - return main_thread_only().queues_to_gracefully_shutdown.size(); -} - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.h deleted file mode 100644 index bb4cf3f6e..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/sequence_manager_for_test.h +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_SEQUENCE_MANAGER_FOR_TEST_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_SEQUENCE_MANAGER_FOR_TEST_H_ - -#include - -#include "base/single_thread_task_runner.h" -#include "base/task/sequence_manager/sequence_manager.h" -#include "base/task/sequence_manager/sequence_manager_impl.h" -#include "base/time/tick_clock.h" - -namespace base { - -namespace sequence_manager { - -class SequenceManagerForTest : public internal::SequenceManagerImpl { - public: - ~SequenceManagerForTest() override = default; - - // Creates SequenceManagerForTest using ThreadControllerImpl constructed with - // the given arguments. ThreadControllerImpl is slightly overridden to skip - // nesting observers registration if message loop is absent. - static std::unique_ptr Create( - SequenceManagerImpl* funneled_sequence_manager, - scoped_refptr task_runner, - const TickClock* clock, - // Since most test calls are in Blink, randomised sampling is enabled - // by default in the test SequenceManager, as opposed to production code. - SequenceManager::Settings settings = - SequenceManager::Settings::Builder() - .SetRandomisedSamplingEnabled(true) - .Build()); - - // Creates SequenceManagerForTest using the provided ThreadController. - static std::unique_ptr Create( - std::unique_ptr thread_controller, - SequenceManager::Settings settings = - SequenceManager::Settings::Builder() - .SetRandomisedSamplingEnabled(true) - .Build()); - - static std::unique_ptr CreateOnCurrentThread( - SequenceManager::Settings); - - size_t ActiveQueuesCount() const; - bool HasImmediateWork() const; - size_t PendingTasksCount() const; - size_t QueuesToDeleteCount() const; - size_t QueuesToShutdownCount(); - - using internal::SequenceManagerImpl:: - CreateThreadControllerImplForCurrentThread; - using internal::SequenceManagerImpl::GetNextSequenceNumber; - using internal::SequenceManagerImpl::MoveReadyDelayedTasksToWorkQueues; - using internal::SequenceManagerImpl::ReloadEmptyWorkQueues; - - private: - explicit SequenceManagerForTest( - std::unique_ptr thread_controller, - SequenceManager::Settings settings); -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_SEQUENCE_MANAGER_FOR_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.cc deleted file mode 100644 index c67585254..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.cc +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/test/test_task_queue.h" - -#include "base/task/sequence_manager/task_queue_impl.h" - -namespace base { -namespace sequence_manager { - -TestTaskQueue::TestTaskQueue(std::unique_ptr impl, - const TaskQueue::Spec& spec) - : TaskQueue(std::move(impl), spec) {} - -TestTaskQueue::~TestTaskQueue() = default; - -WeakPtr TestTaskQueue::GetWeakPtr() { - return weak_factory_.GetWeakPtr(); -} - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.h deleted file mode 100644 index 6749f8c72..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_queue.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_QUEUE_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_QUEUE_H_ - -#include "base/memory/weak_ptr.h" -#include "base/task/sequence_manager/task_queue.h" - -namespace base { -namespace sequence_manager { - -class TestTaskQueue : public TaskQueue { - public: - explicit TestTaskQueue(std::unique_ptr impl, - const TaskQueue::Spec& spec); - - using TaskQueue::GetTaskQueueImpl; - - WeakPtr GetWeakPtr(); - - private: - ~TestTaskQueue() override; // Ref-counted. - - // Used to ensure that task queue is deleted in tests. - WeakPtrFactory weak_factory_{this}; -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_time_observer.h b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_time_observer.h deleted file mode 100644 index 54e4ff45f..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/test/test_task_time_observer.h +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_TIME_OBSERVER_H_ -#define BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_TIME_OBSERVER_H_ - -#include "base/task/sequence_manager/task_time_observer.h" -#include "base/time/time.h" - -namespace base { -namespace sequence_manager { - -class TestTaskTimeObserver : public TaskTimeObserver { - public: - void WillProcessTask(TimeTicks start_time) override {} - void DidProcessTask(TimeTicks start_time, TimeTicks end_time) override {} -}; - -} // namespace sequence_manager -} // namespace base - -#endif // BASE_TASK_SEQUENCE_MANAGER_TEST_TEST_TASK_TIME_OBSERVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/OWNERS b/TMessagesProj/jni/voip/webrtc/call/OWNERS index 1be02c2e4..f863b939b 100644 --- a/TMessagesProj/jni/voip/webrtc/call/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/call/OWNERS @@ -2,3 +2,4 @@ mflodman@webrtc.org stefan@webrtc.org srte@webrtc.org terelius@webrtc.org +sprang@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/adaptation_constraint.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/adaptation_constraint.h index 9ff15d6b8..9ad6414cd 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/adaptation_constraint.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/adaptation_constraint.h @@ -14,7 +14,6 @@ #include #include "api/adaptation/resource.h" -#include "api/scoped_refptr.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" @@ -34,8 +33,7 @@ class AdaptationConstraint { virtual bool IsAdaptationUpAllowed( const VideoStreamInputState& input_state, const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - rtc::scoped_refptr reason_resource) const = 0; + const VideoSourceRestrictions& restrictions_after) const = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/degradation_preference_provider.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/degradation_preference_provider.h index 035fed1e5..1f75901cc 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/degradation_preference_provider.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/degradation_preference_provider.h @@ -15,7 +15,6 @@ namespace webrtc { -// Thread-safe retrieval of degradation preferences. class DegradationPreferenceProvider { public: virtual ~DegradationPreferenceProvider(); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc index b988479d6..ac1b1db17 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc @@ -27,34 +27,34 @@ namespace webrtc { ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate( ResourceAdaptationProcessor* processor) - : resource_adaptation_queue_(nullptr), processor_(processor) {} + : task_queue_(nullptr), processor_(processor) {} -void ResourceAdaptationProcessor::ResourceListenerDelegate:: - SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue) { - RTC_DCHECK(!resource_adaptation_queue_); - RTC_DCHECK(resource_adaptation_queue); - resource_adaptation_queue_ = resource_adaptation_queue; - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); +void ResourceAdaptationProcessor::ResourceListenerDelegate::SetTaskQueue( + TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + RTC_DCHECK_RUN_ON(task_queue_); } void ResourceAdaptationProcessor::ResourceListenerDelegate:: OnProcessorDestroyed() { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); processor_ = nullptr; } void ResourceAdaptationProcessor::ResourceListenerDelegate:: OnResourceUsageStateMeasured(rtc::scoped_refptr resource, ResourceUsageState usage_state) { - if (!resource_adaptation_queue_->IsCurrent()) { - resource_adaptation_queue_->PostTask(ToQueuedTask( + if (!task_queue_->IsCurrent()) { + task_queue_->PostTask(ToQueuedTask( [this_ref = rtc::scoped_refptr(this), resource, usage_state] { this_ref->OnResourceUsageStateMeasured(resource, usage_state); })); return; } - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); if (processor_) { processor_->OnResourceUsageStateMeasured(resource, usage_state); } @@ -69,12 +69,10 @@ ResourceAdaptationProcessor::MitigationResultAndLogMessage:: : result(result), message(std::move(message)) {} ResourceAdaptationProcessor::ResourceAdaptationProcessor( - VideoStreamEncoderObserver* encoder_stats_observer, VideoStreamAdapter* stream_adapter) - : resource_adaptation_queue_(nullptr), + : task_queue_(nullptr), resource_listener_delegate_( new rtc::RefCountedObject(this)), - encoder_stats_observer_(encoder_stats_observer), resources_(), stream_adapter_(stream_adapter), last_reported_source_restrictions_(), @@ -83,7 +81,7 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor( } ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(resources_.empty()) << "There are resource(s) attached to a ResourceAdaptationProcessor " << "being destroyed."; @@ -91,30 +89,29 @@ ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { resource_listener_delegate_->OnProcessorDestroyed(); } -void ResourceAdaptationProcessor::SetResourceAdaptationQueue( - TaskQueueBase* resource_adaptation_queue) { - RTC_DCHECK(!resource_adaptation_queue_); - RTC_DCHECK(resource_adaptation_queue); - resource_adaptation_queue_ = resource_adaptation_queue; - resource_listener_delegate_->SetResourceAdaptationQueue( - resource_adaptation_queue); - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); - // Now that we have the adaptation queue we can attach as adaptation listener. +void ResourceAdaptationProcessor::SetTaskQueue(TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + resource_listener_delegate_->SetTaskQueue(task_queue); + RTC_DCHECK_RUN_ON(task_queue_); + // Now that we have the queue we can attach as adaptation listener. stream_adapter_->AddRestrictionsListener(this); } void ResourceAdaptationProcessor::AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(std::find(resource_limitations_listeners_.begin(), resource_limitations_listeners_.end(), limitations_listener) == resource_limitations_listeners_.end()); resource_limitations_listeners_.push_back(limitations_listener); } + void ResourceAdaptationProcessor::RemoveResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); auto it = std::find(resource_limitations_listeners_.begin(), resource_limitations_listeners_.end(), limitations_listener); @@ -132,6 +129,7 @@ void ResourceAdaptationProcessor::AddResource( resources_.push_back(resource); } resource->SetResourceListener(resource_listener_delegate_); + RTC_LOG(INFO) << "Registered resource \"" << resource->Name() << "\"."; } std::vector> @@ -157,12 +155,12 @@ void ResourceAdaptationProcessor::RemoveResource( void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( rtc::scoped_refptr resource) { - if (!resource_adaptation_queue_->IsCurrent()) { - resource_adaptation_queue_->PostTask(ToQueuedTask( + if (!task_queue_->IsCurrent()) { + task_queue_->PostTask(ToQueuedTask( [this, resource]() { RemoveLimitationsImposedByResource(resource); })); return; } - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); auto resource_adaptation_limits = adaptation_limits_by_resources_.find(resource); if (resource_adaptation_limits != adaptation_limits_by_resources_.end()) { @@ -200,7 +198,7 @@ void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( rtc::scoped_refptr resource, ResourceUsageState usage_state) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(resource); // |resource| could have been removed after signalling. { @@ -242,9 +240,9 @@ void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( ResourceAdaptationProcessor::MitigationResultAndLogMessage ResourceAdaptationProcessor::OnResourceUnderuse( rtc::scoped_refptr reason_resource) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); // How can this stream be adapted up? - Adaptation adaptation = stream_adapter_->GetAdaptationUp(reason_resource); + Adaptation adaptation = stream_adapter_->GetAdaptationUp(); if (adaptation.status() != Adaptation::Status::kValid) { rtc::StringBuilder message; message << "Not adapting up because VideoStreamAdapter returned " @@ -298,11 +296,15 @@ ResourceAdaptationProcessor::OnResourceUnderuse( ResourceAdaptationProcessor::MitigationResultAndLogMessage ResourceAdaptationProcessor::OnResourceOveruse( rtc::scoped_refptr reason_resource) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); // How can this stream be adapted up? Adaptation adaptation = stream_adapter_->GetAdaptationDown(); - if (adaptation.min_pixel_limit_reached()) { - encoder_stats_observer_->OnMinPixelLimitReached(); + if (adaptation.status() == Adaptation::Status::kLimitReached) { + // Add resource as most limited. + VideoStreamAdapter::RestrictionsWithCounters restrictions; + std::tie(std::ignore, restrictions) = FindMostLimitedResources(); + UpdateResourceLimitations(reason_resource, restrictions.restrictions, + restrictions.counters); } if (adaptation.status() != Adaptation::Status::kValid) { rtc::StringBuilder message; @@ -373,7 +375,7 @@ void ResourceAdaptationProcessor::OnVideoSourceRestrictionsUpdated( const VideoAdaptationCounters& adaptation_counters, rtc::scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(task_queue_); if (reason) { UpdateResourceLimitations(reason, unfiltered_restrictions, adaptation_counters); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h index 9e5dd6c64..c84d359fe 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h @@ -54,13 +54,11 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, public VideoSourceRestrictionsListener, public ResourceListener { public: - ResourceAdaptationProcessor( - VideoStreamEncoderObserver* encoder_stats_observer, + explicit ResourceAdaptationProcessor( VideoStreamAdapter* video_stream_adapter); ~ResourceAdaptationProcessor() override; - void SetResourceAdaptationQueue( - TaskQueueBase* resource_adaptation_queue) override; + void SetTaskQueue(TaskQueueBase* task_queue) override; // ResourceAdaptationProcessorInterface implementation. void AddResourceLimitationsListener( @@ -92,7 +90,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, public: explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); - void SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue); + void SetTaskQueue(TaskQueueBase* task_queue); void OnProcessorDestroyed(); // ResourceListener implementation. @@ -100,9 +98,8 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, ResourceUsageState usage_state) override; private: - TaskQueueBase* resource_adaptation_queue_; - ResourceAdaptationProcessor* processor_ - RTC_GUARDED_BY(resource_adaptation_queue_); + TaskQueueBase* task_queue_; + ResourceAdaptationProcessor* processor_ RTC_GUARDED_BY(task_queue_); }; enum class MitigationResult { @@ -130,7 +127,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, void UpdateResourceLimitations(rtc::scoped_refptr reason_resource, const VideoSourceRestrictions& restrictions, const VideoAdaptationCounters& counters) - RTC_RUN_ON(resource_adaptation_queue_); + RTC_RUN_ON(task_queue_); // Searches |adaptation_limits_by_resources_| for each resource with the // highest total adaptation counts. Adaptation up may only occur if the @@ -139,35 +136,31 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, // corresponding adaptation of that resource. std::pair>, VideoStreamAdapter::RestrictionsWithCounters> - FindMostLimitedResources() const RTC_RUN_ON(resource_adaptation_queue_); + FindMostLimitedResources() const RTC_RUN_ON(task_queue_); void RemoveLimitationsImposedByResource( rtc::scoped_refptr resource); - TaskQueueBase* resource_adaptation_queue_; + TaskQueueBase* task_queue_; rtc::scoped_refptr resource_listener_delegate_; // Input and output. - VideoStreamEncoderObserver* const encoder_stats_observer_ - RTC_GUARDED_BY(resource_adaptation_queue_); mutable Mutex resources_lock_; std::vector> resources_ RTC_GUARDED_BY(resources_lock_); std::vector resource_limitations_listeners_ - RTC_GUARDED_BY(resource_adaptation_queue_); + RTC_GUARDED_BY(task_queue_); // Purely used for statistics, does not ensure mapped resources stay alive. std::map, VideoStreamAdapter::RestrictionsWithCounters> - adaptation_limits_by_resources_ - RTC_GUARDED_BY(resource_adaptation_queue_); + adaptation_limits_by_resources_ RTC_GUARDED_BY(task_queue_); // Responsible for generating and applying possible adaptations. - VideoStreamAdapter* const stream_adapter_ - RTC_GUARDED_BY(resource_adaptation_queue_); + VideoStreamAdapter* const stream_adapter_ RTC_GUARDED_BY(task_queue_); VideoSourceRestrictions last_reported_source_restrictions_ - RTC_GUARDED_BY(resource_adaptation_queue_); + RTC_GUARDED_BY(task_queue_); // Keeps track of previous mitigation results per resource since the last // successful adaptation. Used to avoid RTC_LOG spam. std::map previous_mitigation_results_ - RTC_GUARDED_BY(resource_adaptation_queue_); + RTC_GUARDED_BY(task_queue_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h index de940c859..8b1f94b73 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h @@ -47,8 +47,7 @@ class ResourceAdaptationProcessorInterface { public: virtual ~ResourceAdaptationProcessorInterface(); - virtual void SetResourceAdaptationQueue( - TaskQueueBase* resource_adaptation_queue) = 0; + virtual void SetTaskQueue(TaskQueueBase* task_queue) = 0; virtual void AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc index 6209c0584..4fc4743a3 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc @@ -108,6 +108,12 @@ bool CanIncreaseFrameRateTo(int max_frame_rate, std::numeric_limits::max())); } +bool MinPixelLimitReached(const VideoStreamInputState& input_state) { + return input_state.frame_size_pixels().has_value() && + GetLowerResolutionThan(input_state.frame_size_pixels().value()) < + input_state.min_pixels_per_frame(); +} + } // namespace VideoSourceRestrictionsListener::~VideoSourceRestrictionsListener() = default; @@ -156,28 +162,21 @@ const char* Adaptation::StatusToString(Adaptation::Status status) { case Status::kRejectedByConstraint: return "kRejectedByConstraint"; } + RTC_CHECK_NOTREACHED(); } Adaptation::Adaptation(int validation_id, VideoSourceRestrictions restrictions, VideoAdaptationCounters counters, - VideoStreamInputState input_state, - bool min_pixel_limit_reached) + VideoStreamInputState input_state) : validation_id_(validation_id), status_(Status::kValid), - min_pixel_limit_reached_(min_pixel_limit_reached), input_state_(std::move(input_state)), restrictions_(std::move(restrictions)), counters_(std::move(counters)) {} -Adaptation::Adaptation(int validation_id, - Status invalid_status, - VideoStreamInputState input_state, - bool min_pixel_limit_reached) - : validation_id_(validation_id), - status_(invalid_status), - min_pixel_limit_reached_(min_pixel_limit_reached), - input_state_(std::move(input_state)) { +Adaptation::Adaptation(int validation_id, Status invalid_status) + : validation_id_(validation_id), status_(invalid_status) { RTC_DCHECK_NE(status_, Status::kValid); } @@ -185,10 +184,6 @@ Adaptation::Status Adaptation::status() const { return status_; } -bool Adaptation::min_pixel_limit_reached() const { - return min_pixel_limit_reached_; -} - const VideoStreamInputState& Adaptation::input_state() const { return input_state_; } @@ -202,14 +197,16 @@ const VideoAdaptationCounters& Adaptation::counters() const { } VideoStreamAdapter::VideoStreamAdapter( - VideoStreamInputStateProvider* input_state_provider) + VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer) : input_state_provider_(input_state_provider), - balanced_settings_(), + encoder_stats_observer_(encoder_stats_observer), adaptation_validation_id_(0), degradation_preference_(DegradationPreference::DISABLED), - awaiting_frame_size_change_(absl::nullopt), - last_video_source_restrictions_() { + awaiting_frame_size_change_(absl::nullopt) { sequence_checker_.Detach(); + RTC_DCHECK(input_state_provider_); + RTC_DCHECK(encoder_stats_observer_); } VideoStreamAdapter::~VideoStreamAdapter() { @@ -299,17 +296,11 @@ void VideoStreamAdapter::SetDegradationPreference( struct VideoStreamAdapter::RestrictionsOrStateVisitor { Adaptation operator()(const RestrictionsWithCounters& r) const { return Adaptation(adaptation_validation_id, r.restrictions, r.counters, - input_state, min_pixel_limit_reached()); + input_state); } Adaptation operator()(const Adaptation::Status& status) const { RTC_DCHECK_NE(status, Adaptation::Status::kValid); - return Adaptation(adaptation_validation_id, status, input_state, - min_pixel_limit_reached()); - } - bool min_pixel_limit_reached() const { - return input_state.frame_size_pixels().has_value() && - GetLowerResolutionThan(input_state.frame_size_pixels().value()) < - input_state.min_pixels_per_frame(); + return Adaptation(adaptation_validation_id, status); } const int adaptation_validation_id; @@ -326,17 +317,16 @@ Adaptation VideoStreamAdapter::RestrictionsOrStateToAdaptation( } Adaptation VideoStreamAdapter::GetAdaptationUp( - const VideoStreamInputState& input_state, - rtc::scoped_refptr resource) const { + const VideoStreamInputState& input_state) const { RestrictionsOrState step = GetAdaptationUpStep(input_state); // If an adaptation proposed, check with the constraints that it is ok. if (absl::holds_alternative(step)) { RestrictionsWithCounters restrictions = absl::get(step); for (const auto* constraint : adaptation_constraints_) { - if (!constraint->IsAdaptationUpAllowed( - input_state, current_restrictions_.restrictions, - restrictions.restrictions, resource)) { + if (!constraint->IsAdaptationUpAllowed(input_state, + current_restrictions_.restrictions, + restrictions.restrictions)) { RTC_LOG(INFO) << "Not adapting up because constraint \"" << constraint->Name() << "\" disallowed it"; step = Adaptation::Status::kRejectedByConstraint; @@ -346,13 +336,11 @@ Adaptation VideoStreamAdapter::GetAdaptationUp( return RestrictionsOrStateToAdaptation(step, input_state); } -Adaptation VideoStreamAdapter::GetAdaptationUp( - rtc::scoped_refptr resource) { +Adaptation VideoStreamAdapter::GetAdaptationUp() { RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(resource); VideoStreamInputState input_state = input_state_provider_->InputState(); ++adaptation_validation_id_; - Adaptation adaptation = GetAdaptationUp(input_state, resource); + Adaptation adaptation = GetAdaptationUp(input_state); return adaptation; } @@ -394,6 +382,7 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( case DegradationPreference::DISABLED: return Adaptation::Status::kAdaptationDisabled; } + RTC_CHECK_NOTREACHED(); } Adaptation VideoStreamAdapter::GetAdaptationDown() { @@ -402,7 +391,9 @@ Adaptation VideoStreamAdapter::GetAdaptationDown() { ++adaptation_validation_id_; RestrictionsOrState restrictions_or_state = GetAdaptationDownStep(input_state, current_restrictions_); - + if (MinPixelLimitReached(input_state)) { + encoder_stats_observer_->OnMinPixelLimitReached(); + } // Check for min_fps if (degradation_preference_ == DegradationPreference::BALANCED && absl::holds_alternative( @@ -471,6 +462,7 @@ VideoStreamAdapter::GetAdaptationDownStep( case DegradationPreference::DISABLED: return Adaptation::Status::kAdaptationDisabled; } + RTC_CHECK_NOTREACHED(); } VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution( @@ -608,9 +600,8 @@ Adaptation VideoStreamAdapter::GetAdaptDownResolution() { return RestrictionsOrStateToAdaptation( GetAdaptDownResolutionStepForBalanced(input_state), input_state); } - default: - RTC_NOTREACHED(); } + RTC_CHECK_NOTREACHED(); } VideoStreamAdapter::RestrictionsOrState @@ -667,7 +658,7 @@ Adaptation VideoStreamAdapter::GetAdaptationTo( RTC_DCHECK_RUN_ON(&sequence_checker_); VideoStreamInputState input_state = input_state_provider_->InputState(); return Adaptation(adaptation_validation_id_, restrictions, counters, - input_state, false); + input_state); } void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate( diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h index 1f6f252b3..3c3595759 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h @@ -20,6 +20,7 @@ #include "api/adaptation/resource.h" #include "api/rtp_parameters.h" #include "api/video/video_adaptation_counters.h" +#include "api/video/video_stream_encoder_observer.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/video_source_restrictions.h" @@ -87,8 +88,6 @@ class Adaptation final { const VideoStreamInputState& input_state() const; const VideoSourceRestrictions& restrictions() const; const VideoAdaptationCounters& counters() const; - // Used for stats reporting. - bool min_pixel_limit_reached() const; private: friend class VideoStreamAdapter; @@ -97,13 +96,9 @@ class Adaptation final { Adaptation(int validation_id, VideoSourceRestrictions restrictions, VideoAdaptationCounters counters, - VideoStreamInputState input_state, - bool min_pixel_limit_reached); + VideoStreamInputState input_state); // Constructor when adaptation is not valid. Status MUST NOT be kValid. - Adaptation(int validation_id, - Status invalid_status, - VideoStreamInputState input_state, - bool min_pixel_limit_reached); + Adaptation(int validation_id, Status invalid_status); // An Adaptation can become invalidated if the state of VideoStreamAdapter is // modified before the Adaptation is applied. To guard against this, this ID @@ -111,7 +106,6 @@ class Adaptation final { // TODO(https://crbug.com/webrtc/11700): Remove the validation_id_. const int validation_id_; const Status status_; - const bool min_pixel_limit_reached_; // Input state when adaptation was made. const VideoStreamInputState input_state_; const VideoSourceRestrictions restrictions_; @@ -126,8 +120,8 @@ class Adaptation final { // 3. Modify the stream's restrictions in one of the valid ways. class VideoStreamAdapter { public: - explicit VideoStreamAdapter( - VideoStreamInputStateProvider* input_state_provider); + VideoStreamAdapter(VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer); ~VideoStreamAdapter(); VideoSourceRestrictions source_restrictions() const; @@ -148,9 +142,7 @@ class VideoStreamAdapter { // Returns an adaptation that we are guaranteed to be able to apply, or a // status code indicating the reason why we cannot adapt. - // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the - // AdaptationConstraint resources. Remove this parameter when it's removed. - Adaptation GetAdaptationUp(rtc::scoped_refptr resource); + Adaptation GetAdaptationUp(); Adaptation GetAdaptationDown(); Adaptation GetAdaptationTo(const VideoAdaptationCounters& counters, const VideoSourceRestrictions& restrictions); @@ -194,10 +186,7 @@ class VideoStreamAdapter { const RestrictionsWithCounters& restrictions) const RTC_RUN_ON(&sequence_checker_); - // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the - // AdaptationConstraint resources. Remove this parameter when it's removed. - Adaptation GetAdaptationUp(const VideoStreamInputState& input_state, - rtc::scoped_refptr resource) const + Adaptation GetAdaptationUp(const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); Adaptation GetAdaptationDown(const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); @@ -229,6 +218,8 @@ class VideoStreamAdapter { // Gets the input state which is the basis of all adaptations. // Thread safe. VideoStreamInputStateProvider* input_state_provider_; + // Used to signal when min pixel limit has been reached. + VideoStreamEncoderObserver* const encoder_stats_observer_; // Decides the next adaptation target in DegradationPreference::BALANCED. const BalancedDegradationSettings balanced_settings_; // To guard against applying adaptations that have become invalidated, an diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h index d4012bf7e..eee62e9a8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h @@ -167,7 +167,8 @@ class AudioReceiveStream { // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; - virtual Stats GetStats() const = 0; + virtual Stats GetStats(bool get_and_clear_legacy_stats) const = 0; + Stats GetStats() { return GetStats(/*get_and_clear_legacy_stats=*/true); } // Sets an audio sink that receives unmixed audio from the receive stream. // Ownership of the sink is managed by the caller. diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc index 1f5bf0518..5acdc9618 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc @@ -34,6 +34,9 @@ std::string AudioSendStream::Config::ToString() const { ss << ", send_transport: " << (send_transport ? "(Transport)" : "null"); ss << ", min_bitrate_bps: " << min_bitrate_bps; ss << ", max_bitrate_bps: " << max_bitrate_bps; + ss << ", has audio_network_adaptor_config: " + << (audio_network_adaptor_config ? "true" : "false"); + ss << ", has_dscp: " << (has_dscp ? "true" : "false"); ss << ", send_codec_spec: " << (send_codec_spec ? send_codec_spec->ToString() : ""); ss << '}'; diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_state.h b/TMessagesProj/jni/voip/webrtc/call/audio_state.h index 89267c5ab..79fb5cf98 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_state.h +++ b/TMessagesProj/jni/voip/webrtc/call/audio_state.h @@ -12,6 +12,7 @@ #include "api/audio/audio_mixer.h" #include "api/scoped_refptr.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/ref_count.h" @@ -37,6 +38,9 @@ class AudioState : public rtc::RefCountInterface { // TODO(solenberg): Temporary: audio device module. rtc::scoped_refptr audio_device_module; + + rtc::scoped_refptr + async_audio_processing_factory; }; virtual AudioProcessing* audio_processing() = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/call.cc b/TMessagesProj/jni/voip/webrtc/call/call.cc index ace83bee9..e814cff5b 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call.cc @@ -260,6 +260,8 @@ class Call final : public webrtc::Call, Stats GetStats() const override; + const WebRtcKeyValueConfig& trials() const override; + // Implements PacketReceiver. DeliveryStatus DeliverPacket(MediaType media_type, rtc::CopyOnWriteBuffer packet, @@ -306,7 +308,9 @@ class Call final : public webrtc::Call, void UpdateHistograms(); void UpdateAggregateNetworkState(); - void RegisterRateObserver(); + // Ensure that necessary process threads are started, and any required + // callbacks have been registered. + void EnsureStarted() RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); rtc::TaskQueue* send_transport_queue() const { return transport_send_ptr_->GetWorkerQueue(); @@ -433,8 +437,7 @@ class Call final : public webrtc::Call, // last ensures that it is destroyed first and any running tasks are finished. std::unique_ptr transport_send_; - bool is_target_rate_observer_registered_ RTC_GUARDED_BY(worker_thread_) = - false; + bool is_started_ RTC_GUARDED_BY(worker_thread_) = false; RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; @@ -655,19 +658,18 @@ Call::~Call() { UpdateHistograms(); } -void Call::RegisterRateObserver() { - RTC_DCHECK_RUN_ON(worker_thread_); - - if (is_target_rate_observer_registered_) +void Call::EnsureStarted() { + if (is_started_) { return; - - is_target_rate_observer_registered_ = true; + } + is_started_ = true; // This call seems to kick off a number of things, so probably better left // off being kicked off on request rather than in the ctor. transport_send_ptr_->RegisterTargetTransferRateObserver(this); module_process_thread_->EnsureStarted(); + transport_send_ptr_->EnsureStarted(); } void Call::SetClientBitratePreferences(const BitrateSettings& preferences) { @@ -762,7 +764,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream"); RTC_DCHECK_RUN_ON(worker_thread_); - RegisterRateObserver(); + EnsureStarted(); // Stream config is logged in AudioSendStream::ConfigureStream, as it may // change during the stream's lifetime. @@ -822,7 +824,7 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( const webrtc::AudioReceiveStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); - RegisterRateObserver(); + EnsureStarted(); event_log_->Log(std::make_unique( CreateRtcLogStreamConfig(config))); AudioReceiveStream* receive_stream = new AudioReceiveStream( @@ -877,7 +879,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream"); RTC_DCHECK_RUN_ON(worker_thread_); - RegisterRateObserver(); + EnsureStarted(); video_send_delay_stats_->AddSsrcs(config); for (size_t ssrc_index = 0; ssrc_index < config.rtp.ssrcs.size(); @@ -976,7 +978,7 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( receive_side_cc_.SetSendPeriodicFeedback( SendPeriodicFeedback(configuration.rtp.extensions)); - RegisterRateObserver(); + EnsureStarted(); TaskQueueBase* current = GetCurrentTaskQueueOrThread(); RTC_CHECK(current); @@ -1112,6 +1114,10 @@ Call::Stats Call::GetStats() const { return stats; } +const WebRtcKeyValueConfig& Call::trials() const { + return *config_.trials; +} + void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { RTC_DCHECK_RUN_ON(worker_thread_); switch (media) { diff --git a/TMessagesProj/jni/voip/webrtc/call/call.h b/TMessagesProj/jni/voip/webrtc/call/call.h index 75272248c..a2b3b8959 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.h +++ b/TMessagesProj/jni/voip/webrtc/call/call.h @@ -156,6 +156,8 @@ class Call { virtual void SetClientBitratePreferences( const BitrateSettings& preferences) = 0; + virtual const WebRtcKeyValueConfig& trials() const = 0; + virtual ~Call() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc index 9214ae5d1..aa8894e9a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc @@ -182,7 +182,6 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, std::unique_ptr audio_send_transport; std::unique_ptr video_send_transport; std::unique_ptr receive_transport; - test::NullTransport rtcp_send_transport; AudioSendStream* audio_send_stream; AudioReceiveStream* audio_receive_stream; @@ -271,7 +270,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, AudioReceiveStream::Config audio_recv_config; audio_recv_config.rtp.remote_ssrc = kAudioSendSsrc; audio_recv_config.rtp.local_ssrc = kAudioRecvSsrc; - audio_recv_config.rtcp_send_transport = &rtcp_send_transport; + audio_recv_config.rtcp_send_transport = receive_transport.get(); audio_recv_config.sync_group = kSyncGroup; audio_recv_config.decoder_factory = audio_decoder_factory_; audio_recv_config.decoder_map = { @@ -337,27 +336,29 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, ToQueuedTask([to_delete = observer.release()]() { delete to_delete; })); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithoutClockDrift) { +TEST_F(CallPerfTest, Synchronization_PlaysOutAudioAndVideoWithoutClockDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::kNoDrift, DriftingClock::kNoDrift, DriftingClock::kNoDrift, "_video_no_drift"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithVideoNtpDrift) { +TEST_F(CallPerfTest, Synchronization_PlaysOutAudioAndVideoWithVideoNtpDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::PercentsFaster(10.0f), DriftingClock::kNoDrift, DriftingClock::kNoDrift, "_video_ntp_drift"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithAudioFasterThanVideoDrift) { +TEST_F(CallPerfTest, + Synchronization_PlaysOutAudioAndVideoWithAudioFasterThanVideoDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::kNoDrift, DriftingClock::PercentsSlower(30.0f), DriftingClock::PercentsFaster(30.0f), "_audio_faster"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithVideoFasterThanAudioDrift) { +TEST_F(CallPerfTest, + Synchronization_PlaysOutAudioAndVideoWithVideoFasterThanAudioDrift) { TestAudioVideoSync(FecMode::kOn, CreateOrder::kVideoFirst, DriftingClock::kNoDrift, DriftingClock::PercentsFaster(30.0f), @@ -509,7 +510,7 @@ void CallPerfTest::TestCaptureNtpTime( // Flaky tests, disabled on Mac and Windows due to webrtc:8291. #if !(defined(WEBRTC_MAC) || defined(WEBRTC_WIN)) -TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) { +TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkDelay) { BuiltInNetworkBehaviorConfig net_config; net_config.queue_delay_ms = 100; // TODO(wu): lower the threshold as the calculation/estimatation becomes more @@ -520,7 +521,7 @@ TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) { TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs); } -TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkJitter) { +TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkJitter) { BuiltInNetworkBehaviorConfig net_config; net_config.queue_delay_ms = 100; net_config.delay_standard_deviation_ms = 10; @@ -710,11 +711,11 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { RunBaseTest(&test); } -TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { +TEST_F(CallPerfTest, Bitrate_Kbps_PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); } -TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) { +TEST_F(CallPerfTest, Bitrate_Kbps_NoPadWithoutMinTransmitBitrate) { TestMinTransmitBitrate(false); } @@ -730,6 +731,11 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { static const uint32_t kInitialBitrateKbps = 400; static const uint32_t kReconfigureThresholdKbps = 600; + // We get lower bitrate than expected by this test if the following field + // trial is enabled. + test::ScopedFieldTrials field_trials( + "WebRTC-SendSideBwe-WithOverhead/Disabled/"); + class VideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { public: @@ -1003,11 +1009,11 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, // TODO(bugs.webrtc.org/8878) #if defined(WEBRTC_MAC) -#define MAYBE_MinVideoAndAudioBitrate DISABLED_MinVideoAndAudioBitrate +#define MAYBE_Min_Bitrate_VideoAndAudio DISABLED_Min_Bitrate_VideoAndAudio #else -#define MAYBE_MinVideoAndAudioBitrate MinVideoAndAudioBitrate +#define MAYBE_Min_Bitrate_VideoAndAudio Min_Bitrate_VideoAndAudio #endif -TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) { +TEST_F(CallPerfTest, MAYBE_Min_Bitrate_VideoAndAudio) { TestMinAudioVideoBitrate(110, 40, -10, 10000, 70000, 200000); } diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc index 007e0af36..0cd43018a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc @@ -266,6 +266,10 @@ Call::Stats DegradedCall::GetStats() const { return call_->GetStats(); } +const WebRtcKeyValueConfig& DegradedCall::trials() const { + return call_->trials(); +} + void DegradedCall::SignalChannelNetworkState(MediaType media, NetworkState state) { call_->SignalChannelNetworkState(media, state); diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h index ac072b715..d81c65c57 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h @@ -85,6 +85,8 @@ class DegradedCall : public Call, private PacketReceiver { Stats GetStats() const override; + const WebRtcKeyValueConfig& trials() const override; + void SignalChannelNetworkState(MediaType media, NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc index a9ab77b62..1f0815547 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc @@ -36,6 +36,7 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, absl::optional spatial_index, RTPVideoHeader* rtp) { rtp->codec = info.codecType; + rtp->is_last_frame_in_picture = info.end_of_picture; switch (info.codecType) { case kVideoCodecVP8: { auto& vp8_header = rtp->video_type_header.emplace(); @@ -85,7 +86,7 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, for (int i = 0; i < info.codecSpecific.VP9.num_ref_pics; ++i) { vp9_header.pid_diff[i] = info.codecSpecific.VP9.p_diff[i]; } - vp9_header.end_of_picture = info.codecSpecific.VP9.end_of_picture; + vp9_header.end_of_picture = info.end_of_picture; return; } case kVideoCodecH264: { diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc index 9baf164a6..f5adae68a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc @@ -64,6 +64,11 @@ bool IsEnabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { return absl::StartsWith(trials->Lookup(key), "Enabled"); } +bool IsDisabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { + RTC_DCHECK(trials != nullptr); + return absl::StartsWith(trials->Lookup(key), "Disabled"); +} + bool IsRelayed(const rtc::NetworkRoute& route) { return route.local.uses_turn() || route.remote.uses_turn(); } @@ -82,6 +87,7 @@ RtpTransportControllerSend::RtpTransportControllerSend( : clock_(clock), event_log_(event_log), bitrate_configurator_(bitrate_config), + process_thread_started_(false), process_thread_(std::move(process_thread)), use_task_queue_pacer_(IsEnabled(trials, "WebRTC-TaskQueuePacer")), process_thread_pacer_(use_task_queue_pacer_ @@ -110,7 +116,7 @@ RtpTransportControllerSend::RtpTransportControllerSend( reset_feedback_on_route_change_( !IsEnabled(trials, "WebRTC-Bwe-NoFeedbackReset")), send_side_bwe_with_overhead_( - IsEnabled(trials, "WebRTC-SendSideBwe-WithOverhead")), + !IsDisabled(trials, "WebRTC-SendSideBwe-WithOverhead")), add_pacing_to_cwin_( IsEnabled(trials, "WebRTC-AddPacingToCongestionWindowPushback")), relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), @@ -130,15 +136,13 @@ RtpTransportControllerSend::RtpTransportControllerSend( pacer()->SetPacingRates( DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), DataRate::Zero()); - if (!use_task_queue_pacer_) { - process_thread_->Start(); + if (absl::StartsWith(trials->Lookup("WebRTC-LazyPacerStart"), "Disabled")) { + EnsureStarted(); } } RtpTransportControllerSend::~RtpTransportControllerSend() { - if (!use_task_queue_pacer_) { - process_thread_->Stop(); - } + process_thread_->Stop(); } RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( @@ -491,6 +495,13 @@ void RtpTransportControllerSend::IncludeOverheadInPacedSender() { pacer()->SetIncludeOverhead(); } +void RtpTransportControllerSend::EnsureStarted() { + if (!use_task_queue_pacer_ && !process_thread_started_) { + process_thread_started_ = true; + process_thread_->Start(); + } +} + void RtpTransportControllerSend::OnReceivedEstimatedBitrate(uint32_t bitrate) { RemoteBitrateReport msg; msg.receive_time = Timestamp::Millis(clock_->TimeInMilliseconds()); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h index e7310334c..7025b0331 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h @@ -110,6 +110,7 @@ class RtpTransportControllerSend final void AccountForAudioPacketsInPacedSender(bool account_for_audio) override; void IncludeOverheadInPacedSender() override; + void EnsureStarted() override; // Implements RtcpBandwidthObserver interface void OnReceivedEstimatedBitrate(uint32_t bitrate) override; @@ -151,6 +152,7 @@ class RtpTransportControllerSend final std::vector> video_rtp_senders_; RtpBitrateConfigurator bitrate_configurator_; std::map network_routes_; + bool process_thread_started_; const std::unique_ptr process_thread_; const bool use_task_queue_pacer_; std::unique_ptr process_thread_pacer_; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h index f07342496..602908e2a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h @@ -26,6 +26,7 @@ #include "api/transport/bitrate_settings.h" #include "api/units/timestamp.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" @@ -153,6 +154,8 @@ class RtpTransportControllerSendInterface { virtual void AccountForAudioPacketsInPacedSender(bool account_for_audio) = 0; virtual void IncludeOverheadInPacedSender() = 0; + + virtual void EnsureStarted() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc index b2ae0352d..9dad424c8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc @@ -197,7 +197,6 @@ std::vector CreateRtpStreamSenders( FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, rtc::scoped_refptr frame_transformer, - bool use_deferred_fec, const WebRtcKeyValueConfig& trials) { RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); @@ -245,9 +244,6 @@ std::vector CreateRtpStreamSenders( std::unique_ptr fec_generator = MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials); configuration.fec_generator = fec_generator.get(); - if (!use_deferred_fec) { - video_config.fec_generator = fec_generator.get(); - } configuration.rtx_send_ssrc = rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]); @@ -331,19 +327,10 @@ RtpVideoSender::RtpVideoSender( FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, rtc::scoped_refptr frame_transformer) - : send_side_bwe_with_overhead_(absl::StartsWith( + : send_side_bwe_with_overhead_(!absl::StartsWith( field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), - "Enabled")), - account_for_packetization_overhead_(!absl::StartsWith( - field_trials_.Lookup("WebRTC-SubtractPacketizationOverhead"), - "Disabled")), - use_early_loss_detection_(!absl::StartsWith( - field_trials_.Lookup("WebRTC-UseEarlyLossDetection"), "Disabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), - use_deferred_fec_( - absl::StartsWith(field_trials_.Lookup("WebRTC-DeferredFecGeneration"), - "Enabled")), active_(false), module_process_thread_(nullptr), suspended_ssrcs_(std::move(suspended_ssrcs)), @@ -362,7 +349,6 @@ RtpVideoSender::RtpVideoSender( frame_encryptor, crypto_options, std::move(frame_transformer), - use_deferred_fec_, field_trials_)), rtp_config_(rtp_config), codec_type_(GetVideoCodecType(rtp_config)), @@ -606,6 +592,18 @@ void RtpVideoSender::OnBitrateAllocationUpdated( } } } +void RtpVideoSender::OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& allocation) { + MutexLock lock(&mutex_); + if (IsActiveLocked()) { + for (size_t i = 0; i < rtp_streams_.size(); ++i) { + VideoLayersAllocation stream_allocation = allocation; + stream_allocation.rtp_stream_index = i; + rtp_streams_[i].sender_video->SetVideoLayersAllocation( + std::move(stream_allocation)); + } + } +} bool RtpVideoSender::NackEnabled() const { const bool nack_enabled = rtp_config_.nack.rtp_history_ms > 0; @@ -789,16 +787,13 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, // since |fec_allowed_| may be toggled back on at any moment. } - uint32_t packetization_rate_bps = 0; - if (account_for_packetization_overhead_) { // Subtract packetization overhead from the encoder target. If target rate // is really low, cap the overhead at 50%. This also avoids the case where // |encoder_target_rate_bps_| is 0 due to encoder pause event while the // packetization rate is positive since packets are still flowing. - packetization_rate_bps = - std::min(GetPacketizationOverheadRate(), encoder_target_rate_bps_ / 2); - encoder_target_rate_bps_ -= packetization_rate_bps; - } + uint32_t packetization_rate_bps = + std::min(GetPacketizationOverheadRate(), encoder_target_rate_bps_ / 2); + encoder_target_rate_bps_ -= packetization_rate_bps; loss_mask_vector_.clear(); @@ -853,7 +848,6 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, *sent_nack_rate_bps = 0; *sent_fec_rate_bps = 0; for (const RtpStreamSender& stream : rtp_streams_) { - if (use_deferred_fec_) { stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params); auto send_bitrate = stream.rtp_rtcp->GetSendRates(); @@ -862,17 +856,6 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps(); *sent_nack_rate_bps += send_bitrate[RtpPacketMediaType::kRetransmission].bps(); - } else { - if (stream.fec_generator) { - stream.fec_generator->SetProtectionParameters(*delta_params, - *key_params); - *sent_fec_rate_bps += stream.fec_generator->CurrentFecRate().bps(); - } - *sent_video_rate_bps += stream.sender_video->VideoBitrateSent(); - *sent_nack_rate_bps += - stream.rtp_rtcp->GetSendRates()[RtpPacketMediaType::kRetransmission] - .bps(); - } } return 0; } @@ -899,7 +882,6 @@ void RtpVideoSender::OnPacketFeedbackVector( } } - if (use_early_loss_detection_) { // Map from SSRC to vector of RTP sequence numbers that are indicated as // lost by feedback, without being trailed by any received packets. std::map> early_loss_detected_per_ssrc; @@ -925,7 +907,6 @@ void RtpVideoSender::OnPacketFeedbackVector( rtp_sender->ReSendPacket(sequence_number); } } - } for (const auto& kv : acked_packets_per_ssrc) { const uint32_t ssrc = kv.first; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h index 9ec10c460..49fd3cc0d 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h @@ -138,7 +138,8 @@ class RtpVideoSender : public RtpVideoSenderInterface, void OnBitrateAllocationUpdated(const VideoBitrateAllocation& bitrate) RTC_LOCKS_EXCLUDED(mutex_) override; - + void OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& layers) override; void OnTransportOverheadChanged(size_t transport_overhead_bytes_per_packet) RTC_LOCKS_EXCLUDED(mutex_) override; void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) @@ -171,10 +172,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, const FieldTrialBasedConfig field_trials_; const bool send_side_bwe_with_overhead_; - const bool account_for_packetization_overhead_; - const bool use_early_loss_detection_; const bool has_packet_feedback_; - const bool use_deferred_fec_; // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the // transport task queue. diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h index bb72eb599..632c9e835 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h @@ -18,6 +18,7 @@ #include "api/array_view.h" #include "api/call/bitrate_allocation.h" #include "api/fec_controller_override.h" +#include "api/video/video_layers_allocation.h" #include "call/rtp_config.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" @@ -50,6 +51,8 @@ class RtpVideoSenderInterface : public EncodedImageCallback, virtual void OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) = 0; + virtual void OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& allocation) = 0; virtual void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) = 0; virtual void OnTransportOverheadChanged( diff --git a/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc b/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc index 2ed9140fa..f8a5bd893 100644 --- a/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc +++ b/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc @@ -77,6 +77,7 @@ bool CoDelSimulation::DropDequeuedPacket(Timestamp now, } return false; } + RTC_CHECK_NOTREACHED(); } SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) diff --git a/TMessagesProj/jni/voip/webrtc/call/syncable.h b/TMessagesProj/jni/voip/webrtc/call/syncable.h index 3bbe50c8d..43b16a072 100644 --- a/TMessagesProj/jni/voip/webrtc/call/syncable.h +++ b/TMessagesProj/jni/voip/webrtc/call/syncable.h @@ -37,7 +37,7 @@ class Syncable { virtual absl::optional GetInfo() const = 0; virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const = 0; - virtual void SetMinimumPlayoutDelay(int delay_ms) = 0; + virtual bool SetMinimumPlayoutDelay(int delay_ms) = 0; virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h index 91c637160..7a6803d9e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h @@ -32,6 +32,7 @@ #include "api/video/video_timing.h" #include "api/video_codecs/sdp_video_format.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc index a4b674491..244d78089 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc @@ -28,6 +28,7 @@ const char* StreamTypeToString(VideoSendStream::StreamStats::StreamType type) { case VideoSendStream::StreamStats::StreamType::kFlexfec: return "flexfec"; } + RTC_CHECK_NOTREACHED(); } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h index 715d5d73e..0df9e6ce0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h @@ -31,6 +31,7 @@ #include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_encoder_config.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.cc b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.cc new file mode 100644 index 000000000..26468e298 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.cc @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_audio/fir_filter_avx2.h" + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +FIRFilterAVX2::FIRFilterAVX2(const float* unaligned_coefficients, + size_t unaligned_coefficients_length, + size_t max_input_length) + : // Closest higher multiple of eight. + coefficients_length_((unaligned_coefficients_length + 7) & ~0x07), + state_length_(coefficients_length_ - 1), + coefficients_(static_cast( + AlignedMalloc(sizeof(float) * coefficients_length_, 32))), + state_(static_cast( + AlignedMalloc(sizeof(float) * (max_input_length + state_length_), + 32))) { + // Add zeros at the end of the coefficients. + RTC_DCHECK_GE(coefficients_length_, unaligned_coefficients_length); + size_t padding = coefficients_length_ - unaligned_coefficients_length; + memset(coefficients_.get(), 0, padding * sizeof(coefficients_[0])); + // The coefficients are reversed to compensate for the order in which the + // input samples are acquired (most recent last). + for (size_t i = 0; i < unaligned_coefficients_length; ++i) { + coefficients_[i + padding] = + unaligned_coefficients[unaligned_coefficients_length - i - 1]; + } + memset(state_.get(), 0, + (max_input_length + state_length_) * sizeof(state_[0])); +} + +FIRFilterAVX2::~FIRFilterAVX2() = default; + +void FIRFilterAVX2::Filter(const float* in, size_t length, float* out) { + RTC_DCHECK_GT(length, 0); + + memcpy(&state_[state_length_], in, length * sizeof(*in)); + + // Convolves the input signal |in| with the filter kernel |coefficients_| + // taking into account the previous state. + for (size_t i = 0; i < length; ++i) { + float* in_ptr = &state_[i]; + float* coef_ptr = coefficients_.get(); + + __m256 m_sum = _mm256_setzero_ps(); + __m256 m_in; + + // Depending on if the pointer is aligned with 32 bytes or not it is loaded + // differently. + if (reinterpret_cast(in_ptr) & 0x1F) { + for (size_t j = 0; j < coefficients_length_; j += 8) { + m_in = _mm256_loadu_ps(in_ptr + j); + m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum); + } + } else { + for (size_t j = 0; j < coefficients_length_; j += 8) { + m_in = _mm256_load_ps(in_ptr + j); + m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum); + } + } + __m128 m128_sum = _mm_add_ps(_mm256_extractf128_ps(m_sum, 0), + _mm256_extractf128_ps(m_sum, 1)); + m128_sum = _mm_add_ps(_mm_movehl_ps(m128_sum, m128_sum), m128_sum); + _mm_store_ss(out + i, + _mm_add_ss(m128_sum, _mm_shuffle_ps(m128_sum, m128_sum, 1))); + } + + // Update current state. + memmove(state_.get(), &state_[length], state_length_ * sizeof(state_[0])); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.h b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.h new file mode 100644 index 000000000..893b60bf6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_avx2.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_AUDIO_FIR_FILTER_AVX2_H_ +#define COMMON_AUDIO_FIR_FILTER_AVX2_H_ + +#include + +#include + +#include "common_audio/fir_filter.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +class FIRFilterAVX2 : public FIRFilter { + public: + FIRFilterAVX2(const float* coefficients, + size_t coefficients_length, + size_t max_input_length); + ~FIRFilterAVX2() override; + + void Filter(const float* in, size_t length, float* out) override; + + private: + const size_t coefficients_length_; + const size_t state_length_; + std::unique_ptr coefficients_; + std::unique_ptr state_; +}; + +} // namespace webrtc + +#endif // COMMON_AUDIO_FIR_FILTER_AVX2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_factory.cc b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_factory.cc index 19528e312..4ba53e2c9 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/fir_filter_factory.cc @@ -17,6 +17,7 @@ #if defined(WEBRTC_HAS_NEON) #include "common_audio/fir_filter_neon.h" #elif defined(WEBRTC_ARCH_X86_FAMILY) +#include "common_audio/fir_filter_avx2.h" #include "common_audio/fir_filter_sse.h" #include "system_wrappers/include/cpu_features_wrapper.h" // kSSE2, WebRtc_G... #endif @@ -34,18 +35,13 @@ FIRFilter* CreateFirFilter(const float* coefficients, FIRFilter* filter = nullptr; // If we know the minimum architecture at compile time, avoid CPU detection. #if defined(WEBRTC_ARCH_X86_FAMILY) -#if defined(__SSE2__) - filter = - new FIRFilterSSE2(coefficients, coefficients_length, max_input_length); -#else // x86 CPU detection required. - if (WebRtc_GetCPUInfo(kSSE2)) { + if (GetCPUInfo(kSSE2)) { filter = new FIRFilterSSE2(coefficients, coefficients_length, max_input_length); } else { filter = new FIRFilterC(coefficients, coefficients_length); } -#endif #elif defined(WEBRTC_HAS_NEON) filter = new FIRFilterNEON(coefficients, coefficients_length, max_input_length); diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.cc b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.cc index 21707e9e4..154cf5b3b 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.cc @@ -122,28 +122,20 @@ double SincScaleFactor(double io_ratio) { const size_t SincResampler::kKernelSize; // If we know the minimum architecture at compile time, avoid CPU detection. -#if defined(WEBRTC_ARCH_X86_FAMILY) -#if defined(__SSE2__) -#define CONVOLVE_FUNC Convolve_SSE -void SincResampler::InitializeCPUSpecificFeatures() {} -#else -// x86 CPU detection required. Function will be set by -// InitializeCPUSpecificFeatures(). -// TODO(dalecurtis): Once Chrome moves to an SSE baseline this can be removed. -#define CONVOLVE_FUNC convolve_proc_ - void SincResampler::InitializeCPUSpecificFeatures() { - convolve_proc_ = WebRtc_GetCPUInfo(kSSE2) ? Convolve_SSE : Convolve_C; -} -#endif -#elif defined(WEBRTC_HAS_NEON) -#define CONVOLVE_FUNC Convolve_NEON -void SincResampler::InitializeCPUSpecificFeatures() {} +#if defined(WEBRTC_HAS_NEON) + convolve_proc_ = Convolve_NEON; +#elif defined(WEBRTC_ARCH_X86_FAMILY) + // Using AVX2 instead of SSE2 when AVX2 supported. + if (GetCPUInfo(kSSE2)) + convolve_proc_ = Convolve_SSE; + else + convolve_proc_ = Convolve_C; #else -// Unknown architecture. -#define CONVOLVE_FUNC Convolve_C -void SincResampler::InitializeCPUSpecificFeatures() {} + // Unknown architecture. + convolve_proc_ = Convolve_C; #endif +} SincResampler::SincResampler(double io_sample_rate_ratio, size_t request_frames, @@ -152,24 +144,20 @@ SincResampler::SincResampler(double io_sample_rate_ratio, read_cb_(read_cb), request_frames_(request_frames), input_buffer_size_(request_frames_ + kKernelSize), - // Create input buffers with a 16-byte alignment for SSE optimizations. + // Create input buffers with a 32-byte alignment for SIMD optimizations. kernel_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), kernel_pre_sinc_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), kernel_window_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), input_buffer_(static_cast( - AlignedMalloc(sizeof(float) * input_buffer_size_, 16))), -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) + AlignedMalloc(sizeof(float) * input_buffer_size_, 32))), convolve_proc_(nullptr), -#endif r1_(input_buffer_.get()), r2_(input_buffer_.get() + kKernelSize / 2) { -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) InitializeCPUSpecificFeatures(); RTC_DCHECK(convolve_proc_); -#endif RTC_DCHECK_GT(request_frames_, 0); Flush(); RTC_DCHECK_GT(block_size_, kKernelSize); @@ -302,10 +290,10 @@ void SincResampler::Resample(size_t frames, float* destination) { const float* const k1 = kernel_ptr + offset_idx * kKernelSize; const float* const k2 = k1 + kKernelSize; - // Ensure |k1|, |k2| are 16-byte aligned for SIMD usage. Should always be - // true so long as kKernelSize is a multiple of 16. - RTC_DCHECK_EQ(0, reinterpret_cast(k1) % 16); - RTC_DCHECK_EQ(0, reinterpret_cast(k2) % 16); + // Ensure |k1|, |k2| are 32-byte aligned for SIMD usage. Should always be + // true so long as kKernelSize is a multiple of 32. + RTC_DCHECK_EQ(0, reinterpret_cast(k1) % 32); + RTC_DCHECK_EQ(0, reinterpret_cast(k2) % 32); // Initialize input pointer based on quantized |virtual_source_idx_|. const float* const input_ptr = r1_ + source_idx; @@ -314,7 +302,7 @@ void SincResampler::Resample(size_t frames, float* destination) { const double kernel_interpolation_factor = virtual_offset_idx - offset_idx; *destination++ = - CONVOLVE_FUNC(input_ptr, k1, k2, kernel_interpolation_factor); + convolve_proc_(input_ptr, k1, k2, kernel_interpolation_factor); // Advance the virtual index. virtual_source_idx_ += current_io_ratio; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h index 5181c18da..a72a0c62c 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h @@ -112,6 +112,10 @@ class SincResampler { const float* k1, const float* k2, double kernel_interpolation_factor); + static float Convolve_AVX2(const float* input_ptr, + const float* k1, + const float* k2, + double kernel_interpolation_factor); #elif defined(WEBRTC_HAS_NEON) static float Convolve_NEON(const float* input_ptr, const float* k1, @@ -155,13 +159,11 @@ class SincResampler { // TODO(ajm): Move to using a global static which must only be initialized // once by the user. We're not doing this initially, because we don't have // e.g. a LazyInstance helper in webrtc. -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) typedef float (*ConvolveProc)(const float*, const float*, const float*, double); ConvolveProc convolve_proc_; -#endif // Pointers to the various regions inside |input_buffer_|. See the diagram at // the top of the .cc file for more information. diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler_avx2.cc b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler_avx2.cc new file mode 100644 index 000000000..3eb5d4a1b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler_avx2.cc @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include + +#include "common_audio/resampler/sinc_resampler.h" + +namespace webrtc { + +float SincResampler::Convolve_AVX2(const float* input_ptr, + const float* k1, + const float* k2, + double kernel_interpolation_factor) { + __m256 m_input; + __m256 m_sums1 = _mm256_setzero_ps(); + __m256 m_sums2 = _mm256_setzero_ps(); + + // Based on |input_ptr| alignment, we need to use loadu or load. Unrolling + // these loops has not been tested or benchmarked. + bool aligned_input = (reinterpret_cast(input_ptr) & 0x1F) == 0; + if (!aligned_input) { + for (size_t i = 0; i < kKernelSize; i += 8) { + m_input = _mm256_loadu_ps(input_ptr + i); + m_sums1 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k1 + i), m_sums1); + m_sums2 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k2 + i), m_sums2); + } + } else { + for (size_t i = 0; i < kKernelSize; i += 8) { + m_input = _mm256_load_ps(input_ptr + i); + m_sums1 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k1 + i), m_sums1); + m_sums2 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k2 + i), m_sums2); + } + } + + // Linearly interpolate the two "convolutions". + __m128 m128_sums1 = _mm_add_ps(_mm256_extractf128_ps(m_sums1, 0), + _mm256_extractf128_ps(m_sums1, 1)); + __m128 m128_sums2 = _mm_add_ps(_mm256_extractf128_ps(m_sums2, 0), + _mm256_extractf128_ps(m_sums2, 1)); + m128_sums1 = _mm_mul_ps( + m128_sums1, + _mm_set_ps1(static_cast(1.0 - kernel_interpolation_factor))); + m128_sums2 = _mm_mul_ps( + m128_sums2, _mm_set_ps1(static_cast(kernel_interpolation_factor))); + m128_sums1 = _mm_add_ps(m128_sums1, m128_sums2); + + // Sum components together. + float result; + m128_sums2 = _mm_add_ps(_mm_movehl_ps(m128_sums1, m128_sums1), m128_sums1); + _mm_store_ss(&result, _mm_add_ss(m128_sums2, + _mm_shuffle_ps(m128_sums2, m128_sums2, 1))); + + return result; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/smoothing_filter.h b/TMessagesProj/jni/voip/webrtc/common_audio/smoothing_filter.h index e5f561ecf..e96d52a6f 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/smoothing_filter.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/smoothing_filter.h @@ -14,7 +14,6 @@ #include #include "absl/types/optional.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -42,6 +41,11 @@ class SmoothingFilterImpl final : public SmoothingFilter { // will be set to |init_time_ms| first and can be changed through // |SetTimeConstantMs|. explicit SmoothingFilterImpl(int init_time_ms); + + SmoothingFilterImpl() = delete; + SmoothingFilterImpl(const SmoothingFilterImpl&) = delete; + SmoothingFilterImpl& operator=(const SmoothingFilterImpl&) = delete; + ~SmoothingFilterImpl() override; void AddSample(float sample) override; @@ -64,8 +68,6 @@ class SmoothingFilterImpl final : public SmoothingFilter { float alpha_; float state_; int64_t last_state_time_ms_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(SmoothingFilterImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc b/TMessagesProj/jni/voip/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc index 6b6d6f1fd..693312012 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc @@ -323,7 +323,7 @@ OouraFft::OouraFft(bool sse2_available) { OouraFft::OouraFft() { #if defined(WEBRTC_ARCH_X86_FAMILY) - use_sse2_ = (WebRtc_GetCPUInfo(kSSE2) != 0); + use_sse2_ = (GetCPUInfo(kSSE2) != 0); #else use_sse2_ = false; #endif diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h b/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h new file mode 100644 index 000000000..ee642063a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_AUDIO_VAD_VAD_UNITTEST_H_ +#define COMMON_AUDIO_VAD_VAD_UNITTEST_H_ + +#include // size_t + +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +// Modes we support +const int kModes[] = {0, 1, 2, 3}; +const size_t kModesSize = sizeof(kModes) / sizeof(*kModes); + +// Rates we support. +const int kRates[] = {8000, 12000, 16000, 24000, 32000, 48000}; +const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates); + +// Frame lengths we support. +const size_t kMaxFrameLength = 1440; +const size_t kFrameLengths[] = { + 80, 120, 160, 240, 320, 480, 640, 960, kMaxFrameLength}; +const size_t kFrameLengthsSize = sizeof(kFrameLengths) / sizeof(*kFrameLengths); + +} // namespace test +} // namespace webrtc + +class VadTest : public ::testing::Test { + protected: + VadTest(); + void SetUp() override; + void TearDown() override; + + // Returns true if the rate and frame length combination is valid. + bool ValidRatesAndFrameLengths(int rate, size_t frame_length); +}; + +#endif // COMMON_AUDIO_VAD_VAD_UNITTEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc b/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc index d3dca9055..ce119f109 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc @@ -132,7 +132,7 @@ uint16_t MapWavFormatToHeaderField(WavFormat format) { case WavFormat::kWavFormatMuLaw: return 7; } - RTC_CHECK(false); + RTC_CHECK_NOTREACHED(); } WavFormat MapHeaderFieldToWavFormat(uint16_t format_header_value) { @@ -278,10 +278,8 @@ size_t GetFormatBytesPerSample(WavFormat format) { return 1; case WavFormat::kWavFormatIeeeFloat: return 4; - default: - RTC_CHECK(false); - return 2; } + RTC_CHECK_NOTREACHED(); } bool CheckWavParameters(size_t num_channels, diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/window_generator.h b/TMessagesProj/jni/voip/webrtc/common_audio/window_generator.h index 0cbe24a40..c0a89c4f9 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/window_generator.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/window_generator.h @@ -13,18 +13,17 @@ #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Helper class with generators for various signal transform windows. class WindowGenerator { public: + WindowGenerator() = delete; + WindowGenerator(const WindowGenerator&) = delete; + WindowGenerator& operator=(const WindowGenerator&) = delete; + static void Hanning(int length, float* window); static void KaiserBesselDerived(float alpha, size_t length, float* window); - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WindowGenerator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h b/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h new file mode 100644 index 000000000..663fda4a2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_FRAME_COUNTS_H_ +#define COMMON_VIDEO_FRAME_COUNTS_H_ + +namespace webrtc { + +struct FrameCounts { + FrameCounts() : key_frames(0), delta_frames(0) {} + int key_frames; + int delta_frames; +}; + +// Callback, used to notify an observer whenever frame counts have been updated. +class FrameCountObserver { + public: + virtual ~FrameCountObserver() {} + virtual void FrameCountUpdated(const FrameCounts& frame_counts, + uint32_t ssrc) = 0; +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_FRAME_COUNTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/OWNERS b/TMessagesProj/jni/voip/webrtc/common_video/h264/OWNERS new file mode 100644 index 000000000..361ed7e84 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/OWNERS @@ -0,0 +1 @@ +ssilkin@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc index 3fc794fb2..0d16be825 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc @@ -210,7 +210,7 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( return result; } -rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( +rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( rtc::ArrayView buffer, const webrtc::ColorSpace* color_space) { std::vector nalus = @@ -225,8 +225,6 @@ rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( const uint8_t* start_code_ptr = buffer.data() + nalu.start_offset; const size_t start_code_length = nalu.payload_start_offset - nalu.start_offset; - output_buffer.AppendData(start_code_ptr, start_code_length); - const uint8_t* nalu_ptr = buffer.data() + nalu.payload_start_offset; const size_t nalu_length = nalu.payload_size; @@ -253,12 +251,17 @@ rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( nalu_ptr + H264::kNaluTypeSize, nalu_length - H264::kNaluTypeSize, &sps, color_space, &output_nalu, Direction::kOutgoing); if (result == ParseResult::kVuiRewritten) { + output_buffer.AppendData(start_code_ptr, start_code_length); output_buffer.AppendData(output_nalu.data(), output_nalu.size()); continue; } + } else if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kAud) { + // Skip the access unit delimiter copy. + continue; } - // vui wasn't rewritten, copy the nal unit as is. + // vui wasn't rewritten and it is not aud, copy the nal unit as is. + output_buffer.AppendData(start_code_ptr, start_code_length); output_buffer.AppendData(nalu_ptr, nalu_length); } return output_buffer; diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.h b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.h index 9e79c3f31..311db30d5 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.h @@ -50,8 +50,9 @@ class SpsVuiRewriter : private SpsParser { rtc::Buffer* destination, Direction Direction); - // Parses NAL units from |buffer| and rewrites VUI in SPS blocks if necessary. - static rtc::Buffer ParseOutgoingBitstreamAndRewriteSps( + // Parses NAL units from |buffer|, strips AUD blocks and rewrites VUI in SPS + // blocks if necessary. + static rtc::Buffer ParseOutgoingBitstreamAndRewrite( rtc::ArrayView buffer, const ColorSpace* color_space); diff --git a/TMessagesProj/jni/voip/webrtc/common_video/i420_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/common_video/i420_buffer_pool.cc deleted file mode 100644 index d13da6a17..000000000 --- a/TMessagesProj/jni/voip/webrtc/common_video/i420_buffer_pool.cc +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_video/include/i420_buffer_pool.h" - -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -I420BufferPool::I420BufferPool() : I420BufferPool(false) {} -I420BufferPool::I420BufferPool(bool zero_initialize) - : I420BufferPool(zero_initialize, std::numeric_limits::max()) {} -I420BufferPool::I420BufferPool(bool zero_initialize, - size_t max_number_of_buffers) - : zero_initialize_(zero_initialize), - max_number_of_buffers_(max_number_of_buffers) {} -I420BufferPool::~I420BufferPool() = default; - -void I420BufferPool::Release() { - buffers_.clear(); -} - -bool I420BufferPool::Resize(size_t max_number_of_buffers) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - size_t used_buffers_count = 0; - for (const rtc::scoped_refptr& buffer : buffers_) { - // If the buffer is in use, the ref count will be >= 2, one from the list we - // are looping over and one from the application. If the ref count is 1, - // then the list we are looping over holds the only reference and it's safe - // to reuse. - if (!buffer->HasOneRef()) { - used_buffers_count++; - } - } - if (used_buffers_count > max_number_of_buffers) { - return false; - } - max_number_of_buffers_ = max_number_of_buffers; - - size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_; - auto iter = buffers_.begin(); - while (iter != buffers_.end() && buffers_to_purge > 0) { - if ((*iter)->HasOneRef()) { - iter = buffers_.erase(iter); - buffers_to_purge--; - } else { - ++iter; - } - } - return true; -} - -rtc::scoped_refptr I420BufferPool::CreateBuffer(int width, - int height) { - // Default stride_y is width, default uv stride is width / 2 (rounding up). - return CreateBuffer(width, height, width, (width + 1) / 2, (width + 1) / 2); -} - -rtc::scoped_refptr I420BufferPool::CreateBuffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - // Release buffers with wrong resolution. - for (auto it = buffers_.begin(); it != buffers_.end();) { - const auto& buffer = *it; - if (buffer->width() != width || buffer->height() != height || - buffer->StrideY() != stride_y || buffer->StrideU() != stride_u || - buffer->StrideV() != stride_v) { - it = buffers_.erase(it); - } else { - ++it; - } - } - // Look for a free buffer. - for (const rtc::scoped_refptr& buffer : buffers_) { - // If the buffer is in use, the ref count will be >= 2, one from the list we - // are looping over and one from the application. If the ref count is 1, - // then the list we are looping over holds the only reference and it's safe - // to reuse. - if (buffer->HasOneRef()) - return buffer; - } - - if (buffers_.size() >= max_number_of_buffers_) - return nullptr; - // Allocate new buffer. - rtc::scoped_refptr buffer = - new PooledI420Buffer(width, height, stride_y, stride_u, stride_v); - if (zero_initialize_) - buffer->InitializeData(); - buffers_.push_back(buffer); - return buffer; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/i420_buffer_pool.h b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h similarity index 53% rename from TMessagesProj/jni/voip/webrtc/common_video/include/i420_buffer_pool.h rename to TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h index 44f482179..6af117577 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/i420_buffer_pool.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ -#define COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ +#ifndef COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ +#define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ #include @@ -17,36 +17,33 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" #include "rtc_base/race_checker.h" #include "rtc_base/ref_counted_object.h" namespace webrtc { -// Simple buffer pool to avoid unnecessary allocations of I420Buffer objects. -// The pool manages the memory of the I420Buffer returned from CreateBuffer. -// When the I420Buffer is destructed, the memory is returned to the pool for use -// by subsequent calls to CreateBuffer. If the resolution passed to CreateBuffer -// changes, old buffers will be purged from the pool. -// Note that CreateBuffer will crash if more than kMaxNumberOfFramesBeforeCrash -// are created. This is to prevent memory leaks where frames are not returned. -class I420BufferPool { +// Simple buffer pool to avoid unnecessary allocations of video frame buffers. +// The pool manages the memory of the I420Buffer/NV12Buffer returned from +// Create(I420|NV12)Buffer. When the buffer is destructed, the memory is +// returned to the pool for use by subsequent calls to Create(I420|NV12)Buffer. +// If the resolution passed to Create(I420|NV12)Buffer changes or requested +// pixel format changes, old buffers will be purged from the pool. +// Note that Create(I420|NV12)Buffer will crash if more than +// kMaxNumberOfFramesBeforeCrash are created. This is to prevent memory leaks +// where frames are not returned. +class VideoFrameBufferPool { public: - I420BufferPool(); - explicit I420BufferPool(bool zero_initialize); - I420BufferPool(bool zero_initialze, size_t max_number_of_buffers); - ~I420BufferPool(); + VideoFrameBufferPool(); + explicit VideoFrameBufferPool(bool zero_initialize); + VideoFrameBufferPool(bool zero_initialize, size_t max_number_of_buffers); + ~VideoFrameBufferPool(); // Returns a buffer from the pool. If no suitable buffer exist in the pool // and there are less than |max_number_of_buffers| pending, a buffer is // created. Returns null otherwise. - rtc::scoped_refptr CreateBuffer(int width, int height); - - // Returns a buffer from the pool with the explicitly specified stride. - rtc::scoped_refptr CreateBuffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + rtc::scoped_refptr CreateI420Buffer(int width, int height); + rtc::scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. // Returns true if change was successful and false if the amount of already @@ -58,12 +55,11 @@ class I420BufferPool { void Release(); private: - // Explicitly use a RefCountedObject to get access to HasOneRef, - // needed by the pool to check exclusive access. - using PooledI420Buffer = rtc::RefCountedObject; + rtc::scoped_refptr + GetExistingBuffer(int width, int height, VideoFrameBuffer::Type type); rtc::RaceChecker race_checker_; - std::list> buffers_; + std::list> buffers_; // If true, newly allocated buffers are zero-initialized. Note that recycled // buffers are not zero'd before reuse. This is required of buffers used by // FFmpeg according to http://crbug.com/390941, which only requires it for the @@ -76,4 +72,4 @@ class I420BufferPool { } // namespace webrtc -#endif // COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ +#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc new file mode 100644 index 000000000..6df240d9f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/include/video_frame_buffer_pool.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { +bool HasOneRef(const rtc::scoped_refptr& buffer) { + // Cast to rtc::RefCountedObject is safe because this function is only called + // on locally created VideoFrameBuffers, which are either + // |rtc::RefCountedObject| or |rtc::RefCountedObject|. + switch (buffer->type()) { + case VideoFrameBuffer::Type::kI420: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + case VideoFrameBuffer::Type::kNV12: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + default: + RTC_NOTREACHED(); + } + return false; +} + +} // namespace + +VideoFrameBufferPool::VideoFrameBufferPool() : VideoFrameBufferPool(false) {} + +VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize) + : VideoFrameBufferPool(zero_initialize, + std::numeric_limits::max()) {} + +VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize, + size_t max_number_of_buffers) + : zero_initialize_(zero_initialize), + max_number_of_buffers_(max_number_of_buffers) {} + +VideoFrameBufferPool::~VideoFrameBufferPool() = default; + +void VideoFrameBufferPool::Release() { + buffers_.clear(); +} + +bool VideoFrameBufferPool::Resize(size_t max_number_of_buffers) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + size_t used_buffers_count = 0; + for (const rtc::scoped_refptr& buffer : buffers_) { + // If the buffer is in use, the ref count will be >= 2, one from the list we + // are looping over and one from the application. If the ref count is 1, + // then the list we are looping over holds the only reference and it's safe + // to reuse. + if (!HasOneRef(buffer)) { + used_buffers_count++; + } + } + if (used_buffers_count > max_number_of_buffers) { + return false; + } + max_number_of_buffers_ = max_number_of_buffers; + + size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_; + auto iter = buffers_.begin(); + while (iter != buffers_.end() && buffers_to_purge > 0) { + if (HasOneRef(*iter)) { + iter = buffers_.erase(iter); + buffers_to_purge--; + } else { + ++iter; + } + } + return true; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI420); + if (existing_buffer) { + // Cast is safe because the only way kI420 buffer is created is + // in the same function below, where |RefCountedObject| is + // created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + new rtc::RefCountedObject(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kNV12); + if (existing_buffer) { + // Cast is safe because the only way kI420 buffer is created is + // in the same function below, where |RefCountedObject| is + // created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + new rtc::RefCountedObject(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::GetExistingBuffer( + int width, + int height, + VideoFrameBuffer::Type type) { + // Release buffers with wrong resolution or different type. + for (auto it = buffers_.begin(); it != buffers_.end();) { + const auto& buffer = *it; + if (buffer->width() != width || buffer->height() != height || + buffer->type() != type) { + it = buffers_.erase(it); + } else { + ++it; + } + } + // Look for a free buffer. + for (const rtc::scoped_refptr& buffer : buffers_) { + // If the buffer is in use, the ref count will be >= 2, one from the list we + // are looping over and one from the application. If the ref count is 1, + // then the list we are looping over holds the only reference and it's safe + // to reuse. + if (HasOneRef(buffer)) { + RTC_CHECK(buffer->type() == type); + return buffer; + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc index d16badaad..143df8821 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc @@ -696,8 +696,7 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( rtcp::CommonHeader header; const uint8_t* block_begin = packet.data(); const uint8_t* packet_end = packet.data() + packet.size(); - RTC_DCHECK(packet.size() <= IP_PACKET_SIZE); - uint8_t buffer[IP_PACKET_SIZE]; + std::vector buffer(packet.size()); uint32_t buffer_length = 0; while (block_begin < packet_end) { if (!header.Parse(block_begin, packet_end - block_begin)) { @@ -716,7 +715,7 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( // We log sender reports, receiver reports, bye messages // inter-arrival jitter, third-party loss reports, payload-specific // feedback and extended reports. - memcpy(buffer + buffer_length, block_begin, block_size); + memcpy(buffer.data() + buffer_length, block_begin, block_size); buffer_length += block_size; break; case rtcp::App::kPacketType: @@ -729,7 +728,8 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( block_begin += block_size; } - rtclog_event.mutable_rtcp_packet()->set_packet_data(buffer, buffer_length); + rtclog_event.mutable_rtcp_packet()->set_packet_data(buffer.data(), + buffer_length); return Serialize(&rtclog_event); } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc index bc68af29d..7f1b0c7d6 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc @@ -290,11 +290,9 @@ rtclog2::IceCandidatePairEvent::IceCandidatePairEventType ConvertToProtoFormat( } // Copies all RTCP blocks except APP, SDES and unknown from |packet| to -// |buffer|. |buffer| must have space for |IP_PACKET_SIZE| bytes. |packet| must -// be at most |IP_PACKET_SIZE| bytes long. +// |buffer|. |buffer| must have space for at least |packet.size()| bytes. size_t RemoveNonWhitelistedRtcpBlocks(const rtc::Buffer& packet, uint8_t* buffer) { - RTC_DCHECK(packet.size() <= IP_PACKET_SIZE); RTC_DCHECK(buffer != nullptr); rtcp::CommonHeader header; const uint8_t* block_begin = packet.data(); @@ -348,10 +346,10 @@ void EncodeRtcpPacket(rtc::ArrayView batch, const EventType* const base_event = batch[0]; proto_batch->set_timestamp_ms(base_event->timestamp_ms()); { - uint8_t buffer[IP_PACKET_SIZE]; + std::vector buffer(base_event->packet().size()); size_t buffer_length = - RemoveNonWhitelistedRtcpBlocks(base_event->packet(), buffer); - proto_batch->set_raw_packet(buffer, buffer_length); + RemoveNonWhitelistedRtcpBlocks(base_event->packet(), buffer.data()); + proto_batch->set_raw_packet(buffer.data(), buffer_length); } if (batch.size() == 1) { @@ -700,7 +698,8 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( std::vector bwe_loss_based_updates; std::vector dtls_transport_states; std::vector dtls_writable_states; - std::vector frames_decoded; + std::map> + frames_decoded; std::vector generic_acks_received; std::vector generic_packets_received; std::vector generic_packets_sent; @@ -886,8 +885,7 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( case RtcEvent::Type::FrameDecoded: { auto* rtc_event = static_cast(it->get()); - // TODO(terelius): Group by SSRC - frames_decoded.push_back(rtc_event); + frames_decoded[rtc_event->ssrc()].emplace_back(rtc_event); break; } } @@ -903,7 +901,9 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( EncodeBweUpdateLossBased(bwe_loss_based_updates, &event_stream); EncodeDtlsTransportState(dtls_transport_states, &event_stream); EncodeDtlsWritableState(dtls_writable_states, &event_stream); - EncodeFramesDecoded(frames_decoded, &event_stream); + for (const auto& kv : frames_decoded) { + EncodeFramesDecoded(kv.second, &event_stream); + } EncodeGenericAcksReceived(generic_acks_received, &event_stream); EncodeGenericPacketsReceived(generic_packets_received, &event_stream); EncodeGenericPacketsSent(generic_packets_sent, &event_stream); @@ -1409,7 +1409,7 @@ void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( // link_capacity_lower_kbps for (size_t i = 0; i < values.size(); ++i) { const auto* event = batch[i + 1]; - if (base_event->link_capacity_lower_.IsFinite()) { + if (event->link_capacity_lower_.IsFinite()) { values[i] = event->link_capacity_lower_.kbps(); } else { values[i].reset(); @@ -1423,7 +1423,7 @@ void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( // link_capacity_upper_kbps for (size_t i = 0; i < values.size(); ++i) { const auto* event = batch[i + 1]; - if (base_event->link_capacity_upper_.IsFinite()) { + if (event->link_capacity_upper_.IsFinite()) { values[i] = event->link_capacity_upper_.kbps(); } else { values[i].reset(); @@ -1464,7 +1464,7 @@ void RtcEventLogEncoderNewFormat::EncodeRtpPacketIncoming( } void RtcEventLogEncoderNewFormat::EncodeFramesDecoded( - rtc::ArrayView batch, + rtc::ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h index fdbe8fe85..6af34bc6c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h @@ -95,8 +95,9 @@ class RtcEventLogEncoderNewFormat final : public RtcEventLogEncoder { void EncodeDtlsWritableState( rtc::ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeFramesDecoded(rtc::ArrayView batch, - rtclog2::EventStream* event_stream); + void EncodeFramesDecoded( + rtc::ArrayView batch, + rtclog2::EventStream* event_stream); void EncodeGenericAcksReceived( rtc::ArrayView batch, rtclog2::EventStream* event_stream); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc index 3cba8bab2..dd0a8aae2 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc @@ -41,15 +41,17 @@ LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default; LoggedPacketInfo::~LoggedPacketInfo() {} LoggedRtcpPacket::LoggedRtcpPacket(int64_t timestamp_us, - const uint8_t* packet, - size_t total_length) - : timestamp_us(timestamp_us), raw_data(packet, packet + total_length) {} + const std::vector& packet) + : timestamp_us(timestamp_us), raw_data(packet) {} + LoggedRtcpPacket::LoggedRtcpPacket(int64_t timestamp_us, const std::string& packet) : timestamp_us(timestamp_us), raw_data(packet.size()) { memcpy(raw_data.data(), packet.data(), packet.size()); } + LoggedRtcpPacket::LoggedRtcpPacket(const LoggedRtcpPacket& rhs) = default; + LoggedRtcpPacket::~LoggedRtcpPacket() = default; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h index 4bd33f62b..192f7cf81 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h @@ -309,9 +309,7 @@ struct LoggedRtpPacketOutgoing { }; struct LoggedRtcpPacket { - LoggedRtcpPacket(int64_t timestamp_us, - const uint8_t* packet, - size_t total_length); + LoggedRtcpPacket(int64_t timestamp_us, const std::vector& packet); LoggedRtcpPacket(int64_t timestamp_us, const std::string& packet); LoggedRtcpPacket(const LoggedRtcpPacket&); ~LoggedRtcpPacket(); @@ -325,9 +323,8 @@ struct LoggedRtcpPacket { struct LoggedRtcpPacketIncoming { LoggedRtcpPacketIncoming(int64_t timestamp_us, - const uint8_t* packet, - size_t total_length) - : rtcp(timestamp_us, packet, total_length) {} + const std::vector& packet) + : rtcp(timestamp_us, packet) {} LoggedRtcpPacketIncoming(uint64_t timestamp_us, const std::string& packet) : rtcp(timestamp_us, packet) {} @@ -339,9 +336,8 @@ struct LoggedRtcpPacketIncoming { struct LoggedRtcpPacketOutgoing { LoggedRtcpPacketOutgoing(int64_t timestamp_us, - const uint8_t* packet, - size_t total_length) - : rtcp(timestamp_us, packet, total_length) {} + const std::vector& packet) + : rtcp(timestamp_us, packet) {} LoggedRtcpPacketOutgoing(uint64_t timestamp_us, const std::string& packet) : rtcp(timestamp_us, packet) {} diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.cc deleted file mode 100644 index 4e459c144..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.cc +++ /dev/null @@ -1,9708 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: rtc_event_log.proto - -#include "rtc_event_log.pb.h" - -#include - -#include -#include -#include -#include -#include -// @@protoc_insertion_point(includes) -#include -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AlrState_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResult_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DecoderConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_EncoderConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<15> scc_info_Event_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtcpPacket_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpPacket_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtxConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_RtxMap_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<3> scc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<2> scc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto; -namespace webrtc { -namespace rtclog { -class EventStreamDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _EventStream_default_instance_; -class EventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; - const ::webrtc::rtclog::RtpPacket* rtp_packet_; - const ::webrtc::rtclog::RtcpPacket* rtcp_packet_; - const ::webrtc::rtclog::AudioPlayoutEvent* audio_playout_event_; - const ::webrtc::rtclog::LossBasedBweUpdate* loss_based_bwe_update_; - const ::webrtc::rtclog::DelayBasedBweUpdate* delay_based_bwe_update_; - const ::webrtc::rtclog::VideoReceiveConfig* video_receiver_config_; - const ::webrtc::rtclog::VideoSendConfig* video_sender_config_; - const ::webrtc::rtclog::AudioReceiveConfig* audio_receiver_config_; - const ::webrtc::rtclog::AudioSendConfig* audio_sender_config_; - const ::webrtc::rtclog::AudioNetworkAdaptation* audio_network_adaptation_; - const ::webrtc::rtclog::BweProbeCluster* probe_cluster_; - const ::webrtc::rtclog::BweProbeResult* probe_result_; - const ::webrtc::rtclog::AlrState* alr_state_; - const ::webrtc::rtclog::IceCandidatePairConfig* ice_candidate_pair_config_; - const ::webrtc::rtclog::IceCandidatePairEvent* ice_candidate_pair_event_; -} _Event_default_instance_; -class RtpPacketDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtpPacket_default_instance_; -class RtcpPacketDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtcpPacket_default_instance_; -class AudioPlayoutEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioPlayoutEvent_default_instance_; -class LossBasedBweUpdateDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _LossBasedBweUpdate_default_instance_; -class DelayBasedBweUpdateDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _DelayBasedBweUpdate_default_instance_; -class VideoReceiveConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _VideoReceiveConfig_default_instance_; -class DecoderConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _DecoderConfig_default_instance_; -class RtpHeaderExtensionDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtpHeaderExtension_default_instance_; -class RtxConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtxConfig_default_instance_; -class RtxMapDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtxMap_default_instance_; -class VideoSendConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _VideoSendConfig_default_instance_; -class EncoderConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _EncoderConfig_default_instance_; -class AudioReceiveConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioReceiveConfig_default_instance_; -class AudioSendConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioSendConfig_default_instance_; -class AudioNetworkAdaptationDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioNetworkAdaptation_default_instance_; -class BweProbeClusterDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BweProbeCluster_default_instance_; -class BweProbeResultDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BweProbeResult_default_instance_; -class AlrStateDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AlrState_default_instance_; -class IceCandidatePairConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IceCandidatePairConfig_default_instance_; -class IceCandidatePairEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IceCandidatePairEvent_default_instance_; -} // namespace rtclog -} // namespace webrtc -static void InitDefaultsscc_info_AlrState_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_AlrState_default_instance_; - new (ptr) ::webrtc::rtclog::AlrState(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::AlrState::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AlrState_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AlrState_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_AudioNetworkAdaptation_default_instance_; - new (ptr) ::webrtc::rtclog::AudioNetworkAdaptation(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::AudioNetworkAdaptation::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_AudioPlayoutEvent_default_instance_; - new (ptr) ::webrtc::rtclog::AudioPlayoutEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::AudioPlayoutEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_AudioReceiveConfig_default_instance_; - new (ptr) ::webrtc::rtclog::AudioReceiveConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::AudioReceiveConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_AudioSendConfig_default_instance_; - new (ptr) ::webrtc::rtclog::AudioSendConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::AudioSendConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_BweProbeCluster_default_instance_; - new (ptr) ::webrtc::rtclog::BweProbeCluster(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::BweProbeCluster::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_BweProbeResult_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_BweProbeResult_default_instance_; - new (ptr) ::webrtc::rtclog::BweProbeResult(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::BweProbeResult::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResult_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BweProbeResult_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_DecoderConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_DecoderConfig_default_instance_; - new (ptr) ::webrtc::rtclog::DecoderConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::DecoderConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DecoderConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_DecoderConfig_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_DelayBasedBweUpdate_default_instance_; - new (ptr) ::webrtc::rtclog::DelayBasedBweUpdate(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::DelayBasedBweUpdate::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_EncoderConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_EncoderConfig_default_instance_; - new (ptr) ::webrtc::rtclog::EncoderConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::EncoderConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_EncoderConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_EncoderConfig_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_Event_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_Event_default_instance_; - new (ptr) ::webrtc::rtclog::Event(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::Event::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<15> scc_info_Event_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 15, InitDefaultsscc_info_Event_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtpPacket_rtc_5fevent_5flog_2eproto.base, - &scc_info_RtcpPacket_rtc_5fevent_5flog_2eproto.base, - &scc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto.base, - &scc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto.base, - &scc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto.base, - &scc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto.base, - &scc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto.base, - &scc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto.base, - &scc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto.base, - &scc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto.base, - &scc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto.base, - &scc_info_BweProbeResult_rtc_5fevent_5flog_2eproto.base, - &scc_info_AlrState_rtc_5fevent_5flog_2eproto.base, - &scc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto.base, - &scc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_EventStream_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_EventStream_default_instance_; - new (ptr) ::webrtc::rtclog::EventStream(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::EventStream::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_EventStream_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_EventStream_rtc_5fevent_5flog_2eproto}, { - &scc_info_Event_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_IceCandidatePairConfig_default_instance_; - new (ptr) ::webrtc::rtclog::IceCandidatePairConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::IceCandidatePairConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_IceCandidatePairEvent_default_instance_; - new (ptr) ::webrtc::rtclog::IceCandidatePairEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::IceCandidatePairEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_LossBasedBweUpdate_default_instance_; - new (ptr) ::webrtc::rtclog::LossBasedBweUpdate(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::LossBasedBweUpdate::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_RtcpPacket_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_RtcpPacket_default_instance_; - new (ptr) ::webrtc::rtclog::RtcpPacket(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::RtcpPacket::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtcpPacket_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RtcpPacket_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_RtpHeaderExtension_default_instance_; - new (ptr) ::webrtc::rtclog::RtpHeaderExtension(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::RtpHeaderExtension::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_RtpPacket_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_RtpPacket_default_instance_; - new (ptr) ::webrtc::rtclog::RtpPacket(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::RtpPacket::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpPacket_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RtpPacket_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_RtxConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_RtxConfig_default_instance_; - new (ptr) ::webrtc::rtclog::RtxConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::RtxConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtxConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RtxConfig_rtc_5fevent_5flog_2eproto}, {}}; - -static void InitDefaultsscc_info_RtxMap_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_RtxMap_default_instance_; - new (ptr) ::webrtc::rtclog::RtxMap(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::RtxMap::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_RtxMap_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_RtxMap_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtxConfig_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_VideoReceiveConfig_default_instance_; - new (ptr) ::webrtc::rtclog::VideoReceiveConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::VideoReceiveConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<3> scc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsscc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtxMap_rtc_5fevent_5flog_2eproto.base, - &scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base, - &scc_info_DecoderConfig_rtc_5fevent_5flog_2eproto.base,}}; - -static void InitDefaultsscc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog::_VideoSendConfig_default_instance_; - new (ptr) ::webrtc::rtclog::VideoSendConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog::VideoSendConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<2> scc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsscc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto}, { - &scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base, - &scc_info_EncoderConfig_rtc_5fevent_5flog_2eproto.base,}}; - -namespace webrtc { -namespace rtclog { -bool Event_EventType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - case 10: - case 11: - case 16: - case 17: - case 18: - case 19: - case 20: - case 21: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed Event_EventType_strings[18] = {}; - -static const char Event_EventType_names[] = - "ALR_STATE_EVENT" - "AUDIO_NETWORK_ADAPTATION_EVENT" - "AUDIO_PLAYOUT_EVENT" - "AUDIO_RECEIVER_CONFIG_EVENT" - "AUDIO_SENDER_CONFIG_EVENT" - "BWE_PROBE_CLUSTER_CREATED_EVENT" - "BWE_PROBE_RESULT_EVENT" - "DELAY_BASED_BWE_UPDATE" - "ICE_CANDIDATE_PAIR_CONFIG" - "ICE_CANDIDATE_PAIR_EVENT" - "LOG_END" - "LOG_START" - "LOSS_BASED_BWE_UPDATE" - "RTCP_EVENT" - "RTP_EVENT" - "UNKNOWN_EVENT" - "VIDEO_RECEIVER_CONFIG_EVENT" - "VIDEO_SENDER_CONFIG_EVENT"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry Event_EventType_entries[] = { - { {Event_EventType_names + 0, 15}, 19 }, - { {Event_EventType_names + 15, 30}, 16 }, - { {Event_EventType_names + 45, 19}, 5 }, - { {Event_EventType_names + 64, 27}, 10 }, - { {Event_EventType_names + 91, 25}, 11 }, - { {Event_EventType_names + 116, 31}, 17 }, - { {Event_EventType_names + 147, 22}, 18 }, - { {Event_EventType_names + 169, 22}, 7 }, - { {Event_EventType_names + 191, 25}, 20 }, - { {Event_EventType_names + 216, 24}, 21 }, - { {Event_EventType_names + 240, 7}, 2 }, - { {Event_EventType_names + 247, 9}, 1 }, - { {Event_EventType_names + 256, 21}, 6 }, - { {Event_EventType_names + 277, 10}, 4 }, - { {Event_EventType_names + 287, 9}, 3 }, - { {Event_EventType_names + 296, 13}, 0 }, - { {Event_EventType_names + 309, 27}, 8 }, - { {Event_EventType_names + 336, 25}, 9 }, -}; - -static const int Event_EventType_entries_by_number[] = { - 15, // 0 -> UNKNOWN_EVENT - 11, // 1 -> LOG_START - 10, // 2 -> LOG_END - 14, // 3 -> RTP_EVENT - 13, // 4 -> RTCP_EVENT - 2, // 5 -> AUDIO_PLAYOUT_EVENT - 12, // 6 -> LOSS_BASED_BWE_UPDATE - 7, // 7 -> DELAY_BASED_BWE_UPDATE - 16, // 8 -> VIDEO_RECEIVER_CONFIG_EVENT - 17, // 9 -> VIDEO_SENDER_CONFIG_EVENT - 3, // 10 -> AUDIO_RECEIVER_CONFIG_EVENT - 4, // 11 -> AUDIO_SENDER_CONFIG_EVENT - 1, // 16 -> AUDIO_NETWORK_ADAPTATION_EVENT - 5, // 17 -> BWE_PROBE_CLUSTER_CREATED_EVENT - 6, // 18 -> BWE_PROBE_RESULT_EVENT - 0, // 19 -> ALR_STATE_EVENT - 8, // 20 -> ICE_CANDIDATE_PAIR_CONFIG - 9, // 21 -> ICE_CANDIDATE_PAIR_EVENT -}; - -const std::string& Event_EventType_Name( - Event_EventType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - Event_EventType_entries, - Event_EventType_entries_by_number, - 18, Event_EventType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - Event_EventType_entries, - Event_EventType_entries_by_number, - 18, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - Event_EventType_strings[idx].get(); -} -bool Event_EventType_Parse( - const std::string& name, Event_EventType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - Event_EventType_entries, 18, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr Event_EventType Event::UNKNOWN_EVENT; -constexpr Event_EventType Event::LOG_START; -constexpr Event_EventType Event::LOG_END; -constexpr Event_EventType Event::RTP_EVENT; -constexpr Event_EventType Event::RTCP_EVENT; -constexpr Event_EventType Event::AUDIO_PLAYOUT_EVENT; -constexpr Event_EventType Event::LOSS_BASED_BWE_UPDATE; -constexpr Event_EventType Event::DELAY_BASED_BWE_UPDATE; -constexpr Event_EventType Event::VIDEO_RECEIVER_CONFIG_EVENT; -constexpr Event_EventType Event::VIDEO_SENDER_CONFIG_EVENT; -constexpr Event_EventType Event::AUDIO_RECEIVER_CONFIG_EVENT; -constexpr Event_EventType Event::AUDIO_SENDER_CONFIG_EVENT; -constexpr Event_EventType Event::AUDIO_NETWORK_ADAPTATION_EVENT; -constexpr Event_EventType Event::BWE_PROBE_CLUSTER_CREATED_EVENT; -constexpr Event_EventType Event::BWE_PROBE_RESULT_EVENT; -constexpr Event_EventType Event::ALR_STATE_EVENT; -constexpr Event_EventType Event::ICE_CANDIDATE_PAIR_CONFIG; -constexpr Event_EventType Event::ICE_CANDIDATE_PAIR_EVENT; -constexpr Event_EventType Event::EventType_MIN; -constexpr Event_EventType Event::EventType_MAX; -constexpr int Event::EventType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool DelayBasedBweUpdate_DetectorState_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed DelayBasedBweUpdate_DetectorState_strings[3] = {}; - -static const char DelayBasedBweUpdate_DetectorState_names[] = - "BWE_NORMAL" - "BWE_OVERUSING" - "BWE_UNDERUSING"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry DelayBasedBweUpdate_DetectorState_entries[] = { - { {DelayBasedBweUpdate_DetectorState_names + 0, 10}, 0 }, - { {DelayBasedBweUpdate_DetectorState_names + 10, 13}, 2 }, - { {DelayBasedBweUpdate_DetectorState_names + 23, 14}, 1 }, -}; - -static const int DelayBasedBweUpdate_DetectorState_entries_by_number[] = { - 0, // 0 -> BWE_NORMAL - 2, // 1 -> BWE_UNDERUSING - 1, // 2 -> BWE_OVERUSING -}; - -const std::string& DelayBasedBweUpdate_DetectorState_Name( - DelayBasedBweUpdate_DetectorState value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - DelayBasedBweUpdate_DetectorState_entries, - DelayBasedBweUpdate_DetectorState_entries_by_number, - 3, DelayBasedBweUpdate_DetectorState_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - DelayBasedBweUpdate_DetectorState_entries, - DelayBasedBweUpdate_DetectorState_entries_by_number, - 3, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - DelayBasedBweUpdate_DetectorState_strings[idx].get(); -} -bool DelayBasedBweUpdate_DetectorState_Parse( - const std::string& name, DelayBasedBweUpdate_DetectorState* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - DelayBasedBweUpdate_DetectorState_entries, 3, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::BWE_NORMAL; -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::BWE_UNDERUSING; -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::BWE_OVERUSING; -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::DetectorState_MIN; -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::DetectorState_MAX; -constexpr int DelayBasedBweUpdate::DetectorState_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool VideoReceiveConfig_RtcpMode_IsValid(int value) { - switch (value) { - case 1: - case 2: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed VideoReceiveConfig_RtcpMode_strings[2] = {}; - -static const char VideoReceiveConfig_RtcpMode_names[] = - "RTCP_COMPOUND" - "RTCP_REDUCEDSIZE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry VideoReceiveConfig_RtcpMode_entries[] = { - { {VideoReceiveConfig_RtcpMode_names + 0, 13}, 1 }, - { {VideoReceiveConfig_RtcpMode_names + 13, 16}, 2 }, -}; - -static const int VideoReceiveConfig_RtcpMode_entries_by_number[] = { - 0, // 1 -> RTCP_COMPOUND - 1, // 2 -> RTCP_REDUCEDSIZE -}; - -const std::string& VideoReceiveConfig_RtcpMode_Name( - VideoReceiveConfig_RtcpMode value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - VideoReceiveConfig_RtcpMode_entries, - VideoReceiveConfig_RtcpMode_entries_by_number, - 2, VideoReceiveConfig_RtcpMode_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - VideoReceiveConfig_RtcpMode_entries, - VideoReceiveConfig_RtcpMode_entries_by_number, - 2, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - VideoReceiveConfig_RtcpMode_strings[idx].get(); -} -bool VideoReceiveConfig_RtcpMode_Parse( - const std::string& name, VideoReceiveConfig_RtcpMode* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - VideoReceiveConfig_RtcpMode_entries, 2, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig::RTCP_COMPOUND; -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig::RTCP_REDUCEDSIZE; -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig::RtcpMode_MIN; -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig::RtcpMode_MAX; -constexpr int VideoReceiveConfig::RtcpMode_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool BweProbeResult_ResultType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed BweProbeResult_ResultType_strings[4] = {}; - -static const char BweProbeResult_ResultType_names[] = - "INVALID_SEND_RECEIVE_INTERVAL" - "INVALID_SEND_RECEIVE_RATIO" - "SUCCESS" - "TIMEOUT"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry BweProbeResult_ResultType_entries[] = { - { {BweProbeResult_ResultType_names + 0, 29}, 1 }, - { {BweProbeResult_ResultType_names + 29, 26}, 2 }, - { {BweProbeResult_ResultType_names + 55, 7}, 0 }, - { {BweProbeResult_ResultType_names + 62, 7}, 3 }, -}; - -static const int BweProbeResult_ResultType_entries_by_number[] = { - 2, // 0 -> SUCCESS - 0, // 1 -> INVALID_SEND_RECEIVE_INTERVAL - 1, // 2 -> INVALID_SEND_RECEIVE_RATIO - 3, // 3 -> TIMEOUT -}; - -const std::string& BweProbeResult_ResultType_Name( - BweProbeResult_ResultType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - BweProbeResult_ResultType_entries, - BweProbeResult_ResultType_entries_by_number, - 4, BweProbeResult_ResultType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - BweProbeResult_ResultType_entries, - BweProbeResult_ResultType_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - BweProbeResult_ResultType_strings[idx].get(); -} -bool BweProbeResult_ResultType_Parse( - const std::string& name, BweProbeResult_ResultType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - BweProbeResult_ResultType_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr BweProbeResult_ResultType BweProbeResult::SUCCESS; -constexpr BweProbeResult_ResultType BweProbeResult::INVALID_SEND_RECEIVE_INTERVAL; -constexpr BweProbeResult_ResultType BweProbeResult::INVALID_SEND_RECEIVE_RATIO; -constexpr BweProbeResult_ResultType BweProbeResult::TIMEOUT; -constexpr BweProbeResult_ResultType BweProbeResult::ResultType_MIN; -constexpr BweProbeResult_ResultType BweProbeResult::ResultType_MAX; -constexpr int BweProbeResult::ResultType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_IceCandidatePairConfigType_strings[4] = {}; - -static const char IceCandidatePairConfig_IceCandidatePairConfigType_names[] = - "ADDED" - "DESTROYED" - "SELECTED" - "UPDATED"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_IceCandidatePairConfigType_entries[] = { - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 0, 5}, 0 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 5, 9}, 2 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 14, 8}, 3 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 22, 7}, 1 }, -}; - -static const int IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number[] = { - 0, // 0 -> ADDED - 3, // 1 -> UPDATED - 1, // 2 -> DESTROYED - 2, // 3 -> SELECTED -}; - -const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name( - IceCandidatePairConfig_IceCandidatePairConfigType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, - IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number, - 4, IceCandidatePairConfig_IceCandidatePairConfigType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, - IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_IceCandidatePairConfigType_strings[idx].get(); -} -bool IceCandidatePairConfig_IceCandidatePairConfigType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidatePairConfigType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::ADDED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::UPDATED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::DESTROYED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::SELECTED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::IceCandidatePairConfigType_MIN; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::IceCandidatePairConfigType_MAX; -constexpr int IceCandidatePairConfig::IceCandidatePairConfigType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_IceCandidateType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_IceCandidateType_strings[5] = {}; - -static const char IceCandidatePairConfig_IceCandidateType_names[] = - "LOCAL" - "PRFLX" - "RELAY" - "STUN" - "UNKNOWN_CANDIDATE_TYPE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_IceCandidateType_entries[] = { - { {IceCandidatePairConfig_IceCandidateType_names + 0, 5}, 0 }, - { {IceCandidatePairConfig_IceCandidateType_names + 5, 5}, 2 }, - { {IceCandidatePairConfig_IceCandidateType_names + 10, 5}, 3 }, - { {IceCandidatePairConfig_IceCandidateType_names + 15, 4}, 1 }, - { {IceCandidatePairConfig_IceCandidateType_names + 19, 22}, 4 }, -}; - -static const int IceCandidatePairConfig_IceCandidateType_entries_by_number[] = { - 0, // 0 -> LOCAL - 3, // 1 -> STUN - 1, // 2 -> PRFLX - 2, // 3 -> RELAY - 4, // 4 -> UNKNOWN_CANDIDATE_TYPE -}; - -const std::string& IceCandidatePairConfig_IceCandidateType_Name( - IceCandidatePairConfig_IceCandidateType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_IceCandidateType_entries, - IceCandidatePairConfig_IceCandidateType_entries_by_number, - 5, IceCandidatePairConfig_IceCandidateType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_IceCandidateType_entries, - IceCandidatePairConfig_IceCandidateType_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_IceCandidateType_strings[idx].get(); -} -bool IceCandidatePairConfig_IceCandidateType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidateType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_IceCandidateType_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::LOCAL; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::STUN; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::PRFLX; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::RELAY; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::IceCandidateType_MIN; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::IceCandidateType_MAX; -constexpr int IceCandidatePairConfig::IceCandidateType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_Protocol_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_Protocol_strings[5] = {}; - -static const char IceCandidatePairConfig_Protocol_names[] = - "SSLTCP" - "TCP" - "TLS" - "UDP" - "UNKNOWN_PROTOCOL"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_Protocol_entries[] = { - { {IceCandidatePairConfig_Protocol_names + 0, 6}, 2 }, - { {IceCandidatePairConfig_Protocol_names + 6, 3}, 1 }, - { {IceCandidatePairConfig_Protocol_names + 9, 3}, 3 }, - { {IceCandidatePairConfig_Protocol_names + 12, 3}, 0 }, - { {IceCandidatePairConfig_Protocol_names + 15, 16}, 4 }, -}; - -static const int IceCandidatePairConfig_Protocol_entries_by_number[] = { - 3, // 0 -> UDP - 1, // 1 -> TCP - 0, // 2 -> SSLTCP - 2, // 3 -> TLS - 4, // 4 -> UNKNOWN_PROTOCOL -}; - -const std::string& IceCandidatePairConfig_Protocol_Name( - IceCandidatePairConfig_Protocol value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_Protocol_entries, - IceCandidatePairConfig_Protocol_entries_by_number, - 5, IceCandidatePairConfig_Protocol_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_Protocol_entries, - IceCandidatePairConfig_Protocol_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_Protocol_strings[idx].get(); -} -bool IceCandidatePairConfig_Protocol_Parse( - const std::string& name, IceCandidatePairConfig_Protocol* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_Protocol_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::UDP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::TCP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::SSLTCP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::TLS; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::UNKNOWN_PROTOCOL; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::Protocol_MIN; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::Protocol_MAX; -constexpr int IceCandidatePairConfig::Protocol_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_AddressFamily_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_AddressFamily_strings[3] = {}; - -static const char IceCandidatePairConfig_AddressFamily_names[] = - "IPV4" - "IPV6" - "UNKNOWN_ADDRESS_FAMILY"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_AddressFamily_entries[] = { - { {IceCandidatePairConfig_AddressFamily_names + 0, 4}, 0 }, - { {IceCandidatePairConfig_AddressFamily_names + 4, 4}, 1 }, - { {IceCandidatePairConfig_AddressFamily_names + 8, 22}, 2 }, -}; - -static const int IceCandidatePairConfig_AddressFamily_entries_by_number[] = { - 0, // 0 -> IPV4 - 1, // 1 -> IPV6 - 2, // 2 -> UNKNOWN_ADDRESS_FAMILY -}; - -const std::string& IceCandidatePairConfig_AddressFamily_Name( - IceCandidatePairConfig_AddressFamily value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_AddressFamily_entries, - IceCandidatePairConfig_AddressFamily_entries_by_number, - 3, IceCandidatePairConfig_AddressFamily_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_AddressFamily_entries, - IceCandidatePairConfig_AddressFamily_entries_by_number, - 3, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_AddressFamily_strings[idx].get(); -} -bool IceCandidatePairConfig_AddressFamily_Parse( - const std::string& name, IceCandidatePairConfig_AddressFamily* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_AddressFamily_entries, 3, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::IPV4; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::IPV6; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::UNKNOWN_ADDRESS_FAMILY; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::AddressFamily_MIN; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::AddressFamily_MAX; -constexpr int IceCandidatePairConfig::AddressFamily_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_NetworkType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_NetworkType_strings[6] = {}; - -static const char IceCandidatePairConfig_NetworkType_names[] = - "CELLULAR" - "ETHERNET" - "LOOPBACK" - "UNKNOWN_NETWORK_TYPE" - "VPN" - "WIFI"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_NetworkType_entries[] = { - { {IceCandidatePairConfig_NetworkType_names + 0, 8}, 4 }, - { {IceCandidatePairConfig_NetworkType_names + 8, 8}, 0 }, - { {IceCandidatePairConfig_NetworkType_names + 16, 8}, 1 }, - { {IceCandidatePairConfig_NetworkType_names + 24, 20}, 5 }, - { {IceCandidatePairConfig_NetworkType_names + 44, 3}, 3 }, - { {IceCandidatePairConfig_NetworkType_names + 47, 4}, 2 }, -}; - -static const int IceCandidatePairConfig_NetworkType_entries_by_number[] = { - 1, // 0 -> ETHERNET - 2, // 1 -> LOOPBACK - 5, // 2 -> WIFI - 4, // 3 -> VPN - 0, // 4 -> CELLULAR - 3, // 5 -> UNKNOWN_NETWORK_TYPE -}; - -const std::string& IceCandidatePairConfig_NetworkType_Name( - IceCandidatePairConfig_NetworkType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_NetworkType_entries, - IceCandidatePairConfig_NetworkType_entries_by_number, - 6, IceCandidatePairConfig_NetworkType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_NetworkType_entries, - IceCandidatePairConfig_NetworkType_entries_by_number, - 6, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_NetworkType_strings[idx].get(); -} -bool IceCandidatePairConfig_NetworkType_Parse( - const std::string& name, IceCandidatePairConfig_NetworkType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_NetworkType_entries, 6, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::ETHERNET; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::LOOPBACK; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::WIFI; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::VPN; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::CELLULAR; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::UNKNOWN_NETWORK_TYPE; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::NetworkType_MIN; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::NetworkType_MAX; -constexpr int IceCandidatePairConfig::NetworkType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairEvent_IceCandidatePairEventType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairEvent_IceCandidatePairEventType_strings[4] = {}; - -static const char IceCandidatePairEvent_IceCandidatePairEventType_names[] = - "CHECK_RECEIVED" - "CHECK_RESPONSE_RECEIVED" - "CHECK_RESPONSE_SENT" - "CHECK_SENT"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairEvent_IceCandidatePairEventType_entries[] = { - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 0, 14}, 1 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 14, 23}, 3 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 37, 19}, 2 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 56, 10}, 0 }, -}; - -static const int IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number[] = { - 3, // 0 -> CHECK_SENT - 0, // 1 -> CHECK_RECEIVED - 2, // 2 -> CHECK_RESPONSE_SENT - 1, // 3 -> CHECK_RESPONSE_RECEIVED -}; - -const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name( - IceCandidatePairEvent_IceCandidatePairEventType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairEvent_IceCandidatePairEventType_entries, - IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number, - 4, IceCandidatePairEvent_IceCandidatePairEventType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairEvent_IceCandidatePairEventType_entries, - IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairEvent_IceCandidatePairEventType_strings[idx].get(); -} -bool IceCandidatePairEvent_IceCandidatePairEventType_Parse( - const std::string& name, IceCandidatePairEvent_IceCandidatePairEventType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairEvent_IceCandidatePairEventType_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_SENT; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RECEIVED; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RESPONSE_SENT; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RESPONSE_RECEIVED; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::IceCandidatePairEventType_MIN; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::IceCandidatePairEventType_MAX; -constexpr int IceCandidatePairEvent::IceCandidatePairEventType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool MediaType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed MediaType_strings[4] = {}; - -static const char MediaType_names[] = - "ANY" - "AUDIO" - "DATA" - "VIDEO"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry MediaType_entries[] = { - { {MediaType_names + 0, 3}, 0 }, - { {MediaType_names + 3, 5}, 1 }, - { {MediaType_names + 8, 4}, 3 }, - { {MediaType_names + 12, 5}, 2 }, -}; - -static const int MediaType_entries_by_number[] = { - 0, // 0 -> ANY - 1, // 1 -> AUDIO - 3, // 2 -> VIDEO - 2, // 3 -> DATA -}; - -const std::string& MediaType_Name( - MediaType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - MediaType_entries, - MediaType_entries_by_number, - 4, MediaType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - MediaType_entries, - MediaType_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - MediaType_strings[idx].get(); -} -bool MediaType_Parse( - const std::string& name, MediaType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - MediaType_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} - -// =================================================================== - -void EventStream::InitAsDefaultInstance() { -} -class EventStream::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); -}; - -EventStream::EventStream() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.EventStream) -} -EventStream::EventStream(const EventStream& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - stream_(from.stream_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.EventStream) -} - -void EventStream::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_EventStream_rtc_5fevent_5flog_2eproto.base); -} - -EventStream::~EventStream() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.EventStream) - SharedDtor(); -} - -void EventStream::SharedDtor() { -} - -void EventStream::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const EventStream& EventStream::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_EventStream_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void EventStream::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.EventStream) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - stream_.Clear(); - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* EventStream::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // repeated .webrtc.rtclog.Event stream = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_stream(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 10); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool EventStream::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.EventStream) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated .webrtc.rtclog.Event stream = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_stream())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.EventStream) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.EventStream) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void EventStream::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.EventStream) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .webrtc.rtclog.Event stream = 1; - for (unsigned int i = 0, - n = static_cast(this->stream_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 1, - this->stream(static_cast(i)), - output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.EventStream) -} - -size_t EventStream::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.EventStream) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .webrtc.rtclog.Event stream = 1; - { - unsigned int count = static_cast(this->stream_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->stream(static_cast(i))); - } - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void EventStream::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void EventStream::MergeFrom(const EventStream& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.EventStream) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - stream_.MergeFrom(from.stream_); -} - -void EventStream::CopyFrom(const EventStream& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.EventStream) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool EventStream::IsInitialized() const { - return true; -} - -void EventStream::InternalSwap(EventStream* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - CastToBase(&stream_)->InternalSwap(CastToBase(&other->stream_)); -} - -std::string EventStream::GetTypeName() const { - return "webrtc.rtclog.EventStream"; -} - - -// =================================================================== - -void Event::InitAsDefaultInstance() { -} -class Event::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_us(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static const ::webrtc::rtclog::RtpPacket& rtp_packet(const Event* msg); - static const ::webrtc::rtclog::RtcpPacket& rtcp_packet(const Event* msg); - static const ::webrtc::rtclog::AudioPlayoutEvent& audio_playout_event(const Event* msg); - static const ::webrtc::rtclog::LossBasedBweUpdate& loss_based_bwe_update(const Event* msg); - static const ::webrtc::rtclog::DelayBasedBweUpdate& delay_based_bwe_update(const Event* msg); - static const ::webrtc::rtclog::VideoReceiveConfig& video_receiver_config(const Event* msg); - static const ::webrtc::rtclog::VideoSendConfig& video_sender_config(const Event* msg); - static const ::webrtc::rtclog::AudioReceiveConfig& audio_receiver_config(const Event* msg); - static const ::webrtc::rtclog::AudioSendConfig& audio_sender_config(const Event* msg); - static const ::webrtc::rtclog::AudioNetworkAdaptation& audio_network_adaptation(const Event* msg); - static const ::webrtc::rtclog::BweProbeCluster& probe_cluster(const Event* msg); - static const ::webrtc::rtclog::BweProbeResult& probe_result(const Event* msg); - static const ::webrtc::rtclog::AlrState& alr_state(const Event* msg); - static const ::webrtc::rtclog::IceCandidatePairConfig& ice_candidate_pair_config(const Event* msg); - static const ::webrtc::rtclog::IceCandidatePairEvent& ice_candidate_pair_event(const Event* msg); -}; - -const ::webrtc::rtclog::RtpPacket& -Event::_Internal::rtp_packet(const Event* msg) { - return *msg->subtype_.rtp_packet_; -} -const ::webrtc::rtclog::RtcpPacket& -Event::_Internal::rtcp_packet(const Event* msg) { - return *msg->subtype_.rtcp_packet_; -} -const ::webrtc::rtclog::AudioPlayoutEvent& -Event::_Internal::audio_playout_event(const Event* msg) { - return *msg->subtype_.audio_playout_event_; -} -const ::webrtc::rtclog::LossBasedBweUpdate& -Event::_Internal::loss_based_bwe_update(const Event* msg) { - return *msg->subtype_.loss_based_bwe_update_; -} -const ::webrtc::rtclog::DelayBasedBweUpdate& -Event::_Internal::delay_based_bwe_update(const Event* msg) { - return *msg->subtype_.delay_based_bwe_update_; -} -const ::webrtc::rtclog::VideoReceiveConfig& -Event::_Internal::video_receiver_config(const Event* msg) { - return *msg->subtype_.video_receiver_config_; -} -const ::webrtc::rtclog::VideoSendConfig& -Event::_Internal::video_sender_config(const Event* msg) { - return *msg->subtype_.video_sender_config_; -} -const ::webrtc::rtclog::AudioReceiveConfig& -Event::_Internal::audio_receiver_config(const Event* msg) { - return *msg->subtype_.audio_receiver_config_; -} -const ::webrtc::rtclog::AudioSendConfig& -Event::_Internal::audio_sender_config(const Event* msg) { - return *msg->subtype_.audio_sender_config_; -} -const ::webrtc::rtclog::AudioNetworkAdaptation& -Event::_Internal::audio_network_adaptation(const Event* msg) { - return *msg->subtype_.audio_network_adaptation_; -} -const ::webrtc::rtclog::BweProbeCluster& -Event::_Internal::probe_cluster(const Event* msg) { - return *msg->subtype_.probe_cluster_; -} -const ::webrtc::rtclog::BweProbeResult& -Event::_Internal::probe_result(const Event* msg) { - return *msg->subtype_.probe_result_; -} -const ::webrtc::rtclog::AlrState& -Event::_Internal::alr_state(const Event* msg) { - return *msg->subtype_.alr_state_; -} -const ::webrtc::rtclog::IceCandidatePairConfig& -Event::_Internal::ice_candidate_pair_config(const Event* msg) { - return *msg->subtype_.ice_candidate_pair_config_; -} -const ::webrtc::rtclog::IceCandidatePairEvent& -Event::_Internal::ice_candidate_pair_event(const Event* msg) { - return *msg->subtype_.ice_candidate_pair_event_; -} -void Event::set_allocated_rtp_packet(::webrtc::rtclog::RtpPacket* rtp_packet) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (rtp_packet) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - rtp_packet = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, rtp_packet, submessage_arena); - } - set_has_rtp_packet(); - subtype_.rtp_packet_ = rtp_packet; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.rtp_packet) -} -void Event::set_allocated_rtcp_packet(::webrtc::rtclog::RtcpPacket* rtcp_packet) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (rtcp_packet) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - rtcp_packet = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, rtcp_packet, submessage_arena); - } - set_has_rtcp_packet(); - subtype_.rtcp_packet_ = rtcp_packet; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.rtcp_packet) -} -void Event::set_allocated_audio_playout_event(::webrtc::rtclog::AudioPlayoutEvent* audio_playout_event) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (audio_playout_event) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - audio_playout_event = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, audio_playout_event, submessage_arena); - } - set_has_audio_playout_event(); - subtype_.audio_playout_event_ = audio_playout_event; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.audio_playout_event) -} -void Event::set_allocated_loss_based_bwe_update(::webrtc::rtclog::LossBasedBweUpdate* loss_based_bwe_update) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (loss_based_bwe_update) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - loss_based_bwe_update = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, loss_based_bwe_update, submessage_arena); - } - set_has_loss_based_bwe_update(); - subtype_.loss_based_bwe_update_ = loss_based_bwe_update; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.loss_based_bwe_update) -} -void Event::set_allocated_delay_based_bwe_update(::webrtc::rtclog::DelayBasedBweUpdate* delay_based_bwe_update) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (delay_based_bwe_update) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - delay_based_bwe_update = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, delay_based_bwe_update, submessage_arena); - } - set_has_delay_based_bwe_update(); - subtype_.delay_based_bwe_update_ = delay_based_bwe_update; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.delay_based_bwe_update) -} -void Event::set_allocated_video_receiver_config(::webrtc::rtclog::VideoReceiveConfig* video_receiver_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (video_receiver_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - video_receiver_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, video_receiver_config, submessage_arena); - } - set_has_video_receiver_config(); - subtype_.video_receiver_config_ = video_receiver_config; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.video_receiver_config) -} -void Event::set_allocated_video_sender_config(::webrtc::rtclog::VideoSendConfig* video_sender_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (video_sender_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - video_sender_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, video_sender_config, submessage_arena); - } - set_has_video_sender_config(); - subtype_.video_sender_config_ = video_sender_config; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.video_sender_config) -} -void Event::set_allocated_audio_receiver_config(::webrtc::rtclog::AudioReceiveConfig* audio_receiver_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (audio_receiver_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - audio_receiver_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, audio_receiver_config, submessage_arena); - } - set_has_audio_receiver_config(); - subtype_.audio_receiver_config_ = audio_receiver_config; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.audio_receiver_config) -} -void Event::set_allocated_audio_sender_config(::webrtc::rtclog::AudioSendConfig* audio_sender_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (audio_sender_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - audio_sender_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, audio_sender_config, submessage_arena); - } - set_has_audio_sender_config(); - subtype_.audio_sender_config_ = audio_sender_config; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.audio_sender_config) -} -void Event::set_allocated_audio_network_adaptation(::webrtc::rtclog::AudioNetworkAdaptation* audio_network_adaptation) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (audio_network_adaptation) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - audio_network_adaptation = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, audio_network_adaptation, submessage_arena); - } - set_has_audio_network_adaptation(); - subtype_.audio_network_adaptation_ = audio_network_adaptation; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.audio_network_adaptation) -} -void Event::set_allocated_probe_cluster(::webrtc::rtclog::BweProbeCluster* probe_cluster) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (probe_cluster) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - probe_cluster = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, probe_cluster, submessage_arena); - } - set_has_probe_cluster(); - subtype_.probe_cluster_ = probe_cluster; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.probe_cluster) -} -void Event::set_allocated_probe_result(::webrtc::rtclog::BweProbeResult* probe_result) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (probe_result) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - probe_result = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, probe_result, submessage_arena); - } - set_has_probe_result(); - subtype_.probe_result_ = probe_result; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.probe_result) -} -void Event::set_allocated_alr_state(::webrtc::rtclog::AlrState* alr_state) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (alr_state) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - alr_state = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, alr_state, submessage_arena); - } - set_has_alr_state(); - subtype_.alr_state_ = alr_state; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.alr_state) -} -void Event::set_allocated_ice_candidate_pair_config(::webrtc::rtclog::IceCandidatePairConfig* ice_candidate_pair_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (ice_candidate_pair_config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - ice_candidate_pair_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, ice_candidate_pair_config, submessage_arena); - } - set_has_ice_candidate_pair_config(); - subtype_.ice_candidate_pair_config_ = ice_candidate_pair_config; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.ice_candidate_pair_config) -} -void Event::set_allocated_ice_candidate_pair_event(::webrtc::rtclog::IceCandidatePairEvent* ice_candidate_pair_event) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - clear_subtype(); - if (ice_candidate_pair_event) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - ice_candidate_pair_event = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, ice_candidate_pair_event, submessage_arena); - } - set_has_ice_candidate_pair_event(); - subtype_.ice_candidate_pair_event_ = ice_candidate_pair_event; - } - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.Event.ice_candidate_pair_event) -} -Event::Event() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.Event) -} -Event::Event(const Event& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_us_, &from.timestamp_us_, - static_cast(reinterpret_cast(&type_) - - reinterpret_cast(×tamp_us_)) + sizeof(type_)); - clear_has_subtype(); - switch (from.subtype_case()) { - case kRtpPacket: { - mutable_rtp_packet()->::webrtc::rtclog::RtpPacket::MergeFrom(from.rtp_packet()); - break; - } - case kRtcpPacket: { - mutable_rtcp_packet()->::webrtc::rtclog::RtcpPacket::MergeFrom(from.rtcp_packet()); - break; - } - case kAudioPlayoutEvent: { - mutable_audio_playout_event()->::webrtc::rtclog::AudioPlayoutEvent::MergeFrom(from.audio_playout_event()); - break; - } - case kLossBasedBweUpdate: { - mutable_loss_based_bwe_update()->::webrtc::rtclog::LossBasedBweUpdate::MergeFrom(from.loss_based_bwe_update()); - break; - } - case kDelayBasedBweUpdate: { - mutable_delay_based_bwe_update()->::webrtc::rtclog::DelayBasedBweUpdate::MergeFrom(from.delay_based_bwe_update()); - break; - } - case kVideoReceiverConfig: { - mutable_video_receiver_config()->::webrtc::rtclog::VideoReceiveConfig::MergeFrom(from.video_receiver_config()); - break; - } - case kVideoSenderConfig: { - mutable_video_sender_config()->::webrtc::rtclog::VideoSendConfig::MergeFrom(from.video_sender_config()); - break; - } - case kAudioReceiverConfig: { - mutable_audio_receiver_config()->::webrtc::rtclog::AudioReceiveConfig::MergeFrom(from.audio_receiver_config()); - break; - } - case kAudioSenderConfig: { - mutable_audio_sender_config()->::webrtc::rtclog::AudioSendConfig::MergeFrom(from.audio_sender_config()); - break; - } - case kAudioNetworkAdaptation: { - mutable_audio_network_adaptation()->::webrtc::rtclog::AudioNetworkAdaptation::MergeFrom(from.audio_network_adaptation()); - break; - } - case kProbeCluster: { - mutable_probe_cluster()->::webrtc::rtclog::BweProbeCluster::MergeFrom(from.probe_cluster()); - break; - } - case kProbeResult: { - mutable_probe_result()->::webrtc::rtclog::BweProbeResult::MergeFrom(from.probe_result()); - break; - } - case kAlrState: { - mutable_alr_state()->::webrtc::rtclog::AlrState::MergeFrom(from.alr_state()); - break; - } - case kIceCandidatePairConfig: { - mutable_ice_candidate_pair_config()->::webrtc::rtclog::IceCandidatePairConfig::MergeFrom(from.ice_candidate_pair_config()); - break; - } - case kIceCandidatePairEvent: { - mutable_ice_candidate_pair_event()->::webrtc::rtclog::IceCandidatePairEvent::MergeFrom(from.ice_candidate_pair_event()); - break; - } - case SUBTYPE_NOT_SET: { - break; - } - } - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.Event) -} - -void Event::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_Event_rtc_5fevent_5flog_2eproto.base); - ::memset(×tamp_us_, 0, static_cast( - reinterpret_cast(&type_) - - reinterpret_cast(×tamp_us_)) + sizeof(type_)); - clear_has_subtype(); -} - -Event::~Event() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.Event) - SharedDtor(); -} - -void Event::SharedDtor() { - if (has_subtype()) { - clear_subtype(); - } -} - -void Event::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const Event& Event::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_Event_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void Event::clear_subtype() { -// @@protoc_insertion_point(one_of_clear_start:webrtc.rtclog.Event) - switch (subtype_case()) { - case kRtpPacket: { - delete subtype_.rtp_packet_; - break; - } - case kRtcpPacket: { - delete subtype_.rtcp_packet_; - break; - } - case kAudioPlayoutEvent: { - delete subtype_.audio_playout_event_; - break; - } - case kLossBasedBweUpdate: { - delete subtype_.loss_based_bwe_update_; - break; - } - case kDelayBasedBweUpdate: { - delete subtype_.delay_based_bwe_update_; - break; - } - case kVideoReceiverConfig: { - delete subtype_.video_receiver_config_; - break; - } - case kVideoSenderConfig: { - delete subtype_.video_sender_config_; - break; - } - case kAudioReceiverConfig: { - delete subtype_.audio_receiver_config_; - break; - } - case kAudioSenderConfig: { - delete subtype_.audio_sender_config_; - break; - } - case kAudioNetworkAdaptation: { - delete subtype_.audio_network_adaptation_; - break; - } - case kProbeCluster: { - delete subtype_.probe_cluster_; - break; - } - case kProbeResult: { - delete subtype_.probe_result_; - break; - } - case kAlrState: { - delete subtype_.alr_state_; - break; - } - case kIceCandidatePairConfig: { - delete subtype_.ice_candidate_pair_config_; - break; - } - case kIceCandidatePairEvent: { - delete subtype_.ice_candidate_pair_event_; - break; - } - case SUBTYPE_NOT_SET: { - break; - } - } - _oneof_case_[0] = SUBTYPE_NOT_SET; -} - - -void Event::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.Event) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(×tamp_us_, 0, static_cast( - reinterpret_cast(&type_) - - reinterpret_cast(×tamp_us_)) + sizeof(type_)); - } - clear_subtype(); - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* Event::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_us = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_us(&has_bits); - timestamp_us_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.Event.EventType type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::Event_EventType_IsValid(val))) { - set_type(static_cast<::webrtc::rtclog::Event_EventType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.RtpPacket rtp_packet = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26)) { - ptr = ctx->ParseMessage(mutable_rtp_packet(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.RtcpPacket rtcp_packet = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { - ptr = ctx->ParseMessage(mutable_rtcp_packet(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.AudioPlayoutEvent audio_playout_event = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ctx->ParseMessage(mutable_audio_playout_event(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.LossBasedBweUpdate loss_based_bwe_update = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ctx->ParseMessage(mutable_loss_based_bwe_update(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.DelayBasedBweUpdate delay_based_bwe_update = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ctx->ParseMessage(mutable_delay_based_bwe_update(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.VideoReceiveConfig video_receiver_config = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ctx->ParseMessage(mutable_video_receiver_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.VideoSendConfig video_sender_config = 9; - case 9: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 74)) { - ptr = ctx->ParseMessage(mutable_video_sender_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.AudioReceiveConfig audio_receiver_config = 10; - case 10: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 82)) { - ptr = ctx->ParseMessage(mutable_audio_receiver_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.AudioSendConfig audio_sender_config = 11; - case 11: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 90)) { - ptr = ctx->ParseMessage(mutable_audio_sender_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.AudioNetworkAdaptation audio_network_adaptation = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 130)) { - ptr = ctx->ParseMessage(mutable_audio_network_adaptation(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.BweProbeCluster probe_cluster = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 138)) { - ptr = ctx->ParseMessage(mutable_probe_cluster(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.BweProbeResult probe_result = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 146)) { - ptr = ctx->ParseMessage(mutable_probe_result(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.AlrState alr_state = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ctx->ParseMessage(mutable_alr_state(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig ice_candidate_pair_config = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr = ctx->ParseMessage(mutable_ice_candidate_pair_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairEvent ice_candidate_pair_event = 21; - case 21: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 170)) { - ptr = ctx->ParseMessage(mutable_ice_candidate_pair_event(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool Event::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.Event) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_us = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_us(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_us_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.Event.EventType type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::Event_EventType_IsValid(value)) { - set_type(static_cast< ::webrtc::rtclog::Event_EventType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.RtpPacket rtp_packet = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_rtp_packet())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.RtcpPacket rtcp_packet = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_rtcp_packet())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.AudioPlayoutEvent audio_playout_event = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_audio_playout_event())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.LossBasedBweUpdate loss_based_bwe_update = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (50 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_loss_based_bwe_update())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.DelayBasedBweUpdate delay_based_bwe_update = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (58 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_delay_based_bwe_update())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.VideoReceiveConfig video_receiver_config = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (66 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_video_receiver_config())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.VideoSendConfig video_sender_config = 9; - case 9: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (74 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_video_sender_config())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.AudioReceiveConfig audio_receiver_config = 10; - case 10: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (82 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_audio_receiver_config())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.AudioSendConfig audio_sender_config = 11; - case 11: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (90 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_audio_sender_config())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.AudioNetworkAdaptation audio_network_adaptation = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (130 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_audio_network_adaptation())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.BweProbeCluster probe_cluster = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (138 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_probe_cluster())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.BweProbeResult probe_result = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (146 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_probe_result())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.AlrState alr_state = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (154 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_alr_state())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig ice_candidate_pair_config = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (162 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_ice_candidate_pair_config())); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairEvent ice_candidate_pair_event = 21; - case 21: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (170 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_ice_candidate_pair_event())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.Event) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.Event) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void Event::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.Event) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_us = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_us(), output); - } - - // optional .webrtc.rtclog.Event.EventType type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->type(), output); - } - - switch (subtype_case()) { - case kRtpPacket: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 3, _Internal::rtp_packet(this), output); - break; - case kRtcpPacket: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 4, _Internal::rtcp_packet(this), output); - break; - case kAudioPlayoutEvent: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, _Internal::audio_playout_event(this), output); - break; - case kLossBasedBweUpdate: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 6, _Internal::loss_based_bwe_update(this), output); - break; - case kDelayBasedBweUpdate: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 7, _Internal::delay_based_bwe_update(this), output); - break; - case kVideoReceiverConfig: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 8, _Internal::video_receiver_config(this), output); - break; - case kVideoSenderConfig: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 9, _Internal::video_sender_config(this), output); - break; - case kAudioReceiverConfig: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 10, _Internal::audio_receiver_config(this), output); - break; - case kAudioSenderConfig: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 11, _Internal::audio_sender_config(this), output); - break; - case kAudioNetworkAdaptation: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 16, _Internal::audio_network_adaptation(this), output); - break; - case kProbeCluster: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 17, _Internal::probe_cluster(this), output); - break; - case kProbeResult: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 18, _Internal::probe_result(this), output); - break; - case kAlrState: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 19, _Internal::alr_state(this), output); - break; - case kIceCandidatePairConfig: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 20, _Internal::ice_candidate_pair_config(this), output); - break; - case kIceCandidatePairEvent: - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 21, _Internal::ice_candidate_pair_event(this), output); - break; - default: ; - } - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.Event) -} - -size_t Event::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.Event) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional int64 timestamp_us = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_us()); - } - - // optional .webrtc.rtclog.Event.EventType type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->type()); - } - - } - switch (subtype_case()) { - // optional .webrtc.rtclog.RtpPacket rtp_packet = 3; - case kRtpPacket: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.rtp_packet_); - break; - } - // optional .webrtc.rtclog.RtcpPacket rtcp_packet = 4; - case kRtcpPacket: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.rtcp_packet_); - break; - } - // optional .webrtc.rtclog.AudioPlayoutEvent audio_playout_event = 5; - case kAudioPlayoutEvent: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.audio_playout_event_); - break; - } - // optional .webrtc.rtclog.LossBasedBweUpdate loss_based_bwe_update = 6; - case kLossBasedBweUpdate: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.loss_based_bwe_update_); - break; - } - // optional .webrtc.rtclog.DelayBasedBweUpdate delay_based_bwe_update = 7; - case kDelayBasedBweUpdate: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.delay_based_bwe_update_); - break; - } - // optional .webrtc.rtclog.VideoReceiveConfig video_receiver_config = 8; - case kVideoReceiverConfig: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.video_receiver_config_); - break; - } - // optional .webrtc.rtclog.VideoSendConfig video_sender_config = 9; - case kVideoSenderConfig: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.video_sender_config_); - break; - } - // optional .webrtc.rtclog.AudioReceiveConfig audio_receiver_config = 10; - case kAudioReceiverConfig: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.audio_receiver_config_); - break; - } - // optional .webrtc.rtclog.AudioSendConfig audio_sender_config = 11; - case kAudioSenderConfig: { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.audio_sender_config_); - break; - } - // optional .webrtc.rtclog.AudioNetworkAdaptation audio_network_adaptation = 16; - case kAudioNetworkAdaptation: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.audio_network_adaptation_); - break; - } - // optional .webrtc.rtclog.BweProbeCluster probe_cluster = 17; - case kProbeCluster: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.probe_cluster_); - break; - } - // optional .webrtc.rtclog.BweProbeResult probe_result = 18; - case kProbeResult: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.probe_result_); - break; - } - // optional .webrtc.rtclog.AlrState alr_state = 19; - case kAlrState: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.alr_state_); - break; - } - // optional .webrtc.rtclog.IceCandidatePairConfig ice_candidate_pair_config = 20; - case kIceCandidatePairConfig: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.ice_candidate_pair_config_); - break; - } - // optional .webrtc.rtclog.IceCandidatePairEvent ice_candidate_pair_event = 21; - case kIceCandidatePairEvent: { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *subtype_.ice_candidate_pair_event_); - break; - } - case SUBTYPE_NOT_SET: { - break; - } - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void Event::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void Event::MergeFrom(const Event& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.Event) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - timestamp_us_ = from.timestamp_us_; - } - if (cached_has_bits & 0x00000002u) { - type_ = from.type_; - } - _has_bits_[0] |= cached_has_bits; - } - switch (from.subtype_case()) { - case kRtpPacket: { - mutable_rtp_packet()->::webrtc::rtclog::RtpPacket::MergeFrom(from.rtp_packet()); - break; - } - case kRtcpPacket: { - mutable_rtcp_packet()->::webrtc::rtclog::RtcpPacket::MergeFrom(from.rtcp_packet()); - break; - } - case kAudioPlayoutEvent: { - mutable_audio_playout_event()->::webrtc::rtclog::AudioPlayoutEvent::MergeFrom(from.audio_playout_event()); - break; - } - case kLossBasedBweUpdate: { - mutable_loss_based_bwe_update()->::webrtc::rtclog::LossBasedBweUpdate::MergeFrom(from.loss_based_bwe_update()); - break; - } - case kDelayBasedBweUpdate: { - mutable_delay_based_bwe_update()->::webrtc::rtclog::DelayBasedBweUpdate::MergeFrom(from.delay_based_bwe_update()); - break; - } - case kVideoReceiverConfig: { - mutable_video_receiver_config()->::webrtc::rtclog::VideoReceiveConfig::MergeFrom(from.video_receiver_config()); - break; - } - case kVideoSenderConfig: { - mutable_video_sender_config()->::webrtc::rtclog::VideoSendConfig::MergeFrom(from.video_sender_config()); - break; - } - case kAudioReceiverConfig: { - mutable_audio_receiver_config()->::webrtc::rtclog::AudioReceiveConfig::MergeFrom(from.audio_receiver_config()); - break; - } - case kAudioSenderConfig: { - mutable_audio_sender_config()->::webrtc::rtclog::AudioSendConfig::MergeFrom(from.audio_sender_config()); - break; - } - case kAudioNetworkAdaptation: { - mutable_audio_network_adaptation()->::webrtc::rtclog::AudioNetworkAdaptation::MergeFrom(from.audio_network_adaptation()); - break; - } - case kProbeCluster: { - mutable_probe_cluster()->::webrtc::rtclog::BweProbeCluster::MergeFrom(from.probe_cluster()); - break; - } - case kProbeResult: { - mutable_probe_result()->::webrtc::rtclog::BweProbeResult::MergeFrom(from.probe_result()); - break; - } - case kAlrState: { - mutable_alr_state()->::webrtc::rtclog::AlrState::MergeFrom(from.alr_state()); - break; - } - case kIceCandidatePairConfig: { - mutable_ice_candidate_pair_config()->::webrtc::rtclog::IceCandidatePairConfig::MergeFrom(from.ice_candidate_pair_config()); - break; - } - case kIceCandidatePairEvent: { - mutable_ice_candidate_pair_event()->::webrtc::rtclog::IceCandidatePairEvent::MergeFrom(from.ice_candidate_pair_event()); - break; - } - case SUBTYPE_NOT_SET: { - break; - } - } -} - -void Event::CopyFrom(const Event& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.Event) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool Event::IsInitialized() const { - return true; -} - -void Event::InternalSwap(Event* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_us_, other->timestamp_us_); - swap(type_, other->type_); - swap(subtype_, other->subtype_); - swap(_oneof_case_[0], other->_oneof_case_[0]); -} - -std::string Event::GetTypeName() const { - return "webrtc.rtclog.Event"; -} - - -// =================================================================== - -void RtpPacket::InitAsDefaultInstance() { -} -class RtpPacket::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_incoming(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_type(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_packet_length(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_header(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_probe_cluster_id(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } -}; - -RtpPacket::RtpPacket() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.RtpPacket) -} -RtpPacket::RtpPacket(const RtpPacket& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - header_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_header()) { - header_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_); - } - ::memcpy(&incoming_, &from.incoming_, - static_cast(reinterpret_cast(&probe_cluster_id_) - - reinterpret_cast(&incoming_)) + sizeof(probe_cluster_id_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.RtpPacket) -} - -void RtpPacket::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_RtpPacket_rtc_5fevent_5flog_2eproto.base); - header_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(&incoming_, 0, static_cast( - reinterpret_cast(&probe_cluster_id_) - - reinterpret_cast(&incoming_)) + sizeof(probe_cluster_id_)); -} - -RtpPacket::~RtpPacket() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.RtpPacket) - SharedDtor(); -} - -void RtpPacket::SharedDtor() { - header_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void RtpPacket::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtpPacket& RtpPacket::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtpPacket_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void RtpPacket::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.RtpPacket) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - header_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x0000001eu) { - ::memset(&incoming_, 0, static_cast( - reinterpret_cast(&probe_cluster_id_) - - reinterpret_cast(&incoming_)) + sizeof(probe_cluster_id_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtpPacket::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional bool incoming = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_incoming(&has_bits); - incoming_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::MediaType_IsValid(val))) { - set_type(static_cast<::webrtc::rtclog::MediaType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 packet_length = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_packet_length(&has_bits); - packet_length_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes header = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_header(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 probe_cluster_id = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_probe_cluster_id(&has_bits); - probe_cluster_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtpPacket::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.RtpPacket) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional bool incoming = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_incoming(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &incoming_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::MediaType_IsValid(value)) { - set_type(static_cast< ::webrtc::rtclog::MediaType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 packet_length = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_packet_length(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &packet_length_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes header = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_header())); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 probe_cluster_id = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_probe_cluster_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &probe_cluster_id_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.RtpPacket) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.RtpPacket) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtpPacket::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.RtpPacket) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional bool incoming = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(1, this->incoming(), output); - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->type(), output); - } - - // optional uint32 packet_length = 3; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->packet_length(), output); - } - - // optional bytes header = 4; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 4, this->header(), output); - } - - // optional int32 probe_cluster_id = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(5, this->probe_cluster_id(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.RtpPacket) -} - -size_t RtpPacket::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.RtpPacket) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional bytes header = 4; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->header()); - } - - // optional bool incoming = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + 1; - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->type()); - } - - // optional uint32 packet_length = 3; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->packet_length()); - } - - // optional int32 probe_cluster_id = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->probe_cluster_id()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtpPacket::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtpPacket::MergeFrom(const RtpPacket& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.RtpPacket) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - header_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_); - } - if (cached_has_bits & 0x00000002u) { - incoming_ = from.incoming_; - } - if (cached_has_bits & 0x00000004u) { - type_ = from.type_; - } - if (cached_has_bits & 0x00000008u) { - packet_length_ = from.packet_length_; - } - if (cached_has_bits & 0x00000010u) { - probe_cluster_id_ = from.probe_cluster_id_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtpPacket::CopyFrom(const RtpPacket& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.RtpPacket) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtpPacket::IsInitialized() const { - return true; -} - -void RtpPacket::InternalSwap(RtpPacket* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - header_.Swap(&other->header_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(incoming_, other->incoming_); - swap(type_, other->type_); - swap(packet_length_, other->packet_length_); - swap(probe_cluster_id_, other->probe_cluster_id_); -} - -std::string RtpPacket::GetTypeName() const { - return "webrtc.rtclog.RtpPacket"; -} - - -// =================================================================== - -void RtcpPacket::InitAsDefaultInstance() { -} -class RtcpPacket::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_incoming(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_type(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_packet_data(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -RtcpPacket::RtcpPacket() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.RtcpPacket) -} -RtcpPacket::RtcpPacket(const RtcpPacket& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - packet_data_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_packet_data()) { - packet_data_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_data_); - } - ::memcpy(&incoming_, &from.incoming_, - static_cast(reinterpret_cast(&type_) - - reinterpret_cast(&incoming_)) + sizeof(type_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.RtcpPacket) -} - -void RtcpPacket::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_RtcpPacket_rtc_5fevent_5flog_2eproto.base); - packet_data_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(&incoming_, 0, static_cast( - reinterpret_cast(&type_) - - reinterpret_cast(&incoming_)) + sizeof(type_)); -} - -RtcpPacket::~RtcpPacket() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.RtcpPacket) - SharedDtor(); -} - -void RtcpPacket::SharedDtor() { - packet_data_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void RtcpPacket::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtcpPacket& RtcpPacket::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtcpPacket_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void RtcpPacket::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.RtcpPacket) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - packet_data_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000006u) { - ::memset(&incoming_, 0, static_cast( - reinterpret_cast(&type_) - - reinterpret_cast(&incoming_)) + sizeof(type_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtcpPacket::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional bool incoming = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_incoming(&has_bits); - incoming_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::MediaType_IsValid(val))) { - set_type(static_cast<::webrtc::rtclog::MediaType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional bytes packet_data = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_packet_data(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtcpPacket::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.RtcpPacket) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional bool incoming = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_incoming(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &incoming_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::MediaType_IsValid(value)) { - set_type(static_cast< ::webrtc::rtclog::MediaType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional bytes packet_data = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_packet_data())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.RtcpPacket) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.RtcpPacket) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtcpPacket::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.RtcpPacket) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional bool incoming = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(1, this->incoming(), output); - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->type(), output); - } - - // optional bytes packet_data = 3; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 3, this->packet_data(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.RtcpPacket) -} - -size_t RtcpPacket::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.RtcpPacket) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional bytes packet_data = 3; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->packet_data()); - } - - // optional bool incoming = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + 1; - } - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtcpPacket::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtcpPacket::MergeFrom(const RtcpPacket& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.RtcpPacket) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - packet_data_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_data_); - } - if (cached_has_bits & 0x00000002u) { - incoming_ = from.incoming_; - } - if (cached_has_bits & 0x00000004u) { - type_ = from.type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtcpPacket::CopyFrom(const RtcpPacket& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.RtcpPacket) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtcpPacket::IsInitialized() const { - return true; -} - -void RtcpPacket::InternalSwap(RtcpPacket* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - packet_data_.Swap(&other->packet_data_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(incoming_, other->incoming_); - swap(type_, other->type_); -} - -std::string RtcpPacket::GetTypeName() const { - return "webrtc.rtclog.RtcpPacket"; -} - - -// =================================================================== - -void AudioPlayoutEvent::InitAsDefaultInstance() { -} -class AudioPlayoutEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -AudioPlayoutEvent::AudioPlayoutEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.AudioPlayoutEvent) -} -AudioPlayoutEvent::AudioPlayoutEvent(const AudioPlayoutEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - local_ssrc_ = from.local_ssrc_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.AudioPlayoutEvent) -} - -void AudioPlayoutEvent::SharedCtor() { - local_ssrc_ = 0u; -} - -AudioPlayoutEvent::~AudioPlayoutEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.AudioPlayoutEvent) - SharedDtor(); -} - -void AudioPlayoutEvent::SharedDtor() { -} - -void AudioPlayoutEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioPlayoutEvent& AudioPlayoutEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioPlayoutEvent_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void AudioPlayoutEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.AudioPlayoutEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - local_ssrc_ = 0u; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioPlayoutEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional uint32 local_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioPlayoutEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.AudioPlayoutEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional uint32 local_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.AudioPlayoutEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.AudioPlayoutEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioPlayoutEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.AudioPlayoutEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->local_ssrc(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.AudioPlayoutEvent) -} - -size_t AudioPlayoutEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.AudioPlayoutEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // optional uint32 local_ssrc = 2; - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioPlayoutEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioPlayoutEvent::MergeFrom(const AudioPlayoutEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.AudioPlayoutEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_local_ssrc()) { - set_local_ssrc(from.local_ssrc()); - } -} - -void AudioPlayoutEvent::CopyFrom(const AudioPlayoutEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.AudioPlayoutEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioPlayoutEvent::IsInitialized() const { - return true; -} - -void AudioPlayoutEvent::InternalSwap(AudioPlayoutEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(local_ssrc_, other->local_ssrc_); -} - -std::string AudioPlayoutEvent::GetTypeName() const { - return "webrtc.rtclog.AudioPlayoutEvent"; -} - - -// =================================================================== - -void LossBasedBweUpdate::InitAsDefaultInstance() { -} -class LossBasedBweUpdate::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_fraction_loss(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_total_packets(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -LossBasedBweUpdate::LossBasedBweUpdate() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.LossBasedBweUpdate) -} -LossBasedBweUpdate::LossBasedBweUpdate(const LossBasedBweUpdate& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&bitrate_bps_, &from.bitrate_bps_, - static_cast(reinterpret_cast(&total_packets_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(total_packets_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.LossBasedBweUpdate) -} - -void LossBasedBweUpdate::SharedCtor() { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&total_packets_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(total_packets_)); -} - -LossBasedBweUpdate::~LossBasedBweUpdate() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.LossBasedBweUpdate) - SharedDtor(); -} - -void LossBasedBweUpdate::SharedDtor() { -} - -void LossBasedBweUpdate::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const LossBasedBweUpdate& LossBasedBweUpdate::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_LossBasedBweUpdate_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void LossBasedBweUpdate::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.LossBasedBweUpdate) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&total_packets_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(total_packets_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* LossBasedBweUpdate::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 bitrate_bps = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 fraction_loss = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_fraction_loss(&has_bits); - fraction_loss_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 total_packets = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_total_packets(&has_bits); - total_packets_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool LossBasedBweUpdate::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.LossBasedBweUpdate) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 bitrate_bps = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 fraction_loss = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_fraction_loss(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &fraction_loss_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 total_packets = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_total_packets(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &total_packets_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.LossBasedBweUpdate) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.LossBasedBweUpdate) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void LossBasedBweUpdate::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.LossBasedBweUpdate) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->bitrate_bps(), output); - } - - // optional uint32 fraction_loss = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->fraction_loss(), output); - } - - // optional int32 total_packets = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->total_packets(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.LossBasedBweUpdate) -} - -size_t LossBasedBweUpdate::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.LossBasedBweUpdate) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - // optional uint32 fraction_loss = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->fraction_loss()); - } - - // optional int32 total_packets = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->total_packets()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void LossBasedBweUpdate::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void LossBasedBweUpdate::MergeFrom(const LossBasedBweUpdate& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.LossBasedBweUpdate) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000002u) { - fraction_loss_ = from.fraction_loss_; - } - if (cached_has_bits & 0x00000004u) { - total_packets_ = from.total_packets_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void LossBasedBweUpdate::CopyFrom(const LossBasedBweUpdate& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.LossBasedBweUpdate) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool LossBasedBweUpdate::IsInitialized() const { - return true; -} - -void LossBasedBweUpdate::InternalSwap(LossBasedBweUpdate* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(bitrate_bps_, other->bitrate_bps_); - swap(fraction_loss_, other->fraction_loss_); - swap(total_packets_, other->total_packets_); -} - -std::string LossBasedBweUpdate::GetTypeName() const { - return "webrtc.rtclog.LossBasedBweUpdate"; -} - - -// =================================================================== - -void DelayBasedBweUpdate::InitAsDefaultInstance() { -} -class DelayBasedBweUpdate::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_detector_state(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -DelayBasedBweUpdate::DelayBasedBweUpdate() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.DelayBasedBweUpdate) -} -DelayBasedBweUpdate::DelayBasedBweUpdate(const DelayBasedBweUpdate& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&bitrate_bps_, &from.bitrate_bps_, - static_cast(reinterpret_cast(&detector_state_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(detector_state_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.DelayBasedBweUpdate) -} - -void DelayBasedBweUpdate::SharedCtor() { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&detector_state_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(detector_state_)); -} - -DelayBasedBweUpdate::~DelayBasedBweUpdate() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.DelayBasedBweUpdate) - SharedDtor(); -} - -void DelayBasedBweUpdate::SharedDtor() { -} - -void DelayBasedBweUpdate::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DelayBasedBweUpdate& DelayBasedBweUpdate::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_DelayBasedBweUpdate_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void DelayBasedBweUpdate::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.DelayBasedBweUpdate) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&detector_state_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(detector_state_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DelayBasedBweUpdate::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 bitrate_bps = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::DelayBasedBweUpdate_DetectorState_IsValid(val))) { - set_detector_state(static_cast<::webrtc::rtclog::DelayBasedBweUpdate_DetectorState>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DelayBasedBweUpdate::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.DelayBasedBweUpdate) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 bitrate_bps = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::DelayBasedBweUpdate_DetectorState_IsValid(value)) { - set_detector_state(static_cast< ::webrtc::rtclog::DelayBasedBweUpdate_DetectorState >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.DelayBasedBweUpdate) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.DelayBasedBweUpdate) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DelayBasedBweUpdate::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.DelayBasedBweUpdate) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->bitrate_bps(), output); - } - - // optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->detector_state(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.DelayBasedBweUpdate) -} - -size_t DelayBasedBweUpdate::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.DelayBasedBweUpdate) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - // optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->detector_state()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DelayBasedBweUpdate::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void DelayBasedBweUpdate::MergeFrom(const DelayBasedBweUpdate& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.DelayBasedBweUpdate) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000002u) { - detector_state_ = from.detector_state_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void DelayBasedBweUpdate::CopyFrom(const DelayBasedBweUpdate& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.DelayBasedBweUpdate) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DelayBasedBweUpdate::IsInitialized() const { - return true; -} - -void DelayBasedBweUpdate::InternalSwap(DelayBasedBweUpdate* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(bitrate_bps_, other->bitrate_bps_); - swap(detector_state_, other->detector_state_); -} - -std::string DelayBasedBweUpdate::GetTypeName() const { - return "webrtc.rtclog.DelayBasedBweUpdate"; -} - - -// =================================================================== - -void VideoReceiveConfig::InitAsDefaultInstance() { -} -class VideoReceiveConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_remote_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_rtcp_mode(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_remb(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -VideoReceiveConfig::VideoReceiveConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.VideoReceiveConfig) -} -VideoReceiveConfig::VideoReceiveConfig(const VideoReceiveConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - rtx_map_(from.rtx_map_), - header_extensions_(from.header_extensions_), - decoders_(from.decoders_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&remote_ssrc_, &from.remote_ssrc_, - static_cast(reinterpret_cast(&rtcp_mode_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(rtcp_mode_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.VideoReceiveConfig) -} - -void VideoReceiveConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto.base); - ::memset(&remote_ssrc_, 0, static_cast( - reinterpret_cast(&remb_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(remb_)); - rtcp_mode_ = 1; -} - -VideoReceiveConfig::~VideoReceiveConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.VideoReceiveConfig) - SharedDtor(); -} - -void VideoReceiveConfig::SharedDtor() { -} - -void VideoReceiveConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const VideoReceiveConfig& VideoReceiveConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_VideoReceiveConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void VideoReceiveConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.VideoReceiveConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - rtx_map_.Clear(); - header_extensions_.Clear(); - decoders_.Clear(); - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - ::memset(&remote_ssrc_, 0, static_cast( - reinterpret_cast(&remb_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(remb_)); - rtcp_mode_ = 1; - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* VideoReceiveConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional uint32 remote_ssrc = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_remote_ssrc(&has_bits); - remote_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 local_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::VideoReceiveConfig_RtcpMode_IsValid(val))) { - set_rtcp_mode(static_cast<::webrtc::rtclog::VideoReceiveConfig_RtcpMode>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(3, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional bool remb = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_remb(&has_bits); - remb_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.RtxMap rtx_map = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_rtx_map(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 42); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_header_extensions(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 50); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.DecoderConfig decoders = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_decoders(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 58); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool VideoReceiveConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.VideoReceiveConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional uint32 remote_ssrc = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_remote_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &remote_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 local_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::VideoReceiveConfig_RtcpMode_IsValid(value)) { - set_rtcp_mode(static_cast< ::webrtc::rtclog::VideoReceiveConfig_RtcpMode >(value)); - } else { - unknown_fields_stream.WriteVarint32(24u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional bool remb = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_remb(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &remb_))); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.RtxMap rtx_map = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_rtx_map())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (50 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.DecoderConfig decoders = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (58 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_decoders())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.VideoReceiveConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.VideoReceiveConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void VideoReceiveConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.VideoReceiveConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional uint32 remote_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(1, this->remote_ssrc(), output); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->local_ssrc(), output); - } - - // optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 3, this->rtcp_mode(), output); - } - - // optional bool remb = 4; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(4, this->remb(), output); - } - - // repeated .webrtc.rtclog.RtxMap rtx_map = 5; - for (unsigned int i = 0, - n = static_cast(this->rtx_map_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, - this->rtx_map(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; - for (unsigned int i = 0, - n = static_cast(this->header_extensions_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 6, - this->header_extensions(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog.DecoderConfig decoders = 7; - for (unsigned int i = 0, - n = static_cast(this->decoders_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 7, - this->decoders(static_cast(i)), - output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.VideoReceiveConfig) -} - -size_t VideoReceiveConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.VideoReceiveConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .webrtc.rtclog.RtxMap rtx_map = 5; - { - unsigned int count = static_cast(this->rtx_map_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->rtx_map(static_cast(i))); - } - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; - { - unsigned int count = static_cast(this->header_extensions_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->header_extensions(static_cast(i))); - } - } - - // repeated .webrtc.rtclog.DecoderConfig decoders = 7; - { - unsigned int count = static_cast(this->decoders_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->decoders(static_cast(i))); - } - } - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - // optional uint32 remote_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->remote_ssrc()); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - // optional bool remb = 4; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + 1; - } - - // optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->rtcp_mode()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void VideoReceiveConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void VideoReceiveConfig::MergeFrom(const VideoReceiveConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.VideoReceiveConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - rtx_map_.MergeFrom(from.rtx_map_); - header_extensions_.MergeFrom(from.header_extensions_); - decoders_.MergeFrom(from.decoders_); - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - remote_ssrc_ = from.remote_ssrc_; - } - if (cached_has_bits & 0x00000002u) { - local_ssrc_ = from.local_ssrc_; - } - if (cached_has_bits & 0x00000004u) { - remb_ = from.remb_; - } - if (cached_has_bits & 0x00000008u) { - rtcp_mode_ = from.rtcp_mode_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void VideoReceiveConfig::CopyFrom(const VideoReceiveConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.VideoReceiveConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool VideoReceiveConfig::IsInitialized() const { - return true; -} - -void VideoReceiveConfig::InternalSwap(VideoReceiveConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - CastToBase(&rtx_map_)->InternalSwap(CastToBase(&other->rtx_map_)); - CastToBase(&header_extensions_)->InternalSwap(CastToBase(&other->header_extensions_)); - CastToBase(&decoders_)->InternalSwap(CastToBase(&other->decoders_)); - swap(remote_ssrc_, other->remote_ssrc_); - swap(local_ssrc_, other->local_ssrc_); - swap(remb_, other->remb_); - swap(rtcp_mode_, other->rtcp_mode_); -} - -std::string VideoReceiveConfig::GetTypeName() const { - return "webrtc.rtclog.VideoReceiveConfig"; -} - - -// =================================================================== - -void DecoderConfig::InitAsDefaultInstance() { -} -class DecoderConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_name(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -DecoderConfig::DecoderConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.DecoderConfig) -} -DecoderConfig::DecoderConfig(const DecoderConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_name()) { - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - payload_type_ = from.payload_type_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.DecoderConfig) -} - -void DecoderConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_DecoderConfig_rtc_5fevent_5flog_2eproto.base); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_ = 0; -} - -DecoderConfig::~DecoderConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.DecoderConfig) - SharedDtor(); -} - -void DecoderConfig::SharedDtor() { - name_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void DecoderConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DecoderConfig& DecoderConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_DecoderConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void DecoderConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.DecoderConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - name_.ClearNonDefaultToEmptyNoArena(); - } - payload_type_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DecoderConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional string name = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_name(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 payload_type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_payload_type(&has_bits); - payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DecoderConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.DecoderConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional string name = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( - input, this->mutable_name())); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 payload_type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.DecoderConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.DecoderConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DecoderConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.DecoderConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( - 1, this->name(), output); - } - - // optional int32 payload_type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->payload_type(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.DecoderConfig) -} - -size_t DecoderConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.DecoderConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( - this->name()); - } - - // optional int32 payload_type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->payload_type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DecoderConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void DecoderConfig::MergeFrom(const DecoderConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.DecoderConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - if (cached_has_bits & 0x00000002u) { - payload_type_ = from.payload_type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void DecoderConfig::CopyFrom(const DecoderConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.DecoderConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DecoderConfig::IsInitialized() const { - return true; -} - -void DecoderConfig::InternalSwap(DecoderConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - name_.Swap(&other->name_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(payload_type_, other->payload_type_); -} - -std::string DecoderConfig::GetTypeName() const { - return "webrtc.rtclog.DecoderConfig"; -} - - -// =================================================================== - -void RtpHeaderExtension::InitAsDefaultInstance() { -} -class RtpHeaderExtension::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_name(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -RtpHeaderExtension::RtpHeaderExtension() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.RtpHeaderExtension) -} -RtpHeaderExtension::RtpHeaderExtension(const RtpHeaderExtension& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_name()) { - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - id_ = from.id_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.RtpHeaderExtension) -} - -void RtpHeaderExtension::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - id_ = 0; -} - -RtpHeaderExtension::~RtpHeaderExtension() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.RtpHeaderExtension) - SharedDtor(); -} - -void RtpHeaderExtension::SharedDtor() { - name_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void RtpHeaderExtension::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtpHeaderExtension& RtpHeaderExtension::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtpHeaderExtension_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void RtpHeaderExtension::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.RtpHeaderExtension) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - name_.ClearNonDefaultToEmptyNoArena(); - } - id_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtpHeaderExtension::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional string name = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_name(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtpHeaderExtension::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.RtpHeaderExtension) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional string name = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( - input, this->mutable_name())); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.RtpHeaderExtension) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.RtpHeaderExtension) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtpHeaderExtension::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.RtpHeaderExtension) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( - 1, this->name(), output); - } - - // optional int32 id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->id(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.RtpHeaderExtension) -} - -size_t RtpHeaderExtension::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.RtpHeaderExtension) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( - this->name()); - } - - // optional int32 id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->id()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtpHeaderExtension::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtpHeaderExtension::MergeFrom(const RtpHeaderExtension& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.RtpHeaderExtension) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - if (cached_has_bits & 0x00000002u) { - id_ = from.id_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtpHeaderExtension::CopyFrom(const RtpHeaderExtension& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.RtpHeaderExtension) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtpHeaderExtension::IsInitialized() const { - return true; -} - -void RtpHeaderExtension::InternalSwap(RtpHeaderExtension* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - name_.Swap(&other->name_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(id_, other->id_); -} - -std::string RtpHeaderExtension::GetTypeName() const { - return "webrtc.rtclog.RtpHeaderExtension"; -} - - -// =================================================================== - -void RtxConfig::InitAsDefaultInstance() { -} -class RtxConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_rtx_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_rtx_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -RtxConfig::RtxConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.RtxConfig) -} -RtxConfig::RtxConfig(const RtxConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&rtx_ssrc_, &from.rtx_ssrc_, - static_cast(reinterpret_cast(&rtx_payload_type_) - - reinterpret_cast(&rtx_ssrc_)) + sizeof(rtx_payload_type_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.RtxConfig) -} - -void RtxConfig::SharedCtor() { - ::memset(&rtx_ssrc_, 0, static_cast( - reinterpret_cast(&rtx_payload_type_) - - reinterpret_cast(&rtx_ssrc_)) + sizeof(rtx_payload_type_)); -} - -RtxConfig::~RtxConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.RtxConfig) - SharedDtor(); -} - -void RtxConfig::SharedDtor() { -} - -void RtxConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtxConfig& RtxConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtxConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void RtxConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.RtxConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(&rtx_ssrc_, 0, static_cast( - reinterpret_cast(&rtx_payload_type_) - - reinterpret_cast(&rtx_ssrc_)) + sizeof(rtx_payload_type_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtxConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional uint32 rtx_ssrc = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_rtx_ssrc(&has_bits); - rtx_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 rtx_payload_type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_rtx_payload_type(&has_bits); - rtx_payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtxConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.RtxConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional uint32 rtx_ssrc = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_rtx_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &rtx_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 rtx_payload_type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_rtx_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &rtx_payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.RtxConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.RtxConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtxConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.RtxConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional uint32 rtx_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(1, this->rtx_ssrc(), output); - } - - // optional int32 rtx_payload_type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->rtx_payload_type(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.RtxConfig) -} - -size_t RtxConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.RtxConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional uint32 rtx_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->rtx_ssrc()); - } - - // optional int32 rtx_payload_type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->rtx_payload_type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtxConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtxConfig::MergeFrom(const RtxConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.RtxConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - rtx_ssrc_ = from.rtx_ssrc_; - } - if (cached_has_bits & 0x00000002u) { - rtx_payload_type_ = from.rtx_payload_type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtxConfig::CopyFrom(const RtxConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.RtxConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtxConfig::IsInitialized() const { - return true; -} - -void RtxConfig::InternalSwap(RtxConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(rtx_ssrc_, other->rtx_ssrc_); - swap(rtx_payload_type_, other->rtx_payload_type_); -} - -std::string RtxConfig::GetTypeName() const { - return "webrtc.rtclog.RtxConfig"; -} - - -// =================================================================== - -void RtxMap::InitAsDefaultInstance() { - ::webrtc::rtclog::_RtxMap_default_instance_._instance.get_mutable()->config_ = const_cast< ::webrtc::rtclog::RtxConfig*>( - ::webrtc::rtclog::RtxConfig::internal_default_instance()); -} -class RtxMap::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static const ::webrtc::rtclog::RtxConfig& config(const RtxMap* msg); - static void set_has_config(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog::RtxConfig& -RtxMap::_Internal::config(const RtxMap* msg) { - return *msg->config_; -} -RtxMap::RtxMap() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.RtxMap) -} -RtxMap::RtxMap(const RtxMap& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_config()) { - config_ = new ::webrtc::rtclog::RtxConfig(*from.config_); - } else { - config_ = nullptr; - } - payload_type_ = from.payload_type_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.RtxMap) -} - -void RtxMap::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_RtxMap_rtc_5fevent_5flog_2eproto.base); - ::memset(&config_, 0, static_cast( - reinterpret_cast(&payload_type_) - - reinterpret_cast(&config_)) + sizeof(payload_type_)); -} - -RtxMap::~RtxMap() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.RtxMap) - SharedDtor(); -} - -void RtxMap::SharedDtor() { - if (this != internal_default_instance()) delete config_; -} - -void RtxMap::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtxMap& RtxMap::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtxMap_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void RtxMap::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.RtxMap) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(config_ != nullptr); - config_->Clear(); - } - payload_type_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtxMap::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 payload_type = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_payload_type(&has_bits); - payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.RtxConfig config = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr = ctx->ParseMessage(mutable_config(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtxMap::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.RtxMap) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 payload_type = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.RtxConfig config = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_config())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.RtxMap) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.RtxMap) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtxMap::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.RtxMap) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 payload_type = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->payload_type(), output); - } - - // optional .webrtc.rtclog.RtxConfig config = 2; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 2, _Internal::config(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.RtxMap) -} - -size_t RtxMap::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.RtxMap) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional .webrtc.rtclog.RtxConfig config = 2; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *config_); - } - - // optional int32 payload_type = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->payload_type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtxMap::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtxMap::MergeFrom(const RtxMap& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.RtxMap) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - mutable_config()->::webrtc::rtclog::RtxConfig::MergeFrom(from.config()); - } - if (cached_has_bits & 0x00000002u) { - payload_type_ = from.payload_type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtxMap::CopyFrom(const RtxMap& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.RtxMap) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtxMap::IsInitialized() const { - return true; -} - -void RtxMap::InternalSwap(RtxMap* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(config_, other->config_); - swap(payload_type_, other->payload_type_); -} - -std::string RtxMap::GetTypeName() const { - return "webrtc.rtclog.RtxMap"; -} - - -// =================================================================== - -void VideoSendConfig::InitAsDefaultInstance() { - ::webrtc::rtclog::_VideoSendConfig_default_instance_._instance.get_mutable()->encoder_ = const_cast< ::webrtc::rtclog::EncoderConfig*>( - ::webrtc::rtclog::EncoderConfig::internal_default_instance()); -} -class VideoSendConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_rtx_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static const ::webrtc::rtclog::EncoderConfig& encoder(const VideoSendConfig* msg); - static void set_has_encoder(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog::EncoderConfig& -VideoSendConfig::_Internal::encoder(const VideoSendConfig* msg) { - return *msg->encoder_; -} -VideoSendConfig::VideoSendConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.VideoSendConfig) -} -VideoSendConfig::VideoSendConfig(const VideoSendConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - ssrcs_(from.ssrcs_), - header_extensions_(from.header_extensions_), - rtx_ssrcs_(from.rtx_ssrcs_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_encoder()) { - encoder_ = new ::webrtc::rtclog::EncoderConfig(*from.encoder_); - } else { - encoder_ = nullptr; - } - rtx_payload_type_ = from.rtx_payload_type_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.VideoSendConfig) -} - -void VideoSendConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto.base); - ::memset(&encoder_, 0, static_cast( - reinterpret_cast(&rtx_payload_type_) - - reinterpret_cast(&encoder_)) + sizeof(rtx_payload_type_)); -} - -VideoSendConfig::~VideoSendConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.VideoSendConfig) - SharedDtor(); -} - -void VideoSendConfig::SharedDtor() { - if (this != internal_default_instance()) delete encoder_; -} - -void VideoSendConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const VideoSendConfig& VideoSendConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_VideoSendConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void VideoSendConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.VideoSendConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - ssrcs_.Clear(); - header_extensions_.Clear(); - rtx_ssrcs_.Clear(); - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(encoder_ != nullptr); - encoder_->Clear(); - } - rtx_payload_type_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* VideoSendConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // repeated uint32 ssrcs = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - ptr -= 1; - do { - ptr += 1; - add_ssrcs(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr)); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 8); - } else if (static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedUInt32Parser(mutable_ssrcs(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_header_extensions(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 18); - } else goto handle_unusual; - continue; - // repeated uint32 rtx_ssrcs = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - ptr -= 1; - do { - ptr += 1; - add_rtx_ssrcs(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr)); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 24); - } else if (static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedUInt32Parser(mutable_rtx_ssrcs(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 rtx_payload_type = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_rtx_payload_type(&has_bits); - rtx_payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.EncoderConfig encoder = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ctx->ParseMessage(mutable_encoder(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool VideoSendConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.VideoSendConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated uint32 ssrcs = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadRepeatedPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - 1, 8u, input, this->mutable_ssrcs()))); - } else if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPackedPrimitiveNoInline< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, this->mutable_ssrcs()))); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - // repeated uint32 rtx_ssrcs = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadRepeatedPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - 1, 24u, input, this->mutable_rtx_ssrcs()))); - } else if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPackedPrimitiveNoInline< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, this->mutable_rtx_ssrcs()))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 rtx_payload_type = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_rtx_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &rtx_payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.EncoderConfig encoder = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_encoder())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.VideoSendConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.VideoSendConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void VideoSendConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.VideoSendConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated uint32 ssrcs = 1; - for (int i = 0, n = this->ssrcs_size(); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32( - 1, this->ssrcs(i), output); - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - for (unsigned int i = 0, - n = static_cast(this->header_extensions_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 2, - this->header_extensions(static_cast(i)), - output); - } - - // repeated uint32 rtx_ssrcs = 3; - for (int i = 0, n = this->rtx_ssrcs_size(); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32( - 3, this->rtx_ssrcs(i), output); - } - - cached_has_bits = _has_bits_[0]; - // optional int32 rtx_payload_type = 4; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(4, this->rtx_payload_type(), output); - } - - // optional .webrtc.rtclog.EncoderConfig encoder = 5; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, _Internal::encoder(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.VideoSendConfig) -} - -size_t VideoSendConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.VideoSendConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated uint32 ssrcs = 1; - { - size_t data_size = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite:: - UInt32Size(this->ssrcs_); - total_size += 1 * - ::PROTOBUF_NAMESPACE_ID::internal::FromIntSize(this->ssrcs_size()); - total_size += data_size; - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - { - unsigned int count = static_cast(this->header_extensions_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->header_extensions(static_cast(i))); - } - } - - // repeated uint32 rtx_ssrcs = 3; - { - size_t data_size = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite:: - UInt32Size(this->rtx_ssrcs_); - total_size += 1 * - ::PROTOBUF_NAMESPACE_ID::internal::FromIntSize(this->rtx_ssrcs_size()); - total_size += data_size; - } - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional .webrtc.rtclog.EncoderConfig encoder = 5; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *encoder_); - } - - // optional int32 rtx_payload_type = 4; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->rtx_payload_type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void VideoSendConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void VideoSendConfig::MergeFrom(const VideoSendConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.VideoSendConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - ssrcs_.MergeFrom(from.ssrcs_); - header_extensions_.MergeFrom(from.header_extensions_); - rtx_ssrcs_.MergeFrom(from.rtx_ssrcs_); - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - mutable_encoder()->::webrtc::rtclog::EncoderConfig::MergeFrom(from.encoder()); - } - if (cached_has_bits & 0x00000002u) { - rtx_payload_type_ = from.rtx_payload_type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void VideoSendConfig::CopyFrom(const VideoSendConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.VideoSendConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool VideoSendConfig::IsInitialized() const { - return true; -} - -void VideoSendConfig::InternalSwap(VideoSendConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - ssrcs_.InternalSwap(&other->ssrcs_); - CastToBase(&header_extensions_)->InternalSwap(CastToBase(&other->header_extensions_)); - rtx_ssrcs_.InternalSwap(&other->rtx_ssrcs_); - swap(encoder_, other->encoder_); - swap(rtx_payload_type_, other->rtx_payload_type_); -} - -std::string VideoSendConfig::GetTypeName() const { - return "webrtc.rtclog.VideoSendConfig"; -} - - -// =================================================================== - -void EncoderConfig::InitAsDefaultInstance() { -} -class EncoderConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_name(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -EncoderConfig::EncoderConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.EncoderConfig) -} -EncoderConfig::EncoderConfig(const EncoderConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_name()) { - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - payload_type_ = from.payload_type_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.EncoderConfig) -} - -void EncoderConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_EncoderConfig_rtc_5fevent_5flog_2eproto.base); - name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_ = 0; -} - -EncoderConfig::~EncoderConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.EncoderConfig) - SharedDtor(); -} - -void EncoderConfig::SharedDtor() { - name_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void EncoderConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const EncoderConfig& EncoderConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_EncoderConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void EncoderConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.EncoderConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - name_.ClearNonDefaultToEmptyNoArena(); - } - payload_type_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* EncoderConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional string name = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_name(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 payload_type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_payload_type(&has_bits); - payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool EncoderConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.EncoderConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional string name = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( - input, this->mutable_name())); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 payload_type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.EncoderConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.EncoderConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void EncoderConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.EncoderConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( - 1, this->name(), output); - } - - // optional int32 payload_type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->payload_type(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.EncoderConfig) -} - -size_t EncoderConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.EncoderConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional string name = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( - this->name()); - } - - // optional int32 payload_type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->payload_type()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void EncoderConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void EncoderConfig::MergeFrom(const EncoderConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.EncoderConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.name_); - } - if (cached_has_bits & 0x00000002u) { - payload_type_ = from.payload_type_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void EncoderConfig::CopyFrom(const EncoderConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.EncoderConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool EncoderConfig::IsInitialized() const { - return true; -} - -void EncoderConfig::InternalSwap(EncoderConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - name_.Swap(&other->name_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(payload_type_, other->payload_type_); -} - -std::string EncoderConfig::GetTypeName() const { - return "webrtc.rtclog.EncoderConfig"; -} - - -// =================================================================== - -void AudioReceiveConfig::InitAsDefaultInstance() { -} -class AudioReceiveConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_remote_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -AudioReceiveConfig::AudioReceiveConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.AudioReceiveConfig) -} -AudioReceiveConfig::AudioReceiveConfig(const AudioReceiveConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - header_extensions_(from.header_extensions_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&remote_ssrc_, &from.remote_ssrc_, - static_cast(reinterpret_cast(&local_ssrc_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(local_ssrc_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.AudioReceiveConfig) -} - -void AudioReceiveConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto.base); - ::memset(&remote_ssrc_, 0, static_cast( - reinterpret_cast(&local_ssrc_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(local_ssrc_)); -} - -AudioReceiveConfig::~AudioReceiveConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.AudioReceiveConfig) - SharedDtor(); -} - -void AudioReceiveConfig::SharedDtor() { -} - -void AudioReceiveConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioReceiveConfig& AudioReceiveConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioReceiveConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void AudioReceiveConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.AudioReceiveConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - header_extensions_.Clear(); - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(&remote_ssrc_, 0, static_cast( - reinterpret_cast(&local_ssrc_) - - reinterpret_cast(&remote_ssrc_)) + sizeof(local_ssrc_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioReceiveConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional uint32 remote_ssrc = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_remote_ssrc(&has_bits); - remote_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 local_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_header_extensions(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 26); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioReceiveConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.AudioReceiveConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional uint32 remote_ssrc = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_remote_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &remote_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 local_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.AudioReceiveConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.AudioReceiveConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioReceiveConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.AudioReceiveConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional uint32 remote_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(1, this->remote_ssrc(), output); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->local_ssrc(), output); - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; - for (unsigned int i = 0, - n = static_cast(this->header_extensions_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 3, - this->header_extensions(static_cast(i)), - output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.AudioReceiveConfig) -} - -size_t AudioReceiveConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.AudioReceiveConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; - { - unsigned int count = static_cast(this->header_extensions_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->header_extensions(static_cast(i))); - } - } - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional uint32 remote_ssrc = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->remote_ssrc()); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioReceiveConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioReceiveConfig::MergeFrom(const AudioReceiveConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.AudioReceiveConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - header_extensions_.MergeFrom(from.header_extensions_); - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - remote_ssrc_ = from.remote_ssrc_; - } - if (cached_has_bits & 0x00000002u) { - local_ssrc_ = from.local_ssrc_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioReceiveConfig::CopyFrom(const AudioReceiveConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.AudioReceiveConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioReceiveConfig::IsInitialized() const { - return true; -} - -void AudioReceiveConfig::InternalSwap(AudioReceiveConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - CastToBase(&header_extensions_)->InternalSwap(CastToBase(&other->header_extensions_)); - swap(remote_ssrc_, other->remote_ssrc_); - swap(local_ssrc_, other->local_ssrc_); -} - -std::string AudioReceiveConfig::GetTypeName() const { - return "webrtc.rtclog.AudioReceiveConfig"; -} - - -// =================================================================== - -void AudioSendConfig::InitAsDefaultInstance() { -} -class AudioSendConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -AudioSendConfig::AudioSendConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.AudioSendConfig) -} -AudioSendConfig::AudioSendConfig(const AudioSendConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - header_extensions_(from.header_extensions_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ssrc_ = from.ssrc_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.AudioSendConfig) -} - -void AudioSendConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto.base); - ssrc_ = 0u; -} - -AudioSendConfig::~AudioSendConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.AudioSendConfig) - SharedDtor(); -} - -void AudioSendConfig::SharedDtor() { -} - -void AudioSendConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioSendConfig& AudioSendConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioSendConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void AudioSendConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.AudioSendConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - header_extensions_.Clear(); - ssrc_ = 0u; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioSendConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional uint32 ssrc = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_header_extensions(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 18); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioSendConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.AudioSendConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional uint32 ssrc = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.AudioSendConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.AudioSendConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioSendConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.AudioSendConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional uint32 ssrc = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(1, this->ssrc(), output); - } - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - for (unsigned int i = 0, - n = static_cast(this->header_extensions_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 2, - this->header_extensions(static_cast(i)), - output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.AudioSendConfig) -} - -size_t AudioSendConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.AudioSendConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - { - unsigned int count = static_cast(this->header_extensions_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->header_extensions(static_cast(i))); - } - } - - // optional uint32 ssrc = 1; - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->ssrc()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioSendConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioSendConfig::MergeFrom(const AudioSendConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.AudioSendConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - header_extensions_.MergeFrom(from.header_extensions_); - if (from.has_ssrc()) { - set_ssrc(from.ssrc()); - } -} - -void AudioSendConfig::CopyFrom(const AudioSendConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.AudioSendConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioSendConfig::IsInitialized() const { - return true; -} - -void AudioSendConfig::InternalSwap(AudioSendConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - CastToBase(&header_extensions_)->InternalSwap(CastToBase(&other->header_extensions_)); - swap(ssrc_, other->ssrc_); -} - -std::string AudioSendConfig::GetTypeName() const { - return "webrtc.rtclog.AudioSendConfig"; -} - - -// =================================================================== - -void AudioNetworkAdaptation::InitAsDefaultInstance() { -} -class AudioNetworkAdaptation::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_frame_length_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_uplink_packet_loss_fraction(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_enable_fec(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_enable_dtx(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_num_channels(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } -}; - -AudioNetworkAdaptation::AudioNetworkAdaptation() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.AudioNetworkAdaptation) -} -AudioNetworkAdaptation::AudioNetworkAdaptation(const AudioNetworkAdaptation& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&bitrate_bps_, &from.bitrate_bps_, - static_cast(reinterpret_cast(&num_channels_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(num_channels_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.AudioNetworkAdaptation) -} - -void AudioNetworkAdaptation::SharedCtor() { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&num_channels_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(num_channels_)); -} - -AudioNetworkAdaptation::~AudioNetworkAdaptation() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.AudioNetworkAdaptation) - SharedDtor(); -} - -void AudioNetworkAdaptation::SharedDtor() { -} - -void AudioNetworkAdaptation::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioNetworkAdaptation& AudioNetworkAdaptation::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioNetworkAdaptation_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void AudioNetworkAdaptation::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.AudioNetworkAdaptation) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000003fu) { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&num_channels_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(num_channels_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioNetworkAdaptation::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 bitrate_bps = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 frame_length_ms = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_frame_length_ms(&has_bits); - frame_length_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional float uplink_packet_loss_fraction = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 29)) { - _Internal::set_has_uplink_packet_loss_fraction(&has_bits); - uplink_packet_loss_fraction_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad(ptr); - ptr += sizeof(float); - } else goto handle_unusual; - continue; - // optional bool enable_fec = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_enable_fec(&has_bits); - enable_fec_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool enable_dtx = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_enable_dtx(&has_bits); - enable_dtx_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 num_channels = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 48)) { - _Internal::set_has_num_channels(&has_bits); - num_channels_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioNetworkAdaptation::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.AudioNetworkAdaptation) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 bitrate_bps = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 frame_length_ms = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_frame_length_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &frame_length_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional float uplink_packet_loss_fraction = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (29 & 0xFF)) { - _Internal::set_has_uplink_packet_loss_fraction(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - float, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FLOAT>( - input, &uplink_packet_loss_fraction_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool enable_fec = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_enable_fec(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &enable_fec_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool enable_dtx = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_enable_dtx(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &enable_dtx_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 num_channels = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (48 & 0xFF)) { - _Internal::set_has_num_channels(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &num_channels_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.AudioNetworkAdaptation) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.AudioNetworkAdaptation) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioNetworkAdaptation::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.AudioNetworkAdaptation) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->bitrate_bps(), output); - } - - // optional int32 frame_length_ms = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->frame_length_ms(), output); - } - - // optional float uplink_packet_loss_fraction = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFloat(3, this->uplink_packet_loss_fraction(), output); - } - - // optional bool enable_fec = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(4, this->enable_fec(), output); - } - - // optional bool enable_dtx = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(5, this->enable_dtx(), output); - } - - // optional uint32 num_channels = 6; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(6, this->num_channels(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.AudioNetworkAdaptation) -} - -size_t AudioNetworkAdaptation::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.AudioNetworkAdaptation) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000003fu) { - // optional int32 bitrate_bps = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - // optional int32 frame_length_ms = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->frame_length_ms()); - } - - // optional float uplink_packet_loss_fraction = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + 4; - } - - // optional bool enable_fec = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + 1; - } - - // optional bool enable_dtx = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + 1; - } - - // optional uint32 num_channels = 6; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->num_channels()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioNetworkAdaptation::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioNetworkAdaptation::MergeFrom(const AudioNetworkAdaptation& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.AudioNetworkAdaptation) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000003fu) { - if (cached_has_bits & 0x00000001u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000002u) { - frame_length_ms_ = from.frame_length_ms_; - } - if (cached_has_bits & 0x00000004u) { - uplink_packet_loss_fraction_ = from.uplink_packet_loss_fraction_; - } - if (cached_has_bits & 0x00000008u) { - enable_fec_ = from.enable_fec_; - } - if (cached_has_bits & 0x00000010u) { - enable_dtx_ = from.enable_dtx_; - } - if (cached_has_bits & 0x00000020u) { - num_channels_ = from.num_channels_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioNetworkAdaptation::CopyFrom(const AudioNetworkAdaptation& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.AudioNetworkAdaptation) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioNetworkAdaptation::IsInitialized() const { - return true; -} - -void AudioNetworkAdaptation::InternalSwap(AudioNetworkAdaptation* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(bitrate_bps_, other->bitrate_bps_); - swap(frame_length_ms_, other->frame_length_ms_); - swap(uplink_packet_loss_fraction_, other->uplink_packet_loss_fraction_); - swap(enable_fec_, other->enable_fec_); - swap(enable_dtx_, other->enable_dtx_); - swap(num_channels_, other->num_channels_); -} - -std::string AudioNetworkAdaptation::GetTypeName() const { - return "webrtc.rtclog.AudioNetworkAdaptation"; -} - - -// =================================================================== - -void BweProbeCluster::InitAsDefaultInstance() { -} -class BweProbeCluster::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_min_packets(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_min_bytes(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } -}; - -BweProbeCluster::BweProbeCluster() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.BweProbeCluster) -} -BweProbeCluster::BweProbeCluster(const BweProbeCluster& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&id_, &from.id_, - static_cast(reinterpret_cast(&min_bytes_) - - reinterpret_cast(&id_)) + sizeof(min_bytes_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.BweProbeCluster) -} - -void BweProbeCluster::SharedCtor() { - ::memset(&id_, 0, static_cast( - reinterpret_cast(&min_bytes_) - - reinterpret_cast(&id_)) + sizeof(min_bytes_)); -} - -BweProbeCluster::~BweProbeCluster() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.BweProbeCluster) - SharedDtor(); -} - -void BweProbeCluster::SharedDtor() { -} - -void BweProbeCluster::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BweProbeCluster& BweProbeCluster::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BweProbeCluster_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void BweProbeCluster::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.BweProbeCluster) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - ::memset(&id_, 0, static_cast( - reinterpret_cast(&min_bytes_) - - reinterpret_cast(&id_)) + sizeof(min_bytes_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BweProbeCluster::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 id = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 bitrate_bps = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 min_packets = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_min_packets(&has_bits); - min_packets_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 min_bytes = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_min_bytes(&has_bits); - min_bytes_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BweProbeCluster::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.BweProbeCluster) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 id = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 bitrate_bps = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 min_packets = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_min_packets(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &min_packets_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 min_bytes = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_min_bytes(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &min_bytes_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.BweProbeCluster) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.BweProbeCluster) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BweProbeCluster::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.BweProbeCluster) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 id = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->id(), output); - } - - // optional int32 bitrate_bps = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->bitrate_bps(), output); - } - - // optional uint32 min_packets = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->min_packets(), output); - } - - // optional uint32 min_bytes = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->min_bytes(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.BweProbeCluster) -} - -size_t BweProbeCluster::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.BweProbeCluster) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - // optional int32 id = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->id()); - } - - // optional int32 bitrate_bps = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - // optional uint32 min_packets = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->min_packets()); - } - - // optional uint32 min_bytes = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->min_bytes()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BweProbeCluster::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BweProbeCluster::MergeFrom(const BweProbeCluster& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.BweProbeCluster) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - id_ = from.id_; - } - if (cached_has_bits & 0x00000002u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000004u) { - min_packets_ = from.min_packets_; - } - if (cached_has_bits & 0x00000008u) { - min_bytes_ = from.min_bytes_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BweProbeCluster::CopyFrom(const BweProbeCluster& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.BweProbeCluster) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BweProbeCluster::IsInitialized() const { - return true; -} - -void BweProbeCluster::InternalSwap(BweProbeCluster* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(id_, other->id_); - swap(bitrate_bps_, other->bitrate_bps_); - swap(min_packets_, other->min_packets_); - swap(min_bytes_, other->min_bytes_); -} - -std::string BweProbeCluster::GetTypeName() const { - return "webrtc.rtclog.BweProbeCluster"; -} - - -// =================================================================== - -void BweProbeResult::InitAsDefaultInstance() { -} -class BweProbeResult::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_result(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -BweProbeResult::BweProbeResult() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.BweProbeResult) -} -BweProbeResult::BweProbeResult(const BweProbeResult& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&id_, &from.id_, - static_cast(reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(&id_)) + sizeof(bitrate_bps_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.BweProbeResult) -} - -void BweProbeResult::SharedCtor() { - ::memset(&id_, 0, static_cast( - reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(&id_)) + sizeof(bitrate_bps_)); -} - -BweProbeResult::~BweProbeResult() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.BweProbeResult) - SharedDtor(); -} - -void BweProbeResult::SharedDtor() { -} - -void BweProbeResult::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BweProbeResult& BweProbeResult::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BweProbeResult_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void BweProbeResult::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.BweProbeResult) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(&id_, 0, static_cast( - reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(&id_)) + sizeof(bitrate_bps_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BweProbeResult::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 id = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::BweProbeResult_ResultType_IsValid(val))) { - set_result(static_cast<::webrtc::rtclog::BweProbeResult_ResultType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional int32 bitrate_bps = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BweProbeResult::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.BweProbeResult) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 id = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::BweProbeResult_ResultType_IsValid(value)) { - set_result(static_cast< ::webrtc::rtclog::BweProbeResult_ResultType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional int32 bitrate_bps = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.BweProbeResult) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.BweProbeResult) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BweProbeResult::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.BweProbeResult) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 id = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->id(), output); - } - - // optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->result(), output); - } - - // optional int32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->bitrate_bps(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.BweProbeResult) -} - -size_t BweProbeResult::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.BweProbeResult) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int32 id = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->id()); - } - - // optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->result()); - } - - // optional int32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BweProbeResult::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BweProbeResult::MergeFrom(const BweProbeResult& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.BweProbeResult) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - id_ = from.id_; - } - if (cached_has_bits & 0x00000002u) { - result_ = from.result_; - } - if (cached_has_bits & 0x00000004u) { - bitrate_bps_ = from.bitrate_bps_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BweProbeResult::CopyFrom(const BweProbeResult& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.BweProbeResult) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BweProbeResult::IsInitialized() const { - return true; -} - -void BweProbeResult::InternalSwap(BweProbeResult* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(id_, other->id_); - swap(result_, other->result_); - swap(bitrate_bps_, other->bitrate_bps_); -} - -std::string BweProbeResult::GetTypeName() const { - return "webrtc.rtclog.BweProbeResult"; -} - - -// =================================================================== - -void AlrState::InitAsDefaultInstance() { -} -class AlrState::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_in_alr(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -AlrState::AlrState() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.AlrState) -} -AlrState::AlrState(const AlrState& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - in_alr_ = from.in_alr_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.AlrState) -} - -void AlrState::SharedCtor() { - in_alr_ = false; -} - -AlrState::~AlrState() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.AlrState) - SharedDtor(); -} - -void AlrState::SharedDtor() { -} - -void AlrState::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AlrState& AlrState::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AlrState_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void AlrState::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.AlrState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - in_alr_ = false; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AlrState::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional bool in_alr = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_in_alr(&has_bits); - in_alr_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AlrState::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.AlrState) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional bool in_alr = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_in_alr(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &in_alr_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.AlrState) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.AlrState) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AlrState::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.AlrState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional bool in_alr = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(1, this->in_alr(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.AlrState) -} - -size_t AlrState::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.AlrState) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // optional bool in_alr = 1; - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + 1; - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AlrState::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AlrState::MergeFrom(const AlrState& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.AlrState) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_in_alr()) { - set_in_alr(from.in_alr()); - } -} - -void AlrState::CopyFrom(const AlrState& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.AlrState) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AlrState::IsInitialized() const { - return true; -} - -void AlrState::InternalSwap(AlrState* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(in_alr_, other->in_alr_); -} - -std::string AlrState::GetTypeName() const { - return "webrtc.rtclog.AlrState"; -} - - -// =================================================================== - -void IceCandidatePairConfig::InitAsDefaultInstance() { -} -class IceCandidatePairConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_config_type(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_candidate_pair_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_local_candidate_type(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_local_relay_protocol(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_local_network_type(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_local_address_family(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_remote_candidate_type(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_remote_address_family(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_candidate_pair_protocol(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } -}; - -IceCandidatePairConfig::IceCandidatePairConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.IceCandidatePairConfig) -} -IceCandidatePairConfig::IceCandidatePairConfig(const IceCandidatePairConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&config_type_, &from.config_type_, - static_cast(reinterpret_cast(&candidate_pair_protocol_) - - reinterpret_cast(&config_type_)) + sizeof(candidate_pair_protocol_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.IceCandidatePairConfig) -} - -void IceCandidatePairConfig::SharedCtor() { - ::memset(&config_type_, 0, static_cast( - reinterpret_cast(&candidate_pair_protocol_) - - reinterpret_cast(&config_type_)) + sizeof(candidate_pair_protocol_)); -} - -IceCandidatePairConfig::~IceCandidatePairConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.IceCandidatePairConfig) - SharedDtor(); -} - -void IceCandidatePairConfig::SharedDtor() { -} - -void IceCandidatePairConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IceCandidatePairConfig& IceCandidatePairConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IceCandidatePairConfig_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void IceCandidatePairConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.IceCandidatePairConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - ::memset(&config_type_, 0, static_cast( - reinterpret_cast(&remote_address_family_) - - reinterpret_cast(&config_type_)) + sizeof(remote_address_family_)); - } - candidate_pair_protocol_ = 0; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IceCandidatePairConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(val))) { - set_config_type(static_cast<::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(1, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 candidate_pair_id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_candidate_pair_id(&has_bits); - candidate_pair_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(val))) { - set_local_candidate_type(static_cast<::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(3, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(val))) { - set_local_relay_protocol(static_cast<::webrtc::rtclog::IceCandidatePairConfig_Protocol>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(4, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_NetworkType_IsValid(val))) { - set_local_network_type(static_cast<::webrtc::rtclog::IceCandidatePairConfig_NetworkType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(5, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 48)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(val))) { - set_local_address_family(static_cast<::webrtc::rtclog::IceCandidatePairConfig_AddressFamily>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(6, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 56)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(val))) { - set_remote_candidate_type(static_cast<::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(7, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 64)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(val))) { - set_remote_address_family(static_cast<::webrtc::rtclog::IceCandidatePairConfig_AddressFamily>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(8, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; - case 9: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 72)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(val))) { - set_candidate_pair_protocol(static_cast<::webrtc::rtclog::IceCandidatePairConfig_Protocol>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(9, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IceCandidatePairConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.IceCandidatePairConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value)) { - set_config_type(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType >(value)); - } else { - unknown_fields_stream.WriteVarint32(8u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 candidate_pair_id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_candidate_pair_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &candidate_pair_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(value)) { - set_local_candidate_type(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType >(value)); - } else { - unknown_fields_stream.WriteVarint32(24u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(value)) { - set_local_relay_protocol(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_Protocol >(value)); - } else { - unknown_fields_stream.WriteVarint32(32u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_NetworkType_IsValid(value)) { - set_local_network_type(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_NetworkType >(value)); - } else { - unknown_fields_stream.WriteVarint32(40u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (48 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(value)) { - set_local_address_family(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily >(value)); - } else { - unknown_fields_stream.WriteVarint32(48u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (56 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(value)) { - set_remote_candidate_type(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType >(value)); - } else { - unknown_fields_stream.WriteVarint32(56u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (64 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(value)) { - set_remote_address_family(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily >(value)); - } else { - unknown_fields_stream.WriteVarint32(64u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; - case 9: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (72 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(value)) { - set_candidate_pair_protocol(static_cast< ::webrtc::rtclog::IceCandidatePairConfig_Protocol >(value)); - } else { - unknown_fields_stream.WriteVarint32(72u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.IceCandidatePairConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.IceCandidatePairConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IceCandidatePairConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.IceCandidatePairConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 1, this->config_type(), output); - } - - // optional uint32 candidate_pair_id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->candidate_pair_id(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 3, this->local_candidate_type(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 4, this->local_relay_protocol(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 5, this->local_network_type(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 6, this->local_address_family(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 7, this->remote_candidate_type(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 8, this->remote_address_family(), output); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 9, this->candidate_pair_protocol(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.IceCandidatePairConfig) -} - -size_t IceCandidatePairConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.IceCandidatePairConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->config_type()); - } - - // optional uint32 candidate_pair_id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->candidate_pair_id()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_candidate_type()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_relay_protocol()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_network_type()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_address_family()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->remote_candidate_type()); - } - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->remote_address_family()); - } - - } - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->candidate_pair_protocol()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IceCandidatePairConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IceCandidatePairConfig::MergeFrom(const IceCandidatePairConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.IceCandidatePairConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - config_type_ = from.config_type_; - } - if (cached_has_bits & 0x00000002u) { - candidate_pair_id_ = from.candidate_pair_id_; - } - if (cached_has_bits & 0x00000004u) { - local_candidate_type_ = from.local_candidate_type_; - } - if (cached_has_bits & 0x00000008u) { - local_relay_protocol_ = from.local_relay_protocol_; - } - if (cached_has_bits & 0x00000010u) { - local_network_type_ = from.local_network_type_; - } - if (cached_has_bits & 0x00000020u) { - local_address_family_ = from.local_address_family_; - } - if (cached_has_bits & 0x00000040u) { - remote_candidate_type_ = from.remote_candidate_type_; - } - if (cached_has_bits & 0x00000080u) { - remote_address_family_ = from.remote_address_family_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00000100u) { - set_candidate_pair_protocol(from.candidate_pair_protocol()); - } -} - -void IceCandidatePairConfig::CopyFrom(const IceCandidatePairConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.IceCandidatePairConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IceCandidatePairConfig::IsInitialized() const { - return true; -} - -void IceCandidatePairConfig::InternalSwap(IceCandidatePairConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(config_type_, other->config_type_); - swap(candidate_pair_id_, other->candidate_pair_id_); - swap(local_candidate_type_, other->local_candidate_type_); - swap(local_relay_protocol_, other->local_relay_protocol_); - swap(local_network_type_, other->local_network_type_); - swap(local_address_family_, other->local_address_family_); - swap(remote_candidate_type_, other->remote_candidate_type_); - swap(remote_address_family_, other->remote_address_family_); - swap(candidate_pair_protocol_, other->candidate_pair_protocol_); -} - -std::string IceCandidatePairConfig::GetTypeName() const { - return "webrtc.rtclog.IceCandidatePairConfig"; -} - - -// =================================================================== - -void IceCandidatePairEvent::InitAsDefaultInstance() { -} -class IceCandidatePairEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_event_type(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_candidate_pair_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -IceCandidatePairEvent::IceCandidatePairEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog.IceCandidatePairEvent) -} -IceCandidatePairEvent::IceCandidatePairEvent(const IceCandidatePairEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&event_type_, &from.event_type_, - static_cast(reinterpret_cast(&candidate_pair_id_) - - reinterpret_cast(&event_type_)) + sizeof(candidate_pair_id_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog.IceCandidatePairEvent) -} - -void IceCandidatePairEvent::SharedCtor() { - ::memset(&event_type_, 0, static_cast( - reinterpret_cast(&candidate_pair_id_) - - reinterpret_cast(&event_type_)) + sizeof(candidate_pair_id_)); -} - -IceCandidatePairEvent::~IceCandidatePairEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog.IceCandidatePairEvent) - SharedDtor(); -} - -void IceCandidatePairEvent::SharedDtor() { -} - -void IceCandidatePairEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IceCandidatePairEvent& IceCandidatePairEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IceCandidatePairEvent_rtc_5fevent_5flog_2eproto.base); - return *internal_default_instance(); -} - - -void IceCandidatePairEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog.IceCandidatePairEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(&event_type_, 0, static_cast( - reinterpret_cast(&candidate_pair_id_) - - reinterpret_cast(&event_type_)) + sizeof(candidate_pair_id_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IceCandidatePairEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(val))) { - set_event_type(static_cast<::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(1, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 candidate_pair_id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_candidate_pair_id(&has_bits); - candidate_pair_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IceCandidatePairEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog.IceCandidatePairEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value)) { - set_event_type(static_cast< ::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType >(value)); - } else { - unknown_fields_stream.WriteVarint32(8u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 candidate_pair_id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_candidate_pair_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &candidate_pair_id_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog.IceCandidatePairEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog.IceCandidatePairEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IceCandidatePairEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog.IceCandidatePairEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 1, this->event_type(), output); - } - - // optional uint32 candidate_pair_id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->candidate_pair_id(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog.IceCandidatePairEvent) -} - -size_t IceCandidatePairEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog.IceCandidatePairEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->event_type()); - } - - // optional uint32 candidate_pair_id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->candidate_pair_id()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IceCandidatePairEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IceCandidatePairEvent::MergeFrom(const IceCandidatePairEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog.IceCandidatePairEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - event_type_ = from.event_type_; - } - if (cached_has_bits & 0x00000002u) { - candidate_pair_id_ = from.candidate_pair_id_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void IceCandidatePairEvent::CopyFrom(const IceCandidatePairEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog.IceCandidatePairEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IceCandidatePairEvent::IsInitialized() const { - return true; -} - -void IceCandidatePairEvent::InternalSwap(IceCandidatePairEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(event_type_, other->event_type_); - swap(candidate_pair_id_, other->candidate_pair_id_); -} - -std::string IceCandidatePairEvent::GetTypeName() const { - return "webrtc.rtclog.IceCandidatePairEvent"; -} - - -// @@protoc_insertion_point(namespace_scope) -} // namespace rtclog -} // namespace webrtc -PROTOBUF_NAMESPACE_OPEN -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::EventStream* Arena::CreateMaybeMessage< ::webrtc::rtclog::EventStream >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::EventStream >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::Event* Arena::CreateMaybeMessage< ::webrtc::rtclog::Event >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::Event >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::RtpPacket* Arena::CreateMaybeMessage< ::webrtc::rtclog::RtpPacket >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::RtpPacket >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::RtcpPacket* Arena::CreateMaybeMessage< ::webrtc::rtclog::RtcpPacket >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::RtcpPacket >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::AudioPlayoutEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog::AudioPlayoutEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::AudioPlayoutEvent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::LossBasedBweUpdate* Arena::CreateMaybeMessage< ::webrtc::rtclog::LossBasedBweUpdate >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::LossBasedBweUpdate >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::DelayBasedBweUpdate* Arena::CreateMaybeMessage< ::webrtc::rtclog::DelayBasedBweUpdate >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::DelayBasedBweUpdate >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::VideoReceiveConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::VideoReceiveConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::VideoReceiveConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::DecoderConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::DecoderConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::DecoderConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::RtpHeaderExtension* Arena::CreateMaybeMessage< ::webrtc::rtclog::RtpHeaderExtension >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::RtpHeaderExtension >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::RtxConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::RtxConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::RtxConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::RtxMap* Arena::CreateMaybeMessage< ::webrtc::rtclog::RtxMap >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::RtxMap >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::VideoSendConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::VideoSendConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::VideoSendConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::EncoderConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::EncoderConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::EncoderConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::AudioReceiveConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::AudioReceiveConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::AudioReceiveConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::AudioSendConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::AudioSendConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::AudioSendConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::AudioNetworkAdaptation* Arena::CreateMaybeMessage< ::webrtc::rtclog::AudioNetworkAdaptation >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::AudioNetworkAdaptation >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::BweProbeCluster* Arena::CreateMaybeMessage< ::webrtc::rtclog::BweProbeCluster >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::BweProbeCluster >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::BweProbeResult* Arena::CreateMaybeMessage< ::webrtc::rtclog::BweProbeResult >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::BweProbeResult >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::AlrState* Arena::CreateMaybeMessage< ::webrtc::rtclog::AlrState >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::AlrState >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::IceCandidatePairConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog::IceCandidatePairConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::IceCandidatePairConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog::IceCandidatePairEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog::IceCandidatePairEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog::IceCandidatePairEvent >(arena); -} -PROTOBUF_NAMESPACE_CLOSE - -// @@protoc_insertion_point(global_scope) -#include diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.h deleted file mode 100644 index d92dbb608..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log.pb.h +++ /dev/null @@ -1,6496 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: rtc_event_log.proto - -#ifndef GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog_2eproto -#define GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog_2eproto - -#include -#include - -#include -#if PROTOBUF_VERSION < 3009000 -#error This file was generated by a newer version of protoc which is -#error incompatible with your Protocol Buffer headers. Please update -#error your headers. -#endif -#if 3009000 < PROTOBUF_MIN_PROTOC_VERSION -#error This file was generated by an older version of protoc which is -#error incompatible with your Protocol Buffer headers. Please -#error regenerate this file with a newer version of protoc. -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include // IWYU pragma: export -#include // IWYU pragma: export -#include -// @@protoc_insertion_point(includes) -#include -#define PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog_2eproto -PROTOBUF_NAMESPACE_OPEN -namespace internal { -class AnyMetadata; -} // namespace internal -PROTOBUF_NAMESPACE_CLOSE - -// Internal implementation detail -- do not use these members. -struct TableStruct_rtc_5fevent_5flog_2eproto { - static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[22] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[]; - static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[]; - static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[]; -}; -namespace webrtc { -namespace rtclog { -class AlrState; -class AlrStateDefaultTypeInternal; -extern AlrStateDefaultTypeInternal _AlrState_default_instance_; -class AudioNetworkAdaptation; -class AudioNetworkAdaptationDefaultTypeInternal; -extern AudioNetworkAdaptationDefaultTypeInternal _AudioNetworkAdaptation_default_instance_; -class AudioPlayoutEvent; -class AudioPlayoutEventDefaultTypeInternal; -extern AudioPlayoutEventDefaultTypeInternal _AudioPlayoutEvent_default_instance_; -class AudioReceiveConfig; -class AudioReceiveConfigDefaultTypeInternal; -extern AudioReceiveConfigDefaultTypeInternal _AudioReceiveConfig_default_instance_; -class AudioSendConfig; -class AudioSendConfigDefaultTypeInternal; -extern AudioSendConfigDefaultTypeInternal _AudioSendConfig_default_instance_; -class BweProbeCluster; -class BweProbeClusterDefaultTypeInternal; -extern BweProbeClusterDefaultTypeInternal _BweProbeCluster_default_instance_; -class BweProbeResult; -class BweProbeResultDefaultTypeInternal; -extern BweProbeResultDefaultTypeInternal _BweProbeResult_default_instance_; -class DecoderConfig; -class DecoderConfigDefaultTypeInternal; -extern DecoderConfigDefaultTypeInternal _DecoderConfig_default_instance_; -class DelayBasedBweUpdate; -class DelayBasedBweUpdateDefaultTypeInternal; -extern DelayBasedBweUpdateDefaultTypeInternal _DelayBasedBweUpdate_default_instance_; -class EncoderConfig; -class EncoderConfigDefaultTypeInternal; -extern EncoderConfigDefaultTypeInternal _EncoderConfig_default_instance_; -class Event; -class EventDefaultTypeInternal; -extern EventDefaultTypeInternal _Event_default_instance_; -class EventStream; -class EventStreamDefaultTypeInternal; -extern EventStreamDefaultTypeInternal _EventStream_default_instance_; -class IceCandidatePairConfig; -class IceCandidatePairConfigDefaultTypeInternal; -extern IceCandidatePairConfigDefaultTypeInternal _IceCandidatePairConfig_default_instance_; -class IceCandidatePairEvent; -class IceCandidatePairEventDefaultTypeInternal; -extern IceCandidatePairEventDefaultTypeInternal _IceCandidatePairEvent_default_instance_; -class LossBasedBweUpdate; -class LossBasedBweUpdateDefaultTypeInternal; -extern LossBasedBweUpdateDefaultTypeInternal _LossBasedBweUpdate_default_instance_; -class RtcpPacket; -class RtcpPacketDefaultTypeInternal; -extern RtcpPacketDefaultTypeInternal _RtcpPacket_default_instance_; -class RtpHeaderExtension; -class RtpHeaderExtensionDefaultTypeInternal; -extern RtpHeaderExtensionDefaultTypeInternal _RtpHeaderExtension_default_instance_; -class RtpPacket; -class RtpPacketDefaultTypeInternal; -extern RtpPacketDefaultTypeInternal _RtpPacket_default_instance_; -class RtxConfig; -class RtxConfigDefaultTypeInternal; -extern RtxConfigDefaultTypeInternal _RtxConfig_default_instance_; -class RtxMap; -class RtxMapDefaultTypeInternal; -extern RtxMapDefaultTypeInternal _RtxMap_default_instance_; -class VideoReceiveConfig; -class VideoReceiveConfigDefaultTypeInternal; -extern VideoReceiveConfigDefaultTypeInternal _VideoReceiveConfig_default_instance_; -class VideoSendConfig; -class VideoSendConfigDefaultTypeInternal; -extern VideoSendConfigDefaultTypeInternal _VideoSendConfig_default_instance_; -} // namespace rtclog -} // namespace webrtc -PROTOBUF_NAMESPACE_OPEN -template<> ::webrtc::rtclog::AlrState* Arena::CreateMaybeMessage<::webrtc::rtclog::AlrState>(Arena*); -template<> ::webrtc::rtclog::AudioNetworkAdaptation* Arena::CreateMaybeMessage<::webrtc::rtclog::AudioNetworkAdaptation>(Arena*); -template<> ::webrtc::rtclog::AudioPlayoutEvent* Arena::CreateMaybeMessage<::webrtc::rtclog::AudioPlayoutEvent>(Arena*); -template<> ::webrtc::rtclog::AudioReceiveConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::AudioReceiveConfig>(Arena*); -template<> ::webrtc::rtclog::AudioSendConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::AudioSendConfig>(Arena*); -template<> ::webrtc::rtclog::BweProbeCluster* Arena::CreateMaybeMessage<::webrtc::rtclog::BweProbeCluster>(Arena*); -template<> ::webrtc::rtclog::BweProbeResult* Arena::CreateMaybeMessage<::webrtc::rtclog::BweProbeResult>(Arena*); -template<> ::webrtc::rtclog::DecoderConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::DecoderConfig>(Arena*); -template<> ::webrtc::rtclog::DelayBasedBweUpdate* Arena::CreateMaybeMessage<::webrtc::rtclog::DelayBasedBweUpdate>(Arena*); -template<> ::webrtc::rtclog::EncoderConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::EncoderConfig>(Arena*); -template<> ::webrtc::rtclog::Event* Arena::CreateMaybeMessage<::webrtc::rtclog::Event>(Arena*); -template<> ::webrtc::rtclog::EventStream* Arena::CreateMaybeMessage<::webrtc::rtclog::EventStream>(Arena*); -template<> ::webrtc::rtclog::IceCandidatePairConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::IceCandidatePairConfig>(Arena*); -template<> ::webrtc::rtclog::IceCandidatePairEvent* Arena::CreateMaybeMessage<::webrtc::rtclog::IceCandidatePairEvent>(Arena*); -template<> ::webrtc::rtclog::LossBasedBweUpdate* Arena::CreateMaybeMessage<::webrtc::rtclog::LossBasedBweUpdate>(Arena*); -template<> ::webrtc::rtclog::RtcpPacket* Arena::CreateMaybeMessage<::webrtc::rtclog::RtcpPacket>(Arena*); -template<> ::webrtc::rtclog::RtpHeaderExtension* Arena::CreateMaybeMessage<::webrtc::rtclog::RtpHeaderExtension>(Arena*); -template<> ::webrtc::rtclog::RtpPacket* Arena::CreateMaybeMessage<::webrtc::rtclog::RtpPacket>(Arena*); -template<> ::webrtc::rtclog::RtxConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::RtxConfig>(Arena*); -template<> ::webrtc::rtclog::RtxMap* Arena::CreateMaybeMessage<::webrtc::rtclog::RtxMap>(Arena*); -template<> ::webrtc::rtclog::VideoReceiveConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::VideoReceiveConfig>(Arena*); -template<> ::webrtc::rtclog::VideoSendConfig* Arena::CreateMaybeMessage<::webrtc::rtclog::VideoSendConfig>(Arena*); -PROTOBUF_NAMESPACE_CLOSE -namespace webrtc { -namespace rtclog { - -enum Event_EventType : int { - Event_EventType_UNKNOWN_EVENT = 0, - Event_EventType_LOG_START = 1, - Event_EventType_LOG_END = 2, - Event_EventType_RTP_EVENT = 3, - Event_EventType_RTCP_EVENT = 4, - Event_EventType_AUDIO_PLAYOUT_EVENT = 5, - Event_EventType_LOSS_BASED_BWE_UPDATE = 6, - Event_EventType_DELAY_BASED_BWE_UPDATE = 7, - Event_EventType_VIDEO_RECEIVER_CONFIG_EVENT = 8, - Event_EventType_VIDEO_SENDER_CONFIG_EVENT = 9, - Event_EventType_AUDIO_RECEIVER_CONFIG_EVENT = 10, - Event_EventType_AUDIO_SENDER_CONFIG_EVENT = 11, - Event_EventType_AUDIO_NETWORK_ADAPTATION_EVENT = 16, - Event_EventType_BWE_PROBE_CLUSTER_CREATED_EVENT = 17, - Event_EventType_BWE_PROBE_RESULT_EVENT = 18, - Event_EventType_ALR_STATE_EVENT = 19, - Event_EventType_ICE_CANDIDATE_PAIR_CONFIG = 20, - Event_EventType_ICE_CANDIDATE_PAIR_EVENT = 21 -}; -bool Event_EventType_IsValid(int value); -constexpr Event_EventType Event_EventType_EventType_MIN = Event_EventType_UNKNOWN_EVENT; -constexpr Event_EventType Event_EventType_EventType_MAX = Event_EventType_ICE_CANDIDATE_PAIR_EVENT; -constexpr int Event_EventType_EventType_ARRAYSIZE = Event_EventType_EventType_MAX + 1; - -const std::string& Event_EventType_Name(Event_EventType value); -template -inline const std::string& Event_EventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function Event_EventType_Name."); - return Event_EventType_Name(static_cast(enum_t_value)); -} -bool Event_EventType_Parse( - const std::string& name, Event_EventType* value); -enum DelayBasedBweUpdate_DetectorState : int { - DelayBasedBweUpdate_DetectorState_BWE_NORMAL = 0, - DelayBasedBweUpdate_DetectorState_BWE_UNDERUSING = 1, - DelayBasedBweUpdate_DetectorState_BWE_OVERUSING = 2 -}; -bool DelayBasedBweUpdate_DetectorState_IsValid(int value); -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate_DetectorState_DetectorState_MIN = DelayBasedBweUpdate_DetectorState_BWE_NORMAL; -constexpr DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate_DetectorState_DetectorState_MAX = DelayBasedBweUpdate_DetectorState_BWE_OVERUSING; -constexpr int DelayBasedBweUpdate_DetectorState_DetectorState_ARRAYSIZE = DelayBasedBweUpdate_DetectorState_DetectorState_MAX + 1; - -const std::string& DelayBasedBweUpdate_DetectorState_Name(DelayBasedBweUpdate_DetectorState value); -template -inline const std::string& DelayBasedBweUpdate_DetectorState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DelayBasedBweUpdate_DetectorState_Name."); - return DelayBasedBweUpdate_DetectorState_Name(static_cast(enum_t_value)); -} -bool DelayBasedBweUpdate_DetectorState_Parse( - const std::string& name, DelayBasedBweUpdate_DetectorState* value); -enum VideoReceiveConfig_RtcpMode : int { - VideoReceiveConfig_RtcpMode_RTCP_COMPOUND = 1, - VideoReceiveConfig_RtcpMode_RTCP_REDUCEDSIZE = 2 -}; -bool VideoReceiveConfig_RtcpMode_IsValid(int value); -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig_RtcpMode_RtcpMode_MIN = VideoReceiveConfig_RtcpMode_RTCP_COMPOUND; -constexpr VideoReceiveConfig_RtcpMode VideoReceiveConfig_RtcpMode_RtcpMode_MAX = VideoReceiveConfig_RtcpMode_RTCP_REDUCEDSIZE; -constexpr int VideoReceiveConfig_RtcpMode_RtcpMode_ARRAYSIZE = VideoReceiveConfig_RtcpMode_RtcpMode_MAX + 1; - -const std::string& VideoReceiveConfig_RtcpMode_Name(VideoReceiveConfig_RtcpMode value); -template -inline const std::string& VideoReceiveConfig_RtcpMode_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function VideoReceiveConfig_RtcpMode_Name."); - return VideoReceiveConfig_RtcpMode_Name(static_cast(enum_t_value)); -} -bool VideoReceiveConfig_RtcpMode_Parse( - const std::string& name, VideoReceiveConfig_RtcpMode* value); -enum BweProbeResult_ResultType : int { - BweProbeResult_ResultType_SUCCESS = 0, - BweProbeResult_ResultType_INVALID_SEND_RECEIVE_INTERVAL = 1, - BweProbeResult_ResultType_INVALID_SEND_RECEIVE_RATIO = 2, - BweProbeResult_ResultType_TIMEOUT = 3 -}; -bool BweProbeResult_ResultType_IsValid(int value); -constexpr BweProbeResult_ResultType BweProbeResult_ResultType_ResultType_MIN = BweProbeResult_ResultType_SUCCESS; -constexpr BweProbeResult_ResultType BweProbeResult_ResultType_ResultType_MAX = BweProbeResult_ResultType_TIMEOUT; -constexpr int BweProbeResult_ResultType_ResultType_ARRAYSIZE = BweProbeResult_ResultType_ResultType_MAX + 1; - -const std::string& BweProbeResult_ResultType_Name(BweProbeResult_ResultType value); -template -inline const std::string& BweProbeResult_ResultType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function BweProbeResult_ResultType_Name."); - return BweProbeResult_ResultType_Name(static_cast(enum_t_value)); -} -bool BweProbeResult_ResultType_Parse( - const std::string& name, BweProbeResult_ResultType* value); -enum IceCandidatePairConfig_IceCandidatePairConfigType : int { - IceCandidatePairConfig_IceCandidatePairConfigType_ADDED = 0, - IceCandidatePairConfig_IceCandidatePairConfigType_UPDATED = 1, - IceCandidatePairConfig_IceCandidatePairConfigType_DESTROYED = 2, - IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED = 3 -}; -bool IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(int value); -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MIN = IceCandidatePairConfig_IceCandidatePairConfigType_ADDED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX = IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED; -constexpr int IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_ARRAYSIZE = IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX + 1; - -const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name(IceCandidatePairConfig_IceCandidatePairConfigType value); -template -inline const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_IceCandidatePairConfigType_Name."); - return IceCandidatePairConfig_IceCandidatePairConfigType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_IceCandidatePairConfigType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidatePairConfigType* value); -enum IceCandidatePairConfig_IceCandidateType : int { - IceCandidatePairConfig_IceCandidateType_LOCAL = 0, - IceCandidatePairConfig_IceCandidateType_STUN = 1, - IceCandidatePairConfig_IceCandidateType_PRFLX = 2, - IceCandidatePairConfig_IceCandidateType_RELAY = 3, - IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE = 4 -}; -bool IceCandidatePairConfig_IceCandidateType_IsValid(int value); -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig_IceCandidateType_IceCandidateType_MIN = IceCandidatePairConfig_IceCandidateType_LOCAL; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX = IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE; -constexpr int IceCandidatePairConfig_IceCandidateType_IceCandidateType_ARRAYSIZE = IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX + 1; - -const std::string& IceCandidatePairConfig_IceCandidateType_Name(IceCandidatePairConfig_IceCandidateType value); -template -inline const std::string& IceCandidatePairConfig_IceCandidateType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_IceCandidateType_Name."); - return IceCandidatePairConfig_IceCandidateType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_IceCandidateType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidateType* value); -enum IceCandidatePairConfig_Protocol : int { - IceCandidatePairConfig_Protocol_UDP = 0, - IceCandidatePairConfig_Protocol_TCP = 1, - IceCandidatePairConfig_Protocol_SSLTCP = 2, - IceCandidatePairConfig_Protocol_TLS = 3, - IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL = 4 -}; -bool IceCandidatePairConfig_Protocol_IsValid(int value); -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig_Protocol_Protocol_MIN = IceCandidatePairConfig_Protocol_UDP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig_Protocol_Protocol_MAX = IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL; -constexpr int IceCandidatePairConfig_Protocol_Protocol_ARRAYSIZE = IceCandidatePairConfig_Protocol_Protocol_MAX + 1; - -const std::string& IceCandidatePairConfig_Protocol_Name(IceCandidatePairConfig_Protocol value); -template -inline const std::string& IceCandidatePairConfig_Protocol_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_Protocol_Name."); - return IceCandidatePairConfig_Protocol_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_Protocol_Parse( - const std::string& name, IceCandidatePairConfig_Protocol* value); -enum IceCandidatePairConfig_AddressFamily : int { - IceCandidatePairConfig_AddressFamily_IPV4 = 0, - IceCandidatePairConfig_AddressFamily_IPV6 = 1, - IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY = 2 -}; -bool IceCandidatePairConfig_AddressFamily_IsValid(int value); -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig_AddressFamily_AddressFamily_MIN = IceCandidatePairConfig_AddressFamily_IPV4; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig_AddressFamily_AddressFamily_MAX = IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY; -constexpr int IceCandidatePairConfig_AddressFamily_AddressFamily_ARRAYSIZE = IceCandidatePairConfig_AddressFamily_AddressFamily_MAX + 1; - -const std::string& IceCandidatePairConfig_AddressFamily_Name(IceCandidatePairConfig_AddressFamily value); -template -inline const std::string& IceCandidatePairConfig_AddressFamily_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_AddressFamily_Name."); - return IceCandidatePairConfig_AddressFamily_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_AddressFamily_Parse( - const std::string& name, IceCandidatePairConfig_AddressFamily* value); -enum IceCandidatePairConfig_NetworkType : int { - IceCandidatePairConfig_NetworkType_ETHERNET = 0, - IceCandidatePairConfig_NetworkType_LOOPBACK = 1, - IceCandidatePairConfig_NetworkType_WIFI = 2, - IceCandidatePairConfig_NetworkType_VPN = 3, - IceCandidatePairConfig_NetworkType_CELLULAR = 4, - IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE = 5 -}; -bool IceCandidatePairConfig_NetworkType_IsValid(int value); -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig_NetworkType_NetworkType_MIN = IceCandidatePairConfig_NetworkType_ETHERNET; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig_NetworkType_NetworkType_MAX = IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE; -constexpr int IceCandidatePairConfig_NetworkType_NetworkType_ARRAYSIZE = IceCandidatePairConfig_NetworkType_NetworkType_MAX + 1; - -const std::string& IceCandidatePairConfig_NetworkType_Name(IceCandidatePairConfig_NetworkType value); -template -inline const std::string& IceCandidatePairConfig_NetworkType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_NetworkType_Name."); - return IceCandidatePairConfig_NetworkType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_NetworkType_Parse( - const std::string& name, IceCandidatePairConfig_NetworkType* value); -enum IceCandidatePairEvent_IceCandidatePairEventType : int { - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_SENT = 0, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RECEIVED = 1, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_SENT = 2, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED = 3 -}; -bool IceCandidatePairEvent_IceCandidatePairEventType_IsValid(int value); -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MIN = IceCandidatePairEvent_IceCandidatePairEventType_CHECK_SENT; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX = IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED; -constexpr int IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_ARRAYSIZE = IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX + 1; - -const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name(IceCandidatePairEvent_IceCandidatePairEventType value); -template -inline const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairEvent_IceCandidatePairEventType_Name."); - return IceCandidatePairEvent_IceCandidatePairEventType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairEvent_IceCandidatePairEventType_Parse( - const std::string& name, IceCandidatePairEvent_IceCandidatePairEventType* value); -enum MediaType : int { - ANY = 0, - AUDIO = 1, - VIDEO = 2, - DATA = 3 -}; -bool MediaType_IsValid(int value); -constexpr MediaType MediaType_MIN = ANY; -constexpr MediaType MediaType_MAX = DATA; -constexpr int MediaType_ARRAYSIZE = MediaType_MAX + 1; - -const std::string& MediaType_Name(MediaType value); -template -inline const std::string& MediaType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function MediaType_Name."); - return MediaType_Name(static_cast(enum_t_value)); -} -bool MediaType_Parse( - const std::string& name, MediaType* value); -// =================================================================== - -class EventStream : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.EventStream) */ { - public: - EventStream(); - virtual ~EventStream(); - - EventStream(const EventStream& from); - EventStream(EventStream&& from) noexcept - : EventStream() { - *this = ::std::move(from); - } - - inline EventStream& operator=(const EventStream& from) { - CopyFrom(from); - return *this; - } - inline EventStream& operator=(EventStream&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const EventStream& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const EventStream* internal_default_instance() { - return reinterpret_cast( - &_EventStream_default_instance_); - } - static constexpr int kIndexInFileMessages = - 0; - - friend void swap(EventStream& a, EventStream& b) { - a.Swap(&b); - } - inline void Swap(EventStream* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline EventStream* New() const final { - return CreateMaybeMessage(nullptr); - } - - EventStream* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const EventStream& from); - void MergeFrom(const EventStream& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(EventStream* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.EventStream"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kStreamFieldNumber = 1, - }; - // repeated .webrtc.rtclog.Event stream = 1; - int stream_size() const; - void clear_stream(); - ::webrtc::rtclog::Event* mutable_stream(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::Event >* - mutable_stream(); - const ::webrtc::rtclog::Event& stream(int index) const; - ::webrtc::rtclog::Event* add_stream(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::Event >& - stream() const; - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.EventStream) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::Event > stream_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class Event : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.Event) */ { - public: - Event(); - virtual ~Event(); - - Event(const Event& from); - Event(Event&& from) noexcept - : Event() { - *this = ::std::move(from); - } - - inline Event& operator=(const Event& from) { - CopyFrom(from); - return *this; - } - inline Event& operator=(Event&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const Event& default_instance(); - - enum SubtypeCase { - kRtpPacket = 3, - kRtcpPacket = 4, - kAudioPlayoutEvent = 5, - kLossBasedBweUpdate = 6, - kDelayBasedBweUpdate = 7, - kVideoReceiverConfig = 8, - kVideoSenderConfig = 9, - kAudioReceiverConfig = 10, - kAudioSenderConfig = 11, - kAudioNetworkAdaptation = 16, - kProbeCluster = 17, - kProbeResult = 18, - kAlrState = 19, - kIceCandidatePairConfig = 20, - kIceCandidatePairEvent = 21, - SUBTYPE_NOT_SET = 0, - }; - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const Event* internal_default_instance() { - return reinterpret_cast( - &_Event_default_instance_); - } - static constexpr int kIndexInFileMessages = - 1; - - friend void swap(Event& a, Event& b) { - a.Swap(&b); - } - inline void Swap(Event* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline Event* New() const final { - return CreateMaybeMessage(nullptr); - } - - Event* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const Event& from); - void MergeFrom(const Event& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(Event* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.Event"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef Event_EventType EventType; - static constexpr EventType UNKNOWN_EVENT = - Event_EventType_UNKNOWN_EVENT; - static constexpr EventType LOG_START = - Event_EventType_LOG_START; - static constexpr EventType LOG_END = - Event_EventType_LOG_END; - static constexpr EventType RTP_EVENT = - Event_EventType_RTP_EVENT; - static constexpr EventType RTCP_EVENT = - Event_EventType_RTCP_EVENT; - static constexpr EventType AUDIO_PLAYOUT_EVENT = - Event_EventType_AUDIO_PLAYOUT_EVENT; - static constexpr EventType LOSS_BASED_BWE_UPDATE = - Event_EventType_LOSS_BASED_BWE_UPDATE; - static constexpr EventType DELAY_BASED_BWE_UPDATE = - Event_EventType_DELAY_BASED_BWE_UPDATE; - static constexpr EventType VIDEO_RECEIVER_CONFIG_EVENT = - Event_EventType_VIDEO_RECEIVER_CONFIG_EVENT; - static constexpr EventType VIDEO_SENDER_CONFIG_EVENT = - Event_EventType_VIDEO_SENDER_CONFIG_EVENT; - static constexpr EventType AUDIO_RECEIVER_CONFIG_EVENT = - Event_EventType_AUDIO_RECEIVER_CONFIG_EVENT; - static constexpr EventType AUDIO_SENDER_CONFIG_EVENT = - Event_EventType_AUDIO_SENDER_CONFIG_EVENT; - static constexpr EventType AUDIO_NETWORK_ADAPTATION_EVENT = - Event_EventType_AUDIO_NETWORK_ADAPTATION_EVENT; - static constexpr EventType BWE_PROBE_CLUSTER_CREATED_EVENT = - Event_EventType_BWE_PROBE_CLUSTER_CREATED_EVENT; - static constexpr EventType BWE_PROBE_RESULT_EVENT = - Event_EventType_BWE_PROBE_RESULT_EVENT; - static constexpr EventType ALR_STATE_EVENT = - Event_EventType_ALR_STATE_EVENT; - static constexpr EventType ICE_CANDIDATE_PAIR_CONFIG = - Event_EventType_ICE_CANDIDATE_PAIR_CONFIG; - static constexpr EventType ICE_CANDIDATE_PAIR_EVENT = - Event_EventType_ICE_CANDIDATE_PAIR_EVENT; - static inline bool EventType_IsValid(int value) { - return Event_EventType_IsValid(value); - } - static constexpr EventType EventType_MIN = - Event_EventType_EventType_MIN; - static constexpr EventType EventType_MAX = - Event_EventType_EventType_MAX; - static constexpr int EventType_ARRAYSIZE = - Event_EventType_EventType_ARRAYSIZE; - template - static inline const std::string& EventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function EventType_Name."); - return Event_EventType_Name(enum_t_value); - } - static inline bool EventType_Parse(const std::string& name, - EventType* value) { - return Event_EventType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampUsFieldNumber = 1, - kTypeFieldNumber = 2, - kRtpPacketFieldNumber = 3, - kRtcpPacketFieldNumber = 4, - kAudioPlayoutEventFieldNumber = 5, - kLossBasedBweUpdateFieldNumber = 6, - kDelayBasedBweUpdateFieldNumber = 7, - kVideoReceiverConfigFieldNumber = 8, - kVideoSenderConfigFieldNumber = 9, - kAudioReceiverConfigFieldNumber = 10, - kAudioSenderConfigFieldNumber = 11, - kAudioNetworkAdaptationFieldNumber = 16, - kProbeClusterFieldNumber = 17, - kProbeResultFieldNumber = 18, - kAlrStateFieldNumber = 19, - kIceCandidatePairConfigFieldNumber = 20, - kIceCandidatePairEventFieldNumber = 21, - }; - // optional int64 timestamp_us = 1; - bool has_timestamp_us() const; - void clear_timestamp_us(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_us() const; - void set_timestamp_us(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional .webrtc.rtclog.Event.EventType type = 2; - bool has_type() const; - void clear_type(); - ::webrtc::rtclog::Event_EventType type() const; - void set_type(::webrtc::rtclog::Event_EventType value); - - // optional .webrtc.rtclog.RtpPacket rtp_packet = 3; - bool has_rtp_packet() const; - void clear_rtp_packet(); - const ::webrtc::rtclog::RtpPacket& rtp_packet() const; - ::webrtc::rtclog::RtpPacket* release_rtp_packet(); - ::webrtc::rtclog::RtpPacket* mutable_rtp_packet(); - void set_allocated_rtp_packet(::webrtc::rtclog::RtpPacket* rtp_packet); - - // optional .webrtc.rtclog.RtcpPacket rtcp_packet = 4; - bool has_rtcp_packet() const; - void clear_rtcp_packet(); - const ::webrtc::rtclog::RtcpPacket& rtcp_packet() const; - ::webrtc::rtclog::RtcpPacket* release_rtcp_packet(); - ::webrtc::rtclog::RtcpPacket* mutable_rtcp_packet(); - void set_allocated_rtcp_packet(::webrtc::rtclog::RtcpPacket* rtcp_packet); - - // optional .webrtc.rtclog.AudioPlayoutEvent audio_playout_event = 5; - bool has_audio_playout_event() const; - void clear_audio_playout_event(); - const ::webrtc::rtclog::AudioPlayoutEvent& audio_playout_event() const; - ::webrtc::rtclog::AudioPlayoutEvent* release_audio_playout_event(); - ::webrtc::rtclog::AudioPlayoutEvent* mutable_audio_playout_event(); - void set_allocated_audio_playout_event(::webrtc::rtclog::AudioPlayoutEvent* audio_playout_event); - - // optional .webrtc.rtclog.LossBasedBweUpdate loss_based_bwe_update = 6; - bool has_loss_based_bwe_update() const; - void clear_loss_based_bwe_update(); - const ::webrtc::rtclog::LossBasedBweUpdate& loss_based_bwe_update() const; - ::webrtc::rtclog::LossBasedBweUpdate* release_loss_based_bwe_update(); - ::webrtc::rtclog::LossBasedBweUpdate* mutable_loss_based_bwe_update(); - void set_allocated_loss_based_bwe_update(::webrtc::rtclog::LossBasedBweUpdate* loss_based_bwe_update); - - // optional .webrtc.rtclog.DelayBasedBweUpdate delay_based_bwe_update = 7; - bool has_delay_based_bwe_update() const; - void clear_delay_based_bwe_update(); - const ::webrtc::rtclog::DelayBasedBweUpdate& delay_based_bwe_update() const; - ::webrtc::rtclog::DelayBasedBweUpdate* release_delay_based_bwe_update(); - ::webrtc::rtclog::DelayBasedBweUpdate* mutable_delay_based_bwe_update(); - void set_allocated_delay_based_bwe_update(::webrtc::rtclog::DelayBasedBweUpdate* delay_based_bwe_update); - - // optional .webrtc.rtclog.VideoReceiveConfig video_receiver_config = 8; - bool has_video_receiver_config() const; - void clear_video_receiver_config(); - const ::webrtc::rtclog::VideoReceiveConfig& video_receiver_config() const; - ::webrtc::rtclog::VideoReceiveConfig* release_video_receiver_config(); - ::webrtc::rtclog::VideoReceiveConfig* mutable_video_receiver_config(); - void set_allocated_video_receiver_config(::webrtc::rtclog::VideoReceiveConfig* video_receiver_config); - - // optional .webrtc.rtclog.VideoSendConfig video_sender_config = 9; - bool has_video_sender_config() const; - void clear_video_sender_config(); - const ::webrtc::rtclog::VideoSendConfig& video_sender_config() const; - ::webrtc::rtclog::VideoSendConfig* release_video_sender_config(); - ::webrtc::rtclog::VideoSendConfig* mutable_video_sender_config(); - void set_allocated_video_sender_config(::webrtc::rtclog::VideoSendConfig* video_sender_config); - - // optional .webrtc.rtclog.AudioReceiveConfig audio_receiver_config = 10; - bool has_audio_receiver_config() const; - void clear_audio_receiver_config(); - const ::webrtc::rtclog::AudioReceiveConfig& audio_receiver_config() const; - ::webrtc::rtclog::AudioReceiveConfig* release_audio_receiver_config(); - ::webrtc::rtclog::AudioReceiveConfig* mutable_audio_receiver_config(); - void set_allocated_audio_receiver_config(::webrtc::rtclog::AudioReceiveConfig* audio_receiver_config); - - // optional .webrtc.rtclog.AudioSendConfig audio_sender_config = 11; - bool has_audio_sender_config() const; - void clear_audio_sender_config(); - const ::webrtc::rtclog::AudioSendConfig& audio_sender_config() const; - ::webrtc::rtclog::AudioSendConfig* release_audio_sender_config(); - ::webrtc::rtclog::AudioSendConfig* mutable_audio_sender_config(); - void set_allocated_audio_sender_config(::webrtc::rtclog::AudioSendConfig* audio_sender_config); - - // optional .webrtc.rtclog.AudioNetworkAdaptation audio_network_adaptation = 16; - bool has_audio_network_adaptation() const; - void clear_audio_network_adaptation(); - const ::webrtc::rtclog::AudioNetworkAdaptation& audio_network_adaptation() const; - ::webrtc::rtclog::AudioNetworkAdaptation* release_audio_network_adaptation(); - ::webrtc::rtclog::AudioNetworkAdaptation* mutable_audio_network_adaptation(); - void set_allocated_audio_network_adaptation(::webrtc::rtclog::AudioNetworkAdaptation* audio_network_adaptation); - - // optional .webrtc.rtclog.BweProbeCluster probe_cluster = 17; - bool has_probe_cluster() const; - void clear_probe_cluster(); - const ::webrtc::rtclog::BweProbeCluster& probe_cluster() const; - ::webrtc::rtclog::BweProbeCluster* release_probe_cluster(); - ::webrtc::rtclog::BweProbeCluster* mutable_probe_cluster(); - void set_allocated_probe_cluster(::webrtc::rtclog::BweProbeCluster* probe_cluster); - - // optional .webrtc.rtclog.BweProbeResult probe_result = 18; - bool has_probe_result() const; - void clear_probe_result(); - const ::webrtc::rtclog::BweProbeResult& probe_result() const; - ::webrtc::rtclog::BweProbeResult* release_probe_result(); - ::webrtc::rtclog::BweProbeResult* mutable_probe_result(); - void set_allocated_probe_result(::webrtc::rtclog::BweProbeResult* probe_result); - - // optional .webrtc.rtclog.AlrState alr_state = 19; - bool has_alr_state() const; - void clear_alr_state(); - const ::webrtc::rtclog::AlrState& alr_state() const; - ::webrtc::rtclog::AlrState* release_alr_state(); - ::webrtc::rtclog::AlrState* mutable_alr_state(); - void set_allocated_alr_state(::webrtc::rtclog::AlrState* alr_state); - - // optional .webrtc.rtclog.IceCandidatePairConfig ice_candidate_pair_config = 20; - bool has_ice_candidate_pair_config() const; - void clear_ice_candidate_pair_config(); - const ::webrtc::rtclog::IceCandidatePairConfig& ice_candidate_pair_config() const; - ::webrtc::rtclog::IceCandidatePairConfig* release_ice_candidate_pair_config(); - ::webrtc::rtclog::IceCandidatePairConfig* mutable_ice_candidate_pair_config(); - void set_allocated_ice_candidate_pair_config(::webrtc::rtclog::IceCandidatePairConfig* ice_candidate_pair_config); - - // optional .webrtc.rtclog.IceCandidatePairEvent ice_candidate_pair_event = 21; - bool has_ice_candidate_pair_event() const; - void clear_ice_candidate_pair_event(); - const ::webrtc::rtclog::IceCandidatePairEvent& ice_candidate_pair_event() const; - ::webrtc::rtclog::IceCandidatePairEvent* release_ice_candidate_pair_event(); - ::webrtc::rtclog::IceCandidatePairEvent* mutable_ice_candidate_pair_event(); - void set_allocated_ice_candidate_pair_event(::webrtc::rtclog::IceCandidatePairEvent* ice_candidate_pair_event); - - void clear_subtype(); - SubtypeCase subtype_case() const; - // @@protoc_insertion_point(class_scope:webrtc.rtclog.Event) - private: - class _Internal; - void set_has_rtp_packet(); - void set_has_rtcp_packet(); - void set_has_audio_playout_event(); - void set_has_loss_based_bwe_update(); - void set_has_delay_based_bwe_update(); - void set_has_video_receiver_config(); - void set_has_video_sender_config(); - void set_has_audio_receiver_config(); - void set_has_audio_sender_config(); - void set_has_audio_network_adaptation(); - void set_has_probe_cluster(); - void set_has_probe_result(); - void set_has_alr_state(); - void set_has_ice_candidate_pair_config(); - void set_has_ice_candidate_pair_event(); - - inline bool has_subtype() const; - inline void clear_has_subtype(); - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_us_; - int type_; - union SubtypeUnion { - SubtypeUnion() {} - ::webrtc::rtclog::RtpPacket* rtp_packet_; - ::webrtc::rtclog::RtcpPacket* rtcp_packet_; - ::webrtc::rtclog::AudioPlayoutEvent* audio_playout_event_; - ::webrtc::rtclog::LossBasedBweUpdate* loss_based_bwe_update_; - ::webrtc::rtclog::DelayBasedBweUpdate* delay_based_bwe_update_; - ::webrtc::rtclog::VideoReceiveConfig* video_receiver_config_; - ::webrtc::rtclog::VideoSendConfig* video_sender_config_; - ::webrtc::rtclog::AudioReceiveConfig* audio_receiver_config_; - ::webrtc::rtclog::AudioSendConfig* audio_sender_config_; - ::webrtc::rtclog::AudioNetworkAdaptation* audio_network_adaptation_; - ::webrtc::rtclog::BweProbeCluster* probe_cluster_; - ::webrtc::rtclog::BweProbeResult* probe_result_; - ::webrtc::rtclog::AlrState* alr_state_; - ::webrtc::rtclog::IceCandidatePairConfig* ice_candidate_pair_config_; - ::webrtc::rtclog::IceCandidatePairEvent* ice_candidate_pair_event_; - } subtype_; - ::PROTOBUF_NAMESPACE_ID::uint32 _oneof_case_[1]; - - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class RtpPacket : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.RtpPacket) */ { - public: - RtpPacket(); - virtual ~RtpPacket(); - - RtpPacket(const RtpPacket& from); - RtpPacket(RtpPacket&& from) noexcept - : RtpPacket() { - *this = ::std::move(from); - } - - inline RtpPacket& operator=(const RtpPacket& from) { - CopyFrom(from); - return *this; - } - inline RtpPacket& operator=(RtpPacket&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtpPacket& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtpPacket* internal_default_instance() { - return reinterpret_cast( - &_RtpPacket_default_instance_); - } - static constexpr int kIndexInFileMessages = - 2; - - friend void swap(RtpPacket& a, RtpPacket& b) { - a.Swap(&b); - } - inline void Swap(RtpPacket* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtpPacket* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtpPacket* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtpPacket& from); - void MergeFrom(const RtpPacket& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtpPacket* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.RtpPacket"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderFieldNumber = 4, - kIncomingFieldNumber = 1, - kTypeFieldNumber = 2, - kPacketLengthFieldNumber = 3, - kProbeClusterIdFieldNumber = 5, - }; - // optional bytes header = 4; - bool has_header() const; - void clear_header(); - const std::string& header() const; - void set_header(const std::string& value); - void set_header(std::string&& value); - void set_header(const char* value); - void set_header(const void* value, size_t size); - std::string* mutable_header(); - std::string* release_header(); - void set_allocated_header(std::string* header); - - // optional bool incoming = 1; - bool has_incoming() const; - void clear_incoming(); - bool incoming() const; - void set_incoming(bool value); - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - PROTOBUF_DEPRECATED bool has_type() const; - PROTOBUF_DEPRECATED void clear_type(); - PROTOBUF_DEPRECATED ::webrtc::rtclog::MediaType type() const; - PROTOBUF_DEPRECATED void set_type(::webrtc::rtclog::MediaType value); - - // optional uint32 packet_length = 3; - bool has_packet_length() const; - void clear_packet_length(); - ::PROTOBUF_NAMESPACE_ID::uint32 packet_length() const; - void set_packet_length(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 probe_cluster_id = 5; - bool has_probe_cluster_id() const; - void clear_probe_cluster_id(); - ::PROTOBUF_NAMESPACE_ID::int32 probe_cluster_id() const; - void set_probe_cluster_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.RtpPacket) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr header_; - bool incoming_; - int type_; - ::PROTOBUF_NAMESPACE_ID::uint32 packet_length_; - ::PROTOBUF_NAMESPACE_ID::int32 probe_cluster_id_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class RtcpPacket : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.RtcpPacket) */ { - public: - RtcpPacket(); - virtual ~RtcpPacket(); - - RtcpPacket(const RtcpPacket& from); - RtcpPacket(RtcpPacket&& from) noexcept - : RtcpPacket() { - *this = ::std::move(from); - } - - inline RtcpPacket& operator=(const RtcpPacket& from) { - CopyFrom(from); - return *this; - } - inline RtcpPacket& operator=(RtcpPacket&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtcpPacket& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtcpPacket* internal_default_instance() { - return reinterpret_cast( - &_RtcpPacket_default_instance_); - } - static constexpr int kIndexInFileMessages = - 3; - - friend void swap(RtcpPacket& a, RtcpPacket& b) { - a.Swap(&b); - } - inline void Swap(RtcpPacket* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtcpPacket* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtcpPacket* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtcpPacket& from); - void MergeFrom(const RtcpPacket& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtcpPacket* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.RtcpPacket"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kPacketDataFieldNumber = 3, - kIncomingFieldNumber = 1, - kTypeFieldNumber = 2, - }; - // optional bytes packet_data = 3; - bool has_packet_data() const; - void clear_packet_data(); - const std::string& packet_data() const; - void set_packet_data(const std::string& value); - void set_packet_data(std::string&& value); - void set_packet_data(const char* value); - void set_packet_data(const void* value, size_t size); - std::string* mutable_packet_data(); - std::string* release_packet_data(); - void set_allocated_packet_data(std::string* packet_data); - - // optional bool incoming = 1; - bool has_incoming() const; - void clear_incoming(); - bool incoming() const; - void set_incoming(bool value); - - // optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; - PROTOBUF_DEPRECATED bool has_type() const; - PROTOBUF_DEPRECATED void clear_type(); - PROTOBUF_DEPRECATED ::webrtc::rtclog::MediaType type() const; - PROTOBUF_DEPRECATED void set_type(::webrtc::rtclog::MediaType value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.RtcpPacket) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr packet_data_; - bool incoming_; - int type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioPlayoutEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.AudioPlayoutEvent) */ { - public: - AudioPlayoutEvent(); - virtual ~AudioPlayoutEvent(); - - AudioPlayoutEvent(const AudioPlayoutEvent& from); - AudioPlayoutEvent(AudioPlayoutEvent&& from) noexcept - : AudioPlayoutEvent() { - *this = ::std::move(from); - } - - inline AudioPlayoutEvent& operator=(const AudioPlayoutEvent& from) { - CopyFrom(from); - return *this; - } - inline AudioPlayoutEvent& operator=(AudioPlayoutEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioPlayoutEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioPlayoutEvent* internal_default_instance() { - return reinterpret_cast( - &_AudioPlayoutEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 4; - - friend void swap(AudioPlayoutEvent& a, AudioPlayoutEvent& b) { - a.Swap(&b); - } - inline void Swap(AudioPlayoutEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioPlayoutEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioPlayoutEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioPlayoutEvent& from); - void MergeFrom(const AudioPlayoutEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioPlayoutEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.AudioPlayoutEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kLocalSsrcFieldNumber = 2, - }; - // optional uint32 local_ssrc = 2; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.AudioPlayoutEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class LossBasedBweUpdate : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.LossBasedBweUpdate) */ { - public: - LossBasedBweUpdate(); - virtual ~LossBasedBweUpdate(); - - LossBasedBweUpdate(const LossBasedBweUpdate& from); - LossBasedBweUpdate(LossBasedBweUpdate&& from) noexcept - : LossBasedBweUpdate() { - *this = ::std::move(from); - } - - inline LossBasedBweUpdate& operator=(const LossBasedBweUpdate& from) { - CopyFrom(from); - return *this; - } - inline LossBasedBweUpdate& operator=(LossBasedBweUpdate&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const LossBasedBweUpdate& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const LossBasedBweUpdate* internal_default_instance() { - return reinterpret_cast( - &_LossBasedBweUpdate_default_instance_); - } - static constexpr int kIndexInFileMessages = - 5; - - friend void swap(LossBasedBweUpdate& a, LossBasedBweUpdate& b) { - a.Swap(&b); - } - inline void Swap(LossBasedBweUpdate* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline LossBasedBweUpdate* New() const final { - return CreateMaybeMessage(nullptr); - } - - LossBasedBweUpdate* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const LossBasedBweUpdate& from); - void MergeFrom(const LossBasedBweUpdate& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(LossBasedBweUpdate* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.LossBasedBweUpdate"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kBitrateBpsFieldNumber = 1, - kFractionLossFieldNumber = 2, - kTotalPacketsFieldNumber = 3, - }; - // optional int32 bitrate_bps = 1; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 fraction_loss = 2; - bool has_fraction_loss() const; - void clear_fraction_loss(); - ::PROTOBUF_NAMESPACE_ID::uint32 fraction_loss() const; - void set_fraction_loss(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 total_packets = 3; - bool has_total_packets() const; - void clear_total_packets(); - ::PROTOBUF_NAMESPACE_ID::int32 total_packets() const; - void set_total_packets(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.LossBasedBweUpdate) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::uint32 fraction_loss_; - ::PROTOBUF_NAMESPACE_ID::int32 total_packets_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class DelayBasedBweUpdate : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.DelayBasedBweUpdate) */ { - public: - DelayBasedBweUpdate(); - virtual ~DelayBasedBweUpdate(); - - DelayBasedBweUpdate(const DelayBasedBweUpdate& from); - DelayBasedBweUpdate(DelayBasedBweUpdate&& from) noexcept - : DelayBasedBweUpdate() { - *this = ::std::move(from); - } - - inline DelayBasedBweUpdate& operator=(const DelayBasedBweUpdate& from) { - CopyFrom(from); - return *this; - } - inline DelayBasedBweUpdate& operator=(DelayBasedBweUpdate&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const DelayBasedBweUpdate& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DelayBasedBweUpdate* internal_default_instance() { - return reinterpret_cast( - &_DelayBasedBweUpdate_default_instance_); - } - static constexpr int kIndexInFileMessages = - 6; - - friend void swap(DelayBasedBweUpdate& a, DelayBasedBweUpdate& b) { - a.Swap(&b); - } - inline void Swap(DelayBasedBweUpdate* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline DelayBasedBweUpdate* New() const final { - return CreateMaybeMessage(nullptr); - } - - DelayBasedBweUpdate* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const DelayBasedBweUpdate& from); - void MergeFrom(const DelayBasedBweUpdate& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(DelayBasedBweUpdate* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.DelayBasedBweUpdate"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef DelayBasedBweUpdate_DetectorState DetectorState; - static constexpr DetectorState BWE_NORMAL = - DelayBasedBweUpdate_DetectorState_BWE_NORMAL; - static constexpr DetectorState BWE_UNDERUSING = - DelayBasedBweUpdate_DetectorState_BWE_UNDERUSING; - static constexpr DetectorState BWE_OVERUSING = - DelayBasedBweUpdate_DetectorState_BWE_OVERUSING; - static inline bool DetectorState_IsValid(int value) { - return DelayBasedBweUpdate_DetectorState_IsValid(value); - } - static constexpr DetectorState DetectorState_MIN = - DelayBasedBweUpdate_DetectorState_DetectorState_MIN; - static constexpr DetectorState DetectorState_MAX = - DelayBasedBweUpdate_DetectorState_DetectorState_MAX; - static constexpr int DetectorState_ARRAYSIZE = - DelayBasedBweUpdate_DetectorState_DetectorState_ARRAYSIZE; - template - static inline const std::string& DetectorState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DetectorState_Name."); - return DelayBasedBweUpdate_DetectorState_Name(enum_t_value); - } - static inline bool DetectorState_Parse(const std::string& name, - DetectorState* value) { - return DelayBasedBweUpdate_DetectorState_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kBitrateBpsFieldNumber = 1, - kDetectorStateFieldNumber = 2, - }; - // optional int32 bitrate_bps = 1; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; - bool has_detector_state() const; - void clear_detector_state(); - ::webrtc::rtclog::DelayBasedBweUpdate_DetectorState detector_state() const; - void set_detector_state(::webrtc::rtclog::DelayBasedBweUpdate_DetectorState value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.DelayBasedBweUpdate) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - int detector_state_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class VideoReceiveConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.VideoReceiveConfig) */ { - public: - VideoReceiveConfig(); - virtual ~VideoReceiveConfig(); - - VideoReceiveConfig(const VideoReceiveConfig& from); - VideoReceiveConfig(VideoReceiveConfig&& from) noexcept - : VideoReceiveConfig() { - *this = ::std::move(from); - } - - inline VideoReceiveConfig& operator=(const VideoReceiveConfig& from) { - CopyFrom(from); - return *this; - } - inline VideoReceiveConfig& operator=(VideoReceiveConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const VideoReceiveConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const VideoReceiveConfig* internal_default_instance() { - return reinterpret_cast( - &_VideoReceiveConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 7; - - friend void swap(VideoReceiveConfig& a, VideoReceiveConfig& b) { - a.Swap(&b); - } - inline void Swap(VideoReceiveConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline VideoReceiveConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - VideoReceiveConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const VideoReceiveConfig& from); - void MergeFrom(const VideoReceiveConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(VideoReceiveConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.VideoReceiveConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef VideoReceiveConfig_RtcpMode RtcpMode; - static constexpr RtcpMode RTCP_COMPOUND = - VideoReceiveConfig_RtcpMode_RTCP_COMPOUND; - static constexpr RtcpMode RTCP_REDUCEDSIZE = - VideoReceiveConfig_RtcpMode_RTCP_REDUCEDSIZE; - static inline bool RtcpMode_IsValid(int value) { - return VideoReceiveConfig_RtcpMode_IsValid(value); - } - static constexpr RtcpMode RtcpMode_MIN = - VideoReceiveConfig_RtcpMode_RtcpMode_MIN; - static constexpr RtcpMode RtcpMode_MAX = - VideoReceiveConfig_RtcpMode_RtcpMode_MAX; - static constexpr int RtcpMode_ARRAYSIZE = - VideoReceiveConfig_RtcpMode_RtcpMode_ARRAYSIZE; - template - static inline const std::string& RtcpMode_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function RtcpMode_Name."); - return VideoReceiveConfig_RtcpMode_Name(enum_t_value); - } - static inline bool RtcpMode_Parse(const std::string& name, - RtcpMode* value) { - return VideoReceiveConfig_RtcpMode_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kRtxMapFieldNumber = 5, - kHeaderExtensionsFieldNumber = 6, - kDecodersFieldNumber = 7, - kRemoteSsrcFieldNumber = 1, - kLocalSsrcFieldNumber = 2, - kRembFieldNumber = 4, - kRtcpModeFieldNumber = 3, - }; - // repeated .webrtc.rtclog.RtxMap rtx_map = 5; - int rtx_map_size() const; - void clear_rtx_map(); - ::webrtc::rtclog::RtxMap* mutable_rtx_map(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtxMap >* - mutable_rtx_map(); - const ::webrtc::rtclog::RtxMap& rtx_map(int index) const; - ::webrtc::rtclog::RtxMap* add_rtx_map(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtxMap >& - rtx_map() const; - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; - int header_extensions_size() const; - void clear_header_extensions(); - ::webrtc::rtclog::RtpHeaderExtension* mutable_header_extensions(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* - mutable_header_extensions(); - const ::webrtc::rtclog::RtpHeaderExtension& header_extensions(int index) const; - ::webrtc::rtclog::RtpHeaderExtension* add_header_extensions(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& - header_extensions() const; - - // repeated .webrtc.rtclog.DecoderConfig decoders = 7; - int decoders_size() const; - void clear_decoders(); - ::webrtc::rtclog::DecoderConfig* mutable_decoders(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::DecoderConfig >* - mutable_decoders(); - const ::webrtc::rtclog::DecoderConfig& decoders(int index) const; - ::webrtc::rtclog::DecoderConfig* add_decoders(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::DecoderConfig >& - decoders() const; - - // optional uint32 remote_ssrc = 1; - bool has_remote_ssrc() const; - void clear_remote_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc() const; - void set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 local_ssrc = 2; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional bool remb = 4; - bool has_remb() const; - void clear_remb(); - bool remb() const; - void set_remb(bool value); - - // optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; - bool has_rtcp_mode() const; - void clear_rtcp_mode(); - ::webrtc::rtclog::VideoReceiveConfig_RtcpMode rtcp_mode() const; - void set_rtcp_mode(::webrtc::rtclog::VideoReceiveConfig_RtcpMode value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.VideoReceiveConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtxMap > rtx_map_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension > header_extensions_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::DecoderConfig > decoders_; - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - bool remb_; - int rtcp_mode_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class DecoderConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.DecoderConfig) */ { - public: - DecoderConfig(); - virtual ~DecoderConfig(); - - DecoderConfig(const DecoderConfig& from); - DecoderConfig(DecoderConfig&& from) noexcept - : DecoderConfig() { - *this = ::std::move(from); - } - - inline DecoderConfig& operator=(const DecoderConfig& from) { - CopyFrom(from); - return *this; - } - inline DecoderConfig& operator=(DecoderConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const DecoderConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DecoderConfig* internal_default_instance() { - return reinterpret_cast( - &_DecoderConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 8; - - friend void swap(DecoderConfig& a, DecoderConfig& b) { - a.Swap(&b); - } - inline void Swap(DecoderConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline DecoderConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - DecoderConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const DecoderConfig& from); - void MergeFrom(const DecoderConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(DecoderConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.DecoderConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kNameFieldNumber = 1, - kPayloadTypeFieldNumber = 2, - }; - // optional string name = 1; - bool has_name() const; - void clear_name(); - const std::string& name() const; - void set_name(const std::string& value); - void set_name(std::string&& value); - void set_name(const char* value); - void set_name(const char* value, size_t size); - std::string* mutable_name(); - std::string* release_name(); - void set_allocated_name(std::string* name); - - // optional int32 payload_type = 2; - bool has_payload_type() const; - void clear_payload_type(); - ::PROTOBUF_NAMESPACE_ID::int32 payload_type() const; - void set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.DecoderConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_; - ::PROTOBUF_NAMESPACE_ID::int32 payload_type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class RtpHeaderExtension : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.RtpHeaderExtension) */ { - public: - RtpHeaderExtension(); - virtual ~RtpHeaderExtension(); - - RtpHeaderExtension(const RtpHeaderExtension& from); - RtpHeaderExtension(RtpHeaderExtension&& from) noexcept - : RtpHeaderExtension() { - *this = ::std::move(from); - } - - inline RtpHeaderExtension& operator=(const RtpHeaderExtension& from) { - CopyFrom(from); - return *this; - } - inline RtpHeaderExtension& operator=(RtpHeaderExtension&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtpHeaderExtension& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtpHeaderExtension* internal_default_instance() { - return reinterpret_cast( - &_RtpHeaderExtension_default_instance_); - } - static constexpr int kIndexInFileMessages = - 9; - - friend void swap(RtpHeaderExtension& a, RtpHeaderExtension& b) { - a.Swap(&b); - } - inline void Swap(RtpHeaderExtension* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtpHeaderExtension* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtpHeaderExtension* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtpHeaderExtension& from); - void MergeFrom(const RtpHeaderExtension& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtpHeaderExtension* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.RtpHeaderExtension"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kNameFieldNumber = 1, - kIdFieldNumber = 2, - }; - // optional string name = 1; - bool has_name() const; - void clear_name(); - const std::string& name() const; - void set_name(const std::string& value); - void set_name(std::string&& value); - void set_name(const char* value); - void set_name(const char* value, size_t size); - std::string* mutable_name(); - std::string* release_name(); - void set_allocated_name(std::string* name); - - // optional int32 id = 2; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::int32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.RtpHeaderExtension) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_; - ::PROTOBUF_NAMESPACE_ID::int32 id_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class RtxConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.RtxConfig) */ { - public: - RtxConfig(); - virtual ~RtxConfig(); - - RtxConfig(const RtxConfig& from); - RtxConfig(RtxConfig&& from) noexcept - : RtxConfig() { - *this = ::std::move(from); - } - - inline RtxConfig& operator=(const RtxConfig& from) { - CopyFrom(from); - return *this; - } - inline RtxConfig& operator=(RtxConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtxConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtxConfig* internal_default_instance() { - return reinterpret_cast( - &_RtxConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 10; - - friend void swap(RtxConfig& a, RtxConfig& b) { - a.Swap(&b); - } - inline void Swap(RtxConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtxConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtxConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtxConfig& from); - void MergeFrom(const RtxConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtxConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.RtxConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kRtxSsrcFieldNumber = 1, - kRtxPayloadTypeFieldNumber = 2, - }; - // optional uint32 rtx_ssrc = 1; - bool has_rtx_ssrc() const; - void clear_rtx_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc() const; - void set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 rtx_payload_type = 2; - bool has_rtx_payload_type() const; - void clear_rtx_payload_type(); - ::PROTOBUF_NAMESPACE_ID::int32 rtx_payload_type() const; - void set_rtx_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.RtxConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc_; - ::PROTOBUF_NAMESPACE_ID::int32 rtx_payload_type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class RtxMap : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.RtxMap) */ { - public: - RtxMap(); - virtual ~RtxMap(); - - RtxMap(const RtxMap& from); - RtxMap(RtxMap&& from) noexcept - : RtxMap() { - *this = ::std::move(from); - } - - inline RtxMap& operator=(const RtxMap& from) { - CopyFrom(from); - return *this; - } - inline RtxMap& operator=(RtxMap&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtxMap& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtxMap* internal_default_instance() { - return reinterpret_cast( - &_RtxMap_default_instance_); - } - static constexpr int kIndexInFileMessages = - 11; - - friend void swap(RtxMap& a, RtxMap& b) { - a.Swap(&b); - } - inline void Swap(RtxMap* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtxMap* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtxMap* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtxMap& from); - void MergeFrom(const RtxMap& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtxMap* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.RtxMap"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kConfigFieldNumber = 2, - kPayloadTypeFieldNumber = 1, - }; - // optional .webrtc.rtclog.RtxConfig config = 2; - bool has_config() const; - void clear_config(); - const ::webrtc::rtclog::RtxConfig& config() const; - ::webrtc::rtclog::RtxConfig* release_config(); - ::webrtc::rtclog::RtxConfig* mutable_config(); - void set_allocated_config(::webrtc::rtclog::RtxConfig* config); - - // optional int32 payload_type = 1; - bool has_payload_type() const; - void clear_payload_type(); - ::PROTOBUF_NAMESPACE_ID::int32 payload_type() const; - void set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.RtxMap) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::webrtc::rtclog::RtxConfig* config_; - ::PROTOBUF_NAMESPACE_ID::int32 payload_type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class VideoSendConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.VideoSendConfig) */ { - public: - VideoSendConfig(); - virtual ~VideoSendConfig(); - - VideoSendConfig(const VideoSendConfig& from); - VideoSendConfig(VideoSendConfig&& from) noexcept - : VideoSendConfig() { - *this = ::std::move(from); - } - - inline VideoSendConfig& operator=(const VideoSendConfig& from) { - CopyFrom(from); - return *this; - } - inline VideoSendConfig& operator=(VideoSendConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const VideoSendConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const VideoSendConfig* internal_default_instance() { - return reinterpret_cast( - &_VideoSendConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 12; - - friend void swap(VideoSendConfig& a, VideoSendConfig& b) { - a.Swap(&b); - } - inline void Swap(VideoSendConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline VideoSendConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - VideoSendConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const VideoSendConfig& from); - void MergeFrom(const VideoSendConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(VideoSendConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.VideoSendConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kSsrcsFieldNumber = 1, - kHeaderExtensionsFieldNumber = 2, - kRtxSsrcsFieldNumber = 3, - kEncoderFieldNumber = 5, - kRtxPayloadTypeFieldNumber = 4, - }; - // repeated uint32 ssrcs = 1; - int ssrcs_size() const; - void clear_ssrcs(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrcs(int index) const; - void set_ssrcs(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value); - void add_ssrcs(::PROTOBUF_NAMESPACE_ID::uint32 value); - const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >& - ssrcs() const; - ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >* - mutable_ssrcs(); - - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - int header_extensions_size() const; - void clear_header_extensions(); - ::webrtc::rtclog::RtpHeaderExtension* mutable_header_extensions(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* - mutable_header_extensions(); - const ::webrtc::rtclog::RtpHeaderExtension& header_extensions(int index) const; - ::webrtc::rtclog::RtpHeaderExtension* add_header_extensions(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& - header_extensions() const; - - // repeated uint32 rtx_ssrcs = 3; - int rtx_ssrcs_size() const; - void clear_rtx_ssrcs(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrcs(int index) const; - void set_rtx_ssrcs(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value); - void add_rtx_ssrcs(::PROTOBUF_NAMESPACE_ID::uint32 value); - const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >& - rtx_ssrcs() const; - ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >* - mutable_rtx_ssrcs(); - - // optional .webrtc.rtclog.EncoderConfig encoder = 5; - bool has_encoder() const; - void clear_encoder(); - const ::webrtc::rtclog::EncoderConfig& encoder() const; - ::webrtc::rtclog::EncoderConfig* release_encoder(); - ::webrtc::rtclog::EncoderConfig* mutable_encoder(); - void set_allocated_encoder(::webrtc::rtclog::EncoderConfig* encoder); - - // optional int32 rtx_payload_type = 4; - bool has_rtx_payload_type() const; - void clear_rtx_payload_type(); - ::PROTOBUF_NAMESPACE_ID::int32 rtx_payload_type() const; - void set_rtx_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.VideoSendConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 > ssrcs_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension > header_extensions_; - ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 > rtx_ssrcs_; - ::webrtc::rtclog::EncoderConfig* encoder_; - ::PROTOBUF_NAMESPACE_ID::int32 rtx_payload_type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class EncoderConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.EncoderConfig) */ { - public: - EncoderConfig(); - virtual ~EncoderConfig(); - - EncoderConfig(const EncoderConfig& from); - EncoderConfig(EncoderConfig&& from) noexcept - : EncoderConfig() { - *this = ::std::move(from); - } - - inline EncoderConfig& operator=(const EncoderConfig& from) { - CopyFrom(from); - return *this; - } - inline EncoderConfig& operator=(EncoderConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const EncoderConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const EncoderConfig* internal_default_instance() { - return reinterpret_cast( - &_EncoderConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 13; - - friend void swap(EncoderConfig& a, EncoderConfig& b) { - a.Swap(&b); - } - inline void Swap(EncoderConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline EncoderConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - EncoderConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const EncoderConfig& from); - void MergeFrom(const EncoderConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(EncoderConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.EncoderConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kNameFieldNumber = 1, - kPayloadTypeFieldNumber = 2, - }; - // optional string name = 1; - bool has_name() const; - void clear_name(); - const std::string& name() const; - void set_name(const std::string& value); - void set_name(std::string&& value); - void set_name(const char* value); - void set_name(const char* value, size_t size); - std::string* mutable_name(); - std::string* release_name(); - void set_allocated_name(std::string* name); - - // optional int32 payload_type = 2; - bool has_payload_type() const; - void clear_payload_type(); - ::PROTOBUF_NAMESPACE_ID::int32 payload_type() const; - void set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.EncoderConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_; - ::PROTOBUF_NAMESPACE_ID::int32 payload_type_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioReceiveConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.AudioReceiveConfig) */ { - public: - AudioReceiveConfig(); - virtual ~AudioReceiveConfig(); - - AudioReceiveConfig(const AudioReceiveConfig& from); - AudioReceiveConfig(AudioReceiveConfig&& from) noexcept - : AudioReceiveConfig() { - *this = ::std::move(from); - } - - inline AudioReceiveConfig& operator=(const AudioReceiveConfig& from) { - CopyFrom(from); - return *this; - } - inline AudioReceiveConfig& operator=(AudioReceiveConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioReceiveConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioReceiveConfig* internal_default_instance() { - return reinterpret_cast( - &_AudioReceiveConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 14; - - friend void swap(AudioReceiveConfig& a, AudioReceiveConfig& b) { - a.Swap(&b); - } - inline void Swap(AudioReceiveConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioReceiveConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioReceiveConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioReceiveConfig& from); - void MergeFrom(const AudioReceiveConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioReceiveConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.AudioReceiveConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 3, - kRemoteSsrcFieldNumber = 1, - kLocalSsrcFieldNumber = 2, - }; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; - int header_extensions_size() const; - void clear_header_extensions(); - ::webrtc::rtclog::RtpHeaderExtension* mutable_header_extensions(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* - mutable_header_extensions(); - const ::webrtc::rtclog::RtpHeaderExtension& header_extensions(int index) const; - ::webrtc::rtclog::RtpHeaderExtension* add_header_extensions(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& - header_extensions() const; - - // optional uint32 remote_ssrc = 1; - bool has_remote_ssrc() const; - void clear_remote_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc() const; - void set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 local_ssrc = 2; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.AudioReceiveConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension > header_extensions_; - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioSendConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.AudioSendConfig) */ { - public: - AudioSendConfig(); - virtual ~AudioSendConfig(); - - AudioSendConfig(const AudioSendConfig& from); - AudioSendConfig(AudioSendConfig&& from) noexcept - : AudioSendConfig() { - *this = ::std::move(from); - } - - inline AudioSendConfig& operator=(const AudioSendConfig& from) { - CopyFrom(from); - return *this; - } - inline AudioSendConfig& operator=(AudioSendConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioSendConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioSendConfig* internal_default_instance() { - return reinterpret_cast( - &_AudioSendConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 15; - - friend void swap(AudioSendConfig& a, AudioSendConfig& b) { - a.Swap(&b); - } - inline void Swap(AudioSendConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioSendConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioSendConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioSendConfig& from); - void MergeFrom(const AudioSendConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioSendConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.AudioSendConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 2, - kSsrcFieldNumber = 1, - }; - // repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; - int header_extensions_size() const; - void clear_header_extensions(); - ::webrtc::rtclog::RtpHeaderExtension* mutable_header_extensions(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* - mutable_header_extensions(); - const ::webrtc::rtclog::RtpHeaderExtension& header_extensions(int index) const; - ::webrtc::rtclog::RtpHeaderExtension* add_header_extensions(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& - header_extensions() const; - - // optional uint32 ssrc = 1; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.AudioSendConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension > header_extensions_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioNetworkAdaptation : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.AudioNetworkAdaptation) */ { - public: - AudioNetworkAdaptation(); - virtual ~AudioNetworkAdaptation(); - - AudioNetworkAdaptation(const AudioNetworkAdaptation& from); - AudioNetworkAdaptation(AudioNetworkAdaptation&& from) noexcept - : AudioNetworkAdaptation() { - *this = ::std::move(from); - } - - inline AudioNetworkAdaptation& operator=(const AudioNetworkAdaptation& from) { - CopyFrom(from); - return *this; - } - inline AudioNetworkAdaptation& operator=(AudioNetworkAdaptation&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioNetworkAdaptation& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioNetworkAdaptation* internal_default_instance() { - return reinterpret_cast( - &_AudioNetworkAdaptation_default_instance_); - } - static constexpr int kIndexInFileMessages = - 16; - - friend void swap(AudioNetworkAdaptation& a, AudioNetworkAdaptation& b) { - a.Swap(&b); - } - inline void Swap(AudioNetworkAdaptation* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioNetworkAdaptation* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioNetworkAdaptation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioNetworkAdaptation& from); - void MergeFrom(const AudioNetworkAdaptation& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioNetworkAdaptation* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.AudioNetworkAdaptation"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kBitrateBpsFieldNumber = 1, - kFrameLengthMsFieldNumber = 2, - kUplinkPacketLossFractionFieldNumber = 3, - kEnableFecFieldNumber = 4, - kEnableDtxFieldNumber = 5, - kNumChannelsFieldNumber = 6, - }; - // optional int32 bitrate_bps = 1; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 frame_length_ms = 2; - bool has_frame_length_ms() const; - void clear_frame_length_ms(); - ::PROTOBUF_NAMESPACE_ID::int32 frame_length_ms() const; - void set_frame_length_ms(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional float uplink_packet_loss_fraction = 3; - bool has_uplink_packet_loss_fraction() const; - void clear_uplink_packet_loss_fraction(); - float uplink_packet_loss_fraction() const; - void set_uplink_packet_loss_fraction(float value); - - // optional bool enable_fec = 4; - bool has_enable_fec() const; - void clear_enable_fec(); - bool enable_fec() const; - void set_enable_fec(bool value); - - // optional bool enable_dtx = 5; - bool has_enable_dtx() const; - void clear_enable_dtx(); - bool enable_dtx() const; - void set_enable_dtx(bool value); - - // optional uint32 num_channels = 6; - bool has_num_channels() const; - void clear_num_channels(); - ::PROTOBUF_NAMESPACE_ID::uint32 num_channels() const; - void set_num_channels(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.AudioNetworkAdaptation) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::int32 frame_length_ms_; - float uplink_packet_loss_fraction_; - bool enable_fec_; - bool enable_dtx_; - ::PROTOBUF_NAMESPACE_ID::uint32 num_channels_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class BweProbeCluster : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.BweProbeCluster) */ { - public: - BweProbeCluster(); - virtual ~BweProbeCluster(); - - BweProbeCluster(const BweProbeCluster& from); - BweProbeCluster(BweProbeCluster&& from) noexcept - : BweProbeCluster() { - *this = ::std::move(from); - } - - inline BweProbeCluster& operator=(const BweProbeCluster& from) { - CopyFrom(from); - return *this; - } - inline BweProbeCluster& operator=(BweProbeCluster&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BweProbeCluster& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BweProbeCluster* internal_default_instance() { - return reinterpret_cast( - &_BweProbeCluster_default_instance_); - } - static constexpr int kIndexInFileMessages = - 17; - - friend void swap(BweProbeCluster& a, BweProbeCluster& b) { - a.Swap(&b); - } - inline void Swap(BweProbeCluster* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BweProbeCluster* New() const final { - return CreateMaybeMessage(nullptr); - } - - BweProbeCluster* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BweProbeCluster& from); - void MergeFrom(const BweProbeCluster& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BweProbeCluster* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.BweProbeCluster"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kIdFieldNumber = 1, - kBitrateBpsFieldNumber = 2, - kMinPacketsFieldNumber = 3, - kMinBytesFieldNumber = 4, - }; - // optional int32 id = 1; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::int32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 bitrate_bps = 2; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 min_packets = 3; - bool has_min_packets() const; - void clear_min_packets(); - ::PROTOBUF_NAMESPACE_ID::uint32 min_packets() const; - void set_min_packets(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 min_bytes = 4; - bool has_min_bytes() const; - void clear_min_bytes(); - ::PROTOBUF_NAMESPACE_ID::uint32 min_bytes() const; - void set_min_bytes(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.BweProbeCluster) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 id_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::uint32 min_packets_; - ::PROTOBUF_NAMESPACE_ID::uint32 min_bytes_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class BweProbeResult : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.BweProbeResult) */ { - public: - BweProbeResult(); - virtual ~BweProbeResult(); - - BweProbeResult(const BweProbeResult& from); - BweProbeResult(BweProbeResult&& from) noexcept - : BweProbeResult() { - *this = ::std::move(from); - } - - inline BweProbeResult& operator=(const BweProbeResult& from) { - CopyFrom(from); - return *this; - } - inline BweProbeResult& operator=(BweProbeResult&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BweProbeResult& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BweProbeResult* internal_default_instance() { - return reinterpret_cast( - &_BweProbeResult_default_instance_); - } - static constexpr int kIndexInFileMessages = - 18; - - friend void swap(BweProbeResult& a, BweProbeResult& b) { - a.Swap(&b); - } - inline void Swap(BweProbeResult* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BweProbeResult* New() const final { - return CreateMaybeMessage(nullptr); - } - - BweProbeResult* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BweProbeResult& from); - void MergeFrom(const BweProbeResult& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BweProbeResult* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.BweProbeResult"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef BweProbeResult_ResultType ResultType; - static constexpr ResultType SUCCESS = - BweProbeResult_ResultType_SUCCESS; - static constexpr ResultType INVALID_SEND_RECEIVE_INTERVAL = - BweProbeResult_ResultType_INVALID_SEND_RECEIVE_INTERVAL; - static constexpr ResultType INVALID_SEND_RECEIVE_RATIO = - BweProbeResult_ResultType_INVALID_SEND_RECEIVE_RATIO; - static constexpr ResultType TIMEOUT = - BweProbeResult_ResultType_TIMEOUT; - static inline bool ResultType_IsValid(int value) { - return BweProbeResult_ResultType_IsValid(value); - } - static constexpr ResultType ResultType_MIN = - BweProbeResult_ResultType_ResultType_MIN; - static constexpr ResultType ResultType_MAX = - BweProbeResult_ResultType_ResultType_MAX; - static constexpr int ResultType_ARRAYSIZE = - BweProbeResult_ResultType_ResultType_ARRAYSIZE; - template - static inline const std::string& ResultType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function ResultType_Name."); - return BweProbeResult_ResultType_Name(enum_t_value); - } - static inline bool ResultType_Parse(const std::string& name, - ResultType* value) { - return BweProbeResult_ResultType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kIdFieldNumber = 1, - kResultFieldNumber = 2, - kBitrateBpsFieldNumber = 3, - }; - // optional int32 id = 1; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::int32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; - bool has_result() const; - void clear_result(); - ::webrtc::rtclog::BweProbeResult_ResultType result() const; - void set_result(::webrtc::rtclog::BweProbeResult_ResultType value); - - // optional int32 bitrate_bps = 3; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.BweProbeResult) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 id_; - int result_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class AlrState : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.AlrState) */ { - public: - AlrState(); - virtual ~AlrState(); - - AlrState(const AlrState& from); - AlrState(AlrState&& from) noexcept - : AlrState() { - *this = ::std::move(from); - } - - inline AlrState& operator=(const AlrState& from) { - CopyFrom(from); - return *this; - } - inline AlrState& operator=(AlrState&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AlrState& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AlrState* internal_default_instance() { - return reinterpret_cast( - &_AlrState_default_instance_); - } - static constexpr int kIndexInFileMessages = - 19; - - friend void swap(AlrState& a, AlrState& b) { - a.Swap(&b); - } - inline void Swap(AlrState* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AlrState* New() const final { - return CreateMaybeMessage(nullptr); - } - - AlrState* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AlrState& from); - void MergeFrom(const AlrState& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AlrState* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.AlrState"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kInAlrFieldNumber = 1, - }; - // optional bool in_alr = 1; - bool has_in_alr() const; - void clear_in_alr(); - bool in_alr() const; - void set_in_alr(bool value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.AlrState) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - bool in_alr_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class IceCandidatePairConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.IceCandidatePairConfig) */ { - public: - IceCandidatePairConfig(); - virtual ~IceCandidatePairConfig(); - - IceCandidatePairConfig(const IceCandidatePairConfig& from); - IceCandidatePairConfig(IceCandidatePairConfig&& from) noexcept - : IceCandidatePairConfig() { - *this = ::std::move(from); - } - - inline IceCandidatePairConfig& operator=(const IceCandidatePairConfig& from) { - CopyFrom(from); - return *this; - } - inline IceCandidatePairConfig& operator=(IceCandidatePairConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IceCandidatePairConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IceCandidatePairConfig* internal_default_instance() { - return reinterpret_cast( - &_IceCandidatePairConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 20; - - friend void swap(IceCandidatePairConfig& a, IceCandidatePairConfig& b) { - a.Swap(&b); - } - inline void Swap(IceCandidatePairConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IceCandidatePairConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - IceCandidatePairConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IceCandidatePairConfig& from); - void MergeFrom(const IceCandidatePairConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IceCandidatePairConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.IceCandidatePairConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfigType; - static constexpr IceCandidatePairConfigType ADDED = - IceCandidatePairConfig_IceCandidatePairConfigType_ADDED; - static constexpr IceCandidatePairConfigType UPDATED = - IceCandidatePairConfig_IceCandidatePairConfigType_UPDATED; - static constexpr IceCandidatePairConfigType DESTROYED = - IceCandidatePairConfig_IceCandidatePairConfigType_DESTROYED; - static constexpr IceCandidatePairConfigType SELECTED = - IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED; - static inline bool IceCandidatePairConfigType_IsValid(int value) { - return IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value); - } - static constexpr IceCandidatePairConfigType IceCandidatePairConfigType_MIN = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MIN; - static constexpr IceCandidatePairConfigType IceCandidatePairConfigType_MAX = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX; - static constexpr int IceCandidatePairConfigType_ARRAYSIZE = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_ARRAYSIZE; - template - static inline const std::string& IceCandidatePairConfigType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfigType_Name."); - return IceCandidatePairConfig_IceCandidatePairConfigType_Name(enum_t_value); - } - static inline bool IceCandidatePairConfigType_Parse(const std::string& name, - IceCandidatePairConfigType* value) { - return IceCandidatePairConfig_IceCandidatePairConfigType_Parse(name, value); - } - - typedef IceCandidatePairConfig_IceCandidateType IceCandidateType; - static constexpr IceCandidateType LOCAL = - IceCandidatePairConfig_IceCandidateType_LOCAL; - static constexpr IceCandidateType STUN = - IceCandidatePairConfig_IceCandidateType_STUN; - static constexpr IceCandidateType PRFLX = - IceCandidatePairConfig_IceCandidateType_PRFLX; - static constexpr IceCandidateType RELAY = - IceCandidatePairConfig_IceCandidateType_RELAY; - static constexpr IceCandidateType UNKNOWN_CANDIDATE_TYPE = - IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE; - static inline bool IceCandidateType_IsValid(int value) { - return IceCandidatePairConfig_IceCandidateType_IsValid(value); - } - static constexpr IceCandidateType IceCandidateType_MIN = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_MIN; - static constexpr IceCandidateType IceCandidateType_MAX = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX; - static constexpr int IceCandidateType_ARRAYSIZE = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_ARRAYSIZE; - template - static inline const std::string& IceCandidateType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidateType_Name."); - return IceCandidatePairConfig_IceCandidateType_Name(enum_t_value); - } - static inline bool IceCandidateType_Parse(const std::string& name, - IceCandidateType* value) { - return IceCandidatePairConfig_IceCandidateType_Parse(name, value); - } - - typedef IceCandidatePairConfig_Protocol Protocol; - static constexpr Protocol UDP = - IceCandidatePairConfig_Protocol_UDP; - static constexpr Protocol TCP = - IceCandidatePairConfig_Protocol_TCP; - static constexpr Protocol SSLTCP = - IceCandidatePairConfig_Protocol_SSLTCP; - static constexpr Protocol TLS = - IceCandidatePairConfig_Protocol_TLS; - static constexpr Protocol UNKNOWN_PROTOCOL = - IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL; - static inline bool Protocol_IsValid(int value) { - return IceCandidatePairConfig_Protocol_IsValid(value); - } - static constexpr Protocol Protocol_MIN = - IceCandidatePairConfig_Protocol_Protocol_MIN; - static constexpr Protocol Protocol_MAX = - IceCandidatePairConfig_Protocol_Protocol_MAX; - static constexpr int Protocol_ARRAYSIZE = - IceCandidatePairConfig_Protocol_Protocol_ARRAYSIZE; - template - static inline const std::string& Protocol_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function Protocol_Name."); - return IceCandidatePairConfig_Protocol_Name(enum_t_value); - } - static inline bool Protocol_Parse(const std::string& name, - Protocol* value) { - return IceCandidatePairConfig_Protocol_Parse(name, value); - } - - typedef IceCandidatePairConfig_AddressFamily AddressFamily; - static constexpr AddressFamily IPV4 = - IceCandidatePairConfig_AddressFamily_IPV4; - static constexpr AddressFamily IPV6 = - IceCandidatePairConfig_AddressFamily_IPV6; - static constexpr AddressFamily UNKNOWN_ADDRESS_FAMILY = - IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY; - static inline bool AddressFamily_IsValid(int value) { - return IceCandidatePairConfig_AddressFamily_IsValid(value); - } - static constexpr AddressFamily AddressFamily_MIN = - IceCandidatePairConfig_AddressFamily_AddressFamily_MIN; - static constexpr AddressFamily AddressFamily_MAX = - IceCandidatePairConfig_AddressFamily_AddressFamily_MAX; - static constexpr int AddressFamily_ARRAYSIZE = - IceCandidatePairConfig_AddressFamily_AddressFamily_ARRAYSIZE; - template - static inline const std::string& AddressFamily_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function AddressFamily_Name."); - return IceCandidatePairConfig_AddressFamily_Name(enum_t_value); - } - static inline bool AddressFamily_Parse(const std::string& name, - AddressFamily* value) { - return IceCandidatePairConfig_AddressFamily_Parse(name, value); - } - - typedef IceCandidatePairConfig_NetworkType NetworkType; - static constexpr NetworkType ETHERNET = - IceCandidatePairConfig_NetworkType_ETHERNET; - static constexpr NetworkType LOOPBACK = - IceCandidatePairConfig_NetworkType_LOOPBACK; - static constexpr NetworkType WIFI = - IceCandidatePairConfig_NetworkType_WIFI; - static constexpr NetworkType VPN = - IceCandidatePairConfig_NetworkType_VPN; - static constexpr NetworkType CELLULAR = - IceCandidatePairConfig_NetworkType_CELLULAR; - static constexpr NetworkType UNKNOWN_NETWORK_TYPE = - IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE; - static inline bool NetworkType_IsValid(int value) { - return IceCandidatePairConfig_NetworkType_IsValid(value); - } - static constexpr NetworkType NetworkType_MIN = - IceCandidatePairConfig_NetworkType_NetworkType_MIN; - static constexpr NetworkType NetworkType_MAX = - IceCandidatePairConfig_NetworkType_NetworkType_MAX; - static constexpr int NetworkType_ARRAYSIZE = - IceCandidatePairConfig_NetworkType_NetworkType_ARRAYSIZE; - template - static inline const std::string& NetworkType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function NetworkType_Name."); - return IceCandidatePairConfig_NetworkType_Name(enum_t_value); - } - static inline bool NetworkType_Parse(const std::string& name, - NetworkType* value) { - return IceCandidatePairConfig_NetworkType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kConfigTypeFieldNumber = 1, - kCandidatePairIdFieldNumber = 2, - kLocalCandidateTypeFieldNumber = 3, - kLocalRelayProtocolFieldNumber = 4, - kLocalNetworkTypeFieldNumber = 5, - kLocalAddressFamilyFieldNumber = 6, - kRemoteCandidateTypeFieldNumber = 7, - kRemoteAddressFamilyFieldNumber = 8, - kCandidatePairProtocolFieldNumber = 9, - }; - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; - bool has_config_type() const; - void clear_config_type(); - ::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType config_type() const; - void set_config_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType value); - - // optional uint32 candidate_pair_id = 2; - bool has_candidate_pair_id() const; - void clear_candidate_pair_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id() const; - void set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; - bool has_local_candidate_type() const; - void clear_local_candidate_type(); - ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType local_candidate_type() const; - void set_local_candidate_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; - bool has_local_relay_protocol() const; - void clear_local_relay_protocol(); - ::webrtc::rtclog::IceCandidatePairConfig_Protocol local_relay_protocol() const; - void set_local_relay_protocol(::webrtc::rtclog::IceCandidatePairConfig_Protocol value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; - bool has_local_network_type() const; - void clear_local_network_type(); - ::webrtc::rtclog::IceCandidatePairConfig_NetworkType local_network_type() const; - void set_local_network_type(::webrtc::rtclog::IceCandidatePairConfig_NetworkType value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; - bool has_local_address_family() const; - void clear_local_address_family(); - ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily local_address_family() const; - void set_local_address_family(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; - bool has_remote_candidate_type() const; - void clear_remote_candidate_type(); - ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType remote_candidate_type() const; - void set_remote_candidate_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; - bool has_remote_address_family() const; - void clear_remote_address_family(); - ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily remote_address_family() const; - void set_remote_address_family(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily value); - - // optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; - bool has_candidate_pair_protocol() const; - void clear_candidate_pair_protocol(); - ::webrtc::rtclog::IceCandidatePairConfig_Protocol candidate_pair_protocol() const; - void set_candidate_pair_protocol(::webrtc::rtclog::IceCandidatePairConfig_Protocol value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.IceCandidatePairConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - int config_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id_; - int local_candidate_type_; - int local_relay_protocol_; - int local_network_type_; - int local_address_family_; - int remote_candidate_type_; - int remote_address_family_; - int candidate_pair_protocol_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// ------------------------------------------------------------------- - -class IceCandidatePairEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog.IceCandidatePairEvent) */ { - public: - IceCandidatePairEvent(); - virtual ~IceCandidatePairEvent(); - - IceCandidatePairEvent(const IceCandidatePairEvent& from); - IceCandidatePairEvent(IceCandidatePairEvent&& from) noexcept - : IceCandidatePairEvent() { - *this = ::std::move(from); - } - - inline IceCandidatePairEvent& operator=(const IceCandidatePairEvent& from) { - CopyFrom(from); - return *this; - } - inline IceCandidatePairEvent& operator=(IceCandidatePairEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IceCandidatePairEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IceCandidatePairEvent* internal_default_instance() { - return reinterpret_cast( - &_IceCandidatePairEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 21; - - friend void swap(IceCandidatePairEvent& a, IceCandidatePairEvent& b) { - a.Swap(&b); - } - inline void Swap(IceCandidatePairEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IceCandidatePairEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - IceCandidatePairEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IceCandidatePairEvent& from); - void MergeFrom(const IceCandidatePairEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IceCandidatePairEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog.IceCandidatePairEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEventType; - static constexpr IceCandidatePairEventType CHECK_SENT = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_SENT; - static constexpr IceCandidatePairEventType CHECK_RECEIVED = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RECEIVED; - static constexpr IceCandidatePairEventType CHECK_RESPONSE_SENT = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_SENT; - static constexpr IceCandidatePairEventType CHECK_RESPONSE_RECEIVED = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED; - static inline bool IceCandidatePairEventType_IsValid(int value) { - return IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value); - } - static constexpr IceCandidatePairEventType IceCandidatePairEventType_MIN = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MIN; - static constexpr IceCandidatePairEventType IceCandidatePairEventType_MAX = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX; - static constexpr int IceCandidatePairEventType_ARRAYSIZE = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_ARRAYSIZE; - template - static inline const std::string& IceCandidatePairEventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairEventType_Name."); - return IceCandidatePairEvent_IceCandidatePairEventType_Name(enum_t_value); - } - static inline bool IceCandidatePairEventType_Parse(const std::string& name, - IceCandidatePairEventType* value) { - return IceCandidatePairEvent_IceCandidatePairEventType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kEventTypeFieldNumber = 1, - kCandidatePairIdFieldNumber = 2, - }; - // optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; - bool has_event_type() const; - void clear_event_type(); - ::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType event_type() const; - void set_event_type(::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType value); - - // optional uint32 candidate_pair_id = 2; - bool has_candidate_pair_id() const; - void clear_candidate_pair_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id() const; - void set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog.IceCandidatePairEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - int event_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id_; - friend struct ::TableStruct_rtc_5fevent_5flog_2eproto; -}; -// =================================================================== - - -// =================================================================== - -#ifdef __GNUC__ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wstrict-aliasing" -#endif // __GNUC__ -// EventStream - -// repeated .webrtc.rtclog.Event stream = 1; -inline int EventStream::stream_size() const { - return stream_.size(); -} -inline void EventStream::clear_stream() { - stream_.Clear(); -} -inline ::webrtc::rtclog::Event* EventStream::mutable_stream(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.EventStream.stream) - return stream_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::Event >* -EventStream::mutable_stream() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.EventStream.stream) - return &stream_; -} -inline const ::webrtc::rtclog::Event& EventStream::stream(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.EventStream.stream) - return stream_.Get(index); -} -inline ::webrtc::rtclog::Event* EventStream::add_stream() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.EventStream.stream) - return stream_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::Event >& -EventStream::stream() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.EventStream.stream) - return stream_; -} - -// ------------------------------------------------------------------- - -// Event - -// optional int64 timestamp_us = 1; -inline bool Event::has_timestamp_us() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void Event::clear_timestamp_us() { - timestamp_us_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 Event::timestamp_us() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.timestamp_us) - return timestamp_us_; -} -inline void Event::set_timestamp_us(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_us_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.Event.timestamp_us) -} - -// optional .webrtc.rtclog.Event.EventType type = 2; -inline bool Event::has_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void Event::clear_type() { - type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog::Event_EventType Event::type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.type) - return static_cast< ::webrtc::rtclog::Event_EventType >(type_); -} -inline void Event::set_type(::webrtc::rtclog::Event_EventType value) { - assert(::webrtc::rtclog::Event_EventType_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.Event.type) -} - -// optional .webrtc.rtclog.RtpPacket rtp_packet = 3; -inline bool Event::has_rtp_packet() const { - return subtype_case() == kRtpPacket; -} -inline void Event::set_has_rtp_packet() { - _oneof_case_[0] = kRtpPacket; -} -inline void Event::clear_rtp_packet() { - if (has_rtp_packet()) { - delete subtype_.rtp_packet_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::RtpPacket* Event::release_rtp_packet() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.rtp_packet) - if (has_rtp_packet()) { - clear_has_subtype(); - ::webrtc::rtclog::RtpPacket* temp = subtype_.rtp_packet_; - subtype_.rtp_packet_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::RtpPacket& Event::rtp_packet() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.rtp_packet) - return has_rtp_packet() - ? *subtype_.rtp_packet_ - : *reinterpret_cast< ::webrtc::rtclog::RtpPacket*>(&::webrtc::rtclog::_RtpPacket_default_instance_); -} -inline ::webrtc::rtclog::RtpPacket* Event::mutable_rtp_packet() { - if (!has_rtp_packet()) { - clear_subtype(); - set_has_rtp_packet(); - subtype_.rtp_packet_ = CreateMaybeMessage< ::webrtc::rtclog::RtpPacket >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.rtp_packet) - return subtype_.rtp_packet_; -} - -// optional .webrtc.rtclog.RtcpPacket rtcp_packet = 4; -inline bool Event::has_rtcp_packet() const { - return subtype_case() == kRtcpPacket; -} -inline void Event::set_has_rtcp_packet() { - _oneof_case_[0] = kRtcpPacket; -} -inline void Event::clear_rtcp_packet() { - if (has_rtcp_packet()) { - delete subtype_.rtcp_packet_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::RtcpPacket* Event::release_rtcp_packet() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.rtcp_packet) - if (has_rtcp_packet()) { - clear_has_subtype(); - ::webrtc::rtclog::RtcpPacket* temp = subtype_.rtcp_packet_; - subtype_.rtcp_packet_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::RtcpPacket& Event::rtcp_packet() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.rtcp_packet) - return has_rtcp_packet() - ? *subtype_.rtcp_packet_ - : *reinterpret_cast< ::webrtc::rtclog::RtcpPacket*>(&::webrtc::rtclog::_RtcpPacket_default_instance_); -} -inline ::webrtc::rtclog::RtcpPacket* Event::mutable_rtcp_packet() { - if (!has_rtcp_packet()) { - clear_subtype(); - set_has_rtcp_packet(); - subtype_.rtcp_packet_ = CreateMaybeMessage< ::webrtc::rtclog::RtcpPacket >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.rtcp_packet) - return subtype_.rtcp_packet_; -} - -// optional .webrtc.rtclog.AudioPlayoutEvent audio_playout_event = 5; -inline bool Event::has_audio_playout_event() const { - return subtype_case() == kAudioPlayoutEvent; -} -inline void Event::set_has_audio_playout_event() { - _oneof_case_[0] = kAudioPlayoutEvent; -} -inline void Event::clear_audio_playout_event() { - if (has_audio_playout_event()) { - delete subtype_.audio_playout_event_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::AudioPlayoutEvent* Event::release_audio_playout_event() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.audio_playout_event) - if (has_audio_playout_event()) { - clear_has_subtype(); - ::webrtc::rtclog::AudioPlayoutEvent* temp = subtype_.audio_playout_event_; - subtype_.audio_playout_event_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::AudioPlayoutEvent& Event::audio_playout_event() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.audio_playout_event) - return has_audio_playout_event() - ? *subtype_.audio_playout_event_ - : *reinterpret_cast< ::webrtc::rtclog::AudioPlayoutEvent*>(&::webrtc::rtclog::_AudioPlayoutEvent_default_instance_); -} -inline ::webrtc::rtclog::AudioPlayoutEvent* Event::mutable_audio_playout_event() { - if (!has_audio_playout_event()) { - clear_subtype(); - set_has_audio_playout_event(); - subtype_.audio_playout_event_ = CreateMaybeMessage< ::webrtc::rtclog::AudioPlayoutEvent >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.audio_playout_event) - return subtype_.audio_playout_event_; -} - -// optional .webrtc.rtclog.LossBasedBweUpdate loss_based_bwe_update = 6; -inline bool Event::has_loss_based_bwe_update() const { - return subtype_case() == kLossBasedBweUpdate; -} -inline void Event::set_has_loss_based_bwe_update() { - _oneof_case_[0] = kLossBasedBweUpdate; -} -inline void Event::clear_loss_based_bwe_update() { - if (has_loss_based_bwe_update()) { - delete subtype_.loss_based_bwe_update_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::LossBasedBweUpdate* Event::release_loss_based_bwe_update() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.loss_based_bwe_update) - if (has_loss_based_bwe_update()) { - clear_has_subtype(); - ::webrtc::rtclog::LossBasedBweUpdate* temp = subtype_.loss_based_bwe_update_; - subtype_.loss_based_bwe_update_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::LossBasedBweUpdate& Event::loss_based_bwe_update() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.loss_based_bwe_update) - return has_loss_based_bwe_update() - ? *subtype_.loss_based_bwe_update_ - : *reinterpret_cast< ::webrtc::rtclog::LossBasedBweUpdate*>(&::webrtc::rtclog::_LossBasedBweUpdate_default_instance_); -} -inline ::webrtc::rtclog::LossBasedBweUpdate* Event::mutable_loss_based_bwe_update() { - if (!has_loss_based_bwe_update()) { - clear_subtype(); - set_has_loss_based_bwe_update(); - subtype_.loss_based_bwe_update_ = CreateMaybeMessage< ::webrtc::rtclog::LossBasedBweUpdate >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.loss_based_bwe_update) - return subtype_.loss_based_bwe_update_; -} - -// optional .webrtc.rtclog.DelayBasedBweUpdate delay_based_bwe_update = 7; -inline bool Event::has_delay_based_bwe_update() const { - return subtype_case() == kDelayBasedBweUpdate; -} -inline void Event::set_has_delay_based_bwe_update() { - _oneof_case_[0] = kDelayBasedBweUpdate; -} -inline void Event::clear_delay_based_bwe_update() { - if (has_delay_based_bwe_update()) { - delete subtype_.delay_based_bwe_update_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::DelayBasedBweUpdate* Event::release_delay_based_bwe_update() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.delay_based_bwe_update) - if (has_delay_based_bwe_update()) { - clear_has_subtype(); - ::webrtc::rtclog::DelayBasedBweUpdate* temp = subtype_.delay_based_bwe_update_; - subtype_.delay_based_bwe_update_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::DelayBasedBweUpdate& Event::delay_based_bwe_update() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.delay_based_bwe_update) - return has_delay_based_bwe_update() - ? *subtype_.delay_based_bwe_update_ - : *reinterpret_cast< ::webrtc::rtclog::DelayBasedBweUpdate*>(&::webrtc::rtclog::_DelayBasedBweUpdate_default_instance_); -} -inline ::webrtc::rtclog::DelayBasedBweUpdate* Event::mutable_delay_based_bwe_update() { - if (!has_delay_based_bwe_update()) { - clear_subtype(); - set_has_delay_based_bwe_update(); - subtype_.delay_based_bwe_update_ = CreateMaybeMessage< ::webrtc::rtclog::DelayBasedBweUpdate >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.delay_based_bwe_update) - return subtype_.delay_based_bwe_update_; -} - -// optional .webrtc.rtclog.VideoReceiveConfig video_receiver_config = 8; -inline bool Event::has_video_receiver_config() const { - return subtype_case() == kVideoReceiverConfig; -} -inline void Event::set_has_video_receiver_config() { - _oneof_case_[0] = kVideoReceiverConfig; -} -inline void Event::clear_video_receiver_config() { - if (has_video_receiver_config()) { - delete subtype_.video_receiver_config_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::VideoReceiveConfig* Event::release_video_receiver_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.video_receiver_config) - if (has_video_receiver_config()) { - clear_has_subtype(); - ::webrtc::rtclog::VideoReceiveConfig* temp = subtype_.video_receiver_config_; - subtype_.video_receiver_config_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::VideoReceiveConfig& Event::video_receiver_config() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.video_receiver_config) - return has_video_receiver_config() - ? *subtype_.video_receiver_config_ - : *reinterpret_cast< ::webrtc::rtclog::VideoReceiveConfig*>(&::webrtc::rtclog::_VideoReceiveConfig_default_instance_); -} -inline ::webrtc::rtclog::VideoReceiveConfig* Event::mutable_video_receiver_config() { - if (!has_video_receiver_config()) { - clear_subtype(); - set_has_video_receiver_config(); - subtype_.video_receiver_config_ = CreateMaybeMessage< ::webrtc::rtclog::VideoReceiveConfig >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.video_receiver_config) - return subtype_.video_receiver_config_; -} - -// optional .webrtc.rtclog.VideoSendConfig video_sender_config = 9; -inline bool Event::has_video_sender_config() const { - return subtype_case() == kVideoSenderConfig; -} -inline void Event::set_has_video_sender_config() { - _oneof_case_[0] = kVideoSenderConfig; -} -inline void Event::clear_video_sender_config() { - if (has_video_sender_config()) { - delete subtype_.video_sender_config_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::VideoSendConfig* Event::release_video_sender_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.video_sender_config) - if (has_video_sender_config()) { - clear_has_subtype(); - ::webrtc::rtclog::VideoSendConfig* temp = subtype_.video_sender_config_; - subtype_.video_sender_config_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::VideoSendConfig& Event::video_sender_config() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.video_sender_config) - return has_video_sender_config() - ? *subtype_.video_sender_config_ - : *reinterpret_cast< ::webrtc::rtclog::VideoSendConfig*>(&::webrtc::rtclog::_VideoSendConfig_default_instance_); -} -inline ::webrtc::rtclog::VideoSendConfig* Event::mutable_video_sender_config() { - if (!has_video_sender_config()) { - clear_subtype(); - set_has_video_sender_config(); - subtype_.video_sender_config_ = CreateMaybeMessage< ::webrtc::rtclog::VideoSendConfig >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.video_sender_config) - return subtype_.video_sender_config_; -} - -// optional .webrtc.rtclog.AudioReceiveConfig audio_receiver_config = 10; -inline bool Event::has_audio_receiver_config() const { - return subtype_case() == kAudioReceiverConfig; -} -inline void Event::set_has_audio_receiver_config() { - _oneof_case_[0] = kAudioReceiverConfig; -} -inline void Event::clear_audio_receiver_config() { - if (has_audio_receiver_config()) { - delete subtype_.audio_receiver_config_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::AudioReceiveConfig* Event::release_audio_receiver_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.audio_receiver_config) - if (has_audio_receiver_config()) { - clear_has_subtype(); - ::webrtc::rtclog::AudioReceiveConfig* temp = subtype_.audio_receiver_config_; - subtype_.audio_receiver_config_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::AudioReceiveConfig& Event::audio_receiver_config() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.audio_receiver_config) - return has_audio_receiver_config() - ? *subtype_.audio_receiver_config_ - : *reinterpret_cast< ::webrtc::rtclog::AudioReceiveConfig*>(&::webrtc::rtclog::_AudioReceiveConfig_default_instance_); -} -inline ::webrtc::rtclog::AudioReceiveConfig* Event::mutable_audio_receiver_config() { - if (!has_audio_receiver_config()) { - clear_subtype(); - set_has_audio_receiver_config(); - subtype_.audio_receiver_config_ = CreateMaybeMessage< ::webrtc::rtclog::AudioReceiveConfig >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.audio_receiver_config) - return subtype_.audio_receiver_config_; -} - -// optional .webrtc.rtclog.AudioSendConfig audio_sender_config = 11; -inline bool Event::has_audio_sender_config() const { - return subtype_case() == kAudioSenderConfig; -} -inline void Event::set_has_audio_sender_config() { - _oneof_case_[0] = kAudioSenderConfig; -} -inline void Event::clear_audio_sender_config() { - if (has_audio_sender_config()) { - delete subtype_.audio_sender_config_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::AudioSendConfig* Event::release_audio_sender_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.audio_sender_config) - if (has_audio_sender_config()) { - clear_has_subtype(); - ::webrtc::rtclog::AudioSendConfig* temp = subtype_.audio_sender_config_; - subtype_.audio_sender_config_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::AudioSendConfig& Event::audio_sender_config() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.audio_sender_config) - return has_audio_sender_config() - ? *subtype_.audio_sender_config_ - : *reinterpret_cast< ::webrtc::rtclog::AudioSendConfig*>(&::webrtc::rtclog::_AudioSendConfig_default_instance_); -} -inline ::webrtc::rtclog::AudioSendConfig* Event::mutable_audio_sender_config() { - if (!has_audio_sender_config()) { - clear_subtype(); - set_has_audio_sender_config(); - subtype_.audio_sender_config_ = CreateMaybeMessage< ::webrtc::rtclog::AudioSendConfig >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.audio_sender_config) - return subtype_.audio_sender_config_; -} - -// optional .webrtc.rtclog.AudioNetworkAdaptation audio_network_adaptation = 16; -inline bool Event::has_audio_network_adaptation() const { - return subtype_case() == kAudioNetworkAdaptation; -} -inline void Event::set_has_audio_network_adaptation() { - _oneof_case_[0] = kAudioNetworkAdaptation; -} -inline void Event::clear_audio_network_adaptation() { - if (has_audio_network_adaptation()) { - delete subtype_.audio_network_adaptation_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::AudioNetworkAdaptation* Event::release_audio_network_adaptation() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.audio_network_adaptation) - if (has_audio_network_adaptation()) { - clear_has_subtype(); - ::webrtc::rtclog::AudioNetworkAdaptation* temp = subtype_.audio_network_adaptation_; - subtype_.audio_network_adaptation_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::AudioNetworkAdaptation& Event::audio_network_adaptation() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.audio_network_adaptation) - return has_audio_network_adaptation() - ? *subtype_.audio_network_adaptation_ - : *reinterpret_cast< ::webrtc::rtclog::AudioNetworkAdaptation*>(&::webrtc::rtclog::_AudioNetworkAdaptation_default_instance_); -} -inline ::webrtc::rtclog::AudioNetworkAdaptation* Event::mutable_audio_network_adaptation() { - if (!has_audio_network_adaptation()) { - clear_subtype(); - set_has_audio_network_adaptation(); - subtype_.audio_network_adaptation_ = CreateMaybeMessage< ::webrtc::rtclog::AudioNetworkAdaptation >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.audio_network_adaptation) - return subtype_.audio_network_adaptation_; -} - -// optional .webrtc.rtclog.BweProbeCluster probe_cluster = 17; -inline bool Event::has_probe_cluster() const { - return subtype_case() == kProbeCluster; -} -inline void Event::set_has_probe_cluster() { - _oneof_case_[0] = kProbeCluster; -} -inline void Event::clear_probe_cluster() { - if (has_probe_cluster()) { - delete subtype_.probe_cluster_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::BweProbeCluster* Event::release_probe_cluster() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.probe_cluster) - if (has_probe_cluster()) { - clear_has_subtype(); - ::webrtc::rtclog::BweProbeCluster* temp = subtype_.probe_cluster_; - subtype_.probe_cluster_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::BweProbeCluster& Event::probe_cluster() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.probe_cluster) - return has_probe_cluster() - ? *subtype_.probe_cluster_ - : *reinterpret_cast< ::webrtc::rtclog::BweProbeCluster*>(&::webrtc::rtclog::_BweProbeCluster_default_instance_); -} -inline ::webrtc::rtclog::BweProbeCluster* Event::mutable_probe_cluster() { - if (!has_probe_cluster()) { - clear_subtype(); - set_has_probe_cluster(); - subtype_.probe_cluster_ = CreateMaybeMessage< ::webrtc::rtclog::BweProbeCluster >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.probe_cluster) - return subtype_.probe_cluster_; -} - -// optional .webrtc.rtclog.BweProbeResult probe_result = 18; -inline bool Event::has_probe_result() const { - return subtype_case() == kProbeResult; -} -inline void Event::set_has_probe_result() { - _oneof_case_[0] = kProbeResult; -} -inline void Event::clear_probe_result() { - if (has_probe_result()) { - delete subtype_.probe_result_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::BweProbeResult* Event::release_probe_result() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.probe_result) - if (has_probe_result()) { - clear_has_subtype(); - ::webrtc::rtclog::BweProbeResult* temp = subtype_.probe_result_; - subtype_.probe_result_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::BweProbeResult& Event::probe_result() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.probe_result) - return has_probe_result() - ? *subtype_.probe_result_ - : *reinterpret_cast< ::webrtc::rtclog::BweProbeResult*>(&::webrtc::rtclog::_BweProbeResult_default_instance_); -} -inline ::webrtc::rtclog::BweProbeResult* Event::mutable_probe_result() { - if (!has_probe_result()) { - clear_subtype(); - set_has_probe_result(); - subtype_.probe_result_ = CreateMaybeMessage< ::webrtc::rtclog::BweProbeResult >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.probe_result) - return subtype_.probe_result_; -} - -// optional .webrtc.rtclog.AlrState alr_state = 19; -inline bool Event::has_alr_state() const { - return subtype_case() == kAlrState; -} -inline void Event::set_has_alr_state() { - _oneof_case_[0] = kAlrState; -} -inline void Event::clear_alr_state() { - if (has_alr_state()) { - delete subtype_.alr_state_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::AlrState* Event::release_alr_state() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.alr_state) - if (has_alr_state()) { - clear_has_subtype(); - ::webrtc::rtclog::AlrState* temp = subtype_.alr_state_; - subtype_.alr_state_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::AlrState& Event::alr_state() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.alr_state) - return has_alr_state() - ? *subtype_.alr_state_ - : *reinterpret_cast< ::webrtc::rtclog::AlrState*>(&::webrtc::rtclog::_AlrState_default_instance_); -} -inline ::webrtc::rtclog::AlrState* Event::mutable_alr_state() { - if (!has_alr_state()) { - clear_subtype(); - set_has_alr_state(); - subtype_.alr_state_ = CreateMaybeMessage< ::webrtc::rtclog::AlrState >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.alr_state) - return subtype_.alr_state_; -} - -// optional .webrtc.rtclog.IceCandidatePairConfig ice_candidate_pair_config = 20; -inline bool Event::has_ice_candidate_pair_config() const { - return subtype_case() == kIceCandidatePairConfig; -} -inline void Event::set_has_ice_candidate_pair_config() { - _oneof_case_[0] = kIceCandidatePairConfig; -} -inline void Event::clear_ice_candidate_pair_config() { - if (has_ice_candidate_pair_config()) { - delete subtype_.ice_candidate_pair_config_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::IceCandidatePairConfig* Event::release_ice_candidate_pair_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.ice_candidate_pair_config) - if (has_ice_candidate_pair_config()) { - clear_has_subtype(); - ::webrtc::rtclog::IceCandidatePairConfig* temp = subtype_.ice_candidate_pair_config_; - subtype_.ice_candidate_pair_config_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::IceCandidatePairConfig& Event::ice_candidate_pair_config() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.ice_candidate_pair_config) - return has_ice_candidate_pair_config() - ? *subtype_.ice_candidate_pair_config_ - : *reinterpret_cast< ::webrtc::rtclog::IceCandidatePairConfig*>(&::webrtc::rtclog::_IceCandidatePairConfig_default_instance_); -} -inline ::webrtc::rtclog::IceCandidatePairConfig* Event::mutable_ice_candidate_pair_config() { - if (!has_ice_candidate_pair_config()) { - clear_subtype(); - set_has_ice_candidate_pair_config(); - subtype_.ice_candidate_pair_config_ = CreateMaybeMessage< ::webrtc::rtclog::IceCandidatePairConfig >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.ice_candidate_pair_config) - return subtype_.ice_candidate_pair_config_; -} - -// optional .webrtc.rtclog.IceCandidatePairEvent ice_candidate_pair_event = 21; -inline bool Event::has_ice_candidate_pair_event() const { - return subtype_case() == kIceCandidatePairEvent; -} -inline void Event::set_has_ice_candidate_pair_event() { - _oneof_case_[0] = kIceCandidatePairEvent; -} -inline void Event::clear_ice_candidate_pair_event() { - if (has_ice_candidate_pair_event()) { - delete subtype_.ice_candidate_pair_event_; - clear_has_subtype(); - } -} -inline ::webrtc::rtclog::IceCandidatePairEvent* Event::release_ice_candidate_pair_event() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.Event.ice_candidate_pair_event) - if (has_ice_candidate_pair_event()) { - clear_has_subtype(); - ::webrtc::rtclog::IceCandidatePairEvent* temp = subtype_.ice_candidate_pair_event_; - subtype_.ice_candidate_pair_event_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::webrtc::rtclog::IceCandidatePairEvent& Event::ice_candidate_pair_event() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.Event.ice_candidate_pair_event) - return has_ice_candidate_pair_event() - ? *subtype_.ice_candidate_pair_event_ - : *reinterpret_cast< ::webrtc::rtclog::IceCandidatePairEvent*>(&::webrtc::rtclog::_IceCandidatePairEvent_default_instance_); -} -inline ::webrtc::rtclog::IceCandidatePairEvent* Event::mutable_ice_candidate_pair_event() { - if (!has_ice_candidate_pair_event()) { - clear_subtype(); - set_has_ice_candidate_pair_event(); - subtype_.ice_candidate_pair_event_ = CreateMaybeMessage< ::webrtc::rtclog::IceCandidatePairEvent >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.Event.ice_candidate_pair_event) - return subtype_.ice_candidate_pair_event_; -} - -inline bool Event::has_subtype() const { - return subtype_case() != SUBTYPE_NOT_SET; -} -inline void Event::clear_has_subtype() { - _oneof_case_[0] = SUBTYPE_NOT_SET; -} -inline Event::SubtypeCase Event::subtype_case() const { - return Event::SubtypeCase(_oneof_case_[0]); -} -// ------------------------------------------------------------------- - -// RtpPacket - -// optional bool incoming = 1; -inline bool RtpPacket::has_incoming() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtpPacket::clear_incoming() { - incoming_ = false; - _has_bits_[0] &= ~0x00000002u; -} -inline bool RtpPacket::incoming() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpPacket.incoming) - return incoming_; -} -inline void RtpPacket::set_incoming(bool value) { - _has_bits_[0] |= 0x00000002u; - incoming_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpPacket.incoming) -} - -// optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; -inline bool RtpPacket::has_type() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void RtpPacket::clear_type() { - type_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::webrtc::rtclog::MediaType RtpPacket::type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpPacket.type) - return static_cast< ::webrtc::rtclog::MediaType >(type_); -} -inline void RtpPacket::set_type(::webrtc::rtclog::MediaType value) { - assert(::webrtc::rtclog::MediaType_IsValid(value)); - _has_bits_[0] |= 0x00000004u; - type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpPacket.type) -} - -// optional uint32 packet_length = 3; -inline bool RtpPacket::has_packet_length() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void RtpPacket::clear_packet_length() { - packet_length_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RtpPacket::packet_length() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpPacket.packet_length) - return packet_length_; -} -inline void RtpPacket::set_packet_length(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - packet_length_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpPacket.packet_length) -} - -// optional bytes header = 4; -inline bool RtpPacket::has_header() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtpPacket::clear_header() { - header_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& RtpPacket::header() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpPacket.header) - return header_.GetNoArena(); -} -inline void RtpPacket::set_header(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - header_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpPacket.header) -} -inline void RtpPacket::set_header(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - header_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog.RtpPacket.header) -} -inline void RtpPacket::set_header(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - header_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog.RtpPacket.header) -} -inline void RtpPacket::set_header(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - header_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog.RtpPacket.header) -} -inline std::string* RtpPacket::mutable_header() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.RtpPacket.header) - return header_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RtpPacket::release_header() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.RtpPacket.header) - if (!has_header()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return header_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RtpPacket::set_allocated_header(std::string* header) { - if (header != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - header_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), header); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.RtpPacket.header) -} - -// optional int32 probe_cluster_id = 5; -inline bool RtpPacket::has_probe_cluster_id() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void RtpPacket::clear_probe_cluster_id() { - probe_cluster_id_ = 0; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpPacket::probe_cluster_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpPacket.probe_cluster_id) - return probe_cluster_id_; -} -inline void RtpPacket::set_probe_cluster_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000010u; - probe_cluster_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpPacket.probe_cluster_id) -} - -// ------------------------------------------------------------------- - -// RtcpPacket - -// optional bool incoming = 1; -inline bool RtcpPacket::has_incoming() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtcpPacket::clear_incoming() { - incoming_ = false; - _has_bits_[0] &= ~0x00000002u; -} -inline bool RtcpPacket::incoming() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtcpPacket.incoming) - return incoming_; -} -inline void RtcpPacket::set_incoming(bool value) { - _has_bits_[0] |= 0x00000002u; - incoming_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtcpPacket.incoming) -} - -// optional .webrtc.rtclog.MediaType type = 2 [deprecated = true]; -inline bool RtcpPacket::has_type() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void RtcpPacket::clear_type() { - type_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::webrtc::rtclog::MediaType RtcpPacket::type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtcpPacket.type) - return static_cast< ::webrtc::rtclog::MediaType >(type_); -} -inline void RtcpPacket::set_type(::webrtc::rtclog::MediaType value) { - assert(::webrtc::rtclog::MediaType_IsValid(value)); - _has_bits_[0] |= 0x00000004u; - type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtcpPacket.type) -} - -// optional bytes packet_data = 3; -inline bool RtcpPacket::has_packet_data() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtcpPacket::clear_packet_data() { - packet_data_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& RtcpPacket::packet_data() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtcpPacket.packet_data) - return packet_data_.GetNoArena(); -} -inline void RtcpPacket::set_packet_data(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - packet_data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtcpPacket.packet_data) -} -inline void RtcpPacket::set_packet_data(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - packet_data_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog.RtcpPacket.packet_data) -} -inline void RtcpPacket::set_packet_data(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - packet_data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog.RtcpPacket.packet_data) -} -inline void RtcpPacket::set_packet_data(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - packet_data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog.RtcpPacket.packet_data) -} -inline std::string* RtcpPacket::mutable_packet_data() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.RtcpPacket.packet_data) - return packet_data_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RtcpPacket::release_packet_data() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.RtcpPacket.packet_data) - if (!has_packet_data()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return packet_data_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RtcpPacket::set_allocated_packet_data(std::string* packet_data) { - if (packet_data != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - packet_data_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), packet_data); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.RtcpPacket.packet_data) -} - -// ------------------------------------------------------------------- - -// AudioPlayoutEvent - -// optional uint32 local_ssrc = 2; -inline bool AudioPlayoutEvent::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioPlayoutEvent::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioPlayoutEvent::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioPlayoutEvent.local_ssrc) - return local_ssrc_; -} -inline void AudioPlayoutEvent::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000001u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioPlayoutEvent.local_ssrc) -} - -// ------------------------------------------------------------------- - -// LossBasedBweUpdate - -// optional int32 bitrate_bps = 1; -inline bool LossBasedBweUpdate::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void LossBasedBweUpdate::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 LossBasedBweUpdate::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.LossBasedBweUpdate.bitrate_bps) - return bitrate_bps_; -} -inline void LossBasedBweUpdate::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.LossBasedBweUpdate.bitrate_bps) -} - -// optional uint32 fraction_loss = 2; -inline bool LossBasedBweUpdate::has_fraction_loss() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void LossBasedBweUpdate::clear_fraction_loss() { - fraction_loss_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 LossBasedBweUpdate::fraction_loss() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.LossBasedBweUpdate.fraction_loss) - return fraction_loss_; -} -inline void LossBasedBweUpdate::set_fraction_loss(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - fraction_loss_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.LossBasedBweUpdate.fraction_loss) -} - -// optional int32 total_packets = 3; -inline bool LossBasedBweUpdate::has_total_packets() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void LossBasedBweUpdate::clear_total_packets() { - total_packets_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 LossBasedBweUpdate::total_packets() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.LossBasedBweUpdate.total_packets) - return total_packets_; -} -inline void LossBasedBweUpdate::set_total_packets(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000004u; - total_packets_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.LossBasedBweUpdate.total_packets) -} - -// ------------------------------------------------------------------- - -// DelayBasedBweUpdate - -// optional int32 bitrate_bps = 1; -inline bool DelayBasedBweUpdate::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void DelayBasedBweUpdate::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 DelayBasedBweUpdate::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.DelayBasedBweUpdate.bitrate_bps) - return bitrate_bps_; -} -inline void DelayBasedBweUpdate::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.DelayBasedBweUpdate.bitrate_bps) -} - -// optional .webrtc.rtclog.DelayBasedBweUpdate.DetectorState detector_state = 2; -inline bool DelayBasedBweUpdate::has_detector_state() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void DelayBasedBweUpdate::clear_detector_state() { - detector_state_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog::DelayBasedBweUpdate_DetectorState DelayBasedBweUpdate::detector_state() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.DelayBasedBweUpdate.detector_state) - return static_cast< ::webrtc::rtclog::DelayBasedBweUpdate_DetectorState >(detector_state_); -} -inline void DelayBasedBweUpdate::set_detector_state(::webrtc::rtclog::DelayBasedBweUpdate_DetectorState value) { - assert(::webrtc::rtclog::DelayBasedBweUpdate_DetectorState_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - detector_state_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.DelayBasedBweUpdate.detector_state) -} - -// ------------------------------------------------------------------- - -// VideoReceiveConfig - -// optional uint32 remote_ssrc = 1; -inline bool VideoReceiveConfig::has_remote_ssrc() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void VideoReceiveConfig::clear_remote_ssrc() { - remote_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoReceiveConfig::remote_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.remote_ssrc) - return remote_ssrc_; -} -inline void VideoReceiveConfig::set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000001u; - remote_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoReceiveConfig.remote_ssrc) -} - -// optional uint32 local_ssrc = 2; -inline bool VideoReceiveConfig::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void VideoReceiveConfig::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoReceiveConfig::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.local_ssrc) - return local_ssrc_; -} -inline void VideoReceiveConfig::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoReceiveConfig.local_ssrc) -} - -// optional .webrtc.rtclog.VideoReceiveConfig.RtcpMode rtcp_mode = 3; -inline bool VideoReceiveConfig::has_rtcp_mode() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void VideoReceiveConfig::clear_rtcp_mode() { - rtcp_mode_ = 1; - _has_bits_[0] &= ~0x00000008u; -} -inline ::webrtc::rtclog::VideoReceiveConfig_RtcpMode VideoReceiveConfig::rtcp_mode() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.rtcp_mode) - return static_cast< ::webrtc::rtclog::VideoReceiveConfig_RtcpMode >(rtcp_mode_); -} -inline void VideoReceiveConfig::set_rtcp_mode(::webrtc::rtclog::VideoReceiveConfig_RtcpMode value) { - assert(::webrtc::rtclog::VideoReceiveConfig_RtcpMode_IsValid(value)); - _has_bits_[0] |= 0x00000008u; - rtcp_mode_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoReceiveConfig.rtcp_mode) -} - -// optional bool remb = 4; -inline bool VideoReceiveConfig::has_remb() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void VideoReceiveConfig::clear_remb() { - remb_ = false; - _has_bits_[0] &= ~0x00000004u; -} -inline bool VideoReceiveConfig::remb() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.remb) - return remb_; -} -inline void VideoReceiveConfig::set_remb(bool value) { - _has_bits_[0] |= 0x00000004u; - remb_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoReceiveConfig.remb) -} - -// repeated .webrtc.rtclog.RtxMap rtx_map = 5; -inline int VideoReceiveConfig::rtx_map_size() const { - return rtx_map_.size(); -} -inline void VideoReceiveConfig::clear_rtx_map() { - rtx_map_.Clear(); -} -inline ::webrtc::rtclog::RtxMap* VideoReceiveConfig::mutable_rtx_map(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.VideoReceiveConfig.rtx_map) - return rtx_map_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtxMap >* -VideoReceiveConfig::mutable_rtx_map() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoReceiveConfig.rtx_map) - return &rtx_map_; -} -inline const ::webrtc::rtclog::RtxMap& VideoReceiveConfig::rtx_map(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.rtx_map) - return rtx_map_.Get(index); -} -inline ::webrtc::rtclog::RtxMap* VideoReceiveConfig::add_rtx_map() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoReceiveConfig.rtx_map) - return rtx_map_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtxMap >& -VideoReceiveConfig::rtx_map() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoReceiveConfig.rtx_map) - return rtx_map_; -} - -// repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 6; -inline int VideoReceiveConfig::header_extensions_size() const { - return header_extensions_.size(); -} -inline void VideoReceiveConfig::clear_header_extensions() { - header_extensions_.Clear(); -} -inline ::webrtc::rtclog::RtpHeaderExtension* VideoReceiveConfig::mutable_header_extensions(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.VideoReceiveConfig.header_extensions) - return header_extensions_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* -VideoReceiveConfig::mutable_header_extensions() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoReceiveConfig.header_extensions) - return &header_extensions_; -} -inline const ::webrtc::rtclog::RtpHeaderExtension& VideoReceiveConfig::header_extensions(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.header_extensions) - return header_extensions_.Get(index); -} -inline ::webrtc::rtclog::RtpHeaderExtension* VideoReceiveConfig::add_header_extensions() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoReceiveConfig.header_extensions) - return header_extensions_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& -VideoReceiveConfig::header_extensions() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoReceiveConfig.header_extensions) - return header_extensions_; -} - -// repeated .webrtc.rtclog.DecoderConfig decoders = 7; -inline int VideoReceiveConfig::decoders_size() const { - return decoders_.size(); -} -inline void VideoReceiveConfig::clear_decoders() { - decoders_.Clear(); -} -inline ::webrtc::rtclog::DecoderConfig* VideoReceiveConfig::mutable_decoders(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.VideoReceiveConfig.decoders) - return decoders_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::DecoderConfig >* -VideoReceiveConfig::mutable_decoders() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoReceiveConfig.decoders) - return &decoders_; -} -inline const ::webrtc::rtclog::DecoderConfig& VideoReceiveConfig::decoders(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoReceiveConfig.decoders) - return decoders_.Get(index); -} -inline ::webrtc::rtclog::DecoderConfig* VideoReceiveConfig::add_decoders() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoReceiveConfig.decoders) - return decoders_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::DecoderConfig >& -VideoReceiveConfig::decoders() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoReceiveConfig.decoders) - return decoders_; -} - -// ------------------------------------------------------------------- - -// DecoderConfig - -// optional string name = 1; -inline bool DecoderConfig::has_name() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void DecoderConfig::clear_name() { - name_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& DecoderConfig::name() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.DecoderConfig.name) - return name_.GetNoArena(); -} -inline void DecoderConfig::set_name(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.DecoderConfig.name) -} -inline void DecoderConfig::set_name(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog.DecoderConfig.name) -} -inline void DecoderConfig::set_name(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog.DecoderConfig.name) -} -inline void DecoderConfig::set_name(const char* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog.DecoderConfig.name) -} -inline std::string* DecoderConfig::mutable_name() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.DecoderConfig.name) - return name_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* DecoderConfig::release_name() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.DecoderConfig.name) - if (!has_name()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return name_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void DecoderConfig::set_allocated_name(std::string* name) { - if (name != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - name_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), name); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.DecoderConfig.name) -} - -// optional int32 payload_type = 2; -inline bool DecoderConfig::has_payload_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void DecoderConfig::clear_payload_type() { - payload_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 DecoderConfig::payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.DecoderConfig.payload_type) - return payload_type_; -} -inline void DecoderConfig::set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.DecoderConfig.payload_type) -} - -// ------------------------------------------------------------------- - -// RtpHeaderExtension - -// optional string name = 1; -inline bool RtpHeaderExtension::has_name() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtpHeaderExtension::clear_name() { - name_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& RtpHeaderExtension::name() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpHeaderExtension.name) - return name_.GetNoArena(); -} -inline void RtpHeaderExtension::set_name(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpHeaderExtension.name) -} -inline void RtpHeaderExtension::set_name(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog.RtpHeaderExtension.name) -} -inline void RtpHeaderExtension::set_name(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog.RtpHeaderExtension.name) -} -inline void RtpHeaderExtension::set_name(const char* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog.RtpHeaderExtension.name) -} -inline std::string* RtpHeaderExtension::mutable_name() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.RtpHeaderExtension.name) - return name_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RtpHeaderExtension::release_name() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.RtpHeaderExtension.name) - if (!has_name()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return name_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RtpHeaderExtension::set_allocated_name(std::string* name) { - if (name != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - name_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), name); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.RtpHeaderExtension.name) -} - -// optional int32 id = 2; -inline bool RtpHeaderExtension::has_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtpHeaderExtension::clear_id() { - id_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtension::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtpHeaderExtension.id) - return id_; -} -inline void RtpHeaderExtension::set_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtpHeaderExtension.id) -} - -// ------------------------------------------------------------------- - -// RtxConfig - -// optional uint32 rtx_ssrc = 1; -inline bool RtxConfig::has_rtx_ssrc() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtxConfig::clear_rtx_ssrc() { - rtx_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RtxConfig::rtx_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtxConfig.rtx_ssrc) - return rtx_ssrc_; -} -inline void RtxConfig::set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000001u; - rtx_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtxConfig.rtx_ssrc) -} - -// optional int32 rtx_payload_type = 2; -inline bool RtxConfig::has_rtx_payload_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtxConfig::clear_rtx_payload_type() { - rtx_payload_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtxConfig::rtx_payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtxConfig.rtx_payload_type) - return rtx_payload_type_; -} -inline void RtxConfig::set_rtx_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - rtx_payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtxConfig.rtx_payload_type) -} - -// ------------------------------------------------------------------- - -// RtxMap - -// optional int32 payload_type = 1; -inline bool RtxMap::has_payload_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtxMap::clear_payload_type() { - payload_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtxMap::payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtxMap.payload_type) - return payload_type_; -} -inline void RtxMap::set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.RtxMap.payload_type) -} - -// optional .webrtc.rtclog.RtxConfig config = 2; -inline bool RtxMap::has_config() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtxMap::clear_config() { - if (config_ != nullptr) config_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog::RtxConfig& RtxMap::config() const { - const ::webrtc::rtclog::RtxConfig* p = config_; - // @@protoc_insertion_point(field_get:webrtc.rtclog.RtxMap.config) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog::_RtxConfig_default_instance_); -} -inline ::webrtc::rtclog::RtxConfig* RtxMap::release_config() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.RtxMap.config) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog::RtxConfig* temp = config_; - config_ = nullptr; - return temp; -} -inline ::webrtc::rtclog::RtxConfig* RtxMap::mutable_config() { - _has_bits_[0] |= 0x00000001u; - if (config_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog::RtxConfig>(GetArenaNoVirtual()); - config_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.RtxMap.config) - return config_; -} -inline void RtxMap::set_allocated_config(::webrtc::rtclog::RtxConfig* config) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete config_; - } - if (config) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, config, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - config_ = config; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.RtxMap.config) -} - -// ------------------------------------------------------------------- - -// VideoSendConfig - -// repeated uint32 ssrcs = 1; -inline int VideoSendConfig::ssrcs_size() const { - return ssrcs_.size(); -} -inline void VideoSendConfig::clear_ssrcs() { - ssrcs_.Clear(); -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoSendConfig::ssrcs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoSendConfig.ssrcs) - return ssrcs_.Get(index); -} -inline void VideoSendConfig::set_ssrcs(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value) { - ssrcs_.Set(index, value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoSendConfig.ssrcs) -} -inline void VideoSendConfig::add_ssrcs(::PROTOBUF_NAMESPACE_ID::uint32 value) { - ssrcs_.Add(value); - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoSendConfig.ssrcs) -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >& -VideoSendConfig::ssrcs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoSendConfig.ssrcs) - return ssrcs_; -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >* -VideoSendConfig::mutable_ssrcs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoSendConfig.ssrcs) - return &ssrcs_; -} - -// repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; -inline int VideoSendConfig::header_extensions_size() const { - return header_extensions_.size(); -} -inline void VideoSendConfig::clear_header_extensions() { - header_extensions_.Clear(); -} -inline ::webrtc::rtclog::RtpHeaderExtension* VideoSendConfig::mutable_header_extensions(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.VideoSendConfig.header_extensions) - return header_extensions_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* -VideoSendConfig::mutable_header_extensions() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoSendConfig.header_extensions) - return &header_extensions_; -} -inline const ::webrtc::rtclog::RtpHeaderExtension& VideoSendConfig::header_extensions(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoSendConfig.header_extensions) - return header_extensions_.Get(index); -} -inline ::webrtc::rtclog::RtpHeaderExtension* VideoSendConfig::add_header_extensions() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoSendConfig.header_extensions) - return header_extensions_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& -VideoSendConfig::header_extensions() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoSendConfig.header_extensions) - return header_extensions_; -} - -// repeated uint32 rtx_ssrcs = 3; -inline int VideoSendConfig::rtx_ssrcs_size() const { - return rtx_ssrcs_.size(); -} -inline void VideoSendConfig::clear_rtx_ssrcs() { - rtx_ssrcs_.Clear(); -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoSendConfig::rtx_ssrcs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoSendConfig.rtx_ssrcs) - return rtx_ssrcs_.Get(index); -} -inline void VideoSendConfig::set_rtx_ssrcs(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value) { - rtx_ssrcs_.Set(index, value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoSendConfig.rtx_ssrcs) -} -inline void VideoSendConfig::add_rtx_ssrcs(::PROTOBUF_NAMESPACE_ID::uint32 value) { - rtx_ssrcs_.Add(value); - // @@protoc_insertion_point(field_add:webrtc.rtclog.VideoSendConfig.rtx_ssrcs) -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >& -VideoSendConfig::rtx_ssrcs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.VideoSendConfig.rtx_ssrcs) - return rtx_ssrcs_; -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >* -VideoSendConfig::mutable_rtx_ssrcs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.VideoSendConfig.rtx_ssrcs) - return &rtx_ssrcs_; -} - -// optional int32 rtx_payload_type = 4; -inline bool VideoSendConfig::has_rtx_payload_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void VideoSendConfig::clear_rtx_payload_type() { - rtx_payload_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 VideoSendConfig::rtx_payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoSendConfig.rtx_payload_type) - return rtx_payload_type_; -} -inline void VideoSendConfig::set_rtx_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - rtx_payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.VideoSendConfig.rtx_payload_type) -} - -// optional .webrtc.rtclog.EncoderConfig encoder = 5; -inline bool VideoSendConfig::has_encoder() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void VideoSendConfig::clear_encoder() { - if (encoder_ != nullptr) encoder_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog::EncoderConfig& VideoSendConfig::encoder() const { - const ::webrtc::rtclog::EncoderConfig* p = encoder_; - // @@protoc_insertion_point(field_get:webrtc.rtclog.VideoSendConfig.encoder) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog::_EncoderConfig_default_instance_); -} -inline ::webrtc::rtclog::EncoderConfig* VideoSendConfig::release_encoder() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.VideoSendConfig.encoder) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog::EncoderConfig* temp = encoder_; - encoder_ = nullptr; - return temp; -} -inline ::webrtc::rtclog::EncoderConfig* VideoSendConfig::mutable_encoder() { - _has_bits_[0] |= 0x00000001u; - if (encoder_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog::EncoderConfig>(GetArenaNoVirtual()); - encoder_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.VideoSendConfig.encoder) - return encoder_; -} -inline void VideoSendConfig::set_allocated_encoder(::webrtc::rtclog::EncoderConfig* encoder) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete encoder_; - } - if (encoder) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - encoder = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, encoder, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - encoder_ = encoder; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.VideoSendConfig.encoder) -} - -// ------------------------------------------------------------------- - -// EncoderConfig - -// optional string name = 1; -inline bool EncoderConfig::has_name() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void EncoderConfig::clear_name() { - name_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& EncoderConfig::name() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.EncoderConfig.name) - return name_.GetNoArena(); -} -inline void EncoderConfig::set_name(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog.EncoderConfig.name) -} -inline void EncoderConfig::set_name(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog.EncoderConfig.name) -} -inline void EncoderConfig::set_name(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog.EncoderConfig.name) -} -inline void EncoderConfig::set_name(const char* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog.EncoderConfig.name) -} -inline std::string* EncoderConfig::mutable_name() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.EncoderConfig.name) - return name_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* EncoderConfig::release_name() { - // @@protoc_insertion_point(field_release:webrtc.rtclog.EncoderConfig.name) - if (!has_name()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return name_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void EncoderConfig::set_allocated_name(std::string* name) { - if (name != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - name_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), name); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog.EncoderConfig.name) -} - -// optional int32 payload_type = 2; -inline bool EncoderConfig::has_payload_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void EncoderConfig::clear_payload_type() { - payload_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 EncoderConfig::payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.EncoderConfig.payload_type) - return payload_type_; -} -inline void EncoderConfig::set_payload_type(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.EncoderConfig.payload_type) -} - -// ------------------------------------------------------------------- - -// AudioReceiveConfig - -// optional uint32 remote_ssrc = 1; -inline bool AudioReceiveConfig::has_remote_ssrc() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioReceiveConfig::clear_remote_ssrc() { - remote_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioReceiveConfig::remote_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioReceiveConfig.remote_ssrc) - return remote_ssrc_; -} -inline void AudioReceiveConfig::set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000001u; - remote_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioReceiveConfig.remote_ssrc) -} - -// optional uint32 local_ssrc = 2; -inline bool AudioReceiveConfig::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioReceiveConfig::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioReceiveConfig::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioReceiveConfig.local_ssrc) - return local_ssrc_; -} -inline void AudioReceiveConfig::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioReceiveConfig.local_ssrc) -} - -// repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 3; -inline int AudioReceiveConfig::header_extensions_size() const { - return header_extensions_.size(); -} -inline void AudioReceiveConfig::clear_header_extensions() { - header_extensions_.Clear(); -} -inline ::webrtc::rtclog::RtpHeaderExtension* AudioReceiveConfig::mutable_header_extensions(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.AudioReceiveConfig.header_extensions) - return header_extensions_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* -AudioReceiveConfig::mutable_header_extensions() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.AudioReceiveConfig.header_extensions) - return &header_extensions_; -} -inline const ::webrtc::rtclog::RtpHeaderExtension& AudioReceiveConfig::header_extensions(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioReceiveConfig.header_extensions) - return header_extensions_.Get(index); -} -inline ::webrtc::rtclog::RtpHeaderExtension* AudioReceiveConfig::add_header_extensions() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.AudioReceiveConfig.header_extensions) - return header_extensions_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& -AudioReceiveConfig::header_extensions() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.AudioReceiveConfig.header_extensions) - return header_extensions_; -} - -// ------------------------------------------------------------------- - -// AudioSendConfig - -// optional uint32 ssrc = 1; -inline bool AudioSendConfig::has_ssrc() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioSendConfig::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioSendConfig::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioSendConfig.ssrc) - return ssrc_; -} -inline void AudioSendConfig::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000001u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioSendConfig.ssrc) -} - -// repeated .webrtc.rtclog.RtpHeaderExtension header_extensions = 2; -inline int AudioSendConfig::header_extensions_size() const { - return header_extensions_.size(); -} -inline void AudioSendConfig::clear_header_extensions() { - header_extensions_.Clear(); -} -inline ::webrtc::rtclog::RtpHeaderExtension* AudioSendConfig::mutable_header_extensions(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog.AudioSendConfig.header_extensions) - return header_extensions_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >* -AudioSendConfig::mutable_header_extensions() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog.AudioSendConfig.header_extensions) - return &header_extensions_; -} -inline const ::webrtc::rtclog::RtpHeaderExtension& AudioSendConfig::header_extensions(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioSendConfig.header_extensions) - return header_extensions_.Get(index); -} -inline ::webrtc::rtclog::RtpHeaderExtension* AudioSendConfig::add_header_extensions() { - // @@protoc_insertion_point(field_add:webrtc.rtclog.AudioSendConfig.header_extensions) - return header_extensions_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog::RtpHeaderExtension >& -AudioSendConfig::header_extensions() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog.AudioSendConfig.header_extensions) - return header_extensions_; -} - -// ------------------------------------------------------------------- - -// AudioNetworkAdaptation - -// optional int32 bitrate_bps = 1; -inline bool AudioNetworkAdaptation::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioNetworkAdaptation::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 AudioNetworkAdaptation::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.bitrate_bps) - return bitrate_bps_; -} -inline void AudioNetworkAdaptation::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.bitrate_bps) -} - -// optional int32 frame_length_ms = 2; -inline bool AudioNetworkAdaptation::has_frame_length_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioNetworkAdaptation::clear_frame_length_ms() { - frame_length_ms_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 AudioNetworkAdaptation::frame_length_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.frame_length_ms) - return frame_length_ms_; -} -inline void AudioNetworkAdaptation::set_frame_length_ms(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - frame_length_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.frame_length_ms) -} - -// optional float uplink_packet_loss_fraction = 3; -inline bool AudioNetworkAdaptation::has_uplink_packet_loss_fraction() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void AudioNetworkAdaptation::clear_uplink_packet_loss_fraction() { - uplink_packet_loss_fraction_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline float AudioNetworkAdaptation::uplink_packet_loss_fraction() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.uplink_packet_loss_fraction) - return uplink_packet_loss_fraction_; -} -inline void AudioNetworkAdaptation::set_uplink_packet_loss_fraction(float value) { - _has_bits_[0] |= 0x00000004u; - uplink_packet_loss_fraction_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.uplink_packet_loss_fraction) -} - -// optional bool enable_fec = 4; -inline bool AudioNetworkAdaptation::has_enable_fec() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void AudioNetworkAdaptation::clear_enable_fec() { - enable_fec_ = false; - _has_bits_[0] &= ~0x00000008u; -} -inline bool AudioNetworkAdaptation::enable_fec() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.enable_fec) - return enable_fec_; -} -inline void AudioNetworkAdaptation::set_enable_fec(bool value) { - _has_bits_[0] |= 0x00000008u; - enable_fec_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.enable_fec) -} - -// optional bool enable_dtx = 5; -inline bool AudioNetworkAdaptation::has_enable_dtx() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void AudioNetworkAdaptation::clear_enable_dtx() { - enable_dtx_ = false; - _has_bits_[0] &= ~0x00000010u; -} -inline bool AudioNetworkAdaptation::enable_dtx() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.enable_dtx) - return enable_dtx_; -} -inline void AudioNetworkAdaptation::set_enable_dtx(bool value) { - _has_bits_[0] |= 0x00000010u; - enable_dtx_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.enable_dtx) -} - -// optional uint32 num_channels = 6; -inline bool AudioNetworkAdaptation::has_num_channels() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void AudioNetworkAdaptation::clear_num_channels() { - num_channels_ = 0u; - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioNetworkAdaptation::num_channels() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AudioNetworkAdaptation.num_channels) - return num_channels_; -} -inline void AudioNetworkAdaptation::set_num_channels(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000020u; - num_channels_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AudioNetworkAdaptation.num_channels) -} - -// ------------------------------------------------------------------- - -// BweProbeCluster - -// optional int32 id = 1; -inline bool BweProbeCluster::has_id() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BweProbeCluster::clear_id() { - id_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 BweProbeCluster::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeCluster.id) - return id_; -} -inline void BweProbeCluster::set_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeCluster.id) -} - -// optional int32 bitrate_bps = 2; -inline bool BweProbeCluster::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BweProbeCluster::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 BweProbeCluster::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeCluster.bitrate_bps) - return bitrate_bps_; -} -inline void BweProbeCluster::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeCluster.bitrate_bps) -} - -// optional uint32 min_packets = 3; -inline bool BweProbeCluster::has_min_packets() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BweProbeCluster::clear_min_packets() { - min_packets_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::min_packets() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeCluster.min_packets) - return min_packets_; -} -inline void BweProbeCluster::set_min_packets(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - min_packets_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeCluster.min_packets) -} - -// optional uint32 min_bytes = 4; -inline bool BweProbeCluster::has_min_bytes() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void BweProbeCluster::clear_min_bytes() { - min_bytes_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::min_bytes() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeCluster.min_bytes) - return min_bytes_; -} -inline void BweProbeCluster::set_min_bytes(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - min_bytes_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeCluster.min_bytes) -} - -// ------------------------------------------------------------------- - -// BweProbeResult - -// optional int32 id = 1; -inline bool BweProbeResult::has_id() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BweProbeResult::clear_id() { - id_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 BweProbeResult::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeResult.id) - return id_; -} -inline void BweProbeResult::set_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeResult.id) -} - -// optional .webrtc.rtclog.BweProbeResult.ResultType result = 2; -inline bool BweProbeResult::has_result() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BweProbeResult::clear_result() { - result_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog::BweProbeResult_ResultType BweProbeResult::result() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeResult.result) - return static_cast< ::webrtc::rtclog::BweProbeResult_ResultType >(result_); -} -inline void BweProbeResult::set_result(::webrtc::rtclog::BweProbeResult_ResultType value) { - assert(::webrtc::rtclog::BweProbeResult_ResultType_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - result_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeResult.result) -} - -// optional int32 bitrate_bps = 3; -inline bool BweProbeResult::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BweProbeResult::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 BweProbeResult::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.BweProbeResult.bitrate_bps) - return bitrate_bps_; -} -inline void BweProbeResult::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000004u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.BweProbeResult.bitrate_bps) -} - -// ------------------------------------------------------------------- - -// AlrState - -// optional bool in_alr = 1; -inline bool AlrState::has_in_alr() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AlrState::clear_in_alr() { - in_alr_ = false; - _has_bits_[0] &= ~0x00000001u; -} -inline bool AlrState::in_alr() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.AlrState.in_alr) - return in_alr_; -} -inline void AlrState::set_in_alr(bool value) { - _has_bits_[0] |= 0x00000001u; - in_alr_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.AlrState.in_alr) -} - -// ------------------------------------------------------------------- - -// IceCandidatePairConfig - -// optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 1; -inline bool IceCandidatePairConfig::has_config_type() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IceCandidatePairConfig::clear_config_type() { - config_type_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::config_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.config_type) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType >(config_type_); -} -inline void IceCandidatePairConfig::set_config_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value)); - _has_bits_[0] |= 0x00000001u; - config_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.config_type) -} - -// optional uint32 candidate_pair_id = 2; -inline bool IceCandidatePairConfig::has_candidate_pair_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IceCandidatePairConfig::clear_candidate_pair_id() { - candidate_pair_id_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IceCandidatePairConfig::candidate_pair_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.candidate_pair_id) - return candidate_pair_id_; -} -inline void IceCandidatePairConfig::set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - candidate_pair_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.candidate_pair_id) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType local_candidate_type = 3; -inline bool IceCandidatePairConfig::has_local_candidate_type() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void IceCandidatePairConfig::clear_local_candidate_type() { - local_candidate_type_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::local_candidate_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.local_candidate_type) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType >(local_candidate_type_); -} -inline void IceCandidatePairConfig::set_local_candidate_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(value)); - _has_bits_[0] |= 0x00000004u; - local_candidate_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.local_candidate_type) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.Protocol local_relay_protocol = 4; -inline bool IceCandidatePairConfig::has_local_relay_protocol() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void IceCandidatePairConfig::clear_local_relay_protocol() { - local_relay_protocol_ = 0; - _has_bits_[0] &= ~0x00000008u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_Protocol IceCandidatePairConfig::local_relay_protocol() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.local_relay_protocol) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_Protocol >(local_relay_protocol_); -} -inline void IceCandidatePairConfig::set_local_relay_protocol(::webrtc::rtclog::IceCandidatePairConfig_Protocol value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(value)); - _has_bits_[0] |= 0x00000008u; - local_relay_protocol_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.local_relay_protocol) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.NetworkType local_network_type = 5; -inline bool IceCandidatePairConfig::has_local_network_type() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void IceCandidatePairConfig::clear_local_network_type() { - local_network_type_ = 0; - _has_bits_[0] &= ~0x00000010u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_NetworkType IceCandidatePairConfig::local_network_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.local_network_type) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_NetworkType >(local_network_type_); -} -inline void IceCandidatePairConfig::set_local_network_type(::webrtc::rtclog::IceCandidatePairConfig_NetworkType value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_NetworkType_IsValid(value)); - _has_bits_[0] |= 0x00000010u; - local_network_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.local_network_type) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily local_address_family = 6; -inline bool IceCandidatePairConfig::has_local_address_family() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void IceCandidatePairConfig::clear_local_address_family() { - local_address_family_ = 0; - _has_bits_[0] &= ~0x00000020u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::local_address_family() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.local_address_family) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily >(local_address_family_); -} -inline void IceCandidatePairConfig::set_local_address_family(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(value)); - _has_bits_[0] |= 0x00000020u; - local_address_family_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.local_address_family) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 7; -inline bool IceCandidatePairConfig::has_remote_candidate_type() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void IceCandidatePairConfig::clear_remote_candidate_type() { - remote_candidate_type_ = 0; - _has_bits_[0] &= ~0x00000040u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::remote_candidate_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.remote_candidate_type) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType >(remote_candidate_type_); -} -inline void IceCandidatePairConfig::set_remote_candidate_type(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType_IsValid(value)); - _has_bits_[0] |= 0x00000040u; - remote_candidate_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.remote_candidate_type) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.AddressFamily remote_address_family = 8; -inline bool IceCandidatePairConfig::has_remote_address_family() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void IceCandidatePairConfig::clear_remote_address_family() { - remote_address_family_ = 0; - _has_bits_[0] &= ~0x00000080u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::remote_address_family() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.remote_address_family) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily >(remote_address_family_); -} -inline void IceCandidatePairConfig::set_remote_address_family(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_AddressFamily_IsValid(value)); - _has_bits_[0] |= 0x00000080u; - remote_address_family_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.remote_address_family) -} - -// optional .webrtc.rtclog.IceCandidatePairConfig.Protocol candidate_pair_protocol = 9; -inline bool IceCandidatePairConfig::has_candidate_pair_protocol() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void IceCandidatePairConfig::clear_candidate_pair_protocol() { - candidate_pair_protocol_ = 0; - _has_bits_[0] &= ~0x00000100u; -} -inline ::webrtc::rtclog::IceCandidatePairConfig_Protocol IceCandidatePairConfig::candidate_pair_protocol() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairConfig.candidate_pair_protocol) - return static_cast< ::webrtc::rtclog::IceCandidatePairConfig_Protocol >(candidate_pair_protocol_); -} -inline void IceCandidatePairConfig::set_candidate_pair_protocol(::webrtc::rtclog::IceCandidatePairConfig_Protocol value) { - assert(::webrtc::rtclog::IceCandidatePairConfig_Protocol_IsValid(value)); - _has_bits_[0] |= 0x00000100u; - candidate_pair_protocol_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairConfig.candidate_pair_protocol) -} - -// ------------------------------------------------------------------- - -// IceCandidatePairEvent - -// optional .webrtc.rtclog.IceCandidatePairEvent.IceCandidatePairEventType event_type = 1; -inline bool IceCandidatePairEvent::has_event_type() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IceCandidatePairEvent::clear_event_type() { - event_type_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::event_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairEvent.event_type) - return static_cast< ::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType >(event_type_); -} -inline void IceCandidatePairEvent::set_event_type(::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType value) { - assert(::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value)); - _has_bits_[0] |= 0x00000001u; - event_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairEvent.event_type) -} - -// optional uint32 candidate_pair_id = 2; -inline bool IceCandidatePairEvent::has_candidate_pair_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IceCandidatePairEvent::clear_candidate_pair_id() { - candidate_pair_id_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IceCandidatePairEvent::candidate_pair_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog.IceCandidatePairEvent.candidate_pair_id) - return candidate_pair_id_; -} -inline void IceCandidatePairEvent::set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - candidate_pair_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog.IceCandidatePairEvent.candidate_pair_id) -} - -#ifdef __GNUC__ - #pragma GCC diagnostic pop -#endif // __GNUC__ -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - - -// @@protoc_insertion_point(namespace_scope) - -} // namespace rtclog -} // namespace webrtc - -PROTOBUF_NAMESPACE_OPEN - -template <> struct is_proto_enum< ::webrtc::rtclog::Event_EventType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::DelayBasedBweUpdate_DetectorState> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::VideoReceiveConfig_RtcpMode> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::BweProbeResult_ResultType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidatePairConfigType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairConfig_IceCandidateType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairConfig_Protocol> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairConfig_AddressFamily> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairConfig_NetworkType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::IceCandidatePairEvent_IceCandidatePairEventType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog::MediaType> : ::std::true_type {}; - -PROTOBUF_NAMESPACE_CLOSE - -// @@protoc_insertion_point(global_scope) - -#include -#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog_2eproto diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.cc deleted file mode 100644 index ca7036346..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.cc +++ /dev/null @@ -1,18636 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: rtc_event_log2.proto - -#include "rtc_event_log2.pb.h" - -#include - -#include -#include -#include -#include -#include -// @@protoc_insertion_point(includes) -#include -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AlrState_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_Event_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RouteChange_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto; -namespace webrtc { -namespace rtclog2 { -class EventStreamDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _EventStream_default_instance_; -class EventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _Event_default_instance_; -class GenericPacketReceivedDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _GenericPacketReceived_default_instance_; -class GenericPacketSentDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _GenericPacketSent_default_instance_; -class GenericAckReceivedDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _GenericAckReceived_default_instance_; -class IncomingRtpPacketsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IncomingRtpPackets_default_instance_; -class OutgoingRtpPacketsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _OutgoingRtpPackets_default_instance_; -class IncomingRtcpPacketsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IncomingRtcpPackets_default_instance_; -class OutgoingRtcpPacketsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _OutgoingRtcpPackets_default_instance_; -class AudioPlayoutEventsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioPlayoutEvents_default_instance_; -class FrameDecodedEventsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _FrameDecodedEvents_default_instance_; -class BeginLogEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BeginLogEvent_default_instance_; -class EndLogEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _EndLogEvent_default_instance_; -class LossBasedBweUpdatesDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _LossBasedBweUpdates_default_instance_; -class DelayBasedBweUpdatesDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _DelayBasedBweUpdates_default_instance_; -class RtpHeaderExtensionConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RtpHeaderExtensionConfig_default_instance_; -class VideoRecvStreamConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _VideoRecvStreamConfig_default_instance_; -class VideoSendStreamConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _VideoSendStreamConfig_default_instance_; -class AudioRecvStreamConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioRecvStreamConfig_default_instance_; -class AudioSendStreamConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioSendStreamConfig_default_instance_; -class AudioNetworkAdaptationsDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AudioNetworkAdaptations_default_instance_; -class BweProbeClusterDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BweProbeCluster_default_instance_; -class BweProbeResultSuccessDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BweProbeResultSuccess_default_instance_; -class BweProbeResultFailureDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _BweProbeResultFailure_default_instance_; -class AlrStateDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _AlrState_default_instance_; -class IceCandidatePairConfigDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IceCandidatePairConfig_default_instance_; -class IceCandidatePairEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _IceCandidatePairEvent_default_instance_; -class DtlsTransportStateEventDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _DtlsTransportStateEvent_default_instance_; -class DtlsWritableStateDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _DtlsWritableState_default_instance_; -class RouteChangeDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RouteChange_default_instance_; -class RemoteEstimatesDefaultTypeInternal { - public: - ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; -} _RemoteEstimates_default_instance_; -} // namespace rtclog2 -} // namespace webrtc -static void InitDefaultsscc_info_AlrState_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_AlrState_default_instance_; - new (ptr) ::webrtc::rtclog2::AlrState(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::AlrState::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AlrState_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AlrState_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_AudioNetworkAdaptations_default_instance_; - new (ptr) ::webrtc::rtclog2::AudioNetworkAdaptations(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::AudioNetworkAdaptations::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_AudioPlayoutEvents_default_instance_; - new (ptr) ::webrtc::rtclog2::AudioPlayoutEvents(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::AudioPlayoutEvents::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_AudioRecvStreamConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::AudioRecvStreamConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::AudioRecvStreamConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto}, { - &scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto.base,}}; - -static void InitDefaultsscc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_AudioSendStreamConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::AudioSendStreamConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::AudioSendStreamConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto}, { - &scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto.base,}}; - -static void InitDefaultsscc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_BeginLogEvent_default_instance_; - new (ptr) ::webrtc::rtclog2::BeginLogEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::BeginLogEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_BweProbeCluster_default_instance_; - new (ptr) ::webrtc::rtclog2::BweProbeCluster(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::BweProbeCluster::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_BweProbeResultFailure_default_instance_; - new (ptr) ::webrtc::rtclog2::BweProbeResultFailure(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::BweProbeResultFailure::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_BweProbeResultSuccess_default_instance_; - new (ptr) ::webrtc::rtclog2::BweProbeResultSuccess(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::BweProbeResultSuccess::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_DelayBasedBweUpdates_default_instance_; - new (ptr) ::webrtc::rtclog2::DelayBasedBweUpdates(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::DelayBasedBweUpdates::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_DtlsTransportStateEvent_default_instance_; - new (ptr) ::webrtc::rtclog2::DtlsTransportStateEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::DtlsTransportStateEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_DtlsWritableState_default_instance_; - new (ptr) ::webrtc::rtclog2::DtlsWritableState(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::DtlsWritableState::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_EndLogEvent_default_instance_; - new (ptr) ::webrtc::rtclog2::EndLogEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::EndLogEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_Event_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_Event_default_instance_; - new (ptr) ::webrtc::rtclog2::Event(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::Event::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_Event_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_Event_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_EventStream_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_EventStream_default_instance_; - new (ptr) ::webrtc::rtclog2::EventStream(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::EventStream::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<29> scc_info_EventStream_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 29, InitDefaultsscc_info_EventStream_rtc_5fevent_5flog2_2eproto}, { - &scc_info_Event_rtc_5fevent_5flog2_2eproto.base, - &scc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto.base, - &scc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto.base, - &scc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto.base, - &scc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto.base, - &scc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto.base, - &scc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto.base, - &scc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto.base, - &scc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto.base, - &scc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base, - &scc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base, - &scc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto.base, - &scc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto.base, - &scc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto.base, - &scc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto.base, - &scc_info_AlrState_rtc_5fevent_5flog2_2eproto.base, - &scc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto.base, - &scc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto.base, - &scc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto.base, - &scc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto.base, - &scc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto.base, - &scc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto.base, - &scc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto.base, - &scc_info_RouteChange_rtc_5fevent_5flog2_2eproto.base, - &scc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto.base, - &scc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base, - &scc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto.base, - &scc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base, - &scc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto.base,}}; - -static void InitDefaultsscc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_FrameDecodedEvents_default_instance_; - new (ptr) ::webrtc::rtclog2::FrameDecodedEvents(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::FrameDecodedEvents::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_GenericAckReceived_default_instance_; - new (ptr) ::webrtc::rtclog2::GenericAckReceived(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::GenericAckReceived::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_GenericPacketReceived_default_instance_; - new (ptr) ::webrtc::rtclog2::GenericPacketReceived(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::GenericPacketReceived::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_GenericPacketSent_default_instance_; - new (ptr) ::webrtc::rtclog2::GenericPacketSent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::GenericPacketSent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_IceCandidatePairConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::IceCandidatePairConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::IceCandidatePairConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_IceCandidatePairEvent_default_instance_; - new (ptr) ::webrtc::rtclog2::IceCandidatePairEvent(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::IceCandidatePairEvent::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_IncomingRtcpPackets_default_instance_; - new (ptr) ::webrtc::rtclog2::IncomingRtcpPackets(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::IncomingRtcpPackets::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_IncomingRtpPackets_default_instance_; - new (ptr) ::webrtc::rtclog2::IncomingRtpPackets(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::IncomingRtpPackets::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_LossBasedBweUpdates_default_instance_; - new (ptr) ::webrtc::rtclog2::LossBasedBweUpdates(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::LossBasedBweUpdates::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_OutgoingRtcpPackets_default_instance_; - new (ptr) ::webrtc::rtclog2::OutgoingRtcpPackets(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::OutgoingRtcpPackets::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_OutgoingRtpPackets_default_instance_; - new (ptr) ::webrtc::rtclog2::OutgoingRtpPackets(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::OutgoingRtpPackets::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_RemoteEstimates_default_instance_; - new (ptr) ::webrtc::rtclog2::RemoteEstimates(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::RemoteEstimates::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_RouteChange_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_RouteChange_default_instance_; - new (ptr) ::webrtc::rtclog2::RouteChange(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::RouteChange::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RouteChange_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RouteChange_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_RtpHeaderExtensionConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::RtpHeaderExtensionConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::RtpHeaderExtensionConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto}, {}}; - -static void InitDefaultsscc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_VideoRecvStreamConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::VideoRecvStreamConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::VideoRecvStreamConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto}, { - &scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto.base,}}; - -static void InitDefaultsscc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::webrtc::rtclog2::_VideoSendStreamConfig_default_instance_; - new (ptr) ::webrtc::rtclog2::VideoSendStreamConfig(); - ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); - } - ::webrtc::rtclog2::VideoSendStreamConfig::InitAsDefaultInstance(); -} - -::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto = - {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto}, { - &scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto.base,}}; - -namespace webrtc { -namespace rtclog2 { -bool FrameDecodedEvents_Codec_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed FrameDecodedEvents_Codec_strings[7] = {}; - -static const char FrameDecodedEvents_Codec_names[] = - "CODEC_AV1" - "CODEC_GENERIC" - "CODEC_H264" - "CODEC_H265" - "CODEC_UNKNOWN" - "CODEC_VP8" - "CODEC_VP9"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry FrameDecodedEvents_Codec_entries[] = { - { {FrameDecodedEvents_Codec_names + 0, 9}, 4 }, - { {FrameDecodedEvents_Codec_names + 9, 13}, 1 }, - { {FrameDecodedEvents_Codec_names + 22, 10}, 5 }, - { {FrameDecodedEvents_Codec_names + 32, 10}, 6 }, - { {FrameDecodedEvents_Codec_names + 42, 13}, 0 }, - { {FrameDecodedEvents_Codec_names + 55, 9}, 2 }, - { {FrameDecodedEvents_Codec_names + 64, 9}, 3 }, -}; - -static const int FrameDecodedEvents_Codec_entries_by_number[] = { - 4, // 0 -> CODEC_UNKNOWN - 1, // 1 -> CODEC_GENERIC - 5, // 2 -> CODEC_VP8 - 6, // 3 -> CODEC_VP9 - 0, // 4 -> CODEC_AV1 - 2, // 5 -> CODEC_H264 - 3, // 6 -> CODEC_H265 -}; - -const std::string& FrameDecodedEvents_Codec_Name( - FrameDecodedEvents_Codec value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - FrameDecodedEvents_Codec_entries, - FrameDecodedEvents_Codec_entries_by_number, - 7, FrameDecodedEvents_Codec_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - FrameDecodedEvents_Codec_entries, - FrameDecodedEvents_Codec_entries_by_number, - 7, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - FrameDecodedEvents_Codec_strings[idx].get(); -} -bool FrameDecodedEvents_Codec_Parse( - const std::string& name, FrameDecodedEvents_Codec* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - FrameDecodedEvents_Codec_entries, 7, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_UNKNOWN; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_GENERIC; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_VP8; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_VP9; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_AV1; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_H264; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::CODEC_H265; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::Codec_MIN; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents::Codec_MAX; -constexpr int FrameDecodedEvents::Codec_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool DelayBasedBweUpdates_DetectorState_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed DelayBasedBweUpdates_DetectorState_strings[4] = {}; - -static const char DelayBasedBweUpdates_DetectorState_names[] = - "BWE_NORMAL" - "BWE_OVERUSING" - "BWE_UNDERUSING" - "BWE_UNKNOWN_STATE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry DelayBasedBweUpdates_DetectorState_entries[] = { - { {DelayBasedBweUpdates_DetectorState_names + 0, 10}, 1 }, - { {DelayBasedBweUpdates_DetectorState_names + 10, 13}, 3 }, - { {DelayBasedBweUpdates_DetectorState_names + 23, 14}, 2 }, - { {DelayBasedBweUpdates_DetectorState_names + 37, 17}, 0 }, -}; - -static const int DelayBasedBweUpdates_DetectorState_entries_by_number[] = { - 3, // 0 -> BWE_UNKNOWN_STATE - 0, // 1 -> BWE_NORMAL - 2, // 2 -> BWE_UNDERUSING - 1, // 3 -> BWE_OVERUSING -}; - -const std::string& DelayBasedBweUpdates_DetectorState_Name( - DelayBasedBweUpdates_DetectorState value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - DelayBasedBweUpdates_DetectorState_entries, - DelayBasedBweUpdates_DetectorState_entries_by_number, - 4, DelayBasedBweUpdates_DetectorState_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - DelayBasedBweUpdates_DetectorState_entries, - DelayBasedBweUpdates_DetectorState_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - DelayBasedBweUpdates_DetectorState_strings[idx].get(); -} -bool DelayBasedBweUpdates_DetectorState_Parse( - const std::string& name, DelayBasedBweUpdates_DetectorState* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - DelayBasedBweUpdates_DetectorState_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::BWE_UNKNOWN_STATE; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::BWE_NORMAL; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::BWE_UNDERUSING; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::BWE_OVERUSING; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::DetectorState_MIN; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::DetectorState_MAX; -constexpr int DelayBasedBweUpdates::DetectorState_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool BweProbeResultFailure_FailureReason_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed BweProbeResultFailure_FailureReason_strings[4] = {}; - -static const char BweProbeResultFailure_FailureReason_names[] = - "INVALID_SEND_RECEIVE_INTERVAL" - "INVALID_SEND_RECEIVE_RATIO" - "TIMEOUT" - "UNKNOWN"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry BweProbeResultFailure_FailureReason_entries[] = { - { {BweProbeResultFailure_FailureReason_names + 0, 29}, 1 }, - { {BweProbeResultFailure_FailureReason_names + 29, 26}, 2 }, - { {BweProbeResultFailure_FailureReason_names + 55, 7}, 3 }, - { {BweProbeResultFailure_FailureReason_names + 62, 7}, 0 }, -}; - -static const int BweProbeResultFailure_FailureReason_entries_by_number[] = { - 3, // 0 -> UNKNOWN - 0, // 1 -> INVALID_SEND_RECEIVE_INTERVAL - 1, // 2 -> INVALID_SEND_RECEIVE_RATIO - 2, // 3 -> TIMEOUT -}; - -const std::string& BweProbeResultFailure_FailureReason_Name( - BweProbeResultFailure_FailureReason value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - BweProbeResultFailure_FailureReason_entries, - BweProbeResultFailure_FailureReason_entries_by_number, - 4, BweProbeResultFailure_FailureReason_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - BweProbeResultFailure_FailureReason_entries, - BweProbeResultFailure_FailureReason_entries_by_number, - 4, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - BweProbeResultFailure_FailureReason_strings[idx].get(); -} -bool BweProbeResultFailure_FailureReason_Parse( - const std::string& name, BweProbeResultFailure_FailureReason* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - BweProbeResultFailure_FailureReason_entries, 4, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::UNKNOWN; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::INVALID_SEND_RECEIVE_INTERVAL; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::INVALID_SEND_RECEIVE_RATIO; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::TIMEOUT; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::FailureReason_MIN; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure::FailureReason_MAX; -constexpr int BweProbeResultFailure::FailureReason_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_IceCandidatePairConfigType_strings[5] = {}; - -static const char IceCandidatePairConfig_IceCandidatePairConfigType_names[] = - "ADDED" - "DESTROYED" - "SELECTED" - "UNKNOWN_CONFIG_TYPE" - "UPDATED"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_IceCandidatePairConfigType_entries[] = { - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 0, 5}, 1 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 5, 9}, 3 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 14, 8}, 4 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 22, 19}, 0 }, - { {IceCandidatePairConfig_IceCandidatePairConfigType_names + 41, 7}, 2 }, -}; - -static const int IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number[] = { - 3, // 0 -> UNKNOWN_CONFIG_TYPE - 0, // 1 -> ADDED - 4, // 2 -> UPDATED - 1, // 3 -> DESTROYED - 2, // 4 -> SELECTED -}; - -const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name( - IceCandidatePairConfig_IceCandidatePairConfigType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, - IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number, - 5, IceCandidatePairConfig_IceCandidatePairConfigType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, - IceCandidatePairConfig_IceCandidatePairConfigType_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_IceCandidatePairConfigType_strings[idx].get(); -} -bool IceCandidatePairConfig_IceCandidatePairConfigType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidatePairConfigType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_IceCandidatePairConfigType_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::UNKNOWN_CONFIG_TYPE; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::ADDED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::UPDATED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::DESTROYED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::SELECTED; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::IceCandidatePairConfigType_MIN; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::IceCandidatePairConfigType_MAX; -constexpr int IceCandidatePairConfig::IceCandidatePairConfigType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_IceCandidateType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_IceCandidateType_strings[5] = {}; - -static const char IceCandidatePairConfig_IceCandidateType_names[] = - "LOCAL" - "PRFLX" - "RELAY" - "STUN" - "UNKNOWN_CANDIDATE_TYPE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_IceCandidateType_entries[] = { - { {IceCandidatePairConfig_IceCandidateType_names + 0, 5}, 1 }, - { {IceCandidatePairConfig_IceCandidateType_names + 5, 5}, 3 }, - { {IceCandidatePairConfig_IceCandidateType_names + 10, 5}, 4 }, - { {IceCandidatePairConfig_IceCandidateType_names + 15, 4}, 2 }, - { {IceCandidatePairConfig_IceCandidateType_names + 19, 22}, 0 }, -}; - -static const int IceCandidatePairConfig_IceCandidateType_entries_by_number[] = { - 4, // 0 -> UNKNOWN_CANDIDATE_TYPE - 0, // 1 -> LOCAL - 3, // 2 -> STUN - 1, // 3 -> PRFLX - 2, // 4 -> RELAY -}; - -const std::string& IceCandidatePairConfig_IceCandidateType_Name( - IceCandidatePairConfig_IceCandidateType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_IceCandidateType_entries, - IceCandidatePairConfig_IceCandidateType_entries_by_number, - 5, IceCandidatePairConfig_IceCandidateType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_IceCandidateType_entries, - IceCandidatePairConfig_IceCandidateType_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_IceCandidateType_strings[idx].get(); -} -bool IceCandidatePairConfig_IceCandidateType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidateType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_IceCandidateType_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::LOCAL; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::STUN; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::PRFLX; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::RELAY; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::IceCandidateType_MIN; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::IceCandidateType_MAX; -constexpr int IceCandidatePairConfig::IceCandidateType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_Protocol_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_Protocol_strings[5] = {}; - -static const char IceCandidatePairConfig_Protocol_names[] = - "SSLTCP" - "TCP" - "TLS" - "UDP" - "UNKNOWN_PROTOCOL"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_Protocol_entries[] = { - { {IceCandidatePairConfig_Protocol_names + 0, 6}, 3 }, - { {IceCandidatePairConfig_Protocol_names + 6, 3}, 2 }, - { {IceCandidatePairConfig_Protocol_names + 9, 3}, 4 }, - { {IceCandidatePairConfig_Protocol_names + 12, 3}, 1 }, - { {IceCandidatePairConfig_Protocol_names + 15, 16}, 0 }, -}; - -static const int IceCandidatePairConfig_Protocol_entries_by_number[] = { - 4, // 0 -> UNKNOWN_PROTOCOL - 3, // 1 -> UDP - 1, // 2 -> TCP - 0, // 3 -> SSLTCP - 2, // 4 -> TLS -}; - -const std::string& IceCandidatePairConfig_Protocol_Name( - IceCandidatePairConfig_Protocol value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_Protocol_entries, - IceCandidatePairConfig_Protocol_entries_by_number, - 5, IceCandidatePairConfig_Protocol_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_Protocol_entries, - IceCandidatePairConfig_Protocol_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_Protocol_strings[idx].get(); -} -bool IceCandidatePairConfig_Protocol_Parse( - const std::string& name, IceCandidatePairConfig_Protocol* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_Protocol_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::UNKNOWN_PROTOCOL; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::UDP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::TCP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::SSLTCP; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::TLS; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::Protocol_MIN; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig::Protocol_MAX; -constexpr int IceCandidatePairConfig::Protocol_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_AddressFamily_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_AddressFamily_strings[3] = {}; - -static const char IceCandidatePairConfig_AddressFamily_names[] = - "IPV4" - "IPV6" - "UNKNOWN_ADDRESS_FAMILY"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_AddressFamily_entries[] = { - { {IceCandidatePairConfig_AddressFamily_names + 0, 4}, 1 }, - { {IceCandidatePairConfig_AddressFamily_names + 4, 4}, 2 }, - { {IceCandidatePairConfig_AddressFamily_names + 8, 22}, 0 }, -}; - -static const int IceCandidatePairConfig_AddressFamily_entries_by_number[] = { - 2, // 0 -> UNKNOWN_ADDRESS_FAMILY - 0, // 1 -> IPV4 - 1, // 2 -> IPV6 -}; - -const std::string& IceCandidatePairConfig_AddressFamily_Name( - IceCandidatePairConfig_AddressFamily value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_AddressFamily_entries, - IceCandidatePairConfig_AddressFamily_entries_by_number, - 3, IceCandidatePairConfig_AddressFamily_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_AddressFamily_entries, - IceCandidatePairConfig_AddressFamily_entries_by_number, - 3, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_AddressFamily_strings[idx].get(); -} -bool IceCandidatePairConfig_AddressFamily_Parse( - const std::string& name, IceCandidatePairConfig_AddressFamily* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_AddressFamily_entries, 3, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::UNKNOWN_ADDRESS_FAMILY; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::IPV4; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::IPV6; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::AddressFamily_MIN; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::AddressFamily_MAX; -constexpr int IceCandidatePairConfig::AddressFamily_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairConfig_NetworkType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairConfig_NetworkType_strings[6] = {}; - -static const char IceCandidatePairConfig_NetworkType_names[] = - "CELLULAR" - "ETHERNET" - "LOOPBACK" - "UNKNOWN_NETWORK_TYPE" - "VPN" - "WIFI"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairConfig_NetworkType_entries[] = { - { {IceCandidatePairConfig_NetworkType_names + 0, 8}, 3 }, - { {IceCandidatePairConfig_NetworkType_names + 8, 8}, 1 }, - { {IceCandidatePairConfig_NetworkType_names + 16, 8}, 5 }, - { {IceCandidatePairConfig_NetworkType_names + 24, 20}, 0 }, - { {IceCandidatePairConfig_NetworkType_names + 44, 3}, 4 }, - { {IceCandidatePairConfig_NetworkType_names + 47, 4}, 2 }, -}; - -static const int IceCandidatePairConfig_NetworkType_entries_by_number[] = { - 3, // 0 -> UNKNOWN_NETWORK_TYPE - 1, // 1 -> ETHERNET - 5, // 2 -> WIFI - 0, // 3 -> CELLULAR - 4, // 4 -> VPN - 2, // 5 -> LOOPBACK -}; - -const std::string& IceCandidatePairConfig_NetworkType_Name( - IceCandidatePairConfig_NetworkType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairConfig_NetworkType_entries, - IceCandidatePairConfig_NetworkType_entries_by_number, - 6, IceCandidatePairConfig_NetworkType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairConfig_NetworkType_entries, - IceCandidatePairConfig_NetworkType_entries_by_number, - 6, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairConfig_NetworkType_strings[idx].get(); -} -bool IceCandidatePairConfig_NetworkType_Parse( - const std::string& name, IceCandidatePairConfig_NetworkType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairConfig_NetworkType_entries, 6, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::UNKNOWN_NETWORK_TYPE; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::ETHERNET; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::WIFI; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::CELLULAR; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::VPN; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::LOOPBACK; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::NetworkType_MIN; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig::NetworkType_MAX; -constexpr int IceCandidatePairConfig::NetworkType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool IceCandidatePairEvent_IceCandidatePairEventType_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed IceCandidatePairEvent_IceCandidatePairEventType_strings[5] = {}; - -static const char IceCandidatePairEvent_IceCandidatePairEventType_names[] = - "CHECK_RECEIVED" - "CHECK_RESPONSE_RECEIVED" - "CHECK_RESPONSE_SENT" - "CHECK_SENT" - "UNKNOWN_CHECK_TYPE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry IceCandidatePairEvent_IceCandidatePairEventType_entries[] = { - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 0, 14}, 2 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 14, 23}, 4 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 37, 19}, 3 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 56, 10}, 1 }, - { {IceCandidatePairEvent_IceCandidatePairEventType_names + 66, 18}, 0 }, -}; - -static const int IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number[] = { - 4, // 0 -> UNKNOWN_CHECK_TYPE - 3, // 1 -> CHECK_SENT - 0, // 2 -> CHECK_RECEIVED - 2, // 3 -> CHECK_RESPONSE_SENT - 1, // 4 -> CHECK_RESPONSE_RECEIVED -}; - -const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name( - IceCandidatePairEvent_IceCandidatePairEventType value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - IceCandidatePairEvent_IceCandidatePairEventType_entries, - IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number, - 5, IceCandidatePairEvent_IceCandidatePairEventType_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - IceCandidatePairEvent_IceCandidatePairEventType_entries, - IceCandidatePairEvent_IceCandidatePairEventType_entries_by_number, - 5, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - IceCandidatePairEvent_IceCandidatePairEventType_strings[idx].get(); -} -bool IceCandidatePairEvent_IceCandidatePairEventType_Parse( - const std::string& name, IceCandidatePairEvent_IceCandidatePairEventType* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - IceCandidatePairEvent_IceCandidatePairEventType_entries, 5, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::UNKNOWN_CHECK_TYPE; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_SENT; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RECEIVED; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RESPONSE_SENT; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::CHECK_RESPONSE_RECEIVED; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::IceCandidatePairEventType_MIN; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::IceCandidatePairEventType_MAX; -constexpr int IceCandidatePairEvent::IceCandidatePairEventType_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -bool DtlsTransportStateEvent_DtlsTransportState_IsValid(int value) { - switch (value) { - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - return true; - default: - return false; - } -} - -static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed DtlsTransportStateEvent_DtlsTransportState_strings[6] = {}; - -static const char DtlsTransportStateEvent_DtlsTransportState_names[] = - "DTLS_TRANSPORT_CLOSED" - "DTLS_TRANSPORT_CONNECTED" - "DTLS_TRANSPORT_CONNECTING" - "DTLS_TRANSPORT_FAILED" - "DTLS_TRANSPORT_NEW" - "UNKNOWN_DTLS_TRANSPORT_STATE"; - -static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry DtlsTransportStateEvent_DtlsTransportState_entries[] = { - { {DtlsTransportStateEvent_DtlsTransportState_names + 0, 21}, 4 }, - { {DtlsTransportStateEvent_DtlsTransportState_names + 21, 24}, 3 }, - { {DtlsTransportStateEvent_DtlsTransportState_names + 45, 25}, 2 }, - { {DtlsTransportStateEvent_DtlsTransportState_names + 70, 21}, 5 }, - { {DtlsTransportStateEvent_DtlsTransportState_names + 91, 18}, 1 }, - { {DtlsTransportStateEvent_DtlsTransportState_names + 109, 28}, 0 }, -}; - -static const int DtlsTransportStateEvent_DtlsTransportState_entries_by_number[] = { - 5, // 0 -> UNKNOWN_DTLS_TRANSPORT_STATE - 4, // 1 -> DTLS_TRANSPORT_NEW - 2, // 2 -> DTLS_TRANSPORT_CONNECTING - 1, // 3 -> DTLS_TRANSPORT_CONNECTED - 0, // 4 -> DTLS_TRANSPORT_CLOSED - 3, // 5 -> DTLS_TRANSPORT_FAILED -}; - -const std::string& DtlsTransportStateEvent_DtlsTransportState_Name( - DtlsTransportStateEvent_DtlsTransportState value) { - static const bool dummy = - ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings( - DtlsTransportStateEvent_DtlsTransportState_entries, - DtlsTransportStateEvent_DtlsTransportState_entries_by_number, - 6, DtlsTransportStateEvent_DtlsTransportState_strings); - (void) dummy; - int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName( - DtlsTransportStateEvent_DtlsTransportState_entries, - DtlsTransportStateEvent_DtlsTransportState_entries_by_number, - 6, value); - return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() : - DtlsTransportStateEvent_DtlsTransportState_strings[idx].get(); -} -bool DtlsTransportStateEvent_DtlsTransportState_Parse( - const std::string& name, DtlsTransportStateEvent_DtlsTransportState* value) { - int int_value; - bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue( - DtlsTransportStateEvent_DtlsTransportState_entries, 6, name, &int_value); - if (success) { - *value = static_cast(int_value); - } - return success; -} -#if (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::UNKNOWN_DTLS_TRANSPORT_STATE; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DTLS_TRANSPORT_NEW; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DTLS_TRANSPORT_CONNECTING; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DTLS_TRANSPORT_CONNECTED; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DTLS_TRANSPORT_CLOSED; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DTLS_TRANSPORT_FAILED; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DtlsTransportState_MIN; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::DtlsTransportState_MAX; -constexpr int DtlsTransportStateEvent::DtlsTransportState_ARRAYSIZE; -#endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || _MSC_VER >= 1900) - -// =================================================================== - -void EventStream::InitAsDefaultInstance() { -} -class EventStream::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); -}; - -EventStream::EventStream() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.EventStream) -} -EventStream::EventStream(const EventStream& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_), - stream_(from.stream_), - incoming_rtp_packets_(from.incoming_rtp_packets_), - outgoing_rtp_packets_(from.outgoing_rtp_packets_), - incoming_rtcp_packets_(from.incoming_rtcp_packets_), - outgoing_rtcp_packets_(from.outgoing_rtcp_packets_), - audio_playout_events_(from.audio_playout_events_), - frame_decoded_events_(from.frame_decoded_events_), - begin_log_events_(from.begin_log_events_), - end_log_events_(from.end_log_events_), - loss_based_bwe_updates_(from.loss_based_bwe_updates_), - delay_based_bwe_updates_(from.delay_based_bwe_updates_), - audio_network_adaptations_(from.audio_network_adaptations_), - probe_clusters_(from.probe_clusters_), - probe_success_(from.probe_success_), - probe_failure_(from.probe_failure_), - alr_states_(from.alr_states_), - ice_candidate_configs_(from.ice_candidate_configs_), - ice_candidate_events_(from.ice_candidate_events_), - dtls_transport_state_events_(from.dtls_transport_state_events_), - dtls_writable_states_(from.dtls_writable_states_), - generic_packets_sent_(from.generic_packets_sent_), - generic_packets_received_(from.generic_packets_received_), - generic_acks_received_(from.generic_acks_received_), - route_changes_(from.route_changes_), - remote_estimates_(from.remote_estimates_), - audio_recv_stream_configs_(from.audio_recv_stream_configs_), - audio_send_stream_configs_(from.audio_send_stream_configs_), - video_recv_stream_configs_(from.video_recv_stream_configs_), - video_send_stream_configs_(from.video_send_stream_configs_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.EventStream) -} - -void EventStream::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_EventStream_rtc_5fevent_5flog2_2eproto.base); -} - -EventStream::~EventStream() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.EventStream) - SharedDtor(); -} - -void EventStream::SharedDtor() { -} - -void EventStream::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const EventStream& EventStream::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_EventStream_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void EventStream::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.EventStream) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - stream_.Clear(); - incoming_rtp_packets_.Clear(); - outgoing_rtp_packets_.Clear(); - incoming_rtcp_packets_.Clear(); - outgoing_rtcp_packets_.Clear(); - audio_playout_events_.Clear(); - frame_decoded_events_.Clear(); - begin_log_events_.Clear(); - end_log_events_.Clear(); - loss_based_bwe_updates_.Clear(); - delay_based_bwe_updates_.Clear(); - audio_network_adaptations_.Clear(); - probe_clusters_.Clear(); - probe_success_.Clear(); - probe_failure_.Clear(); - alr_states_.Clear(); - ice_candidate_configs_.Clear(); - ice_candidate_events_.Clear(); - dtls_transport_state_events_.Clear(); - dtls_writable_states_.Clear(); - generic_packets_sent_.Clear(); - generic_packets_received_.Clear(); - generic_acks_received_.Clear(); - route_changes_.Clear(); - remote_estimates_.Clear(); - audio_recv_stream_configs_.Clear(); - audio_send_stream_configs_.Clear(); - video_recv_stream_configs_.Clear(); - video_send_stream_configs_.Clear(); - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* EventStream::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_stream(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 10); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_incoming_rtp_packets(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 18); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_outgoing_rtp_packets(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 26); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_incoming_rtcp_packets(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 34); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_outgoing_rtcp_packets(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 42); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_audio_playout_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 50); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr -= 1; - do { - ptr += 1; - ptr = ctx->ParseMessage(add_frame_decoded_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 58); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 130)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_begin_log_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 386); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 138)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_end_log_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 394); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 146)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_loss_based_bwe_updates(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 402); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_delay_based_bwe_updates(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 410); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_audio_network_adaptations(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 418); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; - case 21: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 170)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_probe_clusters(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 426); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; - case 22: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 178)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_probe_success(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 434); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; - case 23: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 186)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_probe_failure(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 442); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.AlrState alr_states = 24; - case 24: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 194)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_alr_states(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 450); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; - case 25: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 202)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_ice_candidate_configs(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 458); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; - case 26: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 210)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_ice_candidate_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 466); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; - case 27: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 218)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_dtls_transport_state_events(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 474); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; - case 28: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 226)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_dtls_writable_states(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 482); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; - case 29: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 234)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_generic_packets_sent(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 490); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; - case 30: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 242)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_generic_packets_received(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 498); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; - case 31: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 250)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_generic_acks_received(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 506); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.RouteChange route_changes = 32; - case 32: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 2)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_route_changes(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 642); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; - case 33: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_remote_estimates(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 650); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_audio_recv_stream_configs(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 1706); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_audio_send_stream_configs(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 1714); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_video_recv_stream_configs(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 1722); - } else goto handle_unusual; - continue; - // repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr -= 2; - do { - ptr += 2; - ptr = ctx->ParseMessage(add_video_send_stream_configs(), ptr); - CHK_(ptr); - if (!ctx->DataAvailable(ptr)) break; - } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint16>(ptr) == 1730); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool EventStream::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.EventStream) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_stream())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_incoming_rtp_packets())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_outgoing_rtp_packets())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_incoming_rtcp_packets())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_outgoing_rtcp_packets())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (50 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_audio_playout_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (58 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_frame_decoded_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (130 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_begin_log_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (138 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_end_log_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (146 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_loss_based_bwe_updates())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (154 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_delay_based_bwe_updates())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (162 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_audio_network_adaptations())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; - case 21: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (170 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_probe_clusters())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; - case 22: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (178 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_probe_success())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; - case 23: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (186 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_probe_failure())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.AlrState alr_states = 24; - case 24: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (194 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_alr_states())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; - case 25: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (202 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_ice_candidate_configs())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; - case 26: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (210 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_ice_candidate_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; - case 27: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (218 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_dtls_transport_state_events())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; - case 28: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (226 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_dtls_writable_states())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; - case 29: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (234 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_generic_packets_sent())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; - case 30: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (242 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_generic_packets_received())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; - case 31: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (250 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_generic_acks_received())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.RouteChange route_changes = 32; - case 32: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (258 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_route_changes())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; - case 33: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (266 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_remote_estimates())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_audio_recv_stream_configs())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_audio_send_stream_configs())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_video_recv_stream_configs())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, add_video_send_stream_configs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.EventStream) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.EventStream) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void EventStream::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.EventStream) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; - for (unsigned int i = 0, - n = static_cast(this->stream_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 1, - this->stream(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; - for (unsigned int i = 0, - n = static_cast(this->incoming_rtp_packets_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 2, - this->incoming_rtp_packets(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; - for (unsigned int i = 0, - n = static_cast(this->outgoing_rtp_packets_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 3, - this->outgoing_rtp_packets(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; - for (unsigned int i = 0, - n = static_cast(this->incoming_rtcp_packets_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 4, - this->incoming_rtcp_packets(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; - for (unsigned int i = 0, - n = static_cast(this->outgoing_rtcp_packets_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, - this->outgoing_rtcp_packets(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; - for (unsigned int i = 0, - n = static_cast(this->audio_playout_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 6, - this->audio_playout_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; - for (unsigned int i = 0, - n = static_cast(this->frame_decoded_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 7, - this->frame_decoded_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; - for (unsigned int i = 0, - n = static_cast(this->begin_log_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 16, - this->begin_log_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; - for (unsigned int i = 0, - n = static_cast(this->end_log_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 17, - this->end_log_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; - for (unsigned int i = 0, - n = static_cast(this->loss_based_bwe_updates_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 18, - this->loss_based_bwe_updates(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; - for (unsigned int i = 0, - n = static_cast(this->delay_based_bwe_updates_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 19, - this->delay_based_bwe_updates(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; - for (unsigned int i = 0, - n = static_cast(this->audio_network_adaptations_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 20, - this->audio_network_adaptations(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; - for (unsigned int i = 0, - n = static_cast(this->probe_clusters_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 21, - this->probe_clusters(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; - for (unsigned int i = 0, - n = static_cast(this->probe_success_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 22, - this->probe_success(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; - for (unsigned int i = 0, - n = static_cast(this->probe_failure_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 23, - this->probe_failure(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.AlrState alr_states = 24; - for (unsigned int i = 0, - n = static_cast(this->alr_states_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 24, - this->alr_states(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; - for (unsigned int i = 0, - n = static_cast(this->ice_candidate_configs_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 25, - this->ice_candidate_configs(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; - for (unsigned int i = 0, - n = static_cast(this->ice_candidate_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 26, - this->ice_candidate_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; - for (unsigned int i = 0, - n = static_cast(this->dtls_transport_state_events_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 27, - this->dtls_transport_state_events(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; - for (unsigned int i = 0, - n = static_cast(this->dtls_writable_states_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 28, - this->dtls_writable_states(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; - for (unsigned int i = 0, - n = static_cast(this->generic_packets_sent_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 29, - this->generic_packets_sent(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; - for (unsigned int i = 0, - n = static_cast(this->generic_packets_received_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 30, - this->generic_packets_received(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; - for (unsigned int i = 0, - n = static_cast(this->generic_acks_received_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 31, - this->generic_acks_received(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.RouteChange route_changes = 32; - for (unsigned int i = 0, - n = static_cast(this->route_changes_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 32, - this->route_changes(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; - for (unsigned int i = 0, - n = static_cast(this->remote_estimates_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 33, - this->remote_estimates(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; - for (unsigned int i = 0, - n = static_cast(this->audio_recv_stream_configs_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 101, - this->audio_recv_stream_configs(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; - for (unsigned int i = 0, - n = static_cast(this->audio_send_stream_configs_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 102, - this->audio_send_stream_configs(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; - for (unsigned int i = 0, - n = static_cast(this->video_recv_stream_configs_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 103, - this->video_recv_stream_configs(static_cast(i)), - output); - } - - // repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; - for (unsigned int i = 0, - n = static_cast(this->video_send_stream_configs_size()); i < n; i++) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 104, - this->video_send_stream_configs(static_cast(i)), - output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.EventStream) -} - -size_t EventStream::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.EventStream) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; - { - unsigned int count = static_cast(this->stream_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->stream(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; - { - unsigned int count = static_cast(this->incoming_rtp_packets_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->incoming_rtp_packets(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; - { - unsigned int count = static_cast(this->outgoing_rtp_packets_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->outgoing_rtp_packets(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; - { - unsigned int count = static_cast(this->incoming_rtcp_packets_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->incoming_rtcp_packets(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; - { - unsigned int count = static_cast(this->outgoing_rtcp_packets_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->outgoing_rtcp_packets(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; - { - unsigned int count = static_cast(this->audio_playout_events_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->audio_playout_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; - { - unsigned int count = static_cast(this->frame_decoded_events_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->frame_decoded_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; - { - unsigned int count = static_cast(this->begin_log_events_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->begin_log_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; - { - unsigned int count = static_cast(this->end_log_events_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->end_log_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; - { - unsigned int count = static_cast(this->loss_based_bwe_updates_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->loss_based_bwe_updates(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; - { - unsigned int count = static_cast(this->delay_based_bwe_updates_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->delay_based_bwe_updates(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; - { - unsigned int count = static_cast(this->audio_network_adaptations_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->audio_network_adaptations(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; - { - unsigned int count = static_cast(this->probe_clusters_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->probe_clusters(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; - { - unsigned int count = static_cast(this->probe_success_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->probe_success(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; - { - unsigned int count = static_cast(this->probe_failure_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->probe_failure(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.AlrState alr_states = 24; - { - unsigned int count = static_cast(this->alr_states_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->alr_states(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; - { - unsigned int count = static_cast(this->ice_candidate_configs_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->ice_candidate_configs(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; - { - unsigned int count = static_cast(this->ice_candidate_events_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->ice_candidate_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; - { - unsigned int count = static_cast(this->dtls_transport_state_events_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->dtls_transport_state_events(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; - { - unsigned int count = static_cast(this->dtls_writable_states_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->dtls_writable_states(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; - { - unsigned int count = static_cast(this->generic_packets_sent_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->generic_packets_sent(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; - { - unsigned int count = static_cast(this->generic_packets_received_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->generic_packets_received(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; - { - unsigned int count = static_cast(this->generic_acks_received_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->generic_acks_received(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.RouteChange route_changes = 32; - { - unsigned int count = static_cast(this->route_changes_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->route_changes(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; - { - unsigned int count = static_cast(this->remote_estimates_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->remote_estimates(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; - { - unsigned int count = static_cast(this->audio_recv_stream_configs_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->audio_recv_stream_configs(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; - { - unsigned int count = static_cast(this->audio_send_stream_configs_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->audio_send_stream_configs(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; - { - unsigned int count = static_cast(this->video_recv_stream_configs_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->video_recv_stream_configs(static_cast(i))); - } - } - - // repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; - { - unsigned int count = static_cast(this->video_send_stream_configs_size()); - total_size += 2UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - this->video_send_stream_configs(static_cast(i))); - } - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void EventStream::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void EventStream::MergeFrom(const EventStream& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.EventStream) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - stream_.MergeFrom(from.stream_); - incoming_rtp_packets_.MergeFrom(from.incoming_rtp_packets_); - outgoing_rtp_packets_.MergeFrom(from.outgoing_rtp_packets_); - incoming_rtcp_packets_.MergeFrom(from.incoming_rtcp_packets_); - outgoing_rtcp_packets_.MergeFrom(from.outgoing_rtcp_packets_); - audio_playout_events_.MergeFrom(from.audio_playout_events_); - frame_decoded_events_.MergeFrom(from.frame_decoded_events_); - begin_log_events_.MergeFrom(from.begin_log_events_); - end_log_events_.MergeFrom(from.end_log_events_); - loss_based_bwe_updates_.MergeFrom(from.loss_based_bwe_updates_); - delay_based_bwe_updates_.MergeFrom(from.delay_based_bwe_updates_); - audio_network_adaptations_.MergeFrom(from.audio_network_adaptations_); - probe_clusters_.MergeFrom(from.probe_clusters_); - probe_success_.MergeFrom(from.probe_success_); - probe_failure_.MergeFrom(from.probe_failure_); - alr_states_.MergeFrom(from.alr_states_); - ice_candidate_configs_.MergeFrom(from.ice_candidate_configs_); - ice_candidate_events_.MergeFrom(from.ice_candidate_events_); - dtls_transport_state_events_.MergeFrom(from.dtls_transport_state_events_); - dtls_writable_states_.MergeFrom(from.dtls_writable_states_); - generic_packets_sent_.MergeFrom(from.generic_packets_sent_); - generic_packets_received_.MergeFrom(from.generic_packets_received_); - generic_acks_received_.MergeFrom(from.generic_acks_received_); - route_changes_.MergeFrom(from.route_changes_); - remote_estimates_.MergeFrom(from.remote_estimates_); - audio_recv_stream_configs_.MergeFrom(from.audio_recv_stream_configs_); - audio_send_stream_configs_.MergeFrom(from.audio_send_stream_configs_); - video_recv_stream_configs_.MergeFrom(from.video_recv_stream_configs_); - video_send_stream_configs_.MergeFrom(from.video_send_stream_configs_); -} - -void EventStream::CopyFrom(const EventStream& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.EventStream) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool EventStream::IsInitialized() const { - return true; -} - -void EventStream::InternalSwap(EventStream* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - CastToBase(&stream_)->InternalSwap(CastToBase(&other->stream_)); - CastToBase(&incoming_rtp_packets_)->InternalSwap(CastToBase(&other->incoming_rtp_packets_)); - CastToBase(&outgoing_rtp_packets_)->InternalSwap(CastToBase(&other->outgoing_rtp_packets_)); - CastToBase(&incoming_rtcp_packets_)->InternalSwap(CastToBase(&other->incoming_rtcp_packets_)); - CastToBase(&outgoing_rtcp_packets_)->InternalSwap(CastToBase(&other->outgoing_rtcp_packets_)); - CastToBase(&audio_playout_events_)->InternalSwap(CastToBase(&other->audio_playout_events_)); - CastToBase(&frame_decoded_events_)->InternalSwap(CastToBase(&other->frame_decoded_events_)); - CastToBase(&begin_log_events_)->InternalSwap(CastToBase(&other->begin_log_events_)); - CastToBase(&end_log_events_)->InternalSwap(CastToBase(&other->end_log_events_)); - CastToBase(&loss_based_bwe_updates_)->InternalSwap(CastToBase(&other->loss_based_bwe_updates_)); - CastToBase(&delay_based_bwe_updates_)->InternalSwap(CastToBase(&other->delay_based_bwe_updates_)); - CastToBase(&audio_network_adaptations_)->InternalSwap(CastToBase(&other->audio_network_adaptations_)); - CastToBase(&probe_clusters_)->InternalSwap(CastToBase(&other->probe_clusters_)); - CastToBase(&probe_success_)->InternalSwap(CastToBase(&other->probe_success_)); - CastToBase(&probe_failure_)->InternalSwap(CastToBase(&other->probe_failure_)); - CastToBase(&alr_states_)->InternalSwap(CastToBase(&other->alr_states_)); - CastToBase(&ice_candidate_configs_)->InternalSwap(CastToBase(&other->ice_candidate_configs_)); - CastToBase(&ice_candidate_events_)->InternalSwap(CastToBase(&other->ice_candidate_events_)); - CastToBase(&dtls_transport_state_events_)->InternalSwap(CastToBase(&other->dtls_transport_state_events_)); - CastToBase(&dtls_writable_states_)->InternalSwap(CastToBase(&other->dtls_writable_states_)); - CastToBase(&generic_packets_sent_)->InternalSwap(CastToBase(&other->generic_packets_sent_)); - CastToBase(&generic_packets_received_)->InternalSwap(CastToBase(&other->generic_packets_received_)); - CastToBase(&generic_acks_received_)->InternalSwap(CastToBase(&other->generic_acks_received_)); - CastToBase(&route_changes_)->InternalSwap(CastToBase(&other->route_changes_)); - CastToBase(&remote_estimates_)->InternalSwap(CastToBase(&other->remote_estimates_)); - CastToBase(&audio_recv_stream_configs_)->InternalSwap(CastToBase(&other->audio_recv_stream_configs_)); - CastToBase(&audio_send_stream_configs_)->InternalSwap(CastToBase(&other->audio_send_stream_configs_)); - CastToBase(&video_recv_stream_configs_)->InternalSwap(CastToBase(&other->video_recv_stream_configs_)); - CastToBase(&video_send_stream_configs_)->InternalSwap(CastToBase(&other->video_send_stream_configs_)); -} - -std::string EventStream::GetTypeName() const { - return "webrtc.rtclog2.EventStream"; -} - - -// =================================================================== - -void Event::InitAsDefaultInstance() { -} -class Event::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); -}; - -Event::Event() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.Event) -} -Event::Event(const Event& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.Event) -} - -void Event::SharedCtor() { -} - -Event::~Event() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.Event) - SharedDtor(); -} - -void Event::SharedDtor() { -} - -void Event::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const Event& Event::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_Event_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void Event::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.Event) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* Event::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool Event::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.Event) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.Event) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.Event) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void Event::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.Event) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.Event) -} - -size_t Event::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.Event) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void Event::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void Event::MergeFrom(const Event& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.Event) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void Event::CopyFrom(const Event& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.Event) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool Event::IsInitialized() const { - return true; -} - -void Event::InternalSwap(Event* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); -} - -std::string Event::GetTypeName() const { - return "webrtc.rtclog2.Event"; -} - - -// =================================================================== - -void GenericPacketReceived::InitAsDefaultInstance() { -} -class GenericPacketReceived::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_packet_number(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_packet_length(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_packet_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_packet_length_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -GenericPacketReceived::GenericPacketReceived() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.GenericPacketReceived) -} -GenericPacketReceived::GenericPacketReceived(const GenericPacketReceived& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_packet_number_deltas()) { - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - packet_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_packet_length_deltas()) { - packet_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_length_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.GenericPacketReceived) -} - -void GenericPacketReceived::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -GenericPacketReceived::~GenericPacketReceived() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.GenericPacketReceived) - SharedDtor(); -} - -void GenericPacketReceived::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_length_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void GenericPacketReceived::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const GenericPacketReceived& GenericPacketReceived::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_GenericPacketReceived_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void GenericPacketReceived::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.GenericPacketReceived) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - packet_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - packet_length_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00000078u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* GenericPacketReceived::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 packet_number = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_packet_number(&has_bits); - packet_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 packet_length = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_packet_length(&has_bits); - packet_length_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 128)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 138)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes packet_number_deltas = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 146)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_packet_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes packet_length_deltas = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_packet_length_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool GenericPacketReceived::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.GenericPacketReceived) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 packet_number = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_packet_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &packet_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 packet_length = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_packet_length(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &packet_length_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (128 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (138 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes packet_number_deltas = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (146 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_packet_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes packet_length_deltas = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (154 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_packet_length_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.GenericPacketReceived) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.GenericPacketReceived) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void GenericPacketReceived::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.GenericPacketReceived) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(2, this->packet_number(), output); - } - - // optional int32 packet_length = 3; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->packet_length(), output); - } - - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(16, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 17, this->timestamp_ms_deltas(), output); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 18, this->packet_number_deltas(), output); - } - - // optional bytes packet_length_deltas = 19; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 19, this->packet_length_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.GenericPacketReceived) -} - -size_t GenericPacketReceived::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.GenericPacketReceived) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->packet_number_deltas()); - } - - // optional bytes packet_length_deltas = 19; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->packet_length_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->packet_number()); - } - - // optional int32 packet_length = 3; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->packet_length()); - } - - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000040u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void GenericPacketReceived::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void GenericPacketReceived::MergeFrom(const GenericPacketReceived& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.GenericPacketReceived) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - packet_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_length_deltas_); - } - if (cached_has_bits & 0x00000008u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000010u) { - packet_number_ = from.packet_number_; - } - if (cached_has_bits & 0x00000020u) { - packet_length_ = from.packet_length_; - } - if (cached_has_bits & 0x00000040u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void GenericPacketReceived::CopyFrom(const GenericPacketReceived& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.GenericPacketReceived) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool GenericPacketReceived::IsInitialized() const { - return true; -} - -void GenericPacketReceived::InternalSwap(GenericPacketReceived* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - packet_number_deltas_.Swap(&other->packet_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - packet_length_deltas_.Swap(&other->packet_length_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(packet_number_, other->packet_number_); - swap(packet_length_, other->packet_length_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string GenericPacketReceived::GetTypeName() const { - return "webrtc.rtclog2.GenericPacketReceived"; -} - - -// =================================================================== - -void GenericPacketSent::InitAsDefaultInstance() { -} -class GenericPacketSent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_packet_number(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_overhead_length(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_payload_length(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_padding_length(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1024u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_packet_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_overhead_length_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_payload_length_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_padding_length_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } -}; - -GenericPacketSent::GenericPacketSent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.GenericPacketSent) -} -GenericPacketSent::GenericPacketSent(const GenericPacketSent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_packet_number_deltas()) { - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - overhead_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_overhead_length_deltas()) { - overhead_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.overhead_length_deltas_); - } - payload_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_payload_length_deltas()) { - payload_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_length_deltas_); - } - padding_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_padding_length_deltas()) { - padding_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_length_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.GenericPacketSent) -} - -void GenericPacketSent::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - overhead_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_length_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -GenericPacketSent::~GenericPacketSent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.GenericPacketSent) - SharedDtor(); -} - -void GenericPacketSent::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - overhead_length_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_length_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_length_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void GenericPacketSent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const GenericPacketSent& GenericPacketSent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_GenericPacketSent_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void GenericPacketSent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.GenericPacketSent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - packet_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - overhead_length_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - payload_length_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000010u) { - padding_length_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x000000e0u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&overhead_length_) - - reinterpret_cast(×tamp_ms_)) + sizeof(overhead_length_)); - } - if (cached_has_bits & 0x00000700u) { - ::memset(&payload_length_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(&payload_length_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* GenericPacketSent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 packet_number = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_packet_number(&has_bits); - packet_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 overhead_length = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_overhead_length(&has_bits); - overhead_length_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 payload_length = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_payload_length(&has_bits); - payload_length_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 padding_length = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_padding_length(&has_bits); - padding_length_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 128)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 138)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes packet_number_deltas = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 146)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_packet_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes overhead_length_deltas = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_overhead_length_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes payload_length_deltas = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_payload_length_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes padding_length_deltas = 21; - case 21: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 170)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_padding_length_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool GenericPacketSent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.GenericPacketSent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 packet_number = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_packet_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &packet_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 overhead_length = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_overhead_length(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &overhead_length_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 payload_length = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_payload_length(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &payload_length_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 padding_length = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_padding_length(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &padding_length_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (128 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (138 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes packet_number_deltas = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (146 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_packet_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes overhead_length_deltas = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (154 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_overhead_length_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes payload_length_deltas = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (162 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_payload_length_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes padding_length_deltas = 21; - case 21: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (170 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_padding_length_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.GenericPacketSent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.GenericPacketSent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void GenericPacketSent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.GenericPacketSent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(2, this->packet_number(), output); - } - - // optional int32 overhead_length = 3; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->overhead_length(), output); - } - - // optional int32 payload_length = 4; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(4, this->payload_length(), output); - } - - // optional int32 padding_length = 5; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(5, this->padding_length(), output); - } - - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000400u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(16, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 17, this->timestamp_ms_deltas(), output); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 18, this->packet_number_deltas(), output); - } - - // optional bytes overhead_length_deltas = 19; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 19, this->overhead_length_deltas(), output); - } - - // optional bytes payload_length_deltas = 20; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 20, this->payload_length_deltas(), output); - } - - // optional bytes padding_length_deltas = 21; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 21, this->padding_length_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.GenericPacketSent) -} - -size_t GenericPacketSent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.GenericPacketSent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->packet_number_deltas()); - } - - // optional bytes overhead_length_deltas = 19; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->overhead_length_deltas()); - } - - // optional bytes payload_length_deltas = 20; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->payload_length_deltas()); - } - - // optional bytes padding_length_deltas = 21; - if (cached_has_bits & 0x00000010u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->padding_length_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->packet_number()); - } - - // optional int32 overhead_length = 3; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->overhead_length()); - } - - } - if (cached_has_bits & 0x00000700u) { - // optional int32 payload_length = 4; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->payload_length()); - } - - // optional int32 padding_length = 5; - if (cached_has_bits & 0x00000200u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->padding_length()); - } - - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000400u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void GenericPacketSent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void GenericPacketSent::MergeFrom(const GenericPacketSent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.GenericPacketSent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - overhead_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.overhead_length_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - payload_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_length_deltas_); - } - if (cached_has_bits & 0x00000010u) { - _has_bits_[0] |= 0x00000010u; - padding_length_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_length_deltas_); - } - if (cached_has_bits & 0x00000020u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000040u) { - packet_number_ = from.packet_number_; - } - if (cached_has_bits & 0x00000080u) { - overhead_length_ = from.overhead_length_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00000700u) { - if (cached_has_bits & 0x00000100u) { - payload_length_ = from.payload_length_; - } - if (cached_has_bits & 0x00000200u) { - padding_length_ = from.padding_length_; - } - if (cached_has_bits & 0x00000400u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void GenericPacketSent::CopyFrom(const GenericPacketSent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.GenericPacketSent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool GenericPacketSent::IsInitialized() const { - return true; -} - -void GenericPacketSent::InternalSwap(GenericPacketSent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - packet_number_deltas_.Swap(&other->packet_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - overhead_length_deltas_.Swap(&other->overhead_length_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - payload_length_deltas_.Swap(&other->payload_length_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - padding_length_deltas_.Swap(&other->padding_length_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(packet_number_, other->packet_number_); - swap(overhead_length_, other->overhead_length_); - swap(payload_length_, other->payload_length_); - swap(padding_length_, other->padding_length_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string GenericPacketSent::GetTypeName() const { - return "webrtc.rtclog2.GenericPacketSent"; -} - - -// =================================================================== - -void GenericAckReceived::InitAsDefaultInstance() { -} -class GenericAckReceived::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_packet_number(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_acked_packet_number(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_receive_acked_packet_time_ms(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_packet_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_acked_packet_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_receive_acked_packet_time_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } -}; - -GenericAckReceived::GenericAckReceived() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.GenericAckReceived) -} -GenericAckReceived::GenericAckReceived(const GenericAckReceived& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_packet_number_deltas()) { - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - acked_packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_acked_packet_number_deltas()) { - acked_packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.acked_packet_number_deltas_); - } - receive_acked_packet_time_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_receive_acked_packet_time_ms_deltas()) { - receive_acked_packet_time_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.receive_acked_packet_time_ms_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.GenericAckReceived) -} - -void GenericAckReceived::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - acked_packet_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - receive_acked_packet_time_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -GenericAckReceived::~GenericAckReceived() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.GenericAckReceived) - SharedDtor(); -} - -void GenericAckReceived::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - packet_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - acked_packet_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - receive_acked_packet_time_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void GenericAckReceived::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const GenericAckReceived& GenericAckReceived::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_GenericAckReceived_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void GenericAckReceived::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.GenericAckReceived) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - packet_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - acked_packet_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - receive_acked_packet_time_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x000000f0u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&receive_acked_packet_time_ms_) - - reinterpret_cast(×tamp_ms_)) + sizeof(receive_acked_packet_time_ms_)); - } - number_of_deltas_ = 0u; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* GenericAckReceived::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 packet_number = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_packet_number(&has_bits); - packet_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 acked_packet_number = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_acked_packet_number(&has_bits); - acked_packet_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 receive_acked_packet_time_ms = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_receive_acked_packet_time_ms(&has_bits); - receive_acked_packet_time_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 128)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 138)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes packet_number_deltas = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 146)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_packet_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes acked_packet_number_deltas = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_acked_packet_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes receive_acked_packet_time_ms_deltas = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_receive_acked_packet_time_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool GenericAckReceived::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.GenericAckReceived) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 packet_number = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_packet_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &packet_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 acked_packet_number = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_acked_packet_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &acked_packet_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 receive_acked_packet_time_ms = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_receive_acked_packet_time_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &receive_acked_packet_time_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (128 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (138 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes packet_number_deltas = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (146 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_packet_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes acked_packet_number_deltas = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (154 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_acked_packet_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes receive_acked_packet_time_ms_deltas = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (162 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_receive_acked_packet_time_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.GenericAckReceived) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.GenericAckReceived) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void GenericAckReceived::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.GenericAckReceived) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(2, this->packet_number(), output); - } - - // optional int64 acked_packet_number = 3; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(3, this->acked_packet_number(), output); - } - - // optional int64 receive_acked_packet_time_ms = 4; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(4, this->receive_acked_packet_time_ms(), output); - } - - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(16, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 17, this->timestamp_ms_deltas(), output); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 18, this->packet_number_deltas(), output); - } - - // optional bytes acked_packet_number_deltas = 19; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 19, this->acked_packet_number_deltas(), output); - } - - // optional bytes receive_acked_packet_time_ms_deltas = 20; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 20, this->receive_acked_packet_time_ms_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.GenericAckReceived) -} - -size_t GenericAckReceived::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.GenericAckReceived) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 17; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes packet_number_deltas = 18; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->packet_number_deltas()); - } - - // optional bytes acked_packet_number_deltas = 19; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->acked_packet_number_deltas()); - } - - // optional bytes receive_acked_packet_time_ms_deltas = 20; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->receive_acked_packet_time_ms_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional int64 packet_number = 2; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->packet_number()); - } - - // optional int64 acked_packet_number = 3; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->acked_packet_number()); - } - - // optional int64 receive_acked_packet_time_ms = 4; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->receive_acked_packet_time_ms()); - } - - } - // optional uint32 number_of_deltas = 16; - if (cached_has_bits & 0x00000100u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void GenericAckReceived::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void GenericAckReceived::MergeFrom(const GenericAckReceived& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.GenericAckReceived) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.packet_number_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - acked_packet_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.acked_packet_number_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - receive_acked_packet_time_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.receive_acked_packet_time_ms_deltas_); - } - if (cached_has_bits & 0x00000010u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000020u) { - packet_number_ = from.packet_number_; - } - if (cached_has_bits & 0x00000040u) { - acked_packet_number_ = from.acked_packet_number_; - } - if (cached_has_bits & 0x00000080u) { - receive_acked_packet_time_ms_ = from.receive_acked_packet_time_ms_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00000100u) { - set_number_of_deltas(from.number_of_deltas()); - } -} - -void GenericAckReceived::CopyFrom(const GenericAckReceived& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.GenericAckReceived) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool GenericAckReceived::IsInitialized() const { - return true; -} - -void GenericAckReceived::InternalSwap(GenericAckReceived* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - packet_number_deltas_.Swap(&other->packet_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - acked_packet_number_deltas_.Swap(&other->acked_packet_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - receive_acked_packet_time_ms_deltas_.Swap(&other->receive_acked_packet_time_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(packet_number_, other->packet_number_); - swap(acked_packet_number_, other->acked_packet_number_); - swap(receive_acked_packet_time_ms_, other->receive_acked_packet_time_ms_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string GenericAckReceived::GetTypeName() const { - return "webrtc.rtclog2.GenericAckReceived"; -} - - -// =================================================================== - -void IncomingRtpPackets::InitAsDefaultInstance() { -} -class IncomingRtpPackets::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 32768u; - } - static void set_has_marker(HasBits* has_bits) { - (*has_bits)[0] |= 16777216u; - } - static void set_has_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 65536u; - } - static void set_has_sequence_number(HasBits* has_bits) { - (*has_bits)[0] |= 131072u; - } - static void set_has_rtp_timestamp(HasBits* has_bits) { - (*has_bits)[0] |= 262144u; - } - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 524288u; - } - static void set_has_payload_size(HasBits* has_bits) { - (*has_bits)[0] |= 1048576u; - } - static void set_has_header_size(HasBits* has_bits) { - (*has_bits)[0] |= 2097152u; - } - static void set_has_padding_size(HasBits* has_bits) { - (*has_bits)[0] |= 4194304u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8388608u; - } - static void set_has_transport_sequence_number(HasBits* has_bits) { - (*has_bits)[0] |= 67108864u; - } - static void set_has_transmission_time_offset(HasBits* has_bits) { - (*has_bits)[0] |= 134217728u; - } - static void set_has_absolute_send_time(HasBits* has_bits) { - (*has_bits)[0] |= 268435456u; - } - static void set_has_video_rotation(HasBits* has_bits) { - (*has_bits)[0] |= 536870912u; - } - static void set_has_audio_level(HasBits* has_bits) { - (*has_bits)[0] |= 1073741824u; - } - static void set_has_voice_activity(HasBits* has_bits) { - (*has_bits)[0] |= 33554432u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_marker_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_payload_type_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_sequence_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_rtp_timestamp_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_ssrc_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_payload_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_header_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_padding_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_transport_sequence_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } - static void set_has_transmission_time_offset_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1024u; - } - static void set_has_absolute_send_time_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2048u; - } - static void set_has_video_rotation_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4096u; - } - static void set_has_audio_level_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8192u; - } - static void set_has_voice_activity_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16384u; - } -}; - -IncomingRtpPackets::IncomingRtpPackets() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.IncomingRtpPackets) -} -IncomingRtpPackets::IncomingRtpPackets(const IncomingRtpPackets& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - marker_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_marker_deltas()) { - marker_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.marker_deltas_); - } - payload_type_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_payload_type_deltas()) { - payload_type_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_type_deltas_); - } - sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_sequence_number_deltas()) { - sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.sequence_number_deltas_); - } - rtp_timestamp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_rtp_timestamp_deltas()) { - rtp_timestamp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.rtp_timestamp_deltas_); - } - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_ssrc_deltas()) { - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - payload_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_payload_size_deltas()) { - payload_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_size_deltas_); - } - header_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_header_size_deltas()) { - header_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_size_deltas_); - } - padding_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_padding_size_deltas()) { - padding_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_size_deltas_); - } - transport_sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_transport_sequence_number_deltas()) { - transport_sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transport_sequence_number_deltas_); - } - transmission_time_offset_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_transmission_time_offset_deltas()) { - transmission_time_offset_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transmission_time_offset_deltas_); - } - absolute_send_time_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_absolute_send_time_deltas()) { - absolute_send_time_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.absolute_send_time_deltas_); - } - video_rotation_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_video_rotation_deltas()) { - video_rotation_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.video_rotation_deltas_); - } - audio_level_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_audio_level_deltas()) { - audio_level_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.audio_level_deltas_); - } - voice_activity_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_voice_activity_deltas()) { - voice_activity_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.voice_activity_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&audio_level_) - - reinterpret_cast(×tamp_ms_)) + sizeof(audio_level_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.IncomingRtpPackets) -} - -void IncomingRtpPackets::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - marker_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - rtp_timestamp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - header_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transport_sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transmission_time_offset_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - absolute_send_time_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - video_rotation_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - audio_level_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - voice_activity_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&audio_level_) - - reinterpret_cast(×tamp_ms_)) + sizeof(audio_level_)); -} - -IncomingRtpPackets::~IncomingRtpPackets() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.IncomingRtpPackets) - SharedDtor(); -} - -void IncomingRtpPackets::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - marker_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - sequence_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - rtp_timestamp_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - header_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transport_sequence_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transmission_time_offset_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - absolute_send_time_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - video_rotation_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - audio_level_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - voice_activity_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void IncomingRtpPackets::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IncomingRtpPackets& IncomingRtpPackets::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IncomingRtpPackets_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void IncomingRtpPackets::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.IncomingRtpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - marker_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - payload_type_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - sequence_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000010u) { - rtp_timestamp_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000020u) { - ssrc_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000040u) { - payload_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000080u) { - header_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00007f00u) { - if (cached_has_bits & 0x00000100u) { - padding_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000200u) { - transport_sequence_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000400u) { - transmission_time_offset_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000800u) { - absolute_send_time_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00001000u) { - video_rotation_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00002000u) { - audio_level_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00004000u) { - voice_activity_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - timestamp_ms_ = PROTOBUF_LONGLONG(0); - if (cached_has_bits & 0x00ff0000u) { - ::memset(&payload_type_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(&payload_type_)) + sizeof(number_of_deltas_)); - } - if (cached_has_bits & 0x7f000000u) { - ::memset(&marker_, 0, static_cast( - reinterpret_cast(&audio_level_) - - reinterpret_cast(&marker_)) + sizeof(audio_level_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IncomingRtpPackets::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool marker = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_marker(&has_bits); - marker_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 payload_type = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_payload_type(&has_bits); - payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 sequence_number = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_sequence_number(&has_bits); - sequence_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional fixed32 rtp_timestamp = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 45)) { - _Internal::set_has_rtp_timestamp(&has_bits); - rtp_timestamp_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint32>(ptr); - ptr += sizeof(::PROTOBUF_NAMESPACE_ID::uint32); - } else goto handle_unusual; - continue; - // optional fixed32 ssrc = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 53)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint32>(ptr); - ptr += sizeof(::PROTOBUF_NAMESPACE_ID::uint32); - } else goto handle_unusual; - continue; - // optional uint32 payload_size = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 64)) { - _Internal::set_has_payload_size(&has_bits); - payload_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 header_size = 9; - case 9: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 72)) { - _Internal::set_has_header_size(&has_bits); - header_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 padding_size = 10; - case 10: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 80)) { - _Internal::set_has_padding_size(&has_bits); - padding_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 11; - case 11: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 88)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 transport_sequence_number = 15; - case 15: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 120)) { - _Internal::set_has_transport_sequence_number(&has_bits); - transport_sequence_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 transmission_time_offset = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 128)) { - _Internal::set_has_transmission_time_offset(&has_bits); - transmission_time_offset_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 absolute_send_time = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 136)) { - _Internal::set_has_absolute_send_time(&has_bits); - absolute_send_time_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 video_rotation = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 144)) { - _Internal::set_has_video_rotation(&has_bits); - video_rotation_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 audio_level = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 152)) { - _Internal::set_has_audio_level(&has_bits); - audio_level_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool voice_activity = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 160)) { - _Internal::set_has_voice_activity(&has_bits); - voice_activity_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes marker_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_marker_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes payload_type_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_payload_type_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes sequence_number_deltas = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_sequence_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes rtp_timestamp_deltas = 105; - case 105: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 74)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_rtp_timestamp_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes ssrc_deltas = 106; - case 106: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 82)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_ssrc_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes payload_size_deltas = 108; - case 108: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 98)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_payload_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes header_size_deltas = 109; - case 109: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 106)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_header_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes padding_size_deltas = 110; - case 110: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 114)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_padding_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes transport_sequence_number_deltas = 115; - case 115: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_transport_sequence_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes transmission_time_offset_deltas = 116; - case 116: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_transmission_time_offset_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes absolute_send_time_deltas = 117; - case 117: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 170)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_absolute_send_time_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes video_rotation_deltas = 118; - case 118: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 178)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_video_rotation_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes audio_level_deltas = 119; - case 119: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 186)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_audio_level_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes voice_activity_deltas = 120; - case 120: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 194)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_voice_activity_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IncomingRtpPackets::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.IncomingRtpPackets) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool marker = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_marker(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &marker_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 payload_type = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 sequence_number = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_sequence_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &sequence_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional fixed32 rtp_timestamp = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (45 & 0xFF)) { - _Internal::set_has_rtp_timestamp(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FIXED32>( - input, &rtp_timestamp_))); - } else { - goto handle_unusual; - } - break; - } - - // optional fixed32 ssrc = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (53 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FIXED32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 payload_size = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (64 & 0xFF)) { - _Internal::set_has_payload_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &payload_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 header_size = 9; - case 9: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (72 & 0xFF)) { - _Internal::set_has_header_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &header_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 padding_size = 10; - case 10: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (80 & 0xFF)) { - _Internal::set_has_padding_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &padding_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 11; - case 11: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (88 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 transport_sequence_number = 15; - case 15: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (120 & 0xFF)) { - _Internal::set_has_transport_sequence_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &transport_sequence_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 transmission_time_offset = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (128 & 0xFF)) { - _Internal::set_has_transmission_time_offset(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &transmission_time_offset_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 absolute_send_time = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (136 & 0xFF)) { - _Internal::set_has_absolute_send_time(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &absolute_send_time_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 video_rotation = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (144 & 0xFF)) { - _Internal::set_has_video_rotation(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &video_rotation_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 audio_level = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (152 & 0xFF)) { - _Internal::set_has_audio_level(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &audio_level_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool voice_activity = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (160 & 0xFF)) { - _Internal::set_has_voice_activity(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &voice_activity_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes marker_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_marker_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes payload_type_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_payload_type_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes sequence_number_deltas = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_sequence_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes rtp_timestamp_deltas = 105; - case 105: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (842 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_rtp_timestamp_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes ssrc_deltas = 106; - case 106: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (850 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_ssrc_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes payload_size_deltas = 108; - case 108: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (866 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_payload_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes header_size_deltas = 109; - case 109: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (874 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_header_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes padding_size_deltas = 110; - case 110: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (882 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_padding_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes transport_sequence_number_deltas = 115; - case 115: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (922 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_transport_sequence_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes transmission_time_offset_deltas = 116; - case 116: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (930 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_transmission_time_offset_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes absolute_send_time_deltas = 117; - case 117: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (938 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_absolute_send_time_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes video_rotation_deltas = 118; - case 118: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (946 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_video_rotation_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes audio_level_deltas = 119; - case 119: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (954 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_audio_level_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes voice_activity_deltas = 120; - case 120: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (962 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_voice_activity_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.IncomingRtpPackets) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.IncomingRtpPackets) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IncomingRtpPackets::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.IncomingRtpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00008000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bool marker = 2; - if (cached_has_bits & 0x01000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(2, this->marker(), output); - } - - // optional uint32 payload_type = 3; - if (cached_has_bits & 0x00010000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->payload_type(), output); - } - - // optional uint32 sequence_number = 4; - if (cached_has_bits & 0x00020000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->sequence_number(), output); - } - - // optional fixed32 rtp_timestamp = 5; - if (cached_has_bits & 0x00040000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFixed32(5, this->rtp_timestamp(), output); - } - - // optional fixed32 ssrc = 6; - if (cached_has_bits & 0x00080000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFixed32(6, this->ssrc(), output); - } - - // optional uint32 payload_size = 8; - if (cached_has_bits & 0x00100000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(8, this->payload_size(), output); - } - - // optional uint32 header_size = 9; - if (cached_has_bits & 0x00200000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(9, this->header_size(), output); - } - - // optional uint32 padding_size = 10; - if (cached_has_bits & 0x00400000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(10, this->padding_size(), output); - } - - // optional uint32 number_of_deltas = 11; - if (cached_has_bits & 0x00800000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(11, this->number_of_deltas(), output); - } - - // optional uint32 transport_sequence_number = 15; - if (cached_has_bits & 0x04000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(15, this->transport_sequence_number(), output); - } - - // optional int32 transmission_time_offset = 16; - if (cached_has_bits & 0x08000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(16, this->transmission_time_offset(), output); - } - - // optional uint32 absolute_send_time = 17; - if (cached_has_bits & 0x10000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(17, this->absolute_send_time(), output); - } - - // optional uint32 video_rotation = 18; - if (cached_has_bits & 0x20000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(18, this->video_rotation(), output); - } - - // optional uint32 audio_level = 19; - if (cached_has_bits & 0x40000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(19, this->audio_level(), output); - } - - // optional bool voice_activity = 20; - if (cached_has_bits & 0x02000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(20, this->voice_activity(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes marker_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->marker_deltas(), output); - } - - // optional bytes payload_type_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->payload_type_deltas(), output); - } - - // optional bytes sequence_number_deltas = 104; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 104, this->sequence_number_deltas(), output); - } - - // optional bytes rtp_timestamp_deltas = 105; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 105, this->rtp_timestamp_deltas(), output); - } - - // optional bytes ssrc_deltas = 106; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 106, this->ssrc_deltas(), output); - } - - // optional bytes payload_size_deltas = 108; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 108, this->payload_size_deltas(), output); - } - - // optional bytes header_size_deltas = 109; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 109, this->header_size_deltas(), output); - } - - // optional bytes padding_size_deltas = 110; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 110, this->padding_size_deltas(), output); - } - - // optional bytes transport_sequence_number_deltas = 115; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 115, this->transport_sequence_number_deltas(), output); - } - - // optional bytes transmission_time_offset_deltas = 116; - if (cached_has_bits & 0x00000400u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 116, this->transmission_time_offset_deltas(), output); - } - - // optional bytes absolute_send_time_deltas = 117; - if (cached_has_bits & 0x00000800u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 117, this->absolute_send_time_deltas(), output); - } - - // optional bytes video_rotation_deltas = 118; - if (cached_has_bits & 0x00001000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 118, this->video_rotation_deltas(), output); - } - - // optional bytes audio_level_deltas = 119; - if (cached_has_bits & 0x00002000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 119, this->audio_level_deltas(), output); - } - - // optional bytes voice_activity_deltas = 120; - if (cached_has_bits & 0x00004000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 120, this->voice_activity_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.IncomingRtpPackets) -} - -size_t IncomingRtpPackets::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.IncomingRtpPackets) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes marker_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->marker_deltas()); - } - - // optional bytes payload_type_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->payload_type_deltas()); - } - - // optional bytes sequence_number_deltas = 104; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->sequence_number_deltas()); - } - - // optional bytes rtp_timestamp_deltas = 105; - if (cached_has_bits & 0x00000010u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->rtp_timestamp_deltas()); - } - - // optional bytes ssrc_deltas = 106; - if (cached_has_bits & 0x00000020u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->ssrc_deltas()); - } - - // optional bytes payload_size_deltas = 108; - if (cached_has_bits & 0x00000040u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->payload_size_deltas()); - } - - // optional bytes header_size_deltas = 109; - if (cached_has_bits & 0x00000080u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->header_size_deltas()); - } - - } - if (cached_has_bits & 0x0000ff00u) { - // optional bytes padding_size_deltas = 110; - if (cached_has_bits & 0x00000100u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->padding_size_deltas()); - } - - // optional bytes transport_sequence_number_deltas = 115; - if (cached_has_bits & 0x00000200u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->transport_sequence_number_deltas()); - } - - // optional bytes transmission_time_offset_deltas = 116; - if (cached_has_bits & 0x00000400u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->transmission_time_offset_deltas()); - } - - // optional bytes absolute_send_time_deltas = 117; - if (cached_has_bits & 0x00000800u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->absolute_send_time_deltas()); - } - - // optional bytes video_rotation_deltas = 118; - if (cached_has_bits & 0x00001000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->video_rotation_deltas()); - } - - // optional bytes audio_level_deltas = 119; - if (cached_has_bits & 0x00002000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->audio_level_deltas()); - } - - // optional bytes voice_activity_deltas = 120; - if (cached_has_bits & 0x00004000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->voice_activity_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00008000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - } - if (cached_has_bits & 0x00ff0000u) { - // optional uint32 payload_type = 3; - if (cached_has_bits & 0x00010000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->payload_type()); - } - - // optional uint32 sequence_number = 4; - if (cached_has_bits & 0x00020000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->sequence_number()); - } - - // optional fixed32 rtp_timestamp = 5; - if (cached_has_bits & 0x00040000u) { - total_size += 1 + 4; - } - - // optional fixed32 ssrc = 6; - if (cached_has_bits & 0x00080000u) { - total_size += 1 + 4; - } - - // optional uint32 payload_size = 8; - if (cached_has_bits & 0x00100000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->payload_size()); - } - - // optional uint32 header_size = 9; - if (cached_has_bits & 0x00200000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->header_size()); - } - - // optional uint32 padding_size = 10; - if (cached_has_bits & 0x00400000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->padding_size()); - } - - // optional uint32 number_of_deltas = 11; - if (cached_has_bits & 0x00800000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - if (cached_has_bits & 0x7f000000u) { - // optional bool marker = 2; - if (cached_has_bits & 0x01000000u) { - total_size += 1 + 1; - } - - // optional bool voice_activity = 20; - if (cached_has_bits & 0x02000000u) { - total_size += 2 + 1; - } - - // optional uint32 transport_sequence_number = 15; - if (cached_has_bits & 0x04000000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->transport_sequence_number()); - } - - // optional int32 transmission_time_offset = 16; - if (cached_has_bits & 0x08000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->transmission_time_offset()); - } - - // optional uint32 absolute_send_time = 17; - if (cached_has_bits & 0x10000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->absolute_send_time()); - } - - // optional uint32 video_rotation = 18; - if (cached_has_bits & 0x20000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->video_rotation()); - } - - // optional uint32 audio_level = 19; - if (cached_has_bits & 0x40000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->audio_level()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IncomingRtpPackets::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IncomingRtpPackets::MergeFrom(const IncomingRtpPackets& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.IncomingRtpPackets) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.marker_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_type_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.sequence_number_deltas_); - } - if (cached_has_bits & 0x00000010u) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.rtp_timestamp_deltas_); - } - if (cached_has_bits & 0x00000020u) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - if (cached_has_bits & 0x00000040u) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_size_deltas_); - } - if (cached_has_bits & 0x00000080u) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_size_deltas_); - } - } - if (cached_has_bits & 0x0000ff00u) { - if (cached_has_bits & 0x00000100u) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_size_deltas_); - } - if (cached_has_bits & 0x00000200u) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transport_sequence_number_deltas_); - } - if (cached_has_bits & 0x00000400u) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transmission_time_offset_deltas_); - } - if (cached_has_bits & 0x00000800u) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.absolute_send_time_deltas_); - } - if (cached_has_bits & 0x00001000u) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.video_rotation_deltas_); - } - if (cached_has_bits & 0x00002000u) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.audio_level_deltas_); - } - if (cached_has_bits & 0x00004000u) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.voice_activity_deltas_); - } - if (cached_has_bits & 0x00008000u) { - timestamp_ms_ = from.timestamp_ms_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00ff0000u) { - if (cached_has_bits & 0x00010000u) { - payload_type_ = from.payload_type_; - } - if (cached_has_bits & 0x00020000u) { - sequence_number_ = from.sequence_number_; - } - if (cached_has_bits & 0x00040000u) { - rtp_timestamp_ = from.rtp_timestamp_; - } - if (cached_has_bits & 0x00080000u) { - ssrc_ = from.ssrc_; - } - if (cached_has_bits & 0x00100000u) { - payload_size_ = from.payload_size_; - } - if (cached_has_bits & 0x00200000u) { - header_size_ = from.header_size_; - } - if (cached_has_bits & 0x00400000u) { - padding_size_ = from.padding_size_; - } - if (cached_has_bits & 0x00800000u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x7f000000u) { - if (cached_has_bits & 0x01000000u) { - marker_ = from.marker_; - } - if (cached_has_bits & 0x02000000u) { - voice_activity_ = from.voice_activity_; - } - if (cached_has_bits & 0x04000000u) { - transport_sequence_number_ = from.transport_sequence_number_; - } - if (cached_has_bits & 0x08000000u) { - transmission_time_offset_ = from.transmission_time_offset_; - } - if (cached_has_bits & 0x10000000u) { - absolute_send_time_ = from.absolute_send_time_; - } - if (cached_has_bits & 0x20000000u) { - video_rotation_ = from.video_rotation_; - } - if (cached_has_bits & 0x40000000u) { - audio_level_ = from.audio_level_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void IncomingRtpPackets::CopyFrom(const IncomingRtpPackets& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.IncomingRtpPackets) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IncomingRtpPackets::IsInitialized() const { - return true; -} - -void IncomingRtpPackets::InternalSwap(IncomingRtpPackets* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - marker_deltas_.Swap(&other->marker_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - payload_type_deltas_.Swap(&other->payload_type_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - sequence_number_deltas_.Swap(&other->sequence_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - rtp_timestamp_deltas_.Swap(&other->rtp_timestamp_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - ssrc_deltas_.Swap(&other->ssrc_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - payload_size_deltas_.Swap(&other->payload_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - header_size_deltas_.Swap(&other->header_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - padding_size_deltas_.Swap(&other->padding_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - transport_sequence_number_deltas_.Swap(&other->transport_sequence_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - transmission_time_offset_deltas_.Swap(&other->transmission_time_offset_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - absolute_send_time_deltas_.Swap(&other->absolute_send_time_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - video_rotation_deltas_.Swap(&other->video_rotation_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - audio_level_deltas_.Swap(&other->audio_level_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - voice_activity_deltas_.Swap(&other->voice_activity_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(payload_type_, other->payload_type_); - swap(sequence_number_, other->sequence_number_); - swap(rtp_timestamp_, other->rtp_timestamp_); - swap(ssrc_, other->ssrc_); - swap(payload_size_, other->payload_size_); - swap(header_size_, other->header_size_); - swap(padding_size_, other->padding_size_); - swap(number_of_deltas_, other->number_of_deltas_); - swap(marker_, other->marker_); - swap(voice_activity_, other->voice_activity_); - swap(transport_sequence_number_, other->transport_sequence_number_); - swap(transmission_time_offset_, other->transmission_time_offset_); - swap(absolute_send_time_, other->absolute_send_time_); - swap(video_rotation_, other->video_rotation_); - swap(audio_level_, other->audio_level_); -} - -std::string IncomingRtpPackets::GetTypeName() const { - return "webrtc.rtclog2.IncomingRtpPackets"; -} - - -// =================================================================== - -void OutgoingRtpPackets::InitAsDefaultInstance() { -} -class OutgoingRtpPackets::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 32768u; - } - static void set_has_marker(HasBits* has_bits) { - (*has_bits)[0] |= 16777216u; - } - static void set_has_payload_type(HasBits* has_bits) { - (*has_bits)[0] |= 65536u; - } - static void set_has_sequence_number(HasBits* has_bits) { - (*has_bits)[0] |= 131072u; - } - static void set_has_rtp_timestamp(HasBits* has_bits) { - (*has_bits)[0] |= 262144u; - } - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 524288u; - } - static void set_has_payload_size(HasBits* has_bits) { - (*has_bits)[0] |= 1048576u; - } - static void set_has_header_size(HasBits* has_bits) { - (*has_bits)[0] |= 2097152u; - } - static void set_has_padding_size(HasBits* has_bits) { - (*has_bits)[0] |= 4194304u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8388608u; - } - static void set_has_transport_sequence_number(HasBits* has_bits) { - (*has_bits)[0] |= 67108864u; - } - static void set_has_transmission_time_offset(HasBits* has_bits) { - (*has_bits)[0] |= 134217728u; - } - static void set_has_absolute_send_time(HasBits* has_bits) { - (*has_bits)[0] |= 268435456u; - } - static void set_has_video_rotation(HasBits* has_bits) { - (*has_bits)[0] |= 536870912u; - } - static void set_has_audio_level(HasBits* has_bits) { - (*has_bits)[0] |= 1073741824u; - } - static void set_has_voice_activity(HasBits* has_bits) { - (*has_bits)[0] |= 33554432u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_marker_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_payload_type_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_sequence_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_rtp_timestamp_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_ssrc_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_payload_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_header_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_padding_size_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_transport_sequence_number_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } - static void set_has_transmission_time_offset_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1024u; - } - static void set_has_absolute_send_time_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2048u; - } - static void set_has_video_rotation_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4096u; - } - static void set_has_audio_level_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8192u; - } - static void set_has_voice_activity_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16384u; - } -}; - -OutgoingRtpPackets::OutgoingRtpPackets() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.OutgoingRtpPackets) -} -OutgoingRtpPackets::OutgoingRtpPackets(const OutgoingRtpPackets& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - marker_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_marker_deltas()) { - marker_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.marker_deltas_); - } - payload_type_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_payload_type_deltas()) { - payload_type_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_type_deltas_); - } - sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_sequence_number_deltas()) { - sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.sequence_number_deltas_); - } - rtp_timestamp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_rtp_timestamp_deltas()) { - rtp_timestamp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.rtp_timestamp_deltas_); - } - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_ssrc_deltas()) { - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - payload_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_payload_size_deltas()) { - payload_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_size_deltas_); - } - header_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_header_size_deltas()) { - header_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_size_deltas_); - } - padding_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_padding_size_deltas()) { - padding_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_size_deltas_); - } - transport_sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_transport_sequence_number_deltas()) { - transport_sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transport_sequence_number_deltas_); - } - transmission_time_offset_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_transmission_time_offset_deltas()) { - transmission_time_offset_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transmission_time_offset_deltas_); - } - absolute_send_time_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_absolute_send_time_deltas()) { - absolute_send_time_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.absolute_send_time_deltas_); - } - video_rotation_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_video_rotation_deltas()) { - video_rotation_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.video_rotation_deltas_); - } - audio_level_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_audio_level_deltas()) { - audio_level_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.audio_level_deltas_); - } - voice_activity_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_voice_activity_deltas()) { - voice_activity_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.voice_activity_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&audio_level_) - - reinterpret_cast(×tamp_ms_)) + sizeof(audio_level_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.OutgoingRtpPackets) -} - -void OutgoingRtpPackets::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - marker_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - rtp_timestamp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - header_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_size_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transport_sequence_number_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transmission_time_offset_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - absolute_send_time_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - video_rotation_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - audio_level_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - voice_activity_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&audio_level_) - - reinterpret_cast(×tamp_ms_)) + sizeof(audio_level_)); -} - -OutgoingRtpPackets::~OutgoingRtpPackets() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.OutgoingRtpPackets) - SharedDtor(); -} - -void OutgoingRtpPackets::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - marker_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_type_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - sequence_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - rtp_timestamp_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - payload_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - header_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - padding_size_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transport_sequence_number_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - transmission_time_offset_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - absolute_send_time_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - video_rotation_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - audio_level_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - voice_activity_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void OutgoingRtpPackets::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const OutgoingRtpPackets& OutgoingRtpPackets::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_OutgoingRtpPackets_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void OutgoingRtpPackets::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.OutgoingRtpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - marker_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - payload_type_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - sequence_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000010u) { - rtp_timestamp_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000020u) { - ssrc_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000040u) { - payload_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000080u) { - header_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00007f00u) { - if (cached_has_bits & 0x00000100u) { - padding_size_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000200u) { - transport_sequence_number_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000400u) { - transmission_time_offset_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000800u) { - absolute_send_time_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00001000u) { - video_rotation_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00002000u) { - audio_level_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00004000u) { - voice_activity_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - timestamp_ms_ = PROTOBUF_LONGLONG(0); - if (cached_has_bits & 0x00ff0000u) { - ::memset(&payload_type_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(&payload_type_)) + sizeof(number_of_deltas_)); - } - if (cached_has_bits & 0x7f000000u) { - ::memset(&marker_, 0, static_cast( - reinterpret_cast(&audio_level_) - - reinterpret_cast(&marker_)) + sizeof(audio_level_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* OutgoingRtpPackets::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool marker = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_marker(&has_bits); - marker_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 payload_type = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_payload_type(&has_bits); - payload_type_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 sequence_number = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_sequence_number(&has_bits); - sequence_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional fixed32 rtp_timestamp = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 45)) { - _Internal::set_has_rtp_timestamp(&has_bits); - rtp_timestamp_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint32>(ptr); - ptr += sizeof(::PROTOBUF_NAMESPACE_ID::uint32); - } else goto handle_unusual; - continue; - // optional fixed32 ssrc = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 53)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint32>(ptr); - ptr += sizeof(::PROTOBUF_NAMESPACE_ID::uint32); - } else goto handle_unusual; - continue; - // optional uint32 payload_size = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 64)) { - _Internal::set_has_payload_size(&has_bits); - payload_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 header_size = 9; - case 9: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 72)) { - _Internal::set_has_header_size(&has_bits); - header_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 padding_size = 10; - case 10: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 80)) { - _Internal::set_has_padding_size(&has_bits); - padding_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 11; - case 11: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 88)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 transport_sequence_number = 15; - case 15: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 120)) { - _Internal::set_has_transport_sequence_number(&has_bits); - transport_sequence_number_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 transmission_time_offset = 16; - case 16: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 128)) { - _Internal::set_has_transmission_time_offset(&has_bits); - transmission_time_offset_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 absolute_send_time = 17; - case 17: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 136)) { - _Internal::set_has_absolute_send_time(&has_bits); - absolute_send_time_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 video_rotation = 18; - case 18: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 144)) { - _Internal::set_has_video_rotation(&has_bits); - video_rotation_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 audio_level = 19; - case 19: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 152)) { - _Internal::set_has_audio_level(&has_bits); - audio_level_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool voice_activity = 20; - case 20: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 160)) { - _Internal::set_has_voice_activity(&has_bits); - voice_activity_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes marker_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_marker_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes payload_type_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_payload_type_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes sequence_number_deltas = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_sequence_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes rtp_timestamp_deltas = 105; - case 105: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 74)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_rtp_timestamp_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes ssrc_deltas = 106; - case 106: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 82)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_ssrc_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes payload_size_deltas = 108; - case 108: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 98)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_payload_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes header_size_deltas = 109; - case 109: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 106)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_header_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes padding_size_deltas = 110; - case 110: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 114)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_padding_size_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes transport_sequence_number_deltas = 115; - case 115: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 154)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_transport_sequence_number_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes transmission_time_offset_deltas = 116; - case 116: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 162)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_transmission_time_offset_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes absolute_send_time_deltas = 117; - case 117: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 170)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_absolute_send_time_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes video_rotation_deltas = 118; - case 118: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 178)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_video_rotation_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes audio_level_deltas = 119; - case 119: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 186)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_audio_level_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes voice_activity_deltas = 120; - case 120: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 194)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_voice_activity_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool OutgoingRtpPackets::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.OutgoingRtpPackets) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool marker = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_marker(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &marker_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 payload_type = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_payload_type(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &payload_type_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 sequence_number = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_sequence_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &sequence_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional fixed32 rtp_timestamp = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (45 & 0xFF)) { - _Internal::set_has_rtp_timestamp(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FIXED32>( - input, &rtp_timestamp_))); - } else { - goto handle_unusual; - } - break; - } - - // optional fixed32 ssrc = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (53 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FIXED32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 payload_size = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (64 & 0xFF)) { - _Internal::set_has_payload_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &payload_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 header_size = 9; - case 9: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (72 & 0xFF)) { - _Internal::set_has_header_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &header_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 padding_size = 10; - case 10: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (80 & 0xFF)) { - _Internal::set_has_padding_size(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &padding_size_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 11; - case 11: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (88 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 transport_sequence_number = 15; - case 15: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (120 & 0xFF)) { - _Internal::set_has_transport_sequence_number(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &transport_sequence_number_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 transmission_time_offset = 16; - case 16: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (128 & 0xFF)) { - _Internal::set_has_transmission_time_offset(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &transmission_time_offset_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 absolute_send_time = 17; - case 17: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (136 & 0xFF)) { - _Internal::set_has_absolute_send_time(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &absolute_send_time_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 video_rotation = 18; - case 18: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (144 & 0xFF)) { - _Internal::set_has_video_rotation(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &video_rotation_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 audio_level = 19; - case 19: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (152 & 0xFF)) { - _Internal::set_has_audio_level(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &audio_level_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool voice_activity = 20; - case 20: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (160 & 0xFF)) { - _Internal::set_has_voice_activity(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &voice_activity_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes marker_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_marker_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes payload_type_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_payload_type_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes sequence_number_deltas = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_sequence_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes rtp_timestamp_deltas = 105; - case 105: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (842 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_rtp_timestamp_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes ssrc_deltas = 106; - case 106: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (850 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_ssrc_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes payload_size_deltas = 108; - case 108: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (866 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_payload_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes header_size_deltas = 109; - case 109: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (874 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_header_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes padding_size_deltas = 110; - case 110: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (882 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_padding_size_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes transport_sequence_number_deltas = 115; - case 115: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (922 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_transport_sequence_number_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes transmission_time_offset_deltas = 116; - case 116: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (930 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_transmission_time_offset_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes absolute_send_time_deltas = 117; - case 117: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (938 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_absolute_send_time_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes video_rotation_deltas = 118; - case 118: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (946 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_video_rotation_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes audio_level_deltas = 119; - case 119: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (954 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_audio_level_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes voice_activity_deltas = 120; - case 120: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (962 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_voice_activity_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.OutgoingRtpPackets) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.OutgoingRtpPackets) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void OutgoingRtpPackets::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.OutgoingRtpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00008000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bool marker = 2; - if (cached_has_bits & 0x01000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(2, this->marker(), output); - } - - // optional uint32 payload_type = 3; - if (cached_has_bits & 0x00010000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->payload_type(), output); - } - - // optional uint32 sequence_number = 4; - if (cached_has_bits & 0x00020000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->sequence_number(), output); - } - - // optional fixed32 rtp_timestamp = 5; - if (cached_has_bits & 0x00040000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFixed32(5, this->rtp_timestamp(), output); - } - - // optional fixed32 ssrc = 6; - if (cached_has_bits & 0x00080000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFixed32(6, this->ssrc(), output); - } - - // optional uint32 payload_size = 8; - if (cached_has_bits & 0x00100000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(8, this->payload_size(), output); - } - - // optional uint32 header_size = 9; - if (cached_has_bits & 0x00200000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(9, this->header_size(), output); - } - - // optional uint32 padding_size = 10; - if (cached_has_bits & 0x00400000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(10, this->padding_size(), output); - } - - // optional uint32 number_of_deltas = 11; - if (cached_has_bits & 0x00800000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(11, this->number_of_deltas(), output); - } - - // optional uint32 transport_sequence_number = 15; - if (cached_has_bits & 0x04000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(15, this->transport_sequence_number(), output); - } - - // optional int32 transmission_time_offset = 16; - if (cached_has_bits & 0x08000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(16, this->transmission_time_offset(), output); - } - - // optional uint32 absolute_send_time = 17; - if (cached_has_bits & 0x10000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(17, this->absolute_send_time(), output); - } - - // optional uint32 video_rotation = 18; - if (cached_has_bits & 0x20000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(18, this->video_rotation(), output); - } - - // optional uint32 audio_level = 19; - if (cached_has_bits & 0x40000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(19, this->audio_level(), output); - } - - // optional bool voice_activity = 20; - if (cached_has_bits & 0x02000000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(20, this->voice_activity(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes marker_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->marker_deltas(), output); - } - - // optional bytes payload_type_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->payload_type_deltas(), output); - } - - // optional bytes sequence_number_deltas = 104; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 104, this->sequence_number_deltas(), output); - } - - // optional bytes rtp_timestamp_deltas = 105; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 105, this->rtp_timestamp_deltas(), output); - } - - // optional bytes ssrc_deltas = 106; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 106, this->ssrc_deltas(), output); - } - - // optional bytes payload_size_deltas = 108; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 108, this->payload_size_deltas(), output); - } - - // optional bytes header_size_deltas = 109; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 109, this->header_size_deltas(), output); - } - - // optional bytes padding_size_deltas = 110; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 110, this->padding_size_deltas(), output); - } - - // optional bytes transport_sequence_number_deltas = 115; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 115, this->transport_sequence_number_deltas(), output); - } - - // optional bytes transmission_time_offset_deltas = 116; - if (cached_has_bits & 0x00000400u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 116, this->transmission_time_offset_deltas(), output); - } - - // optional bytes absolute_send_time_deltas = 117; - if (cached_has_bits & 0x00000800u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 117, this->absolute_send_time_deltas(), output); - } - - // optional bytes video_rotation_deltas = 118; - if (cached_has_bits & 0x00001000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 118, this->video_rotation_deltas(), output); - } - - // optional bytes audio_level_deltas = 119; - if (cached_has_bits & 0x00002000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 119, this->audio_level_deltas(), output); - } - - // optional bytes voice_activity_deltas = 120; - if (cached_has_bits & 0x00004000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 120, this->voice_activity_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.OutgoingRtpPackets) -} - -size_t OutgoingRtpPackets::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.OutgoingRtpPackets) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes marker_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->marker_deltas()); - } - - // optional bytes payload_type_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->payload_type_deltas()); - } - - // optional bytes sequence_number_deltas = 104; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->sequence_number_deltas()); - } - - // optional bytes rtp_timestamp_deltas = 105; - if (cached_has_bits & 0x00000010u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->rtp_timestamp_deltas()); - } - - // optional bytes ssrc_deltas = 106; - if (cached_has_bits & 0x00000020u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->ssrc_deltas()); - } - - // optional bytes payload_size_deltas = 108; - if (cached_has_bits & 0x00000040u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->payload_size_deltas()); - } - - // optional bytes header_size_deltas = 109; - if (cached_has_bits & 0x00000080u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->header_size_deltas()); - } - - } - if (cached_has_bits & 0x0000ff00u) { - // optional bytes padding_size_deltas = 110; - if (cached_has_bits & 0x00000100u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->padding_size_deltas()); - } - - // optional bytes transport_sequence_number_deltas = 115; - if (cached_has_bits & 0x00000200u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->transport_sequence_number_deltas()); - } - - // optional bytes transmission_time_offset_deltas = 116; - if (cached_has_bits & 0x00000400u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->transmission_time_offset_deltas()); - } - - // optional bytes absolute_send_time_deltas = 117; - if (cached_has_bits & 0x00000800u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->absolute_send_time_deltas()); - } - - // optional bytes video_rotation_deltas = 118; - if (cached_has_bits & 0x00001000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->video_rotation_deltas()); - } - - // optional bytes audio_level_deltas = 119; - if (cached_has_bits & 0x00002000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->audio_level_deltas()); - } - - // optional bytes voice_activity_deltas = 120; - if (cached_has_bits & 0x00004000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->voice_activity_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00008000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - } - if (cached_has_bits & 0x00ff0000u) { - // optional uint32 payload_type = 3; - if (cached_has_bits & 0x00010000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->payload_type()); - } - - // optional uint32 sequence_number = 4; - if (cached_has_bits & 0x00020000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->sequence_number()); - } - - // optional fixed32 rtp_timestamp = 5; - if (cached_has_bits & 0x00040000u) { - total_size += 1 + 4; - } - - // optional fixed32 ssrc = 6; - if (cached_has_bits & 0x00080000u) { - total_size += 1 + 4; - } - - // optional uint32 payload_size = 8; - if (cached_has_bits & 0x00100000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->payload_size()); - } - - // optional uint32 header_size = 9; - if (cached_has_bits & 0x00200000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->header_size()); - } - - // optional uint32 padding_size = 10; - if (cached_has_bits & 0x00400000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->padding_size()); - } - - // optional uint32 number_of_deltas = 11; - if (cached_has_bits & 0x00800000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - if (cached_has_bits & 0x7f000000u) { - // optional bool marker = 2; - if (cached_has_bits & 0x01000000u) { - total_size += 1 + 1; - } - - // optional bool voice_activity = 20; - if (cached_has_bits & 0x02000000u) { - total_size += 2 + 1; - } - - // optional uint32 transport_sequence_number = 15; - if (cached_has_bits & 0x04000000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->transport_sequence_number()); - } - - // optional int32 transmission_time_offset = 16; - if (cached_has_bits & 0x08000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->transmission_time_offset()); - } - - // optional uint32 absolute_send_time = 17; - if (cached_has_bits & 0x10000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->absolute_send_time()); - } - - // optional uint32 video_rotation = 18; - if (cached_has_bits & 0x20000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->video_rotation()); - } - - // optional uint32 audio_level = 19; - if (cached_has_bits & 0x40000000u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->audio_level()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void OutgoingRtpPackets::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void OutgoingRtpPackets::MergeFrom(const OutgoingRtpPackets& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.OutgoingRtpPackets) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.marker_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_type_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.sequence_number_deltas_); - } - if (cached_has_bits & 0x00000010u) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.rtp_timestamp_deltas_); - } - if (cached_has_bits & 0x00000020u) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - if (cached_has_bits & 0x00000040u) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.payload_size_deltas_); - } - if (cached_has_bits & 0x00000080u) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.header_size_deltas_); - } - } - if (cached_has_bits & 0x0000ff00u) { - if (cached_has_bits & 0x00000100u) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.padding_size_deltas_); - } - if (cached_has_bits & 0x00000200u) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transport_sequence_number_deltas_); - } - if (cached_has_bits & 0x00000400u) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.transmission_time_offset_deltas_); - } - if (cached_has_bits & 0x00000800u) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.absolute_send_time_deltas_); - } - if (cached_has_bits & 0x00001000u) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.video_rotation_deltas_); - } - if (cached_has_bits & 0x00002000u) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.audio_level_deltas_); - } - if (cached_has_bits & 0x00004000u) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.voice_activity_deltas_); - } - if (cached_has_bits & 0x00008000u) { - timestamp_ms_ = from.timestamp_ms_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00ff0000u) { - if (cached_has_bits & 0x00010000u) { - payload_type_ = from.payload_type_; - } - if (cached_has_bits & 0x00020000u) { - sequence_number_ = from.sequence_number_; - } - if (cached_has_bits & 0x00040000u) { - rtp_timestamp_ = from.rtp_timestamp_; - } - if (cached_has_bits & 0x00080000u) { - ssrc_ = from.ssrc_; - } - if (cached_has_bits & 0x00100000u) { - payload_size_ = from.payload_size_; - } - if (cached_has_bits & 0x00200000u) { - header_size_ = from.header_size_; - } - if (cached_has_bits & 0x00400000u) { - padding_size_ = from.padding_size_; - } - if (cached_has_bits & 0x00800000u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x7f000000u) { - if (cached_has_bits & 0x01000000u) { - marker_ = from.marker_; - } - if (cached_has_bits & 0x02000000u) { - voice_activity_ = from.voice_activity_; - } - if (cached_has_bits & 0x04000000u) { - transport_sequence_number_ = from.transport_sequence_number_; - } - if (cached_has_bits & 0x08000000u) { - transmission_time_offset_ = from.transmission_time_offset_; - } - if (cached_has_bits & 0x10000000u) { - absolute_send_time_ = from.absolute_send_time_; - } - if (cached_has_bits & 0x20000000u) { - video_rotation_ = from.video_rotation_; - } - if (cached_has_bits & 0x40000000u) { - audio_level_ = from.audio_level_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void OutgoingRtpPackets::CopyFrom(const OutgoingRtpPackets& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.OutgoingRtpPackets) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool OutgoingRtpPackets::IsInitialized() const { - return true; -} - -void OutgoingRtpPackets::InternalSwap(OutgoingRtpPackets* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - marker_deltas_.Swap(&other->marker_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - payload_type_deltas_.Swap(&other->payload_type_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - sequence_number_deltas_.Swap(&other->sequence_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - rtp_timestamp_deltas_.Swap(&other->rtp_timestamp_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - ssrc_deltas_.Swap(&other->ssrc_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - payload_size_deltas_.Swap(&other->payload_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - header_size_deltas_.Swap(&other->header_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - padding_size_deltas_.Swap(&other->padding_size_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - transport_sequence_number_deltas_.Swap(&other->transport_sequence_number_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - transmission_time_offset_deltas_.Swap(&other->transmission_time_offset_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - absolute_send_time_deltas_.Swap(&other->absolute_send_time_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - video_rotation_deltas_.Swap(&other->video_rotation_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - audio_level_deltas_.Swap(&other->audio_level_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - voice_activity_deltas_.Swap(&other->voice_activity_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(payload_type_, other->payload_type_); - swap(sequence_number_, other->sequence_number_); - swap(rtp_timestamp_, other->rtp_timestamp_); - swap(ssrc_, other->ssrc_); - swap(payload_size_, other->payload_size_); - swap(header_size_, other->header_size_); - swap(padding_size_, other->padding_size_); - swap(number_of_deltas_, other->number_of_deltas_); - swap(marker_, other->marker_); - swap(voice_activity_, other->voice_activity_); - swap(transport_sequence_number_, other->transport_sequence_number_); - swap(transmission_time_offset_, other->transmission_time_offset_); - swap(absolute_send_time_, other->absolute_send_time_); - swap(video_rotation_, other->video_rotation_); - swap(audio_level_, other->audio_level_); -} - -std::string OutgoingRtpPackets::GetTypeName() const { - return "webrtc.rtclog2.OutgoingRtpPackets"; -} - - -// =================================================================== - -void IncomingRtcpPackets::InitAsDefaultInstance() { -} -class IncomingRtcpPackets::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_raw_packet(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_raw_packet_blobs(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -IncomingRtcpPackets::IncomingRtcpPackets() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.IncomingRtcpPackets) -} -IncomingRtcpPackets::IncomingRtcpPackets(const IncomingRtcpPackets& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - raw_packet_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_raw_packet()) { - raw_packet_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_); - } - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - raw_packet_blobs_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_raw_packet_blobs()) { - raw_packet_blobs_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_blobs_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.IncomingRtcpPackets) -} - -void IncomingRtcpPackets::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto.base); - raw_packet_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - raw_packet_blobs_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -IncomingRtcpPackets::~IncomingRtcpPackets() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.IncomingRtcpPackets) - SharedDtor(); -} - -void IncomingRtcpPackets::SharedDtor() { - raw_packet_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - raw_packet_blobs_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void IncomingRtcpPackets::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IncomingRtcpPackets& IncomingRtcpPackets::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IncomingRtcpPackets_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void IncomingRtcpPackets::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.IncomingRtcpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - raw_packet_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - raw_packet_blobs_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00000018u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IncomingRtcpPackets::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes raw_packet = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_raw_packet(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes raw_packet_blobs = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_raw_packet_blobs(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IncomingRtcpPackets::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.IncomingRtcpPackets) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes raw_packet = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_raw_packet())); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes raw_packet_blobs = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_raw_packet_blobs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.IncomingRtcpPackets) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.IncomingRtcpPackets) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IncomingRtcpPackets::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.IncomingRtcpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bytes raw_packet = 2; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 2, this->raw_packet(), output); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes raw_packet_blobs = 102; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->raw_packet_blobs(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.IncomingRtcpPackets) -} - -size_t IncomingRtcpPackets::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.IncomingRtcpPackets) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional bytes raw_packet = 2; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->raw_packet()); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes raw_packet_blobs = 102; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->raw_packet_blobs()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IncomingRtcpPackets::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IncomingRtcpPackets::MergeFrom(const IncomingRtcpPackets& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.IncomingRtcpPackets) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_blobs_); - } - if (cached_has_bits & 0x00000008u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000010u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void IncomingRtcpPackets::CopyFrom(const IncomingRtcpPackets& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.IncomingRtcpPackets) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IncomingRtcpPackets::IsInitialized() const { - return true; -} - -void IncomingRtcpPackets::InternalSwap(IncomingRtcpPackets* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - raw_packet_.Swap(&other->raw_packet_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - raw_packet_blobs_.Swap(&other->raw_packet_blobs_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string IncomingRtcpPackets::GetTypeName() const { - return "webrtc.rtclog2.IncomingRtcpPackets"; -} - - -// =================================================================== - -void OutgoingRtcpPackets::InitAsDefaultInstance() { -} -class OutgoingRtcpPackets::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_raw_packet(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_raw_packet_blobs(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -OutgoingRtcpPackets::OutgoingRtcpPackets() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.OutgoingRtcpPackets) -} -OutgoingRtcpPackets::OutgoingRtcpPackets(const OutgoingRtcpPackets& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - raw_packet_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_raw_packet()) { - raw_packet_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_); - } - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - raw_packet_blobs_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_raw_packet_blobs()) { - raw_packet_blobs_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_blobs_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.OutgoingRtcpPackets) -} - -void OutgoingRtcpPackets::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto.base); - raw_packet_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - raw_packet_blobs_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -OutgoingRtcpPackets::~OutgoingRtcpPackets() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.OutgoingRtcpPackets) - SharedDtor(); -} - -void OutgoingRtcpPackets::SharedDtor() { - raw_packet_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - raw_packet_blobs_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void OutgoingRtcpPackets::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const OutgoingRtcpPackets& OutgoingRtcpPackets::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_OutgoingRtcpPackets_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void OutgoingRtcpPackets::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.OutgoingRtcpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - raw_packet_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - raw_packet_blobs_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00000018u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* OutgoingRtcpPackets::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes raw_packet = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_raw_packet(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes raw_packet_blobs = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_raw_packet_blobs(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool OutgoingRtcpPackets::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.OutgoingRtcpPackets) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes raw_packet = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_raw_packet())); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes raw_packet_blobs = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_raw_packet_blobs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.OutgoingRtcpPackets) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.OutgoingRtcpPackets) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void OutgoingRtcpPackets::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.OutgoingRtcpPackets) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bytes raw_packet = 2; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 2, this->raw_packet(), output); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes raw_packet_blobs = 102; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->raw_packet_blobs(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.OutgoingRtcpPackets) -} - -size_t OutgoingRtcpPackets::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.OutgoingRtcpPackets) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional bytes raw_packet = 2; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->raw_packet()); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes raw_packet_blobs = 102; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->raw_packet_blobs()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void OutgoingRtcpPackets::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void OutgoingRtcpPackets::MergeFrom(const OutgoingRtcpPackets& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.OutgoingRtcpPackets) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.raw_packet_blobs_); - } - if (cached_has_bits & 0x00000008u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000010u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void OutgoingRtcpPackets::CopyFrom(const OutgoingRtcpPackets& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.OutgoingRtcpPackets) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool OutgoingRtcpPackets::IsInitialized() const { - return true; -} - -void OutgoingRtcpPackets::InternalSwap(OutgoingRtcpPackets* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - raw_packet_.Swap(&other->raw_packet_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - raw_packet_blobs_.Swap(&other->raw_packet_blobs_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string OutgoingRtcpPackets::GetTypeName() const { - return "webrtc.rtclog2.OutgoingRtcpPackets"; -} - - -// =================================================================== - -void AudioPlayoutEvents::InitAsDefaultInstance() { -} -class AudioPlayoutEvents::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_local_ssrc_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -AudioPlayoutEvents::AudioPlayoutEvents() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.AudioPlayoutEvents) -} -AudioPlayoutEvents::AudioPlayoutEvents(const AudioPlayoutEvents& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - local_ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_local_ssrc_deltas()) { - local_ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.local_ssrc_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.AudioPlayoutEvents) -} - -void AudioPlayoutEvents::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - local_ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -AudioPlayoutEvents::~AudioPlayoutEvents() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.AudioPlayoutEvents) - SharedDtor(); -} - -void AudioPlayoutEvents::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - local_ssrc_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void AudioPlayoutEvents::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioPlayoutEvents& AudioPlayoutEvents::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioPlayoutEvents_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void AudioPlayoutEvents::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.AudioPlayoutEvents) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - local_ssrc_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x0000001cu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioPlayoutEvents::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 local_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes local_ssrc_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_local_ssrc_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioPlayoutEvents::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.AudioPlayoutEvents) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 local_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes local_ssrc_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_local_ssrc_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.AudioPlayoutEvents) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.AudioPlayoutEvents) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioPlayoutEvents::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.AudioPlayoutEvents) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->local_ssrc(), output); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes local_ssrc_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->local_ssrc_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.AudioPlayoutEvents) -} - -size_t AudioPlayoutEvents::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.AudioPlayoutEvents) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes local_ssrc_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->local_ssrc_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 local_ssrc = 2; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - // optional uint32 number_of_deltas = 3; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioPlayoutEvents::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioPlayoutEvents::MergeFrom(const AudioPlayoutEvents& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.AudioPlayoutEvents) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.local_ssrc_deltas_); - } - if (cached_has_bits & 0x00000004u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000008u) { - local_ssrc_ = from.local_ssrc_; - } - if (cached_has_bits & 0x00000010u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioPlayoutEvents::CopyFrom(const AudioPlayoutEvents& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.AudioPlayoutEvents) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioPlayoutEvents::IsInitialized() const { - return true; -} - -void AudioPlayoutEvents::InternalSwap(AudioPlayoutEvents* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - local_ssrc_deltas_.Swap(&other->local_ssrc_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(local_ssrc_, other->local_ssrc_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string AudioPlayoutEvents::GetTypeName() const { - return "webrtc.rtclog2.AudioPlayoutEvents"; -} - - -// =================================================================== - -void FrameDecodedEvents::InitAsDefaultInstance() { -} -class FrameDecodedEvents::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } - static void set_has_render_time_ms(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_width(HasBits* has_bits) { - (*has_bits)[0] |= 1024u; - } - static void set_has_height(HasBits* has_bits) { - (*has_bits)[0] |= 2048u; - } - static void set_has_codec(HasBits* has_bits) { - (*has_bits)[0] |= 4096u; - } - static void set_has_qp(HasBits* has_bits) { - (*has_bits)[0] |= 8192u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16384u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_ssrc_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_render_time_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_width_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_height_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_codec_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_qp_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } -}; - -FrameDecodedEvents::FrameDecodedEvents() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.FrameDecodedEvents) -} -FrameDecodedEvents::FrameDecodedEvents(const FrameDecodedEvents& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_ssrc_deltas()) { - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - render_time_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_render_time_ms_deltas()) { - render_time_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.render_time_ms_deltas_); - } - width_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_width_deltas()) { - width_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.width_deltas_); - } - height_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_height_deltas()) { - height_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.height_deltas_); - } - codec_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_codec_deltas()) { - codec_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.codec_deltas_); - } - qp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_qp_deltas()) { - qp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.qp_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.FrameDecodedEvents) -} - -void FrameDecodedEvents::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - render_time_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - width_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - height_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - codec_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - qp_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -FrameDecodedEvents::~FrameDecodedEvents() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.FrameDecodedEvents) - SharedDtor(); -} - -void FrameDecodedEvents::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ssrc_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - render_time_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - width_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - height_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - codec_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - qp_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void FrameDecodedEvents::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const FrameDecodedEvents& FrameDecodedEvents::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_FrameDecodedEvents_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void FrameDecodedEvents::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.FrameDecodedEvents) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - ssrc_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - render_time_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - width_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000010u) { - height_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000020u) { - codec_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000040u) { - qp_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - timestamp_ms_ = PROTOBUF_LONGLONG(0); - if (cached_has_bits & 0x00007f00u) { - ::memset(&render_time_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(&render_time_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* FrameDecodedEvents::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional fixed32 ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 21)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint32>(ptr); - ptr += sizeof(::PROTOBUF_NAMESPACE_ID::uint32); - } else goto handle_unusual; - continue; - // optional int64 render_time_ms = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_render_time_ms(&has_bits); - render_time_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 width = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_width(&has_bits); - width_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 height = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_height(&has_bits); - height_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 48)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::FrameDecodedEvents_Codec_IsValid(val))) { - set_codec(static_cast<::webrtc::rtclog2::FrameDecodedEvents_Codec>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(6, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 qp = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 56)) { - _Internal::set_has_qp(&has_bits); - qp_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 15; - case 15: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 120)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes ssrc_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_ssrc_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes render_time_ms_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_render_time_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes width_deltas = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_width_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes height_deltas = 105; - case 105: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 74)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_height_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes codec_deltas = 106; - case 106: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 82)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_codec_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes qp_deltas = 107; - case 107: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 90)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_qp_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool FrameDecodedEvents::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.FrameDecodedEvents) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional fixed32 ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (21 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_FIXED32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 render_time_ms = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_render_time_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &render_time_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 width = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_width(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &width_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 height = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_height(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &height_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (48 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::FrameDecodedEvents_Codec_IsValid(value)) { - set_codec(static_cast< ::webrtc::rtclog2::FrameDecodedEvents_Codec >(value)); - } else { - unknown_fields_stream.WriteVarint32(48u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 qp = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (56 & 0xFF)) { - _Internal::set_has_qp(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &qp_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 15; - case 15: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (120 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes ssrc_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_ssrc_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes render_time_ms_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_render_time_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes width_deltas = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_width_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes height_deltas = 105; - case 105: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (842 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_height_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes codec_deltas = 106; - case 106: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (850 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_codec_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes qp_deltas = 107; - case 107: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (858 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_qp_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.FrameDecodedEvents) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.FrameDecodedEvents) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void FrameDecodedEvents::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.FrameDecodedEvents) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional fixed32 ssrc = 2; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteFixed32(2, this->ssrc(), output); - } - - // optional int64 render_time_ms = 3; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(3, this->render_time_ms(), output); - } - - // optional int32 width = 4; - if (cached_has_bits & 0x00000400u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(4, this->width(), output); - } - - // optional int32 height = 5; - if (cached_has_bits & 0x00000800u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(5, this->height(), output); - } - - // optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; - if (cached_has_bits & 0x00001000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 6, this->codec(), output); - } - - // optional uint32 qp = 7; - if (cached_has_bits & 0x00002000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(7, this->qp(), output); - } - - // optional uint32 number_of_deltas = 15; - if (cached_has_bits & 0x00004000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(15, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes ssrc_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->ssrc_deltas(), output); - } - - // optional bytes render_time_ms_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->render_time_ms_deltas(), output); - } - - // optional bytes width_deltas = 104; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 104, this->width_deltas(), output); - } - - // optional bytes height_deltas = 105; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 105, this->height_deltas(), output); - } - - // optional bytes codec_deltas = 106; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 106, this->codec_deltas(), output); - } - - // optional bytes qp_deltas = 107; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 107, this->qp_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.FrameDecodedEvents) -} - -size_t FrameDecodedEvents::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.FrameDecodedEvents) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes ssrc_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->ssrc_deltas()); - } - - // optional bytes render_time_ms_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->render_time_ms_deltas()); - } - - // optional bytes width_deltas = 104; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->width_deltas()); - } - - // optional bytes height_deltas = 105; - if (cached_has_bits & 0x00000010u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->height_deltas()); - } - - // optional bytes codec_deltas = 106; - if (cached_has_bits & 0x00000020u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->codec_deltas()); - } - - // optional bytes qp_deltas = 107; - if (cached_has_bits & 0x00000040u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->qp_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - } - if (cached_has_bits & 0x00007f00u) { - // optional int64 render_time_ms = 3; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->render_time_ms()); - } - - // optional fixed32 ssrc = 2; - if (cached_has_bits & 0x00000200u) { - total_size += 1 + 4; - } - - // optional int32 width = 4; - if (cached_has_bits & 0x00000400u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->width()); - } - - // optional int32 height = 5; - if (cached_has_bits & 0x00000800u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->height()); - } - - // optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; - if (cached_has_bits & 0x00001000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->codec()); - } - - // optional uint32 qp = 7; - if (cached_has_bits & 0x00002000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->qp()); - } - - // optional uint32 number_of_deltas = 15; - if (cached_has_bits & 0x00004000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void FrameDecodedEvents::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void FrameDecodedEvents::MergeFrom(const FrameDecodedEvents& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.FrameDecodedEvents) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - ssrc_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.ssrc_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - render_time_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.render_time_ms_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - width_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.width_deltas_); - } - if (cached_has_bits & 0x00000010u) { - _has_bits_[0] |= 0x00000010u; - height_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.height_deltas_); - } - if (cached_has_bits & 0x00000020u) { - _has_bits_[0] |= 0x00000020u; - codec_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.codec_deltas_); - } - if (cached_has_bits & 0x00000040u) { - _has_bits_[0] |= 0x00000040u; - qp_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.qp_deltas_); - } - if (cached_has_bits & 0x00000080u) { - timestamp_ms_ = from.timestamp_ms_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00007f00u) { - if (cached_has_bits & 0x00000100u) { - render_time_ms_ = from.render_time_ms_; - } - if (cached_has_bits & 0x00000200u) { - ssrc_ = from.ssrc_; - } - if (cached_has_bits & 0x00000400u) { - width_ = from.width_; - } - if (cached_has_bits & 0x00000800u) { - height_ = from.height_; - } - if (cached_has_bits & 0x00001000u) { - codec_ = from.codec_; - } - if (cached_has_bits & 0x00002000u) { - qp_ = from.qp_; - } - if (cached_has_bits & 0x00004000u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void FrameDecodedEvents::CopyFrom(const FrameDecodedEvents& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.FrameDecodedEvents) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool FrameDecodedEvents::IsInitialized() const { - return true; -} - -void FrameDecodedEvents::InternalSwap(FrameDecodedEvents* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - ssrc_deltas_.Swap(&other->ssrc_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - render_time_ms_deltas_.Swap(&other->render_time_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - width_deltas_.Swap(&other->width_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - height_deltas_.Swap(&other->height_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - codec_deltas_.Swap(&other->codec_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - qp_deltas_.Swap(&other->qp_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(render_time_ms_, other->render_time_ms_); - swap(ssrc_, other->ssrc_); - swap(width_, other->width_); - swap(height_, other->height_); - swap(codec_, other->codec_); - swap(qp_, other->qp_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string FrameDecodedEvents::GetTypeName() const { - return "webrtc.rtclog2.FrameDecodedEvents"; -} - - -// =================================================================== - -void BeginLogEvent::InitAsDefaultInstance() { -} -class BeginLogEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_version(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_utc_time_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -BeginLogEvent::BeginLogEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.BeginLogEvent) -} -BeginLogEvent::BeginLogEvent(const BeginLogEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&version_) - - reinterpret_cast(×tamp_ms_)) + sizeof(version_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.BeginLogEvent) -} - -void BeginLogEvent::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&version_) - - reinterpret_cast(×tamp_ms_)) + sizeof(version_)); -} - -BeginLogEvent::~BeginLogEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.BeginLogEvent) - SharedDtor(); -} - -void BeginLogEvent::SharedDtor() { -} - -void BeginLogEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BeginLogEvent& BeginLogEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BeginLogEvent_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void BeginLogEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.BeginLogEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&version_) - - reinterpret_cast(×tamp_ms_)) + sizeof(version_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BeginLogEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 version = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_version(&has_bits); - version_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int64 utc_time_ms = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_utc_time_ms(&has_bits); - utc_time_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BeginLogEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.BeginLogEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 version = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_version(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &version_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int64 utc_time_ms = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_utc_time_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, &utc_time_ms_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.BeginLogEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.BeginLogEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BeginLogEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.BeginLogEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 version = 2; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->version(), output); - } - - // optional int64 utc_time_ms = 3; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(3, this->utc_time_ms(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.BeginLogEvent) -} - -size_t BeginLogEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.BeginLogEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional int64 utc_time_ms = 3; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->utc_time_ms()); - } - - // optional uint32 version = 2; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->version()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BeginLogEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BeginLogEvent::MergeFrom(const BeginLogEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.BeginLogEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - utc_time_ms_ = from.utc_time_ms_; - } - if (cached_has_bits & 0x00000004u) { - version_ = from.version_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BeginLogEvent::CopyFrom(const BeginLogEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.BeginLogEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BeginLogEvent::IsInitialized() const { - return true; -} - -void BeginLogEvent::InternalSwap(BeginLogEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(utc_time_ms_, other->utc_time_ms_); - swap(version_, other->version_); -} - -std::string BeginLogEvent::GetTypeName() const { - return "webrtc.rtclog2.BeginLogEvent"; -} - - -// =================================================================== - -void EndLogEvent::InitAsDefaultInstance() { -} -class EndLogEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -EndLogEvent::EndLogEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.EndLogEvent) -} -EndLogEvent::EndLogEvent(const EndLogEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_ = from.timestamp_ms_; - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.EndLogEvent) -} - -void EndLogEvent::SharedCtor() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); -} - -EndLogEvent::~EndLogEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.EndLogEvent) - SharedDtor(); -} - -void EndLogEvent::SharedDtor() { -} - -void EndLogEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const EndLogEvent& EndLogEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_EndLogEvent_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void EndLogEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.EndLogEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* EndLogEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool EndLogEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.EndLogEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.EndLogEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.EndLogEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void EndLogEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.EndLogEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.EndLogEvent) -} - -size_t EndLogEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.EndLogEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // optional int64 timestamp_ms = 1; - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void EndLogEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void EndLogEvent::MergeFrom(const EndLogEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.EndLogEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_timestamp_ms()) { - set_timestamp_ms(from.timestamp_ms()); - } -} - -void EndLogEvent::CopyFrom(const EndLogEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.EndLogEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool EndLogEvent::IsInitialized() const { - return true; -} - -void EndLogEvent::InternalSwap(EndLogEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); -} - -std::string EndLogEvent::GetTypeName() const { - return "webrtc.rtclog2.EndLogEvent"; -} - - -// =================================================================== - -void LossBasedBweUpdates::InitAsDefaultInstance() { -} -class LossBasedBweUpdates::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_fraction_loss(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_total_packets(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_bitrate_bps_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_fraction_loss_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_total_packets_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } -}; - -LossBasedBweUpdates::LossBasedBweUpdates() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.LossBasedBweUpdates) -} -LossBasedBweUpdates::LossBasedBweUpdates(const LossBasedBweUpdates& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_bitrate_bps_deltas()) { - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - fraction_loss_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_fraction_loss_deltas()) { - fraction_loss_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.fraction_loss_deltas_); - } - total_packets_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_total_packets_deltas()) { - total_packets_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.total_packets_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.LossBasedBweUpdates) -} - -void LossBasedBweUpdates::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - fraction_loss_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - total_packets_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -LossBasedBweUpdates::~LossBasedBweUpdates() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.LossBasedBweUpdates) - SharedDtor(); -} - -void LossBasedBweUpdates::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - fraction_loss_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - total_packets_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void LossBasedBweUpdates::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const LossBasedBweUpdates& LossBasedBweUpdates::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_LossBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void LossBasedBweUpdates::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.LossBasedBweUpdates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - bitrate_bps_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - fraction_loss_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - total_packets_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x000000f0u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&total_packets_) - - reinterpret_cast(×tamp_ms_)) + sizeof(total_packets_)); - } - number_of_deltas_ = 0u; - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* LossBasedBweUpdates::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 bitrate_bps = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 fraction_loss = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_fraction_loss(&has_bits); - fraction_loss_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 total_packets = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_total_packets(&has_bits); - total_packets_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes bitrate_bps_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_bitrate_bps_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes fraction_loss_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_fraction_loss_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes total_packets_deltas = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_total_packets_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool LossBasedBweUpdates::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.LossBasedBweUpdates) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 bitrate_bps = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 fraction_loss = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_fraction_loss(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &fraction_loss_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 total_packets = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_total_packets(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &total_packets_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes bitrate_bps_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_bitrate_bps_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes fraction_loss_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_fraction_loss_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes total_packets_deltas = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_total_packets_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.LossBasedBweUpdates) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.LossBasedBweUpdates) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void LossBasedBweUpdates::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.LossBasedBweUpdates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 bitrate_bps = 2; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->bitrate_bps(), output); - } - - // optional uint32 fraction_loss = 3; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->fraction_loss(), output); - } - - // optional uint32 total_packets = 4; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->total_packets(), output); - } - - // optional uint32 number_of_deltas = 5; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(5, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->bitrate_bps_deltas(), output); - } - - // optional bytes fraction_loss_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->fraction_loss_deltas(), output); - } - - // optional bytes total_packets_deltas = 104; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 104, this->total_packets_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.LossBasedBweUpdates) -} - -size_t LossBasedBweUpdates::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.LossBasedBweUpdates) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->bitrate_bps_deltas()); - } - - // optional bytes fraction_loss_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->fraction_loss_deltas()); - } - - // optional bytes total_packets_deltas = 104; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->total_packets_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 bitrate_bps = 2; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->bitrate_bps()); - } - - // optional uint32 fraction_loss = 3; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->fraction_loss()); - } - - // optional uint32 total_packets = 4; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->total_packets()); - } - - } - // optional uint32 number_of_deltas = 5; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void LossBasedBweUpdates::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void LossBasedBweUpdates::MergeFrom(const LossBasedBweUpdates& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.LossBasedBweUpdates) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - fraction_loss_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.fraction_loss_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - total_packets_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.total_packets_deltas_); - } - if (cached_has_bits & 0x00000010u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000020u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000040u) { - fraction_loss_ = from.fraction_loss_; - } - if (cached_has_bits & 0x00000080u) { - total_packets_ = from.total_packets_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00000100u) { - set_number_of_deltas(from.number_of_deltas()); - } -} - -void LossBasedBweUpdates::CopyFrom(const LossBasedBweUpdates& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.LossBasedBweUpdates) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool LossBasedBweUpdates::IsInitialized() const { - return true; -} - -void LossBasedBweUpdates::InternalSwap(LossBasedBweUpdates* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - bitrate_bps_deltas_.Swap(&other->bitrate_bps_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - fraction_loss_deltas_.Swap(&other->fraction_loss_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - total_packets_deltas_.Swap(&other->total_packets_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(bitrate_bps_, other->bitrate_bps_); - swap(fraction_loss_, other->fraction_loss_); - swap(total_packets_, other->total_packets_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string LossBasedBweUpdates::GetTypeName() const { - return "webrtc.rtclog2.LossBasedBweUpdates"; -} - - -// =================================================================== - -void DelayBasedBweUpdates::InitAsDefaultInstance() { -} -class DelayBasedBweUpdates::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_detector_state(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_bitrate_bps_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_detector_state_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -DelayBasedBweUpdates::DelayBasedBweUpdates() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.DelayBasedBweUpdates) -} -DelayBasedBweUpdates::DelayBasedBweUpdates(const DelayBasedBweUpdates& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_bitrate_bps_deltas()) { - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - detector_state_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_detector_state_deltas()) { - detector_state_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.detector_state_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.DelayBasedBweUpdates) -} - -void DelayBasedBweUpdates::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - detector_state_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -DelayBasedBweUpdates::~DelayBasedBweUpdates() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.DelayBasedBweUpdates) - SharedDtor(); -} - -void DelayBasedBweUpdates::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - detector_state_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void DelayBasedBweUpdates::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DelayBasedBweUpdates& DelayBasedBweUpdates::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_DelayBasedBweUpdates_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void DelayBasedBweUpdates::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.DelayBasedBweUpdates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - bitrate_bps_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - detector_state_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00000078u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DelayBasedBweUpdates::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 bitrate_bps = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState_IsValid(val))) { - set_detector_state(static_cast<::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(3, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes bitrate_bps_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_bitrate_bps_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes detector_state_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_detector_state_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DelayBasedBweUpdates::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.DelayBasedBweUpdates) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 bitrate_bps = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState_IsValid(value)) { - set_detector_state(static_cast< ::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState >(value)); - } else { - unknown_fields_stream.WriteVarint32(24u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes bitrate_bps_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_bitrate_bps_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes detector_state_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_detector_state_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.DelayBasedBweUpdates) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.DelayBasedBweUpdates) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DelayBasedBweUpdates::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.DelayBasedBweUpdates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 bitrate_bps = 2; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->bitrate_bps(), output); - } - - // optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 3, this->detector_state(), output); - } - - // optional uint32 number_of_deltas = 4; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->bitrate_bps_deltas(), output); - } - - // optional bytes detector_state_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->detector_state_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.DelayBasedBweUpdates) -} - -size_t DelayBasedBweUpdates::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.DelayBasedBweUpdates) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->bitrate_bps_deltas()); - } - - // optional bytes detector_state_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->detector_state_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 bitrate_bps = 2; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->bitrate_bps()); - } - - // optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->detector_state()); - } - - // optional uint32 number_of_deltas = 4; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DelayBasedBweUpdates::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void DelayBasedBweUpdates::MergeFrom(const DelayBasedBweUpdates& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.DelayBasedBweUpdates) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - detector_state_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.detector_state_deltas_); - } - if (cached_has_bits & 0x00000008u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000010u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000020u) { - detector_state_ = from.detector_state_; - } - if (cached_has_bits & 0x00000040u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void DelayBasedBweUpdates::CopyFrom(const DelayBasedBweUpdates& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.DelayBasedBweUpdates) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DelayBasedBweUpdates::IsInitialized() const { - return true; -} - -void DelayBasedBweUpdates::InternalSwap(DelayBasedBweUpdates* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - bitrate_bps_deltas_.Swap(&other->bitrate_bps_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - detector_state_deltas_.Swap(&other->detector_state_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(bitrate_bps_, other->bitrate_bps_); - swap(detector_state_, other->detector_state_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string DelayBasedBweUpdates::GetTypeName() const { - return "webrtc.rtclog2.DelayBasedBweUpdates"; -} - - -// =================================================================== - -void RtpHeaderExtensionConfig::InitAsDefaultInstance() { -} -class RtpHeaderExtensionConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_transmission_time_offset_id(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_absolute_send_time_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_transport_sequence_number_id(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_video_rotation_id(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_audio_level_id(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } -}; - -RtpHeaderExtensionConfig::RtpHeaderExtensionConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.RtpHeaderExtensionConfig) -} -RtpHeaderExtensionConfig::RtpHeaderExtensionConfig(const RtpHeaderExtensionConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&transmission_time_offset_id_, &from.transmission_time_offset_id_, - static_cast(reinterpret_cast(&audio_level_id_) - - reinterpret_cast(&transmission_time_offset_id_)) + sizeof(audio_level_id_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.RtpHeaderExtensionConfig) -} - -void RtpHeaderExtensionConfig::SharedCtor() { - ::memset(&transmission_time_offset_id_, 0, static_cast( - reinterpret_cast(&audio_level_id_) - - reinterpret_cast(&transmission_time_offset_id_)) + sizeof(audio_level_id_)); -} - -RtpHeaderExtensionConfig::~RtpHeaderExtensionConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.RtpHeaderExtensionConfig) - SharedDtor(); -} - -void RtpHeaderExtensionConfig::SharedDtor() { -} - -void RtpHeaderExtensionConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RtpHeaderExtensionConfig& RtpHeaderExtensionConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RtpHeaderExtensionConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void RtpHeaderExtensionConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - ::memset(&transmission_time_offset_id_, 0, static_cast( - reinterpret_cast(&audio_level_id_) - - reinterpret_cast(&transmission_time_offset_id_)) + sizeof(audio_level_id_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RtpHeaderExtensionConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int32 transmission_time_offset_id = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_transmission_time_offset_id(&has_bits); - transmission_time_offset_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 absolute_send_time_id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_absolute_send_time_id(&has_bits); - absolute_send_time_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 transport_sequence_number_id = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_transport_sequence_number_id(&has_bits); - transport_sequence_number_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 video_rotation_id = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_video_rotation_id(&has_bits); - video_rotation_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 audio_level_id = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_audio_level_id(&has_bits); - audio_level_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RtpHeaderExtensionConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int32 transmission_time_offset_id = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_transmission_time_offset_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &transmission_time_offset_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 absolute_send_time_id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_absolute_send_time_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &absolute_send_time_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 transport_sequence_number_id = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_transport_sequence_number_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &transport_sequence_number_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 video_rotation_id = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_video_rotation_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &video_rotation_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 audio_level_id = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_audio_level_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &audio_level_id_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.RtpHeaderExtensionConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.RtpHeaderExtensionConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RtpHeaderExtensionConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int32 transmission_time_offset_id = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(1, this->transmission_time_offset_id(), output); - } - - // optional int32 absolute_send_time_id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->absolute_send_time_id(), output); - } - - // optional int32 transport_sequence_number_id = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->transport_sequence_number_id(), output); - } - - // optional int32 video_rotation_id = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(4, this->video_rotation_id(), output); - } - - // optional int32 audio_level_id = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(5, this->audio_level_id(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.RtpHeaderExtensionConfig) -} - -size_t RtpHeaderExtensionConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional int32 transmission_time_offset_id = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->transmission_time_offset_id()); - } - - // optional int32 absolute_send_time_id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->absolute_send_time_id()); - } - - // optional int32 transport_sequence_number_id = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->transport_sequence_number_id()); - } - - // optional int32 video_rotation_id = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->video_rotation_id()); - } - - // optional int32 audio_level_id = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->audio_level_id()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RtpHeaderExtensionConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RtpHeaderExtensionConfig::MergeFrom(const RtpHeaderExtensionConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - transmission_time_offset_id_ = from.transmission_time_offset_id_; - } - if (cached_has_bits & 0x00000002u) { - absolute_send_time_id_ = from.absolute_send_time_id_; - } - if (cached_has_bits & 0x00000004u) { - transport_sequence_number_id_ = from.transport_sequence_number_id_; - } - if (cached_has_bits & 0x00000008u) { - video_rotation_id_ = from.video_rotation_id_; - } - if (cached_has_bits & 0x00000010u) { - audio_level_id_ = from.audio_level_id_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RtpHeaderExtensionConfig::CopyFrom(const RtpHeaderExtensionConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.RtpHeaderExtensionConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RtpHeaderExtensionConfig::IsInitialized() const { - return true; -} - -void RtpHeaderExtensionConfig::InternalSwap(RtpHeaderExtensionConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(transmission_time_offset_id_, other->transmission_time_offset_id_); - swap(absolute_send_time_id_, other->absolute_send_time_id_); - swap(transport_sequence_number_id_, other->transport_sequence_number_id_); - swap(video_rotation_id_, other->video_rotation_id_); - swap(audio_level_id_, other->audio_level_id_); -} - -std::string RtpHeaderExtensionConfig::GetTypeName() const { - return "webrtc.rtclog2.RtpHeaderExtensionConfig"; -} - - -// =================================================================== - -void VideoRecvStreamConfig::InitAsDefaultInstance() { - ::webrtc::rtclog2::_VideoRecvStreamConfig_default_instance_._instance.get_mutable()->header_extensions_ = const_cast< ::webrtc::rtclog2::RtpHeaderExtensionConfig*>( - ::webrtc::rtclog2::RtpHeaderExtensionConfig::internal_default_instance()); -} -class VideoRecvStreamConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_remote_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_rtx_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions(const VideoRecvStreamConfig* msg); - static void set_has_header_extensions(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog2::RtpHeaderExtensionConfig& -VideoRecvStreamConfig::_Internal::header_extensions(const VideoRecvStreamConfig* msg) { - return *msg->header_extensions_; -} -VideoRecvStreamConfig::VideoRecvStreamConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.VideoRecvStreamConfig) -} -VideoRecvStreamConfig::VideoRecvStreamConfig(const VideoRecvStreamConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_header_extensions()) { - header_extensions_ = new ::webrtc::rtclog2::RtpHeaderExtensionConfig(*from.header_extensions_); - } else { - header_extensions_ = nullptr; - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(rtx_ssrc_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.VideoRecvStreamConfig) -} - -void VideoRecvStreamConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base); - ::memset(&header_extensions_, 0, static_cast( - reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(&header_extensions_)) + sizeof(rtx_ssrc_)); -} - -VideoRecvStreamConfig::~VideoRecvStreamConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.VideoRecvStreamConfig) - SharedDtor(); -} - -void VideoRecvStreamConfig::SharedDtor() { - if (this != internal_default_instance()) delete header_extensions_; -} - -void VideoRecvStreamConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const VideoRecvStreamConfig& VideoRecvStreamConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_VideoRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void VideoRecvStreamConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.VideoRecvStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(header_extensions_ != nullptr); - header_extensions_->Clear(); - } - if (cached_has_bits & 0x0000001eu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(rtx_ssrc_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* VideoRecvStreamConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 remote_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_remote_ssrc(&has_bits); - remote_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 local_ssrc = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 rtx_ssrc = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_rtx_ssrc(&has_bits); - rtx_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ctx->ParseMessage(mutable_header_extensions(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool VideoRecvStreamConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.VideoRecvStreamConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 remote_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_remote_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &remote_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 local_ssrc = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 rtx_ssrc = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_rtx_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &rtx_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.VideoRecvStreamConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.VideoRecvStreamConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void VideoRecvStreamConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.VideoRecvStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 remote_ssrc = 2; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->remote_ssrc(), output); - } - - // optional uint32 local_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->local_ssrc(), output); - } - - // optional uint32 rtx_ssrc = 4; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->rtx_ssrc(), output); - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, _Internal::header_extensions(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.VideoRecvStreamConfig) -} - -size_t VideoRecvStreamConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.VideoRecvStreamConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *header_extensions_); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 remote_ssrc = 2; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->remote_ssrc()); - } - - // optional uint32 local_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - // optional uint32 rtx_ssrc = 4; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->rtx_ssrc()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void VideoRecvStreamConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void VideoRecvStreamConfig::MergeFrom(const VideoRecvStreamConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.VideoRecvStreamConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - mutable_header_extensions()->::webrtc::rtclog2::RtpHeaderExtensionConfig::MergeFrom(from.header_extensions()); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000004u) { - remote_ssrc_ = from.remote_ssrc_; - } - if (cached_has_bits & 0x00000008u) { - local_ssrc_ = from.local_ssrc_; - } - if (cached_has_bits & 0x00000010u) { - rtx_ssrc_ = from.rtx_ssrc_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void VideoRecvStreamConfig::CopyFrom(const VideoRecvStreamConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.VideoRecvStreamConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool VideoRecvStreamConfig::IsInitialized() const { - return true; -} - -void VideoRecvStreamConfig::InternalSwap(VideoRecvStreamConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(header_extensions_, other->header_extensions_); - swap(timestamp_ms_, other->timestamp_ms_); - swap(remote_ssrc_, other->remote_ssrc_); - swap(local_ssrc_, other->local_ssrc_); - swap(rtx_ssrc_, other->rtx_ssrc_); -} - -std::string VideoRecvStreamConfig::GetTypeName() const { - return "webrtc.rtclog2.VideoRecvStreamConfig"; -} - - -// =================================================================== - -void VideoSendStreamConfig::InitAsDefaultInstance() { - ::webrtc::rtclog2::_VideoSendStreamConfig_default_instance_._instance.get_mutable()->header_extensions_ = const_cast< ::webrtc::rtclog2::RtpHeaderExtensionConfig*>( - ::webrtc::rtclog2::RtpHeaderExtensionConfig::internal_default_instance()); -} -class VideoSendStreamConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_rtx_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions(const VideoSendStreamConfig* msg); - static void set_has_header_extensions(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog2::RtpHeaderExtensionConfig& -VideoSendStreamConfig::_Internal::header_extensions(const VideoSendStreamConfig* msg) { - return *msg->header_extensions_; -} -VideoSendStreamConfig::VideoSendStreamConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.VideoSendStreamConfig) -} -VideoSendStreamConfig::VideoSendStreamConfig(const VideoSendStreamConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_header_extensions()) { - header_extensions_ = new ::webrtc::rtclog2::RtpHeaderExtensionConfig(*from.header_extensions_); - } else { - header_extensions_ = nullptr; - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(rtx_ssrc_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.VideoSendStreamConfig) -} - -void VideoSendStreamConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto.base); - ::memset(&header_extensions_, 0, static_cast( - reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(&header_extensions_)) + sizeof(rtx_ssrc_)); -} - -VideoSendStreamConfig::~VideoSendStreamConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.VideoSendStreamConfig) - SharedDtor(); -} - -void VideoSendStreamConfig::SharedDtor() { - if (this != internal_default_instance()) delete header_extensions_; -} - -void VideoSendStreamConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const VideoSendStreamConfig& VideoSendStreamConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_VideoSendStreamConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void VideoSendStreamConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.VideoSendStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(header_extensions_ != nullptr); - header_extensions_->Clear(); - } - if (cached_has_bits & 0x0000000eu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&rtx_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(rtx_ssrc_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* VideoSendStreamConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 rtx_ssrc = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_rtx_ssrc(&has_bits); - rtx_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { - ptr = ctx->ParseMessage(mutable_header_extensions(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool VideoSendStreamConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.VideoSendStreamConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 rtx_ssrc = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_rtx_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &rtx_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.VideoSendStreamConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.VideoSendStreamConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void VideoSendStreamConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.VideoSendStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 ssrc = 2; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->ssrc(), output); - } - - // optional uint32 rtx_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->rtx_ssrc(), output); - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 4, _Internal::header_extensions(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.VideoSendStreamConfig) -} - -size_t VideoSendStreamConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.VideoSendStreamConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *header_extensions_); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 ssrc = 2; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->ssrc()); - } - - // optional uint32 rtx_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->rtx_ssrc()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void VideoSendStreamConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void VideoSendStreamConfig::MergeFrom(const VideoSendStreamConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.VideoSendStreamConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - mutable_header_extensions()->::webrtc::rtclog2::RtpHeaderExtensionConfig::MergeFrom(from.header_extensions()); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000004u) { - ssrc_ = from.ssrc_; - } - if (cached_has_bits & 0x00000008u) { - rtx_ssrc_ = from.rtx_ssrc_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void VideoSendStreamConfig::CopyFrom(const VideoSendStreamConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.VideoSendStreamConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool VideoSendStreamConfig::IsInitialized() const { - return true; -} - -void VideoSendStreamConfig::InternalSwap(VideoSendStreamConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(header_extensions_, other->header_extensions_); - swap(timestamp_ms_, other->timestamp_ms_); - swap(ssrc_, other->ssrc_); - swap(rtx_ssrc_, other->rtx_ssrc_); -} - -std::string VideoSendStreamConfig::GetTypeName() const { - return "webrtc.rtclog2.VideoSendStreamConfig"; -} - - -// =================================================================== - -void AudioRecvStreamConfig::InitAsDefaultInstance() { - ::webrtc::rtclog2::_AudioRecvStreamConfig_default_instance_._instance.get_mutable()->header_extensions_ = const_cast< ::webrtc::rtclog2::RtpHeaderExtensionConfig*>( - ::webrtc::rtclog2::RtpHeaderExtensionConfig::internal_default_instance()); -} -class AudioRecvStreamConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_remote_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_local_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions(const AudioRecvStreamConfig* msg); - static void set_has_header_extensions(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog2::RtpHeaderExtensionConfig& -AudioRecvStreamConfig::_Internal::header_extensions(const AudioRecvStreamConfig* msg) { - return *msg->header_extensions_; -} -AudioRecvStreamConfig::AudioRecvStreamConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.AudioRecvStreamConfig) -} -AudioRecvStreamConfig::AudioRecvStreamConfig(const AudioRecvStreamConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_header_extensions()) { - header_extensions_ = new ::webrtc::rtclog2::RtpHeaderExtensionConfig(*from.header_extensions_); - } else { - header_extensions_ = nullptr; - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&local_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(local_ssrc_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.AudioRecvStreamConfig) -} - -void AudioRecvStreamConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base); - ::memset(&header_extensions_, 0, static_cast( - reinterpret_cast(&local_ssrc_) - - reinterpret_cast(&header_extensions_)) + sizeof(local_ssrc_)); -} - -AudioRecvStreamConfig::~AudioRecvStreamConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.AudioRecvStreamConfig) - SharedDtor(); -} - -void AudioRecvStreamConfig::SharedDtor() { - if (this != internal_default_instance()) delete header_extensions_; -} - -void AudioRecvStreamConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioRecvStreamConfig& AudioRecvStreamConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioRecvStreamConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void AudioRecvStreamConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.AudioRecvStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(header_extensions_ != nullptr); - header_extensions_->Clear(); - } - if (cached_has_bits & 0x0000000eu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&local_ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(local_ssrc_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioRecvStreamConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 remote_ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_remote_ssrc(&has_bits); - remote_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 local_ssrc = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_local_ssrc(&has_bits); - local_ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ctx->ParseMessage(mutable_header_extensions(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioRecvStreamConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.AudioRecvStreamConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 remote_ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_remote_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &remote_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 local_ssrc = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_local_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &local_ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.AudioRecvStreamConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.AudioRecvStreamConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioRecvStreamConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.AudioRecvStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 remote_ssrc = 2; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->remote_ssrc(), output); - } - - // optional uint32 local_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->local_ssrc(), output); - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 5, _Internal::header_extensions(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.AudioRecvStreamConfig) -} - -size_t AudioRecvStreamConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.AudioRecvStreamConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *header_extensions_); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 remote_ssrc = 2; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->remote_ssrc()); - } - - // optional uint32 local_ssrc = 3; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->local_ssrc()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioRecvStreamConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioRecvStreamConfig::MergeFrom(const AudioRecvStreamConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.AudioRecvStreamConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - mutable_header_extensions()->::webrtc::rtclog2::RtpHeaderExtensionConfig::MergeFrom(from.header_extensions()); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000004u) { - remote_ssrc_ = from.remote_ssrc_; - } - if (cached_has_bits & 0x00000008u) { - local_ssrc_ = from.local_ssrc_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioRecvStreamConfig::CopyFrom(const AudioRecvStreamConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.AudioRecvStreamConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioRecvStreamConfig::IsInitialized() const { - return true; -} - -void AudioRecvStreamConfig::InternalSwap(AudioRecvStreamConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(header_extensions_, other->header_extensions_); - swap(timestamp_ms_, other->timestamp_ms_); - swap(remote_ssrc_, other->remote_ssrc_); - swap(local_ssrc_, other->local_ssrc_); -} - -std::string AudioRecvStreamConfig::GetTypeName() const { - return "webrtc.rtclog2.AudioRecvStreamConfig"; -} - - -// =================================================================== - -void AudioSendStreamConfig::InitAsDefaultInstance() { - ::webrtc::rtclog2::_AudioSendStreamConfig_default_instance_._instance.get_mutable()->header_extensions_ = const_cast< ::webrtc::rtclog2::RtpHeaderExtensionConfig*>( - ::webrtc::rtclog2::RtpHeaderExtensionConfig::internal_default_instance()); -} -class AudioSendStreamConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_ssrc(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions(const AudioSendStreamConfig* msg); - static void set_has_header_extensions(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } -}; - -const ::webrtc::rtclog2::RtpHeaderExtensionConfig& -AudioSendStreamConfig::_Internal::header_extensions(const AudioSendStreamConfig* msg) { - return *msg->header_extensions_; -} -AudioSendStreamConfig::AudioSendStreamConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.AudioSendStreamConfig) -} -AudioSendStreamConfig::AudioSendStreamConfig(const AudioSendStreamConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_header_extensions()) { - header_extensions_ = new ::webrtc::rtclog2::RtpHeaderExtensionConfig(*from.header_extensions_); - } else { - header_extensions_ = nullptr; - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(ssrc_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.AudioSendStreamConfig) -} - -void AudioSendStreamConfig::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto.base); - ::memset(&header_extensions_, 0, static_cast( - reinterpret_cast(&ssrc_) - - reinterpret_cast(&header_extensions_)) + sizeof(ssrc_)); -} - -AudioSendStreamConfig::~AudioSendStreamConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.AudioSendStreamConfig) - SharedDtor(); -} - -void AudioSendStreamConfig::SharedDtor() { - if (this != internal_default_instance()) delete header_extensions_; -} - -void AudioSendStreamConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioSendStreamConfig& AudioSendStreamConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioSendStreamConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void AudioSendStreamConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.AudioSendStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000001u) { - GOOGLE_DCHECK(header_extensions_ != nullptr); - header_extensions_->Clear(); - } - if (cached_has_bits & 0x00000006u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&ssrc_) - - reinterpret_cast(×tamp_ms_)) + sizeof(ssrc_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioSendStreamConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 ssrc = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_ssrc(&has_bits); - ssrc_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { - ptr = ctx->ParseMessage(mutable_header_extensions(), ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioSendStreamConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.AudioSendStreamConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 ssrc = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_ssrc(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &ssrc_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( - input, mutable_header_extensions())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.AudioSendStreamConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.AudioSendStreamConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioSendStreamConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.AudioSendStreamConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 ssrc = 2; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->ssrc(), output); - } - - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessage( - 4, _Internal::header_extensions(this), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.AudioSendStreamConfig) -} - -size_t AudioSendStreamConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.AudioSendStreamConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( - *header_extensions_); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 ssrc = 2; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->ssrc()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioSendStreamConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioSendStreamConfig::MergeFrom(const AudioSendStreamConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.AudioSendStreamConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - mutable_header_extensions()->::webrtc::rtclog2::RtpHeaderExtensionConfig::MergeFrom(from.header_extensions()); - } - if (cached_has_bits & 0x00000002u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000004u) { - ssrc_ = from.ssrc_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioSendStreamConfig::CopyFrom(const AudioSendStreamConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.AudioSendStreamConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioSendStreamConfig::IsInitialized() const { - return true; -} - -void AudioSendStreamConfig::InternalSwap(AudioSendStreamConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(header_extensions_, other->header_extensions_); - swap(timestamp_ms_, other->timestamp_ms_); - swap(ssrc_, other->ssrc_); -} - -std::string AudioSendStreamConfig::GetTypeName() const { - return "webrtc.rtclog2.AudioSendStreamConfig"; -} - - -// =================================================================== - -void AudioNetworkAdaptations::InitAsDefaultInstance() { -} -class AudioNetworkAdaptations::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_frame_length_ms(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } - static void set_has_uplink_packet_loss_fraction(HasBits* has_bits) { - (*has_bits)[0] |= 1024u; - } - static void set_has_enable_fec(HasBits* has_bits) { - (*has_bits)[0] |= 2048u; - } - static void set_has_enable_dtx(HasBits* has_bits) { - (*has_bits)[0] |= 4096u; - } - static void set_has_num_channels(HasBits* has_bits) { - (*has_bits)[0] |= 8192u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16384u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_bitrate_bps_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_frame_length_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_uplink_packet_loss_fraction_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_enable_fec_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_enable_dtx_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_num_channels_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } -}; - -AudioNetworkAdaptations::AudioNetworkAdaptations() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.AudioNetworkAdaptations) -} -AudioNetworkAdaptations::AudioNetworkAdaptations(const AudioNetworkAdaptations& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_bitrate_bps_deltas()) { - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - frame_length_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_frame_length_ms_deltas()) { - frame_length_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.frame_length_ms_deltas_); - } - uplink_packet_loss_fraction_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_uplink_packet_loss_fraction_deltas()) { - uplink_packet_loss_fraction_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.uplink_packet_loss_fraction_deltas_); - } - enable_fec_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_enable_fec_deltas()) { - enable_fec_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.enable_fec_deltas_); - } - enable_dtx_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_enable_dtx_deltas()) { - enable_dtx_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.enable_dtx_deltas_); - } - num_channels_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_num_channels_deltas()) { - num_channels_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.num_channels_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.AudioNetworkAdaptations) -} - -void AudioNetworkAdaptations::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - frame_length_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - uplink_packet_loss_fraction_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - enable_fec_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - enable_dtx_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - num_channels_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -AudioNetworkAdaptations::~AudioNetworkAdaptations() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.AudioNetworkAdaptations) - SharedDtor(); -} - -void AudioNetworkAdaptations::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - bitrate_bps_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - frame_length_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - uplink_packet_loss_fraction_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - enable_fec_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - enable_dtx_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - num_channels_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void AudioNetworkAdaptations::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AudioNetworkAdaptations& AudioNetworkAdaptations::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AudioNetworkAdaptations_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void AudioNetworkAdaptations::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.AudioNetworkAdaptations) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - bitrate_bps_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - frame_length_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000008u) { - uplink_packet_loss_fraction_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000010u) { - enable_fec_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000020u) { - enable_dtx_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000040u) { - num_channels_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - timestamp_ms_ = PROTOBUF_LONGLONG(0); - if (cached_has_bits & 0x00007f00u) { - ::memset(&bitrate_bps_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(&bitrate_bps_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AudioNetworkAdaptations::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 bitrate_bps = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional int32 frame_length_ms = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_frame_length_ms(&has_bits); - frame_length_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 uplink_packet_loss_fraction = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_uplink_packet_loss_fraction(&has_bits); - uplink_packet_loss_fraction_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool enable_fec = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_enable_fec(&has_bits); - enable_fec_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool enable_dtx = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 48)) { - _Internal::set_has_enable_dtx(&has_bits); - enable_dtx_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 num_channels = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 56)) { - _Internal::set_has_num_channels(&has_bits); - num_channels_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 64)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes bitrate_bps_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_bitrate_bps_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes frame_length_ms_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_frame_length_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes uplink_packet_loss_fraction_deltas = 104; - case 104: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 66)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_uplink_packet_loss_fraction_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes enable_fec_deltas = 105; - case 105: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 74)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_enable_fec_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes enable_dtx_deltas = 106; - case 106: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 82)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_enable_dtx_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes num_channels_deltas = 107; - case 107: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 90)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_num_channels_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AudioNetworkAdaptations::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.AudioNetworkAdaptations) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 bitrate_bps = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional int32 frame_length_ms = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_frame_length_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>( - input, &frame_length_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 uplink_packet_loss_fraction = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_uplink_packet_loss_fraction(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &uplink_packet_loss_fraction_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool enable_fec = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_enable_fec(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &enable_fec_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool enable_dtx = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (48 & 0xFF)) { - _Internal::set_has_enable_dtx(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &enable_dtx_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 num_channels = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (56 & 0xFF)) { - _Internal::set_has_num_channels(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &num_channels_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (64 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes bitrate_bps_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_bitrate_bps_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes frame_length_ms_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_frame_length_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes uplink_packet_loss_fraction_deltas = 104; - case 104: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (834 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_uplink_packet_loss_fraction_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes enable_fec_deltas = 105; - case 105: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (842 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_enable_fec_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes enable_dtx_deltas = 106; - case 106: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (850 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_enable_dtx_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes num_channels_deltas = 107; - case 107: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (858 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_num_channels_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.AudioNetworkAdaptations) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.AudioNetworkAdaptations) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AudioNetworkAdaptations::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.AudioNetworkAdaptations) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional int32 bitrate_bps = 2; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(2, this->bitrate_bps(), output); - } - - // optional int32 frame_length_ms = 3; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32(3, this->frame_length_ms(), output); - } - - // optional uint32 uplink_packet_loss_fraction = 4; - if (cached_has_bits & 0x00000400u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->uplink_packet_loss_fraction(), output); - } - - // optional bool enable_fec = 5; - if (cached_has_bits & 0x00000800u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(5, this->enable_fec(), output); - } - - // optional bool enable_dtx = 6; - if (cached_has_bits & 0x00001000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(6, this->enable_dtx(), output); - } - - // optional uint32 num_channels = 7; - if (cached_has_bits & 0x00002000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(7, this->num_channels(), output); - } - - // optional uint32 number_of_deltas = 8; - if (cached_has_bits & 0x00004000u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(8, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->bitrate_bps_deltas(), output); - } - - // optional bytes frame_length_ms_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->frame_length_ms_deltas(), output); - } - - // optional bytes uplink_packet_loss_fraction_deltas = 104; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 104, this->uplink_packet_loss_fraction_deltas(), output); - } - - // optional bytes enable_fec_deltas = 105; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 105, this->enable_fec_deltas(), output); - } - - // optional bytes enable_dtx_deltas = 106; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 106, this->enable_dtx_deltas(), output); - } - - // optional bytes num_channels_deltas = 107; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 107, this->num_channels_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.AudioNetworkAdaptations) -} - -size_t AudioNetworkAdaptations::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.AudioNetworkAdaptations) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes bitrate_bps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->bitrate_bps_deltas()); - } - - // optional bytes frame_length_ms_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->frame_length_ms_deltas()); - } - - // optional bytes uplink_packet_loss_fraction_deltas = 104; - if (cached_has_bits & 0x00000008u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->uplink_packet_loss_fraction_deltas()); - } - - // optional bytes enable_fec_deltas = 105; - if (cached_has_bits & 0x00000010u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->enable_fec_deltas()); - } - - // optional bytes enable_dtx_deltas = 106; - if (cached_has_bits & 0x00000020u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->enable_dtx_deltas()); - } - - // optional bytes num_channels_deltas = 107; - if (cached_has_bits & 0x00000040u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->num_channels_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - } - if (cached_has_bits & 0x00007f00u) { - // optional int32 bitrate_bps = 2; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->bitrate_bps()); - } - - // optional int32 frame_length_ms = 3; - if (cached_has_bits & 0x00000200u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size( - this->frame_length_ms()); - } - - // optional uint32 uplink_packet_loss_fraction = 4; - if (cached_has_bits & 0x00000400u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->uplink_packet_loss_fraction()); - } - - // optional bool enable_fec = 5; - if (cached_has_bits & 0x00000800u) { - total_size += 1 + 1; - } - - // optional bool enable_dtx = 6; - if (cached_has_bits & 0x00001000u) { - total_size += 1 + 1; - } - - // optional uint32 num_channels = 7; - if (cached_has_bits & 0x00002000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->num_channels()); - } - - // optional uint32 number_of_deltas = 8; - if (cached_has_bits & 0x00004000u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AudioNetworkAdaptations::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AudioNetworkAdaptations::MergeFrom(const AudioNetworkAdaptations& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.AudioNetworkAdaptations) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.bitrate_bps_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - frame_length_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.frame_length_ms_deltas_); - } - if (cached_has_bits & 0x00000008u) { - _has_bits_[0] |= 0x00000008u; - uplink_packet_loss_fraction_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.uplink_packet_loss_fraction_deltas_); - } - if (cached_has_bits & 0x00000010u) { - _has_bits_[0] |= 0x00000010u; - enable_fec_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.enable_fec_deltas_); - } - if (cached_has_bits & 0x00000020u) { - _has_bits_[0] |= 0x00000020u; - enable_dtx_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.enable_dtx_deltas_); - } - if (cached_has_bits & 0x00000040u) { - _has_bits_[0] |= 0x00000040u; - num_channels_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.num_channels_deltas_); - } - if (cached_has_bits & 0x00000080u) { - timestamp_ms_ = from.timestamp_ms_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00007f00u) { - if (cached_has_bits & 0x00000100u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000200u) { - frame_length_ms_ = from.frame_length_ms_; - } - if (cached_has_bits & 0x00000400u) { - uplink_packet_loss_fraction_ = from.uplink_packet_loss_fraction_; - } - if (cached_has_bits & 0x00000800u) { - enable_fec_ = from.enable_fec_; - } - if (cached_has_bits & 0x00001000u) { - enable_dtx_ = from.enable_dtx_; - } - if (cached_has_bits & 0x00002000u) { - num_channels_ = from.num_channels_; - } - if (cached_has_bits & 0x00004000u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AudioNetworkAdaptations::CopyFrom(const AudioNetworkAdaptations& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.AudioNetworkAdaptations) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AudioNetworkAdaptations::IsInitialized() const { - return true; -} - -void AudioNetworkAdaptations::InternalSwap(AudioNetworkAdaptations* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - bitrate_bps_deltas_.Swap(&other->bitrate_bps_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - frame_length_ms_deltas_.Swap(&other->frame_length_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - uplink_packet_loss_fraction_deltas_.Swap(&other->uplink_packet_loss_fraction_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - enable_fec_deltas_.Swap(&other->enable_fec_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - enable_dtx_deltas_.Swap(&other->enable_dtx_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - num_channels_deltas_.Swap(&other->num_channels_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(bitrate_bps_, other->bitrate_bps_); - swap(frame_length_ms_, other->frame_length_ms_); - swap(uplink_packet_loss_fraction_, other->uplink_packet_loss_fraction_); - swap(enable_fec_, other->enable_fec_); - swap(enable_dtx_, other->enable_dtx_); - swap(num_channels_, other->num_channels_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string AudioNetworkAdaptations::GetTypeName() const { - return "webrtc.rtclog2.AudioNetworkAdaptations"; -} - - -// =================================================================== - -void BweProbeCluster::InitAsDefaultInstance() { -} -class BweProbeCluster::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_min_packets(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_min_bytes(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } -}; - -BweProbeCluster::BweProbeCluster() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.BweProbeCluster) -} -BweProbeCluster::BweProbeCluster(const BweProbeCluster& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&min_bytes_) - - reinterpret_cast(×tamp_ms_)) + sizeof(min_bytes_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.BweProbeCluster) -} - -void BweProbeCluster::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&min_bytes_) - - reinterpret_cast(×tamp_ms_)) + sizeof(min_bytes_)); -} - -BweProbeCluster::~BweProbeCluster() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.BweProbeCluster) - SharedDtor(); -} - -void BweProbeCluster::SharedDtor() { -} - -void BweProbeCluster::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BweProbeCluster& BweProbeCluster::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BweProbeCluster_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void BweProbeCluster::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.BweProbeCluster) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&min_bytes_) - - reinterpret_cast(×tamp_ms_)) + sizeof(min_bytes_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BweProbeCluster::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 bitrate_bps = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 min_packets = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_min_packets(&has_bits); - min_packets_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 min_bytes = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - _Internal::set_has_min_bytes(&has_bits); - min_bytes_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BweProbeCluster::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.BweProbeCluster) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 bitrate_bps = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 min_packets = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_min_packets(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &min_packets_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 min_bytes = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - _Internal::set_has_min_bytes(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &min_bytes_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.BweProbeCluster) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.BweProbeCluster) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BweProbeCluster::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.BweProbeCluster) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->id(), output); - } - - // optional uint32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->bitrate_bps(), output); - } - - // optional uint32 min_packets = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->min_packets(), output); - } - - // optional uint32 min_bytes = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(5, this->min_bytes(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.BweProbeCluster) -} - -size_t BweProbeCluster::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.BweProbeCluster) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->id()); - } - - // optional uint32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->bitrate_bps()); - } - - // optional uint32 min_packets = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->min_packets()); - } - - // optional uint32 min_bytes = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->min_bytes()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BweProbeCluster::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BweProbeCluster::MergeFrom(const BweProbeCluster& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.BweProbeCluster) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000001fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - id_ = from.id_; - } - if (cached_has_bits & 0x00000004u) { - bitrate_bps_ = from.bitrate_bps_; - } - if (cached_has_bits & 0x00000008u) { - min_packets_ = from.min_packets_; - } - if (cached_has_bits & 0x00000010u) { - min_bytes_ = from.min_bytes_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BweProbeCluster::CopyFrom(const BweProbeCluster& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.BweProbeCluster) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BweProbeCluster::IsInitialized() const { - return true; -} - -void BweProbeCluster::InternalSwap(BweProbeCluster* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(id_, other->id_); - swap(bitrate_bps_, other->bitrate_bps_); - swap(min_packets_, other->min_packets_); - swap(min_bytes_, other->min_bytes_); -} - -std::string BweProbeCluster::GetTypeName() const { - return "webrtc.rtclog2.BweProbeCluster"; -} - - -// =================================================================== - -void BweProbeResultSuccess::InitAsDefaultInstance() { -} -class BweProbeResultSuccess::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_bitrate_bps(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -BweProbeResultSuccess::BweProbeResultSuccess() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.BweProbeResultSuccess) -} -BweProbeResultSuccess::BweProbeResultSuccess(const BweProbeResultSuccess& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(×tamp_ms_)) + sizeof(bitrate_bps_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.BweProbeResultSuccess) -} - -void BweProbeResultSuccess::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(×tamp_ms_)) + sizeof(bitrate_bps_)); -} - -BweProbeResultSuccess::~BweProbeResultSuccess() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.BweProbeResultSuccess) - SharedDtor(); -} - -void BweProbeResultSuccess::SharedDtor() { -} - -void BweProbeResultSuccess::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BweProbeResultSuccess& BweProbeResultSuccess::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BweProbeResultSuccess_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void BweProbeResultSuccess::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.BweProbeResultSuccess) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&bitrate_bps_) - - reinterpret_cast(×tamp_ms_)) + sizeof(bitrate_bps_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BweProbeResultSuccess::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 bitrate_bps = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_bitrate_bps(&has_bits); - bitrate_bps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BweProbeResultSuccess::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.BweProbeResultSuccess) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 bitrate_bps = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_bitrate_bps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &bitrate_bps_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.BweProbeResultSuccess) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.BweProbeResultSuccess) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BweProbeResultSuccess::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.BweProbeResultSuccess) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->id(), output); - } - - // optional uint32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->bitrate_bps(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.BweProbeResultSuccess) -} - -size_t BweProbeResultSuccess::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.BweProbeResultSuccess) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->id()); - } - - // optional uint32 bitrate_bps = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->bitrate_bps()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BweProbeResultSuccess::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BweProbeResultSuccess::MergeFrom(const BweProbeResultSuccess& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.BweProbeResultSuccess) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - id_ = from.id_; - } - if (cached_has_bits & 0x00000004u) { - bitrate_bps_ = from.bitrate_bps_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BweProbeResultSuccess::CopyFrom(const BweProbeResultSuccess& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.BweProbeResultSuccess) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BweProbeResultSuccess::IsInitialized() const { - return true; -} - -void BweProbeResultSuccess::InternalSwap(BweProbeResultSuccess* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(id_, other->id_); - swap(bitrate_bps_, other->bitrate_bps_); -} - -std::string BweProbeResultSuccess::GetTypeName() const { - return "webrtc.rtclog2.BweProbeResultSuccess"; -} - - -// =================================================================== - -void BweProbeResultFailure::InitAsDefaultInstance() { -} -class BweProbeResultFailure::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_id(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_failure(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -BweProbeResultFailure::BweProbeResultFailure() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.BweProbeResultFailure) -} -BweProbeResultFailure::BweProbeResultFailure(const BweProbeResultFailure& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&failure_) - - reinterpret_cast(×tamp_ms_)) + sizeof(failure_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.BweProbeResultFailure) -} - -void BweProbeResultFailure::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&failure_) - - reinterpret_cast(×tamp_ms_)) + sizeof(failure_)); -} - -BweProbeResultFailure::~BweProbeResultFailure() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.BweProbeResultFailure) - SharedDtor(); -} - -void BweProbeResultFailure::SharedDtor() { -} - -void BweProbeResultFailure::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const BweProbeResultFailure& BweProbeResultFailure::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_BweProbeResultFailure_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void BweProbeResultFailure::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.BweProbeResultFailure) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&failure_) - - reinterpret_cast(×tamp_ms_)) + sizeof(failure_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* BweProbeResultFailure::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 id = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_id(&has_bits); - id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::BweProbeResultFailure_FailureReason_IsValid(val))) { - set_failure(static_cast<::webrtc::rtclog2::BweProbeResultFailure_FailureReason>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(3, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool BweProbeResultFailure::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.BweProbeResultFailure) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 id = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::BweProbeResultFailure_FailureReason_IsValid(value)) { - set_failure(static_cast< ::webrtc::rtclog2::BweProbeResultFailure_FailureReason >(value)); - } else { - unknown_fields_stream.WriteVarint32(24u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.BweProbeResultFailure) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.BweProbeResultFailure) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void BweProbeResultFailure::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.BweProbeResultFailure) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->id(), output); - } - - // optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 3, this->failure(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.BweProbeResultFailure) -} - -size_t BweProbeResultFailure::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.BweProbeResultFailure) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 id = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->id()); - } - - // optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->failure()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void BweProbeResultFailure::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void BweProbeResultFailure::MergeFrom(const BweProbeResultFailure& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.BweProbeResultFailure) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - id_ = from.id_; - } - if (cached_has_bits & 0x00000004u) { - failure_ = from.failure_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void BweProbeResultFailure::CopyFrom(const BweProbeResultFailure& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.BweProbeResultFailure) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool BweProbeResultFailure::IsInitialized() const { - return true; -} - -void BweProbeResultFailure::InternalSwap(BweProbeResultFailure* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(id_, other->id_); - swap(failure_, other->failure_); -} - -std::string BweProbeResultFailure::GetTypeName() const { - return "webrtc.rtclog2.BweProbeResultFailure"; -} - - -// =================================================================== - -void AlrState::InitAsDefaultInstance() { -} -class AlrState::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_in_alr(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -AlrState::AlrState() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.AlrState) -} -AlrState::AlrState(const AlrState& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&in_alr_) - - reinterpret_cast(×tamp_ms_)) + sizeof(in_alr_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.AlrState) -} - -void AlrState::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&in_alr_) - - reinterpret_cast(×tamp_ms_)) + sizeof(in_alr_)); -} - -AlrState::~AlrState() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.AlrState) - SharedDtor(); -} - -void AlrState::SharedDtor() { -} - -void AlrState::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AlrState& AlrState::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_AlrState_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void AlrState::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.AlrState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&in_alr_) - - reinterpret_cast(×tamp_ms_)) + sizeof(in_alr_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AlrState::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool in_alr = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_in_alr(&has_bits); - in_alr_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AlrState::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.AlrState) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool in_alr = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_in_alr(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &in_alr_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.AlrState) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.AlrState) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AlrState::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.AlrState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bool in_alr = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(2, this->in_alr(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.AlrState) -} - -size_t AlrState::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.AlrState) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional bool in_alr = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + 1; - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AlrState::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void AlrState::MergeFrom(const AlrState& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.AlrState) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - in_alr_ = from.in_alr_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void AlrState::CopyFrom(const AlrState& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.AlrState) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AlrState::IsInitialized() const { - return true; -} - -void AlrState::InternalSwap(AlrState* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(in_alr_, other->in_alr_); -} - -std::string AlrState::GetTypeName() const { - return "webrtc.rtclog2.AlrState"; -} - - -// =================================================================== - -void IceCandidatePairConfig::InitAsDefaultInstance() { -} -class IceCandidatePairConfig::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_config_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_candidate_pair_id(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_local_candidate_type(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_local_relay_protocol(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_local_network_type(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_local_address_family(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_remote_candidate_type(HasBits* has_bits) { - (*has_bits)[0] |= 128u; - } - static void set_has_remote_address_family(HasBits* has_bits) { - (*has_bits)[0] |= 256u; - } - static void set_has_candidate_pair_protocol(HasBits* has_bits) { - (*has_bits)[0] |= 512u; - } -}; - -IceCandidatePairConfig::IceCandidatePairConfig() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.IceCandidatePairConfig) -} -IceCandidatePairConfig::IceCandidatePairConfig(const IceCandidatePairConfig& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&candidate_pair_protocol_) - - reinterpret_cast(×tamp_ms_)) + sizeof(candidate_pair_protocol_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.IceCandidatePairConfig) -} - -void IceCandidatePairConfig::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&candidate_pair_protocol_) - - reinterpret_cast(×tamp_ms_)) + sizeof(candidate_pair_protocol_)); -} - -IceCandidatePairConfig::~IceCandidatePairConfig() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.IceCandidatePairConfig) - SharedDtor(); -} - -void IceCandidatePairConfig::SharedDtor() { -} - -void IceCandidatePairConfig::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IceCandidatePairConfig& IceCandidatePairConfig::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IceCandidatePairConfig_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void IceCandidatePairConfig::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.IceCandidatePairConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&remote_candidate_type_) - - reinterpret_cast(×tamp_ms_)) + sizeof(remote_candidate_type_)); - } - if (cached_has_bits & 0x00000300u) { - ::memset(&remote_address_family_, 0, static_cast( - reinterpret_cast(&candidate_pair_protocol_) - - reinterpret_cast(&remote_address_family_)) + sizeof(candidate_pair_protocol_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IceCandidatePairConfig::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(val))) { - set_config_type(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 candidate_pair_id = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_candidate_pair_id(&has_bits); - candidate_pair_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(val))) { - set_local_candidate_type(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(4, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; - case 5: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 40)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(val))) { - set_local_relay_protocol(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_Protocol>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(5, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; - case 6: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 48)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_NetworkType_IsValid(val))) { - set_local_network_type(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_NetworkType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(6, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; - case 7: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 56)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(val))) { - set_local_address_family(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(7, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; - case 8: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 64)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(val))) { - set_remote_candidate_type(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(8, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; - case 9: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 72)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(val))) { - set_remote_address_family(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(9, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; - case 10: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 80)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(val))) { - set_candidate_pair_protocol(static_cast<::webrtc::rtclog2::IceCandidatePairConfig_Protocol>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(10, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IceCandidatePairConfig::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.IceCandidatePairConfig) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value)) { - set_config_type(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 candidate_pair_id = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_candidate_pair_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &candidate_pair_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(value)) { - set_local_candidate_type(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType >(value)); - } else { - unknown_fields_stream.WriteVarint32(32u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; - case 5: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (40 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(value)) { - set_local_relay_protocol(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_Protocol >(value)); - } else { - unknown_fields_stream.WriteVarint32(40u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; - case 6: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (48 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_NetworkType_IsValid(value)) { - set_local_network_type(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_NetworkType >(value)); - } else { - unknown_fields_stream.WriteVarint32(48u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; - case 7: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (56 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(value)) { - set_local_address_family(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily >(value)); - } else { - unknown_fields_stream.WriteVarint32(56u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; - case 8: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (64 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(value)) { - set_remote_candidate_type(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType >(value)); - } else { - unknown_fields_stream.WriteVarint32(64u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; - case 9: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (72 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(value)) { - set_remote_address_family(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily >(value)); - } else { - unknown_fields_stream.WriteVarint32(72u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; - case 10: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (80 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(value)) { - set_candidate_pair_protocol(static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_Protocol >(value)); - } else { - unknown_fields_stream.WriteVarint32(80u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.IceCandidatePairConfig) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.IceCandidatePairConfig) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IceCandidatePairConfig::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.IceCandidatePairConfig) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->config_type(), output); - } - - // optional uint32 candidate_pair_id = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->candidate_pair_id(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 4, this->local_candidate_type(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 5, this->local_relay_protocol(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 6, this->local_network_type(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 7, this->local_address_family(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; - if (cached_has_bits & 0x00000080u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 8, this->remote_candidate_type(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; - if (cached_has_bits & 0x00000100u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 9, this->remote_address_family(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; - if (cached_has_bits & 0x00000200u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 10, this->candidate_pair_protocol(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.IceCandidatePairConfig) -} - -size_t IceCandidatePairConfig::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.IceCandidatePairConfig) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->config_type()); - } - - // optional uint32 candidate_pair_id = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->candidate_pair_id()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_candidate_type()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_relay_protocol()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_network_type()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->local_address_family()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; - if (cached_has_bits & 0x00000080u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->remote_candidate_type()); - } - - } - if (cached_has_bits & 0x00000300u) { - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; - if (cached_has_bits & 0x00000100u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->remote_address_family()); - } - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; - if (cached_has_bits & 0x00000200u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->candidate_pair_protocol()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IceCandidatePairConfig::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IceCandidatePairConfig::MergeFrom(const IceCandidatePairConfig& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.IceCandidatePairConfig) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x000000ffu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - config_type_ = from.config_type_; - } - if (cached_has_bits & 0x00000004u) { - candidate_pair_id_ = from.candidate_pair_id_; - } - if (cached_has_bits & 0x00000008u) { - local_candidate_type_ = from.local_candidate_type_; - } - if (cached_has_bits & 0x00000010u) { - local_relay_protocol_ = from.local_relay_protocol_; - } - if (cached_has_bits & 0x00000020u) { - local_network_type_ = from.local_network_type_; - } - if (cached_has_bits & 0x00000040u) { - local_address_family_ = from.local_address_family_; - } - if (cached_has_bits & 0x00000080u) { - remote_candidate_type_ = from.remote_candidate_type_; - } - _has_bits_[0] |= cached_has_bits; - } - if (cached_has_bits & 0x00000300u) { - if (cached_has_bits & 0x00000100u) { - remote_address_family_ = from.remote_address_family_; - } - if (cached_has_bits & 0x00000200u) { - candidate_pair_protocol_ = from.candidate_pair_protocol_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void IceCandidatePairConfig::CopyFrom(const IceCandidatePairConfig& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.IceCandidatePairConfig) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IceCandidatePairConfig::IsInitialized() const { - return true; -} - -void IceCandidatePairConfig::InternalSwap(IceCandidatePairConfig* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(config_type_, other->config_type_); - swap(candidate_pair_id_, other->candidate_pair_id_); - swap(local_candidate_type_, other->local_candidate_type_); - swap(local_relay_protocol_, other->local_relay_protocol_); - swap(local_network_type_, other->local_network_type_); - swap(local_address_family_, other->local_address_family_); - swap(remote_candidate_type_, other->remote_candidate_type_); - swap(remote_address_family_, other->remote_address_family_); - swap(candidate_pair_protocol_, other->candidate_pair_protocol_); -} - -std::string IceCandidatePairConfig::GetTypeName() const { - return "webrtc.rtclog2.IceCandidatePairConfig"; -} - - -// =================================================================== - -void IceCandidatePairEvent::InitAsDefaultInstance() { -} -class IceCandidatePairEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_event_type(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_candidate_pair_id(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } - static void set_has_transaction_id(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } -}; - -IceCandidatePairEvent::IceCandidatePairEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.IceCandidatePairEvent) -} -IceCandidatePairEvent::IceCandidatePairEvent(const IceCandidatePairEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&transaction_id_) - - reinterpret_cast(×tamp_ms_)) + sizeof(transaction_id_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.IceCandidatePairEvent) -} - -void IceCandidatePairEvent::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&transaction_id_) - - reinterpret_cast(×tamp_ms_)) + sizeof(transaction_id_)); -} - -IceCandidatePairEvent::~IceCandidatePairEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.IceCandidatePairEvent) - SharedDtor(); -} - -void IceCandidatePairEvent::SharedDtor() { -} - -void IceCandidatePairEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const IceCandidatePairEvent& IceCandidatePairEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_IceCandidatePairEvent_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void IceCandidatePairEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.IceCandidatePairEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&transaction_id_) - - reinterpret_cast(×tamp_ms_)) + sizeof(transaction_id_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* IceCandidatePairEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(val))) { - set_event_type(static_cast<::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - // optional uint32 candidate_pair_id = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_candidate_pair_id(&has_bits); - candidate_pair_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 transaction_id = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_transaction_id(&has_bits); - transaction_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool IceCandidatePairEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.IceCandidatePairEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value)) { - set_event_type(static_cast< ::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 candidate_pair_id = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_candidate_pair_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &candidate_pair_id_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 transaction_id = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_transaction_id(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &transaction_id_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.IceCandidatePairEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.IceCandidatePairEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void IceCandidatePairEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.IceCandidatePairEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->event_type(), output); - } - - // optional uint32 candidate_pair_id = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->candidate_pair_id(), output); - } - - // optional uint32 transaction_id = 4; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->transaction_id(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.IceCandidatePairEvent) -} - -size_t IceCandidatePairEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.IceCandidatePairEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->event_type()); - } - - // optional uint32 candidate_pair_id = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->candidate_pair_id()); - } - - // optional uint32 transaction_id = 4; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->transaction_id()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void IceCandidatePairEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void IceCandidatePairEvent::MergeFrom(const IceCandidatePairEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.IceCandidatePairEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000000fu) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - event_type_ = from.event_type_; - } - if (cached_has_bits & 0x00000004u) { - candidate_pair_id_ = from.candidate_pair_id_; - } - if (cached_has_bits & 0x00000008u) { - transaction_id_ = from.transaction_id_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void IceCandidatePairEvent::CopyFrom(const IceCandidatePairEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.IceCandidatePairEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool IceCandidatePairEvent::IsInitialized() const { - return true; -} - -void IceCandidatePairEvent::InternalSwap(IceCandidatePairEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(event_type_, other->event_type_); - swap(candidate_pair_id_, other->candidate_pair_id_); - swap(transaction_id_, other->transaction_id_); -} - -std::string IceCandidatePairEvent::GetTypeName() const { - return "webrtc.rtclog2.IceCandidatePairEvent"; -} - - -// =================================================================== - -void DtlsTransportStateEvent::InitAsDefaultInstance() { -} -class DtlsTransportStateEvent::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_dtls_transport_state(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -DtlsTransportStateEvent::DtlsTransportStateEvent() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.DtlsTransportStateEvent) -} -DtlsTransportStateEvent::DtlsTransportStateEvent(const DtlsTransportStateEvent& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&dtls_transport_state_) - - reinterpret_cast(×tamp_ms_)) + sizeof(dtls_transport_state_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.DtlsTransportStateEvent) -} - -void DtlsTransportStateEvent::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&dtls_transport_state_) - - reinterpret_cast(×tamp_ms_)) + sizeof(dtls_transport_state_)); -} - -DtlsTransportStateEvent::~DtlsTransportStateEvent() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.DtlsTransportStateEvent) - SharedDtor(); -} - -void DtlsTransportStateEvent::SharedDtor() { -} - -void DtlsTransportStateEvent::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DtlsTransportStateEvent& DtlsTransportStateEvent::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_DtlsTransportStateEvent_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void DtlsTransportStateEvent::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.DtlsTransportStateEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&dtls_transport_state_) - - reinterpret_cast(×tamp_ms_)) + sizeof(dtls_transport_state_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DtlsTransportStateEvent::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - if (PROTOBUF_PREDICT_TRUE(::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState_IsValid(val))) { - set_dtls_transport_state(static_cast<::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState>(val)); - } else { - ::PROTOBUF_NAMESPACE_ID::internal::WriteVarint(2, val, mutable_unknown_fields()); - } - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DtlsTransportStateEvent::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.DtlsTransportStateEvent) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - if (::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState_IsValid(value)) { - set_dtls_transport_state(static_cast< ::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState >(value)); - } else { - unknown_fields_stream.WriteVarint32(16u); - unknown_fields_stream.WriteVarint32( - static_cast<::PROTOBUF_NAMESPACE_ID::uint32>(value)); - } - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.DtlsTransportStateEvent) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.DtlsTransportStateEvent) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DtlsTransportStateEvent::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.DtlsTransportStateEvent) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( - 2, this->dtls_transport_state(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.DtlsTransportStateEvent) -} - -size_t DtlsTransportStateEvent::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.DtlsTransportStateEvent) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->dtls_transport_state()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DtlsTransportStateEvent::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void DtlsTransportStateEvent::MergeFrom(const DtlsTransportStateEvent& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.DtlsTransportStateEvent) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - dtls_transport_state_ = from.dtls_transport_state_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void DtlsTransportStateEvent::CopyFrom(const DtlsTransportStateEvent& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.DtlsTransportStateEvent) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DtlsTransportStateEvent::IsInitialized() const { - return true; -} - -void DtlsTransportStateEvent::InternalSwap(DtlsTransportStateEvent* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(dtls_transport_state_, other->dtls_transport_state_); -} - -std::string DtlsTransportStateEvent::GetTypeName() const { - return "webrtc.rtclog2.DtlsTransportStateEvent"; -} - - -// =================================================================== - -void DtlsWritableState::InitAsDefaultInstance() { -} -class DtlsWritableState::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_writable(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } -}; - -DtlsWritableState::DtlsWritableState() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.DtlsWritableState) -} -DtlsWritableState::DtlsWritableState(const DtlsWritableState& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&writable_) - - reinterpret_cast(×tamp_ms_)) + sizeof(writable_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.DtlsWritableState) -} - -void DtlsWritableState::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&writable_) - - reinterpret_cast(×tamp_ms_)) + sizeof(writable_)); -} - -DtlsWritableState::~DtlsWritableState() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.DtlsWritableState) - SharedDtor(); -} - -void DtlsWritableState::SharedDtor() { -} - -void DtlsWritableState::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DtlsWritableState& DtlsWritableState::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_DtlsWritableState_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void DtlsWritableState::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.DtlsWritableState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&writable_) - - reinterpret_cast(×tamp_ms_)) + sizeof(writable_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DtlsWritableState::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool writable = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_writable(&has_bits); - writable_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DtlsWritableState::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.DtlsWritableState) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool writable = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_writable(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &writable_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.DtlsWritableState) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.DtlsWritableState) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DtlsWritableState::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.DtlsWritableState) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bool writable = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(2, this->writable(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.DtlsWritableState) -} - -size_t DtlsWritableState::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.DtlsWritableState) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional bool writable = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + 1; - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DtlsWritableState::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void DtlsWritableState::MergeFrom(const DtlsWritableState& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.DtlsWritableState) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000003u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - writable_ = from.writable_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void DtlsWritableState::CopyFrom(const DtlsWritableState& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.DtlsWritableState) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DtlsWritableState::IsInitialized() const { - return true; -} - -void DtlsWritableState::InternalSwap(DtlsWritableState* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(writable_, other->writable_); -} - -std::string DtlsWritableState::GetTypeName() const { - return "webrtc.rtclog2.DtlsWritableState"; -} - - -// =================================================================== - -void RouteChange::InitAsDefaultInstance() { -} -class RouteChange::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_connected(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_overhead(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -RouteChange::RouteChange() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.RouteChange) -} -RouteChange::RouteChange(const RouteChange& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&overhead_) - - reinterpret_cast(×tamp_ms_)) + sizeof(overhead_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.RouteChange) -} - -void RouteChange::SharedCtor() { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&overhead_) - - reinterpret_cast(×tamp_ms_)) + sizeof(overhead_)); -} - -RouteChange::~RouteChange() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.RouteChange) - SharedDtor(); -} - -void RouteChange::SharedDtor() { -} - -void RouteChange::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RouteChange& RouteChange::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RouteChange_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void RouteChange::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.RouteChange) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&overhead_) - - reinterpret_cast(×tamp_ms_)) + sizeof(overhead_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RouteChange::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bool connected = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_connected(&has_bits); - connected_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 overhead = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_overhead(&has_bits); - overhead_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RouteChange::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.RouteChange) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bool connected = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_connected(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - bool, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_BOOL>( - input, &connected_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 overhead = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_overhead(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &overhead_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.RouteChange) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.RouteChange) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RouteChange::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.RouteChange) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional bool connected = 2; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBool(2, this->connected(), output); - } - - // optional uint32 overhead = 3; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->overhead(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.RouteChange) -} - -size_t RouteChange::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.RouteChange) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000001u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional bool connected = 2; - if (cached_has_bits & 0x00000002u) { - total_size += 1 + 1; - } - - // optional uint32 overhead = 3; - if (cached_has_bits & 0x00000004u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->overhead()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RouteChange::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RouteChange::MergeFrom(const RouteChange& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.RouteChange) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000002u) { - connected_ = from.connected_; - } - if (cached_has_bits & 0x00000004u) { - overhead_ = from.overhead_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RouteChange::CopyFrom(const RouteChange& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.RouteChange) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RouteChange::IsInitialized() const { - return true; -} - -void RouteChange::InternalSwap(RouteChange* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - swap(timestamp_ms_, other->timestamp_ms_); - swap(connected_, other->connected_); - swap(overhead_, other->overhead_); -} - -std::string RouteChange::GetTypeName() const { - return "webrtc.rtclog2.RouteChange"; -} - - -// =================================================================== - -void RemoteEstimates::InitAsDefaultInstance() { -} -class RemoteEstimates::_Internal { - public: - using HasBits = decltype(std::declval()._has_bits_); - static void set_has_timestamp_ms(HasBits* has_bits) { - (*has_bits)[0] |= 8u; - } - static void set_has_link_capacity_lower_kbps(HasBits* has_bits) { - (*has_bits)[0] |= 16u; - } - static void set_has_link_capacity_upper_kbps(HasBits* has_bits) { - (*has_bits)[0] |= 32u; - } - static void set_has_number_of_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 64u; - } - static void set_has_timestamp_ms_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 1u; - } - static void set_has_link_capacity_lower_kbps_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 2u; - } - static void set_has_link_capacity_upper_kbps_deltas(HasBits* has_bits) { - (*has_bits)[0] |= 4u; - } -}; - -RemoteEstimates::RemoteEstimates() - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:webrtc.rtclog2.RemoteEstimates) -} -RemoteEstimates::RemoteEstimates(const RemoteEstimates& from) - : ::PROTOBUF_NAMESPACE_ID::MessageLite(), - _internal_metadata_(nullptr), - _has_bits_(from._has_bits_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_timestamp_ms_deltas()) { - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - link_capacity_lower_kbps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_link_capacity_lower_kbps_deltas()) { - link_capacity_lower_kbps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.link_capacity_lower_kbps_deltas_); - } - link_capacity_upper_kbps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - if (from.has_link_capacity_upper_kbps_deltas()) { - link_capacity_upper_kbps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.link_capacity_upper_kbps_deltas_); - } - ::memcpy(×tamp_ms_, &from.timestamp_ms_, - static_cast(reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - // @@protoc_insertion_point(copy_constructor:webrtc.rtclog2.RemoteEstimates) -} - -void RemoteEstimates::SharedCtor() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto.base); - timestamp_ms_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - link_capacity_lower_kbps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - link_capacity_upper_kbps_deltas_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); -} - -RemoteEstimates::~RemoteEstimates() { - // @@protoc_insertion_point(destructor:webrtc.rtclog2.RemoteEstimates) - SharedDtor(); -} - -void RemoteEstimates::SharedDtor() { - timestamp_ms_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - link_capacity_lower_kbps_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - link_capacity_upper_kbps_deltas_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} - -void RemoteEstimates::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RemoteEstimates& RemoteEstimates::default_instance() { - ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_RemoteEstimates_rtc_5fevent_5flog2_2eproto.base); - return *internal_default_instance(); -} - - -void RemoteEstimates::Clear() { -// @@protoc_insertion_point(message_clear_start:webrtc.rtclog2.RemoteEstimates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x00000007u) { - if (cached_has_bits & 0x00000001u) { - timestamp_ms_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000002u) { - link_capacity_lower_kbps_deltas_.ClearNonDefaultToEmptyNoArena(); - } - if (cached_has_bits & 0x00000004u) { - link_capacity_upper_kbps_deltas_.ClearNonDefaultToEmptyNoArena(); - } - } - if (cached_has_bits & 0x00000078u) { - ::memset(×tamp_ms_, 0, static_cast( - reinterpret_cast(&number_of_deltas_) - - reinterpret_cast(×tamp_ms_)) + sizeof(number_of_deltas_)); - } - _has_bits_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RemoteEstimates::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { -#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure - _Internal::HasBits has_bits{}; - while (!ctx->Done(&ptr)) { - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); - CHK_(ptr); - switch (tag >> 3) { - // optional int64 timestamp_ms = 1; - case 1: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { - _Internal::set_has_timestamp_ms(&has_bits); - timestamp_ms_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 link_capacity_lower_kbps = 2; - case 2: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { - _Internal::set_has_link_capacity_lower_kbps(&has_bits); - link_capacity_lower_kbps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 link_capacity_upper_kbps = 3; - case 3: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { - _Internal::set_has_link_capacity_upper_kbps(&has_bits); - link_capacity_upper_kbps_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional uint32 number_of_deltas = 4; - case 4: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32)) { - _Internal::set_has_number_of_deltas(&has_bits); - number_of_deltas_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes timestamp_ms_deltas = 101; - case 101: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_timestamp_ms_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes link_capacity_lower_kbps_deltas = 102; - case 102: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 50)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_link_capacity_lower_kbps_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - // optional bytes link_capacity_upper_kbps_deltas = 103; - case 103: - if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 58)) { - ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_link_capacity_upper_kbps_deltas(), ptr, ctx); - CHK_(ptr); - } else goto handle_unusual; - continue; - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->SetLastTag(tag); - goto success; - } - ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); - CHK_(ptr != nullptr); - continue; - } - } // switch - } // while -success: - _has_bits_.Or(has_bits); - return ptr; -failure: - ptr = nullptr; - goto success; -#undef CHK_ -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RemoteEstimates::MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::PROTOBUF_NAMESPACE_ID::uint32 tag; - ::PROTOBUF_NAMESPACE_ID::internal::LiteUnknownFieldSetter unknown_fields_setter( - &_internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::io::StringOutputStream unknown_fields_output( - unknown_fields_setter.buffer()); - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream unknown_fields_stream( - &unknown_fields_output, false); - // @@protoc_insertion_point(parse_start:webrtc.rtclog2.RemoteEstimates) - for (;;) { - ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // optional int64 timestamp_ms = 1; - case 1: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { - _Internal::set_has_timestamp_ms(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::int64, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT64>( - input, ×tamp_ms_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 link_capacity_lower_kbps = 2; - case 2: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { - _Internal::set_has_link_capacity_lower_kbps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &link_capacity_lower_kbps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 link_capacity_upper_kbps = 3; - case 3: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { - _Internal::set_has_link_capacity_upper_kbps(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &link_capacity_upper_kbps_))); - } else { - goto handle_unusual; - } - break; - } - - // optional uint32 number_of_deltas = 4; - case 4: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) { - _Internal::set_has_number_of_deltas(&_has_bits_); - DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< - ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( - input, &number_of_deltas_))); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes timestamp_ms_deltas = 101; - case 101: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (810 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_timestamp_ms_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes link_capacity_lower_kbps_deltas = 102; - case 102: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (818 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_link_capacity_lower_kbps_deltas())); - } else { - goto handle_unusual; - } - break; - } - - // optional bytes link_capacity_upper_kbps_deltas = 103; - case 103: { - if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (826 & 0xFF)) { - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( - input, this->mutable_link_capacity_upper_kbps_deltas())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SkipField( - input, tag, &unknown_fields_stream)); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:webrtc.rtclog2.RemoteEstimates) - return true; -failure: - // @@protoc_insertion_point(parse_failure:webrtc.rtclog2.RemoteEstimates) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RemoteEstimates::SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:webrtc.rtclog2.RemoteEstimates) - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt64(1, this->timestamp_ms(), output); - } - - // optional uint32 link_capacity_lower_kbps = 2; - if (cached_has_bits & 0x00000010u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->link_capacity_lower_kbps(), output); - } - - // optional uint32 link_capacity_upper_kbps = 3; - if (cached_has_bits & 0x00000020u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->link_capacity_upper_kbps(), output); - } - - // optional uint32 number_of_deltas = 4; - if (cached_has_bits & 0x00000040u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(4, this->number_of_deltas(), output); - } - - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 101, this->timestamp_ms_deltas(), output); - } - - // optional bytes link_capacity_lower_kbps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 102, this->link_capacity_lower_kbps_deltas(), output); - } - - // optional bytes link_capacity_upper_kbps_deltas = 103; - if (cached_has_bits & 0x00000004u) { - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( - 103, this->link_capacity_upper_kbps_deltas(), output); - } - - output->WriteRaw(_internal_metadata_.unknown_fields().data(), - static_cast(_internal_metadata_.unknown_fields().size())); - // @@protoc_insertion_point(serialize_end:webrtc.rtclog2.RemoteEstimates) -} - -size_t RemoteEstimates::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:webrtc.rtclog2.RemoteEstimates) - size_t total_size = 0; - - total_size += _internal_metadata_.unknown_fields().size(); - - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - cached_has_bits = _has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - // optional bytes timestamp_ms_deltas = 101; - if (cached_has_bits & 0x00000001u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->timestamp_ms_deltas()); - } - - // optional bytes link_capacity_lower_kbps_deltas = 102; - if (cached_has_bits & 0x00000002u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->link_capacity_lower_kbps_deltas()); - } - - // optional bytes link_capacity_upper_kbps_deltas = 103; - if (cached_has_bits & 0x00000004u) { - total_size += 2 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( - this->link_capacity_upper_kbps_deltas()); - } - - // optional int64 timestamp_ms = 1; - if (cached_has_bits & 0x00000008u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int64Size( - this->timestamp_ms()); - } - - // optional uint32 link_capacity_lower_kbps = 2; - if (cached_has_bits & 0x00000010u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->link_capacity_lower_kbps()); - } - - // optional uint32 link_capacity_upper_kbps = 3; - if (cached_has_bits & 0x00000020u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->link_capacity_upper_kbps()); - } - - // optional uint32 number_of_deltas = 4; - if (cached_has_bits & 0x00000040u) { - total_size += 1 + - ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( - this->number_of_deltas()); - } - - } - int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RemoteEstimates::CheckTypeAndMergeFrom( - const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) { - MergeFrom(*::PROTOBUF_NAMESPACE_ID::internal::DownCast( - &from)); -} - -void RemoteEstimates::MergeFrom(const RemoteEstimates& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:webrtc.rtclog2.RemoteEstimates) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - cached_has_bits = from._has_bits_[0]; - if (cached_has_bits & 0x0000007fu) { - if (cached_has_bits & 0x00000001u) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.timestamp_ms_deltas_); - } - if (cached_has_bits & 0x00000002u) { - _has_bits_[0] |= 0x00000002u; - link_capacity_lower_kbps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.link_capacity_lower_kbps_deltas_); - } - if (cached_has_bits & 0x00000004u) { - _has_bits_[0] |= 0x00000004u; - link_capacity_upper_kbps_deltas_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.link_capacity_upper_kbps_deltas_); - } - if (cached_has_bits & 0x00000008u) { - timestamp_ms_ = from.timestamp_ms_; - } - if (cached_has_bits & 0x00000010u) { - link_capacity_lower_kbps_ = from.link_capacity_lower_kbps_; - } - if (cached_has_bits & 0x00000020u) { - link_capacity_upper_kbps_ = from.link_capacity_upper_kbps_; - } - if (cached_has_bits & 0x00000040u) { - number_of_deltas_ = from.number_of_deltas_; - } - _has_bits_[0] |= cached_has_bits; - } -} - -void RemoteEstimates::CopyFrom(const RemoteEstimates& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:webrtc.rtclog2.RemoteEstimates) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RemoteEstimates::IsInitialized() const { - return true; -} - -void RemoteEstimates::InternalSwap(RemoteEstimates* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(_has_bits_[0], other->_has_bits_[0]); - timestamp_ms_deltas_.Swap(&other->timestamp_ms_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - link_capacity_lower_kbps_deltas_.Swap(&other->link_capacity_lower_kbps_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - link_capacity_upper_kbps_deltas_.Swap(&other->link_capacity_upper_kbps_deltas_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(timestamp_ms_, other->timestamp_ms_); - swap(link_capacity_lower_kbps_, other->link_capacity_lower_kbps_); - swap(link_capacity_upper_kbps_, other->link_capacity_upper_kbps_); - swap(number_of_deltas_, other->number_of_deltas_); -} - -std::string RemoteEstimates::GetTypeName() const { - return "webrtc.rtclog2.RemoteEstimates"; -} - - -// @@protoc_insertion_point(namespace_scope) -} // namespace rtclog2 -} // namespace webrtc -PROTOBUF_NAMESPACE_OPEN -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::EventStream* Arena::CreateMaybeMessage< ::webrtc::rtclog2::EventStream >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::EventStream >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::Event* Arena::CreateMaybeMessage< ::webrtc::rtclog2::Event >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::Event >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::GenericPacketReceived* Arena::CreateMaybeMessage< ::webrtc::rtclog2::GenericPacketReceived >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::GenericPacketReceived >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::GenericPacketSent* Arena::CreateMaybeMessage< ::webrtc::rtclog2::GenericPacketSent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::GenericPacketSent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::GenericAckReceived* Arena::CreateMaybeMessage< ::webrtc::rtclog2::GenericAckReceived >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::GenericAckReceived >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::IncomingRtpPackets* Arena::CreateMaybeMessage< ::webrtc::rtclog2::IncomingRtpPackets >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::IncomingRtpPackets >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::OutgoingRtpPackets* Arena::CreateMaybeMessage< ::webrtc::rtclog2::OutgoingRtpPackets >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::OutgoingRtpPackets >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::IncomingRtcpPackets* Arena::CreateMaybeMessage< ::webrtc::rtclog2::IncomingRtcpPackets >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::IncomingRtcpPackets >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::OutgoingRtcpPackets* Arena::CreateMaybeMessage< ::webrtc::rtclog2::OutgoingRtcpPackets >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::OutgoingRtcpPackets >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::AudioPlayoutEvents* Arena::CreateMaybeMessage< ::webrtc::rtclog2::AudioPlayoutEvents >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::AudioPlayoutEvents >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::FrameDecodedEvents* Arena::CreateMaybeMessage< ::webrtc::rtclog2::FrameDecodedEvents >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::FrameDecodedEvents >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::BeginLogEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog2::BeginLogEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::BeginLogEvent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::EndLogEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog2::EndLogEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::EndLogEvent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::LossBasedBweUpdates* Arena::CreateMaybeMessage< ::webrtc::rtclog2::LossBasedBweUpdates >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::LossBasedBweUpdates >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::DelayBasedBweUpdates* Arena::CreateMaybeMessage< ::webrtc::rtclog2::DelayBasedBweUpdates >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::DelayBasedBweUpdates >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::RtpHeaderExtensionConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::RtpHeaderExtensionConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::RtpHeaderExtensionConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::VideoRecvStreamConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::VideoRecvStreamConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::VideoRecvStreamConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::VideoSendStreamConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::VideoSendStreamConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::VideoSendStreamConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::AudioRecvStreamConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::AudioRecvStreamConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::AudioRecvStreamConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::AudioSendStreamConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::AudioSendStreamConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::AudioSendStreamConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::AudioNetworkAdaptations* Arena::CreateMaybeMessage< ::webrtc::rtclog2::AudioNetworkAdaptations >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::AudioNetworkAdaptations >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::BweProbeCluster* Arena::CreateMaybeMessage< ::webrtc::rtclog2::BweProbeCluster >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::BweProbeCluster >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::BweProbeResultSuccess* Arena::CreateMaybeMessage< ::webrtc::rtclog2::BweProbeResultSuccess >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::BweProbeResultSuccess >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::BweProbeResultFailure* Arena::CreateMaybeMessage< ::webrtc::rtclog2::BweProbeResultFailure >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::BweProbeResultFailure >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::AlrState* Arena::CreateMaybeMessage< ::webrtc::rtclog2::AlrState >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::AlrState >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::IceCandidatePairConfig* Arena::CreateMaybeMessage< ::webrtc::rtclog2::IceCandidatePairConfig >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::IceCandidatePairConfig >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::IceCandidatePairEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog2::IceCandidatePairEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::IceCandidatePairEvent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::DtlsTransportStateEvent* Arena::CreateMaybeMessage< ::webrtc::rtclog2::DtlsTransportStateEvent >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::DtlsTransportStateEvent >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::DtlsWritableState* Arena::CreateMaybeMessage< ::webrtc::rtclog2::DtlsWritableState >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::DtlsWritableState >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::RouteChange* Arena::CreateMaybeMessage< ::webrtc::rtclog2::RouteChange >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::RouteChange >(arena); -} -template<> PROTOBUF_NOINLINE ::webrtc::rtclog2::RemoteEstimates* Arena::CreateMaybeMessage< ::webrtc::rtclog2::RemoteEstimates >(Arena* arena) { - return Arena::CreateInternal< ::webrtc::rtclog2::RemoteEstimates >(arena); -} -PROTOBUF_NAMESPACE_CLOSE - -// @@protoc_insertion_point(global_scope) -#include diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h deleted file mode 100644 index a6291e618..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h +++ /dev/null @@ -1,14921 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: rtc_event_log2.proto - -#ifndef GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog2_2eproto -#define GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog2_2eproto - -#include -#include - -#include -#if PROTOBUF_VERSION < 3009000 -#error This file was generated by a newer version of protoc which is -#error incompatible with your Protocol Buffer headers. Please update -#error your headers. -#endif -#if 3009000 < PROTOBUF_MIN_PROTOC_VERSION -#error This file was generated by an older version of protoc which is -#error incompatible with your Protocol Buffer headers. Please -#error regenerate this file with a newer version of protoc. -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include // IWYU pragma: export -#include // IWYU pragma: export -#include -// @@protoc_insertion_point(includes) -#include -#define PROTOBUF_INTERNAL_EXPORT_rtc_5fevent_5flog2_2eproto -PROTOBUF_NAMESPACE_OPEN -namespace internal { -class AnyMetadata; -} // namespace internal -PROTOBUF_NAMESPACE_CLOSE - -// Internal implementation detail -- do not use these members. -struct TableStruct_rtc_5fevent_5flog2_2eproto { - static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[31] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[]; - static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[]; - static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[]; -}; -namespace webrtc { -namespace rtclog2 { -class AlrState; -class AlrStateDefaultTypeInternal; -extern AlrStateDefaultTypeInternal _AlrState_default_instance_; -class AudioNetworkAdaptations; -class AudioNetworkAdaptationsDefaultTypeInternal; -extern AudioNetworkAdaptationsDefaultTypeInternal _AudioNetworkAdaptations_default_instance_; -class AudioPlayoutEvents; -class AudioPlayoutEventsDefaultTypeInternal; -extern AudioPlayoutEventsDefaultTypeInternal _AudioPlayoutEvents_default_instance_; -class AudioRecvStreamConfig; -class AudioRecvStreamConfigDefaultTypeInternal; -extern AudioRecvStreamConfigDefaultTypeInternal _AudioRecvStreamConfig_default_instance_; -class AudioSendStreamConfig; -class AudioSendStreamConfigDefaultTypeInternal; -extern AudioSendStreamConfigDefaultTypeInternal _AudioSendStreamConfig_default_instance_; -class BeginLogEvent; -class BeginLogEventDefaultTypeInternal; -extern BeginLogEventDefaultTypeInternal _BeginLogEvent_default_instance_; -class BweProbeCluster; -class BweProbeClusterDefaultTypeInternal; -extern BweProbeClusterDefaultTypeInternal _BweProbeCluster_default_instance_; -class BweProbeResultFailure; -class BweProbeResultFailureDefaultTypeInternal; -extern BweProbeResultFailureDefaultTypeInternal _BweProbeResultFailure_default_instance_; -class BweProbeResultSuccess; -class BweProbeResultSuccessDefaultTypeInternal; -extern BweProbeResultSuccessDefaultTypeInternal _BweProbeResultSuccess_default_instance_; -class DelayBasedBweUpdates; -class DelayBasedBweUpdatesDefaultTypeInternal; -extern DelayBasedBweUpdatesDefaultTypeInternal _DelayBasedBweUpdates_default_instance_; -class DtlsTransportStateEvent; -class DtlsTransportStateEventDefaultTypeInternal; -extern DtlsTransportStateEventDefaultTypeInternal _DtlsTransportStateEvent_default_instance_; -class DtlsWritableState; -class DtlsWritableStateDefaultTypeInternal; -extern DtlsWritableStateDefaultTypeInternal _DtlsWritableState_default_instance_; -class EndLogEvent; -class EndLogEventDefaultTypeInternal; -extern EndLogEventDefaultTypeInternal _EndLogEvent_default_instance_; -class Event; -class EventDefaultTypeInternal; -extern EventDefaultTypeInternal _Event_default_instance_; -class EventStream; -class EventStreamDefaultTypeInternal; -extern EventStreamDefaultTypeInternal _EventStream_default_instance_; -class FrameDecodedEvents; -class FrameDecodedEventsDefaultTypeInternal; -extern FrameDecodedEventsDefaultTypeInternal _FrameDecodedEvents_default_instance_; -class GenericAckReceived; -class GenericAckReceivedDefaultTypeInternal; -extern GenericAckReceivedDefaultTypeInternal _GenericAckReceived_default_instance_; -class GenericPacketReceived; -class GenericPacketReceivedDefaultTypeInternal; -extern GenericPacketReceivedDefaultTypeInternal _GenericPacketReceived_default_instance_; -class GenericPacketSent; -class GenericPacketSentDefaultTypeInternal; -extern GenericPacketSentDefaultTypeInternal _GenericPacketSent_default_instance_; -class IceCandidatePairConfig; -class IceCandidatePairConfigDefaultTypeInternal; -extern IceCandidatePairConfigDefaultTypeInternal _IceCandidatePairConfig_default_instance_; -class IceCandidatePairEvent; -class IceCandidatePairEventDefaultTypeInternal; -extern IceCandidatePairEventDefaultTypeInternal _IceCandidatePairEvent_default_instance_; -class IncomingRtcpPackets; -class IncomingRtcpPacketsDefaultTypeInternal; -extern IncomingRtcpPacketsDefaultTypeInternal _IncomingRtcpPackets_default_instance_; -class IncomingRtpPackets; -class IncomingRtpPacketsDefaultTypeInternal; -extern IncomingRtpPacketsDefaultTypeInternal _IncomingRtpPackets_default_instance_; -class LossBasedBweUpdates; -class LossBasedBweUpdatesDefaultTypeInternal; -extern LossBasedBweUpdatesDefaultTypeInternal _LossBasedBweUpdates_default_instance_; -class OutgoingRtcpPackets; -class OutgoingRtcpPacketsDefaultTypeInternal; -extern OutgoingRtcpPacketsDefaultTypeInternal _OutgoingRtcpPackets_default_instance_; -class OutgoingRtpPackets; -class OutgoingRtpPacketsDefaultTypeInternal; -extern OutgoingRtpPacketsDefaultTypeInternal _OutgoingRtpPackets_default_instance_; -class RemoteEstimates; -class RemoteEstimatesDefaultTypeInternal; -extern RemoteEstimatesDefaultTypeInternal _RemoteEstimates_default_instance_; -class RouteChange; -class RouteChangeDefaultTypeInternal; -extern RouteChangeDefaultTypeInternal _RouteChange_default_instance_; -class RtpHeaderExtensionConfig; -class RtpHeaderExtensionConfigDefaultTypeInternal; -extern RtpHeaderExtensionConfigDefaultTypeInternal _RtpHeaderExtensionConfig_default_instance_; -class VideoRecvStreamConfig; -class VideoRecvStreamConfigDefaultTypeInternal; -extern VideoRecvStreamConfigDefaultTypeInternal _VideoRecvStreamConfig_default_instance_; -class VideoSendStreamConfig; -class VideoSendStreamConfigDefaultTypeInternal; -extern VideoSendStreamConfigDefaultTypeInternal _VideoSendStreamConfig_default_instance_; -} // namespace rtclog2 -} // namespace webrtc -PROTOBUF_NAMESPACE_OPEN -template<> ::webrtc::rtclog2::AlrState* Arena::CreateMaybeMessage<::webrtc::rtclog2::AlrState>(Arena*); -template<> ::webrtc::rtclog2::AudioNetworkAdaptations* Arena::CreateMaybeMessage<::webrtc::rtclog2::AudioNetworkAdaptations>(Arena*); -template<> ::webrtc::rtclog2::AudioPlayoutEvents* Arena::CreateMaybeMessage<::webrtc::rtclog2::AudioPlayoutEvents>(Arena*); -template<> ::webrtc::rtclog2::AudioRecvStreamConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::AudioRecvStreamConfig>(Arena*); -template<> ::webrtc::rtclog2::AudioSendStreamConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::AudioSendStreamConfig>(Arena*); -template<> ::webrtc::rtclog2::BeginLogEvent* Arena::CreateMaybeMessage<::webrtc::rtclog2::BeginLogEvent>(Arena*); -template<> ::webrtc::rtclog2::BweProbeCluster* Arena::CreateMaybeMessage<::webrtc::rtclog2::BweProbeCluster>(Arena*); -template<> ::webrtc::rtclog2::BweProbeResultFailure* Arena::CreateMaybeMessage<::webrtc::rtclog2::BweProbeResultFailure>(Arena*); -template<> ::webrtc::rtclog2::BweProbeResultSuccess* Arena::CreateMaybeMessage<::webrtc::rtclog2::BweProbeResultSuccess>(Arena*); -template<> ::webrtc::rtclog2::DelayBasedBweUpdates* Arena::CreateMaybeMessage<::webrtc::rtclog2::DelayBasedBweUpdates>(Arena*); -template<> ::webrtc::rtclog2::DtlsTransportStateEvent* Arena::CreateMaybeMessage<::webrtc::rtclog2::DtlsTransportStateEvent>(Arena*); -template<> ::webrtc::rtclog2::DtlsWritableState* Arena::CreateMaybeMessage<::webrtc::rtclog2::DtlsWritableState>(Arena*); -template<> ::webrtc::rtclog2::EndLogEvent* Arena::CreateMaybeMessage<::webrtc::rtclog2::EndLogEvent>(Arena*); -template<> ::webrtc::rtclog2::Event* Arena::CreateMaybeMessage<::webrtc::rtclog2::Event>(Arena*); -template<> ::webrtc::rtclog2::EventStream* Arena::CreateMaybeMessage<::webrtc::rtclog2::EventStream>(Arena*); -template<> ::webrtc::rtclog2::FrameDecodedEvents* Arena::CreateMaybeMessage<::webrtc::rtclog2::FrameDecodedEvents>(Arena*); -template<> ::webrtc::rtclog2::GenericAckReceived* Arena::CreateMaybeMessage<::webrtc::rtclog2::GenericAckReceived>(Arena*); -template<> ::webrtc::rtclog2::GenericPacketReceived* Arena::CreateMaybeMessage<::webrtc::rtclog2::GenericPacketReceived>(Arena*); -template<> ::webrtc::rtclog2::GenericPacketSent* Arena::CreateMaybeMessage<::webrtc::rtclog2::GenericPacketSent>(Arena*); -template<> ::webrtc::rtclog2::IceCandidatePairConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::IceCandidatePairConfig>(Arena*); -template<> ::webrtc::rtclog2::IceCandidatePairEvent* Arena::CreateMaybeMessage<::webrtc::rtclog2::IceCandidatePairEvent>(Arena*); -template<> ::webrtc::rtclog2::IncomingRtcpPackets* Arena::CreateMaybeMessage<::webrtc::rtclog2::IncomingRtcpPackets>(Arena*); -template<> ::webrtc::rtclog2::IncomingRtpPackets* Arena::CreateMaybeMessage<::webrtc::rtclog2::IncomingRtpPackets>(Arena*); -template<> ::webrtc::rtclog2::LossBasedBweUpdates* Arena::CreateMaybeMessage<::webrtc::rtclog2::LossBasedBweUpdates>(Arena*); -template<> ::webrtc::rtclog2::OutgoingRtcpPackets* Arena::CreateMaybeMessage<::webrtc::rtclog2::OutgoingRtcpPackets>(Arena*); -template<> ::webrtc::rtclog2::OutgoingRtpPackets* Arena::CreateMaybeMessage<::webrtc::rtclog2::OutgoingRtpPackets>(Arena*); -template<> ::webrtc::rtclog2::RemoteEstimates* Arena::CreateMaybeMessage<::webrtc::rtclog2::RemoteEstimates>(Arena*); -template<> ::webrtc::rtclog2::RouteChange* Arena::CreateMaybeMessage<::webrtc::rtclog2::RouteChange>(Arena*); -template<> ::webrtc::rtclog2::RtpHeaderExtensionConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::RtpHeaderExtensionConfig>(Arena*); -template<> ::webrtc::rtclog2::VideoRecvStreamConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::VideoRecvStreamConfig>(Arena*); -template<> ::webrtc::rtclog2::VideoSendStreamConfig* Arena::CreateMaybeMessage<::webrtc::rtclog2::VideoSendStreamConfig>(Arena*); -PROTOBUF_NAMESPACE_CLOSE -namespace webrtc { -namespace rtclog2 { - -enum FrameDecodedEvents_Codec : int { - FrameDecodedEvents_Codec_CODEC_UNKNOWN = 0, - FrameDecodedEvents_Codec_CODEC_GENERIC = 1, - FrameDecodedEvents_Codec_CODEC_VP8 = 2, - FrameDecodedEvents_Codec_CODEC_VP9 = 3, - FrameDecodedEvents_Codec_CODEC_AV1 = 4, - FrameDecodedEvents_Codec_CODEC_H264 = 5, - FrameDecodedEvents_Codec_CODEC_H265 = 6 -}; -bool FrameDecodedEvents_Codec_IsValid(int value); -constexpr FrameDecodedEvents_Codec FrameDecodedEvents_Codec_Codec_MIN = FrameDecodedEvents_Codec_CODEC_UNKNOWN; -constexpr FrameDecodedEvents_Codec FrameDecodedEvents_Codec_Codec_MAX = FrameDecodedEvents_Codec_CODEC_H265; -constexpr int FrameDecodedEvents_Codec_Codec_ARRAYSIZE = FrameDecodedEvents_Codec_Codec_MAX + 1; - -const std::string& FrameDecodedEvents_Codec_Name(FrameDecodedEvents_Codec value); -template -inline const std::string& FrameDecodedEvents_Codec_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function FrameDecodedEvents_Codec_Name."); - return FrameDecodedEvents_Codec_Name(static_cast(enum_t_value)); -} -bool FrameDecodedEvents_Codec_Parse( - const std::string& name, FrameDecodedEvents_Codec* value); -enum DelayBasedBweUpdates_DetectorState : int { - DelayBasedBweUpdates_DetectorState_BWE_UNKNOWN_STATE = 0, - DelayBasedBweUpdates_DetectorState_BWE_NORMAL = 1, - DelayBasedBweUpdates_DetectorState_BWE_UNDERUSING = 2, - DelayBasedBweUpdates_DetectorState_BWE_OVERUSING = 3 -}; -bool DelayBasedBweUpdates_DetectorState_IsValid(int value); -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates_DetectorState_DetectorState_MIN = DelayBasedBweUpdates_DetectorState_BWE_UNKNOWN_STATE; -constexpr DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates_DetectorState_DetectorState_MAX = DelayBasedBweUpdates_DetectorState_BWE_OVERUSING; -constexpr int DelayBasedBweUpdates_DetectorState_DetectorState_ARRAYSIZE = DelayBasedBweUpdates_DetectorState_DetectorState_MAX + 1; - -const std::string& DelayBasedBweUpdates_DetectorState_Name(DelayBasedBweUpdates_DetectorState value); -template -inline const std::string& DelayBasedBweUpdates_DetectorState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DelayBasedBweUpdates_DetectorState_Name."); - return DelayBasedBweUpdates_DetectorState_Name(static_cast(enum_t_value)); -} -bool DelayBasedBweUpdates_DetectorState_Parse( - const std::string& name, DelayBasedBweUpdates_DetectorState* value); -enum BweProbeResultFailure_FailureReason : int { - BweProbeResultFailure_FailureReason_UNKNOWN = 0, - BweProbeResultFailure_FailureReason_INVALID_SEND_RECEIVE_INTERVAL = 1, - BweProbeResultFailure_FailureReason_INVALID_SEND_RECEIVE_RATIO = 2, - BweProbeResultFailure_FailureReason_TIMEOUT = 3 -}; -bool BweProbeResultFailure_FailureReason_IsValid(int value); -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure_FailureReason_FailureReason_MIN = BweProbeResultFailure_FailureReason_UNKNOWN; -constexpr BweProbeResultFailure_FailureReason BweProbeResultFailure_FailureReason_FailureReason_MAX = BweProbeResultFailure_FailureReason_TIMEOUT; -constexpr int BweProbeResultFailure_FailureReason_FailureReason_ARRAYSIZE = BweProbeResultFailure_FailureReason_FailureReason_MAX + 1; - -const std::string& BweProbeResultFailure_FailureReason_Name(BweProbeResultFailure_FailureReason value); -template -inline const std::string& BweProbeResultFailure_FailureReason_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function BweProbeResultFailure_FailureReason_Name."); - return BweProbeResultFailure_FailureReason_Name(static_cast(enum_t_value)); -} -bool BweProbeResultFailure_FailureReason_Parse( - const std::string& name, BweProbeResultFailure_FailureReason* value); -enum IceCandidatePairConfig_IceCandidatePairConfigType : int { - IceCandidatePairConfig_IceCandidatePairConfigType_UNKNOWN_CONFIG_TYPE = 0, - IceCandidatePairConfig_IceCandidatePairConfigType_ADDED = 1, - IceCandidatePairConfig_IceCandidatePairConfigType_UPDATED = 2, - IceCandidatePairConfig_IceCandidatePairConfigType_DESTROYED = 3, - IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED = 4 -}; -bool IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(int value); -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MIN = IceCandidatePairConfig_IceCandidatePairConfigType_UNKNOWN_CONFIG_TYPE; -constexpr IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX = IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED; -constexpr int IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_ARRAYSIZE = IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX + 1; - -const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name(IceCandidatePairConfig_IceCandidatePairConfigType value); -template -inline const std::string& IceCandidatePairConfig_IceCandidatePairConfigType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_IceCandidatePairConfigType_Name."); - return IceCandidatePairConfig_IceCandidatePairConfigType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_IceCandidatePairConfigType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidatePairConfigType* value); -enum IceCandidatePairConfig_IceCandidateType : int { - IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE = 0, - IceCandidatePairConfig_IceCandidateType_LOCAL = 1, - IceCandidatePairConfig_IceCandidateType_STUN = 2, - IceCandidatePairConfig_IceCandidateType_PRFLX = 3, - IceCandidatePairConfig_IceCandidateType_RELAY = 4 -}; -bool IceCandidatePairConfig_IceCandidateType_IsValid(int value); -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig_IceCandidateType_IceCandidateType_MIN = IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE; -constexpr IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX = IceCandidatePairConfig_IceCandidateType_RELAY; -constexpr int IceCandidatePairConfig_IceCandidateType_IceCandidateType_ARRAYSIZE = IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX + 1; - -const std::string& IceCandidatePairConfig_IceCandidateType_Name(IceCandidatePairConfig_IceCandidateType value); -template -inline const std::string& IceCandidatePairConfig_IceCandidateType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_IceCandidateType_Name."); - return IceCandidatePairConfig_IceCandidateType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_IceCandidateType_Parse( - const std::string& name, IceCandidatePairConfig_IceCandidateType* value); -enum IceCandidatePairConfig_Protocol : int { - IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL = 0, - IceCandidatePairConfig_Protocol_UDP = 1, - IceCandidatePairConfig_Protocol_TCP = 2, - IceCandidatePairConfig_Protocol_SSLTCP = 3, - IceCandidatePairConfig_Protocol_TLS = 4 -}; -bool IceCandidatePairConfig_Protocol_IsValid(int value); -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig_Protocol_Protocol_MIN = IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL; -constexpr IceCandidatePairConfig_Protocol IceCandidatePairConfig_Protocol_Protocol_MAX = IceCandidatePairConfig_Protocol_TLS; -constexpr int IceCandidatePairConfig_Protocol_Protocol_ARRAYSIZE = IceCandidatePairConfig_Protocol_Protocol_MAX + 1; - -const std::string& IceCandidatePairConfig_Protocol_Name(IceCandidatePairConfig_Protocol value); -template -inline const std::string& IceCandidatePairConfig_Protocol_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_Protocol_Name."); - return IceCandidatePairConfig_Protocol_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_Protocol_Parse( - const std::string& name, IceCandidatePairConfig_Protocol* value); -enum IceCandidatePairConfig_AddressFamily : int { - IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY = 0, - IceCandidatePairConfig_AddressFamily_IPV4 = 1, - IceCandidatePairConfig_AddressFamily_IPV6 = 2 -}; -bool IceCandidatePairConfig_AddressFamily_IsValid(int value); -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig_AddressFamily_AddressFamily_MIN = IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY; -constexpr IceCandidatePairConfig_AddressFamily IceCandidatePairConfig_AddressFamily_AddressFamily_MAX = IceCandidatePairConfig_AddressFamily_IPV6; -constexpr int IceCandidatePairConfig_AddressFamily_AddressFamily_ARRAYSIZE = IceCandidatePairConfig_AddressFamily_AddressFamily_MAX + 1; - -const std::string& IceCandidatePairConfig_AddressFamily_Name(IceCandidatePairConfig_AddressFamily value); -template -inline const std::string& IceCandidatePairConfig_AddressFamily_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_AddressFamily_Name."); - return IceCandidatePairConfig_AddressFamily_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_AddressFamily_Parse( - const std::string& name, IceCandidatePairConfig_AddressFamily* value); -enum IceCandidatePairConfig_NetworkType : int { - IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE = 0, - IceCandidatePairConfig_NetworkType_ETHERNET = 1, - IceCandidatePairConfig_NetworkType_WIFI = 2, - IceCandidatePairConfig_NetworkType_CELLULAR = 3, - IceCandidatePairConfig_NetworkType_VPN = 4, - IceCandidatePairConfig_NetworkType_LOOPBACK = 5 -}; -bool IceCandidatePairConfig_NetworkType_IsValid(int value); -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig_NetworkType_NetworkType_MIN = IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE; -constexpr IceCandidatePairConfig_NetworkType IceCandidatePairConfig_NetworkType_NetworkType_MAX = IceCandidatePairConfig_NetworkType_LOOPBACK; -constexpr int IceCandidatePairConfig_NetworkType_NetworkType_ARRAYSIZE = IceCandidatePairConfig_NetworkType_NetworkType_MAX + 1; - -const std::string& IceCandidatePairConfig_NetworkType_Name(IceCandidatePairConfig_NetworkType value); -template -inline const std::string& IceCandidatePairConfig_NetworkType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfig_NetworkType_Name."); - return IceCandidatePairConfig_NetworkType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairConfig_NetworkType_Parse( - const std::string& name, IceCandidatePairConfig_NetworkType* value); -enum IceCandidatePairEvent_IceCandidatePairEventType : int { - IceCandidatePairEvent_IceCandidatePairEventType_UNKNOWN_CHECK_TYPE = 0, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_SENT = 1, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RECEIVED = 2, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_SENT = 3, - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED = 4 -}; -bool IceCandidatePairEvent_IceCandidatePairEventType_IsValid(int value); -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MIN = IceCandidatePairEvent_IceCandidatePairEventType_UNKNOWN_CHECK_TYPE; -constexpr IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX = IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED; -constexpr int IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_ARRAYSIZE = IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX + 1; - -const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name(IceCandidatePairEvent_IceCandidatePairEventType value); -template -inline const std::string& IceCandidatePairEvent_IceCandidatePairEventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairEvent_IceCandidatePairEventType_Name."); - return IceCandidatePairEvent_IceCandidatePairEventType_Name(static_cast(enum_t_value)); -} -bool IceCandidatePairEvent_IceCandidatePairEventType_Parse( - const std::string& name, IceCandidatePairEvent_IceCandidatePairEventType* value); -enum DtlsTransportStateEvent_DtlsTransportState : int { - DtlsTransportStateEvent_DtlsTransportState_UNKNOWN_DTLS_TRANSPORT_STATE = 0, - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_NEW = 1, - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CONNECTING = 2, - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CONNECTED = 3, - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CLOSED = 4, - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_FAILED = 5 -}; -bool DtlsTransportStateEvent_DtlsTransportState_IsValid(int value); -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_MIN = DtlsTransportStateEvent_DtlsTransportState_UNKNOWN_DTLS_TRANSPORT_STATE; -constexpr DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_MAX = DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_FAILED; -constexpr int DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_ARRAYSIZE = DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_MAX + 1; - -const std::string& DtlsTransportStateEvent_DtlsTransportState_Name(DtlsTransportStateEvent_DtlsTransportState value); -template -inline const std::string& DtlsTransportStateEvent_DtlsTransportState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DtlsTransportStateEvent_DtlsTransportState_Name."); - return DtlsTransportStateEvent_DtlsTransportState_Name(static_cast(enum_t_value)); -} -bool DtlsTransportStateEvent_DtlsTransportState_Parse( - const std::string& name, DtlsTransportStateEvent_DtlsTransportState* value); -// =================================================================== - -class EventStream : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.EventStream) */ { - public: - EventStream(); - virtual ~EventStream(); - - EventStream(const EventStream& from); - EventStream(EventStream&& from) noexcept - : EventStream() { - *this = ::std::move(from); - } - - inline EventStream& operator=(const EventStream& from) { - CopyFrom(from); - return *this; - } - inline EventStream& operator=(EventStream&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const EventStream& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const EventStream* internal_default_instance() { - return reinterpret_cast( - &_EventStream_default_instance_); - } - static constexpr int kIndexInFileMessages = - 0; - - friend void swap(EventStream& a, EventStream& b) { - a.Swap(&b); - } - inline void Swap(EventStream* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline EventStream* New() const final { - return CreateMaybeMessage(nullptr); - } - - EventStream* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const EventStream& from); - void MergeFrom(const EventStream& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(EventStream* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.EventStream"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kStreamFieldNumber = 1, - kIncomingRtpPacketsFieldNumber = 2, - kOutgoingRtpPacketsFieldNumber = 3, - kIncomingRtcpPacketsFieldNumber = 4, - kOutgoingRtcpPacketsFieldNumber = 5, - kAudioPlayoutEventsFieldNumber = 6, - kFrameDecodedEventsFieldNumber = 7, - kBeginLogEventsFieldNumber = 16, - kEndLogEventsFieldNumber = 17, - kLossBasedBweUpdatesFieldNumber = 18, - kDelayBasedBweUpdatesFieldNumber = 19, - kAudioNetworkAdaptationsFieldNumber = 20, - kProbeClustersFieldNumber = 21, - kProbeSuccessFieldNumber = 22, - kProbeFailureFieldNumber = 23, - kAlrStatesFieldNumber = 24, - kIceCandidateConfigsFieldNumber = 25, - kIceCandidateEventsFieldNumber = 26, - kDtlsTransportStateEventsFieldNumber = 27, - kDtlsWritableStatesFieldNumber = 28, - kGenericPacketsSentFieldNumber = 29, - kGenericPacketsReceivedFieldNumber = 30, - kGenericAcksReceivedFieldNumber = 31, - kRouteChangesFieldNumber = 32, - kRemoteEstimatesFieldNumber = 33, - kAudioRecvStreamConfigsFieldNumber = 101, - kAudioSendStreamConfigsFieldNumber = 102, - kVideoRecvStreamConfigsFieldNumber = 103, - kVideoSendStreamConfigsFieldNumber = 104, - }; - // repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; - PROTOBUF_DEPRECATED int stream_size() const; - PROTOBUF_DEPRECATED void clear_stream(); - PROTOBUF_DEPRECATED ::webrtc::rtclog2::Event* mutable_stream(int index); - PROTOBUF_DEPRECATED ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::Event >* - mutable_stream(); - PROTOBUF_DEPRECATED const ::webrtc::rtclog2::Event& stream(int index) const; - PROTOBUF_DEPRECATED ::webrtc::rtclog2::Event* add_stream(); - PROTOBUF_DEPRECATED const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::Event >& - stream() const; - - // repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; - int incoming_rtp_packets_size() const; - void clear_incoming_rtp_packets(); - ::webrtc::rtclog2::IncomingRtpPackets* mutable_incoming_rtp_packets(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtpPackets >* - mutable_incoming_rtp_packets(); - const ::webrtc::rtclog2::IncomingRtpPackets& incoming_rtp_packets(int index) const; - ::webrtc::rtclog2::IncomingRtpPackets* add_incoming_rtp_packets(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtpPackets >& - incoming_rtp_packets() const; - - // repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; - int outgoing_rtp_packets_size() const; - void clear_outgoing_rtp_packets(); - ::webrtc::rtclog2::OutgoingRtpPackets* mutable_outgoing_rtp_packets(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtpPackets >* - mutable_outgoing_rtp_packets(); - const ::webrtc::rtclog2::OutgoingRtpPackets& outgoing_rtp_packets(int index) const; - ::webrtc::rtclog2::OutgoingRtpPackets* add_outgoing_rtp_packets(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtpPackets >& - outgoing_rtp_packets() const; - - // repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; - int incoming_rtcp_packets_size() const; - void clear_incoming_rtcp_packets(); - ::webrtc::rtclog2::IncomingRtcpPackets* mutable_incoming_rtcp_packets(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtcpPackets >* - mutable_incoming_rtcp_packets(); - const ::webrtc::rtclog2::IncomingRtcpPackets& incoming_rtcp_packets(int index) const; - ::webrtc::rtclog2::IncomingRtcpPackets* add_incoming_rtcp_packets(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtcpPackets >& - incoming_rtcp_packets() const; - - // repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; - int outgoing_rtcp_packets_size() const; - void clear_outgoing_rtcp_packets(); - ::webrtc::rtclog2::OutgoingRtcpPackets* mutable_outgoing_rtcp_packets(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtcpPackets >* - mutable_outgoing_rtcp_packets(); - const ::webrtc::rtclog2::OutgoingRtcpPackets& outgoing_rtcp_packets(int index) const; - ::webrtc::rtclog2::OutgoingRtcpPackets* add_outgoing_rtcp_packets(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtcpPackets >& - outgoing_rtcp_packets() const; - - // repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; - int audio_playout_events_size() const; - void clear_audio_playout_events(); - ::webrtc::rtclog2::AudioPlayoutEvents* mutable_audio_playout_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioPlayoutEvents >* - mutable_audio_playout_events(); - const ::webrtc::rtclog2::AudioPlayoutEvents& audio_playout_events(int index) const; - ::webrtc::rtclog2::AudioPlayoutEvents* add_audio_playout_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioPlayoutEvents >& - audio_playout_events() const; - - // repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; - int frame_decoded_events_size() const; - void clear_frame_decoded_events(); - ::webrtc::rtclog2::FrameDecodedEvents* mutable_frame_decoded_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::FrameDecodedEvents >* - mutable_frame_decoded_events(); - const ::webrtc::rtclog2::FrameDecodedEvents& frame_decoded_events(int index) const; - ::webrtc::rtclog2::FrameDecodedEvents* add_frame_decoded_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::FrameDecodedEvents >& - frame_decoded_events() const; - - // repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; - int begin_log_events_size() const; - void clear_begin_log_events(); - ::webrtc::rtclog2::BeginLogEvent* mutable_begin_log_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BeginLogEvent >* - mutable_begin_log_events(); - const ::webrtc::rtclog2::BeginLogEvent& begin_log_events(int index) const; - ::webrtc::rtclog2::BeginLogEvent* add_begin_log_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BeginLogEvent >& - begin_log_events() const; - - // repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; - int end_log_events_size() const; - void clear_end_log_events(); - ::webrtc::rtclog2::EndLogEvent* mutable_end_log_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::EndLogEvent >* - mutable_end_log_events(); - const ::webrtc::rtclog2::EndLogEvent& end_log_events(int index) const; - ::webrtc::rtclog2::EndLogEvent* add_end_log_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::EndLogEvent >& - end_log_events() const; - - // repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; - int loss_based_bwe_updates_size() const; - void clear_loss_based_bwe_updates(); - ::webrtc::rtclog2::LossBasedBweUpdates* mutable_loss_based_bwe_updates(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::LossBasedBweUpdates >* - mutable_loss_based_bwe_updates(); - const ::webrtc::rtclog2::LossBasedBweUpdates& loss_based_bwe_updates(int index) const; - ::webrtc::rtclog2::LossBasedBweUpdates* add_loss_based_bwe_updates(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::LossBasedBweUpdates >& - loss_based_bwe_updates() const; - - // repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; - int delay_based_bwe_updates_size() const; - void clear_delay_based_bwe_updates(); - ::webrtc::rtclog2::DelayBasedBweUpdates* mutable_delay_based_bwe_updates(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DelayBasedBweUpdates >* - mutable_delay_based_bwe_updates(); - const ::webrtc::rtclog2::DelayBasedBweUpdates& delay_based_bwe_updates(int index) const; - ::webrtc::rtclog2::DelayBasedBweUpdates* add_delay_based_bwe_updates(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DelayBasedBweUpdates >& - delay_based_bwe_updates() const; - - // repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; - int audio_network_adaptations_size() const; - void clear_audio_network_adaptations(); - ::webrtc::rtclog2::AudioNetworkAdaptations* mutable_audio_network_adaptations(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioNetworkAdaptations >* - mutable_audio_network_adaptations(); - const ::webrtc::rtclog2::AudioNetworkAdaptations& audio_network_adaptations(int index) const; - ::webrtc::rtclog2::AudioNetworkAdaptations* add_audio_network_adaptations(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioNetworkAdaptations >& - audio_network_adaptations() const; - - // repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; - int probe_clusters_size() const; - void clear_probe_clusters(); - ::webrtc::rtclog2::BweProbeCluster* mutable_probe_clusters(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeCluster >* - mutable_probe_clusters(); - const ::webrtc::rtclog2::BweProbeCluster& probe_clusters(int index) const; - ::webrtc::rtclog2::BweProbeCluster* add_probe_clusters(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeCluster >& - probe_clusters() const; - - // repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; - int probe_success_size() const; - void clear_probe_success(); - ::webrtc::rtclog2::BweProbeResultSuccess* mutable_probe_success(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultSuccess >* - mutable_probe_success(); - const ::webrtc::rtclog2::BweProbeResultSuccess& probe_success(int index) const; - ::webrtc::rtclog2::BweProbeResultSuccess* add_probe_success(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultSuccess >& - probe_success() const; - - // repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; - int probe_failure_size() const; - void clear_probe_failure(); - ::webrtc::rtclog2::BweProbeResultFailure* mutable_probe_failure(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultFailure >* - mutable_probe_failure(); - const ::webrtc::rtclog2::BweProbeResultFailure& probe_failure(int index) const; - ::webrtc::rtclog2::BweProbeResultFailure* add_probe_failure(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultFailure >& - probe_failure() const; - - // repeated .webrtc.rtclog2.AlrState alr_states = 24; - int alr_states_size() const; - void clear_alr_states(); - ::webrtc::rtclog2::AlrState* mutable_alr_states(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AlrState >* - mutable_alr_states(); - const ::webrtc::rtclog2::AlrState& alr_states(int index) const; - ::webrtc::rtclog2::AlrState* add_alr_states(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AlrState >& - alr_states() const; - - // repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; - int ice_candidate_configs_size() const; - void clear_ice_candidate_configs(); - ::webrtc::rtclog2::IceCandidatePairConfig* mutable_ice_candidate_configs(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairConfig >* - mutable_ice_candidate_configs(); - const ::webrtc::rtclog2::IceCandidatePairConfig& ice_candidate_configs(int index) const; - ::webrtc::rtclog2::IceCandidatePairConfig* add_ice_candidate_configs(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairConfig >& - ice_candidate_configs() const; - - // repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; - int ice_candidate_events_size() const; - void clear_ice_candidate_events(); - ::webrtc::rtclog2::IceCandidatePairEvent* mutable_ice_candidate_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairEvent >* - mutable_ice_candidate_events(); - const ::webrtc::rtclog2::IceCandidatePairEvent& ice_candidate_events(int index) const; - ::webrtc::rtclog2::IceCandidatePairEvent* add_ice_candidate_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairEvent >& - ice_candidate_events() const; - - // repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; - int dtls_transport_state_events_size() const; - void clear_dtls_transport_state_events(); - ::webrtc::rtclog2::DtlsTransportStateEvent* mutable_dtls_transport_state_events(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsTransportStateEvent >* - mutable_dtls_transport_state_events(); - const ::webrtc::rtclog2::DtlsTransportStateEvent& dtls_transport_state_events(int index) const; - ::webrtc::rtclog2::DtlsTransportStateEvent* add_dtls_transport_state_events(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsTransportStateEvent >& - dtls_transport_state_events() const; - - // repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; - int dtls_writable_states_size() const; - void clear_dtls_writable_states(); - ::webrtc::rtclog2::DtlsWritableState* mutable_dtls_writable_states(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsWritableState >* - mutable_dtls_writable_states(); - const ::webrtc::rtclog2::DtlsWritableState& dtls_writable_states(int index) const; - ::webrtc::rtclog2::DtlsWritableState* add_dtls_writable_states(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsWritableState >& - dtls_writable_states() const; - - // repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; - int generic_packets_sent_size() const; - void clear_generic_packets_sent(); - ::webrtc::rtclog2::GenericPacketSent* mutable_generic_packets_sent(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketSent >* - mutable_generic_packets_sent(); - const ::webrtc::rtclog2::GenericPacketSent& generic_packets_sent(int index) const; - ::webrtc::rtclog2::GenericPacketSent* add_generic_packets_sent(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketSent >& - generic_packets_sent() const; - - // repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; - int generic_packets_received_size() const; - void clear_generic_packets_received(); - ::webrtc::rtclog2::GenericPacketReceived* mutable_generic_packets_received(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketReceived >* - mutable_generic_packets_received(); - const ::webrtc::rtclog2::GenericPacketReceived& generic_packets_received(int index) const; - ::webrtc::rtclog2::GenericPacketReceived* add_generic_packets_received(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketReceived >& - generic_packets_received() const; - - // repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; - int generic_acks_received_size() const; - void clear_generic_acks_received(); - ::webrtc::rtclog2::GenericAckReceived* mutable_generic_acks_received(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericAckReceived >* - mutable_generic_acks_received(); - const ::webrtc::rtclog2::GenericAckReceived& generic_acks_received(int index) const; - ::webrtc::rtclog2::GenericAckReceived* add_generic_acks_received(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericAckReceived >& - generic_acks_received() const; - - // repeated .webrtc.rtclog2.RouteChange route_changes = 32; - int route_changes_size() const; - void clear_route_changes(); - ::webrtc::rtclog2::RouteChange* mutable_route_changes(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RouteChange >* - mutable_route_changes(); - const ::webrtc::rtclog2::RouteChange& route_changes(int index) const; - ::webrtc::rtclog2::RouteChange* add_route_changes(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RouteChange >& - route_changes() const; - - // repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; - int remote_estimates_size() const; - void clear_remote_estimates(); - ::webrtc::rtclog2::RemoteEstimates* mutable_remote_estimates(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RemoteEstimates >* - mutable_remote_estimates(); - const ::webrtc::rtclog2::RemoteEstimates& remote_estimates(int index) const; - ::webrtc::rtclog2::RemoteEstimates* add_remote_estimates(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RemoteEstimates >& - remote_estimates() const; - - // repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; - int audio_recv_stream_configs_size() const; - void clear_audio_recv_stream_configs(); - ::webrtc::rtclog2::AudioRecvStreamConfig* mutable_audio_recv_stream_configs(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioRecvStreamConfig >* - mutable_audio_recv_stream_configs(); - const ::webrtc::rtclog2::AudioRecvStreamConfig& audio_recv_stream_configs(int index) const; - ::webrtc::rtclog2::AudioRecvStreamConfig* add_audio_recv_stream_configs(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioRecvStreamConfig >& - audio_recv_stream_configs() const; - - // repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; - int audio_send_stream_configs_size() const; - void clear_audio_send_stream_configs(); - ::webrtc::rtclog2::AudioSendStreamConfig* mutable_audio_send_stream_configs(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioSendStreamConfig >* - mutable_audio_send_stream_configs(); - const ::webrtc::rtclog2::AudioSendStreamConfig& audio_send_stream_configs(int index) const; - ::webrtc::rtclog2::AudioSendStreamConfig* add_audio_send_stream_configs(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioSendStreamConfig >& - audio_send_stream_configs() const; - - // repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; - int video_recv_stream_configs_size() const; - void clear_video_recv_stream_configs(); - ::webrtc::rtclog2::VideoRecvStreamConfig* mutable_video_recv_stream_configs(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoRecvStreamConfig >* - mutable_video_recv_stream_configs(); - const ::webrtc::rtclog2::VideoRecvStreamConfig& video_recv_stream_configs(int index) const; - ::webrtc::rtclog2::VideoRecvStreamConfig* add_video_recv_stream_configs(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoRecvStreamConfig >& - video_recv_stream_configs() const; - - // repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; - int video_send_stream_configs_size() const; - void clear_video_send_stream_configs(); - ::webrtc::rtclog2::VideoSendStreamConfig* mutable_video_send_stream_configs(int index); - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoSendStreamConfig >* - mutable_video_send_stream_configs(); - const ::webrtc::rtclog2::VideoSendStreamConfig& video_send_stream_configs(int index) const; - ::webrtc::rtclog2::VideoSendStreamConfig* add_video_send_stream_configs(); - const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoSendStreamConfig >& - video_send_stream_configs() const; - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.EventStream) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::Event > stream_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtpPackets > incoming_rtp_packets_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtpPackets > outgoing_rtp_packets_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtcpPackets > incoming_rtcp_packets_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtcpPackets > outgoing_rtcp_packets_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioPlayoutEvents > audio_playout_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::FrameDecodedEvents > frame_decoded_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BeginLogEvent > begin_log_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::EndLogEvent > end_log_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::LossBasedBweUpdates > loss_based_bwe_updates_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DelayBasedBweUpdates > delay_based_bwe_updates_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioNetworkAdaptations > audio_network_adaptations_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeCluster > probe_clusters_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultSuccess > probe_success_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultFailure > probe_failure_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AlrState > alr_states_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairConfig > ice_candidate_configs_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairEvent > ice_candidate_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsTransportStateEvent > dtls_transport_state_events_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsWritableState > dtls_writable_states_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketSent > generic_packets_sent_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketReceived > generic_packets_received_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericAckReceived > generic_acks_received_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RouteChange > route_changes_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RemoteEstimates > remote_estimates_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioRecvStreamConfig > audio_recv_stream_configs_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioSendStreamConfig > audio_send_stream_configs_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoRecvStreamConfig > video_recv_stream_configs_; - ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoSendStreamConfig > video_send_stream_configs_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class Event : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.Event) */ { - public: - Event(); - virtual ~Event(); - - Event(const Event& from); - Event(Event&& from) noexcept - : Event() { - *this = ::std::move(from); - } - - inline Event& operator=(const Event& from) { - CopyFrom(from); - return *this; - } - inline Event& operator=(Event&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const Event& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const Event* internal_default_instance() { - return reinterpret_cast( - &_Event_default_instance_); - } - static constexpr int kIndexInFileMessages = - 1; - - friend void swap(Event& a, Event& b) { - a.Swap(&b); - } - inline void Swap(Event* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline Event* New() const final { - return CreateMaybeMessage(nullptr); - } - - Event* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const Event& from); - void MergeFrom(const Event& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(Event* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.Event"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.Event) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class GenericPacketReceived : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.GenericPacketReceived) */ { - public: - GenericPacketReceived(); - virtual ~GenericPacketReceived(); - - GenericPacketReceived(const GenericPacketReceived& from); - GenericPacketReceived(GenericPacketReceived&& from) noexcept - : GenericPacketReceived() { - *this = ::std::move(from); - } - - inline GenericPacketReceived& operator=(const GenericPacketReceived& from) { - CopyFrom(from); - return *this; - } - inline GenericPacketReceived& operator=(GenericPacketReceived&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const GenericPacketReceived& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const GenericPacketReceived* internal_default_instance() { - return reinterpret_cast( - &_GenericPacketReceived_default_instance_); - } - static constexpr int kIndexInFileMessages = - 2; - - friend void swap(GenericPacketReceived& a, GenericPacketReceived& b) { - a.Swap(&b); - } - inline void Swap(GenericPacketReceived* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline GenericPacketReceived* New() const final { - return CreateMaybeMessage(nullptr); - } - - GenericPacketReceived* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const GenericPacketReceived& from); - void MergeFrom(const GenericPacketReceived& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(GenericPacketReceived* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.GenericPacketReceived"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 17, - kPacketNumberDeltasFieldNumber = 18, - kPacketLengthDeltasFieldNumber = 19, - kTimestampMsFieldNumber = 1, - kPacketNumberFieldNumber = 2, - kPacketLengthFieldNumber = 3, - kNumberOfDeltasFieldNumber = 16, - }; - // optional bytes timestamp_ms_deltas = 17; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes packet_number_deltas = 18; - bool has_packet_number_deltas() const; - void clear_packet_number_deltas(); - const std::string& packet_number_deltas() const; - void set_packet_number_deltas(const std::string& value); - void set_packet_number_deltas(std::string&& value); - void set_packet_number_deltas(const char* value); - void set_packet_number_deltas(const void* value, size_t size); - std::string* mutable_packet_number_deltas(); - std::string* release_packet_number_deltas(); - void set_allocated_packet_number_deltas(std::string* packet_number_deltas); - - // optional bytes packet_length_deltas = 19; - bool has_packet_length_deltas() const; - void clear_packet_length_deltas(); - const std::string& packet_length_deltas() const; - void set_packet_length_deltas(const std::string& value); - void set_packet_length_deltas(std::string&& value); - void set_packet_length_deltas(const char* value); - void set_packet_length_deltas(const void* value, size_t size); - std::string* mutable_packet_length_deltas(); - std::string* release_packet_length_deltas(); - void set_allocated_packet_length_deltas(std::string* packet_length_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 packet_number = 2; - bool has_packet_number() const; - void clear_packet_number(); - ::PROTOBUF_NAMESPACE_ID::int64 packet_number() const; - void set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int32 packet_length = 3; - bool has_packet_length() const; - void clear_packet_length(); - ::PROTOBUF_NAMESPACE_ID::int32 packet_length() const; - void set_packet_length(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 number_of_deltas = 16; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.GenericPacketReceived) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr packet_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr packet_length_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int64 packet_number_; - ::PROTOBUF_NAMESPACE_ID::int32 packet_length_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class GenericPacketSent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.GenericPacketSent) */ { - public: - GenericPacketSent(); - virtual ~GenericPacketSent(); - - GenericPacketSent(const GenericPacketSent& from); - GenericPacketSent(GenericPacketSent&& from) noexcept - : GenericPacketSent() { - *this = ::std::move(from); - } - - inline GenericPacketSent& operator=(const GenericPacketSent& from) { - CopyFrom(from); - return *this; - } - inline GenericPacketSent& operator=(GenericPacketSent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const GenericPacketSent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const GenericPacketSent* internal_default_instance() { - return reinterpret_cast( - &_GenericPacketSent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 3; - - friend void swap(GenericPacketSent& a, GenericPacketSent& b) { - a.Swap(&b); - } - inline void Swap(GenericPacketSent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline GenericPacketSent* New() const final { - return CreateMaybeMessage(nullptr); - } - - GenericPacketSent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const GenericPacketSent& from); - void MergeFrom(const GenericPacketSent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(GenericPacketSent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.GenericPacketSent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 17, - kPacketNumberDeltasFieldNumber = 18, - kOverheadLengthDeltasFieldNumber = 19, - kPayloadLengthDeltasFieldNumber = 20, - kPaddingLengthDeltasFieldNumber = 21, - kTimestampMsFieldNumber = 1, - kPacketNumberFieldNumber = 2, - kOverheadLengthFieldNumber = 3, - kPayloadLengthFieldNumber = 4, - kPaddingLengthFieldNumber = 5, - kNumberOfDeltasFieldNumber = 16, - }; - // optional bytes timestamp_ms_deltas = 17; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes packet_number_deltas = 18; - bool has_packet_number_deltas() const; - void clear_packet_number_deltas(); - const std::string& packet_number_deltas() const; - void set_packet_number_deltas(const std::string& value); - void set_packet_number_deltas(std::string&& value); - void set_packet_number_deltas(const char* value); - void set_packet_number_deltas(const void* value, size_t size); - std::string* mutable_packet_number_deltas(); - std::string* release_packet_number_deltas(); - void set_allocated_packet_number_deltas(std::string* packet_number_deltas); - - // optional bytes overhead_length_deltas = 19; - bool has_overhead_length_deltas() const; - void clear_overhead_length_deltas(); - const std::string& overhead_length_deltas() const; - void set_overhead_length_deltas(const std::string& value); - void set_overhead_length_deltas(std::string&& value); - void set_overhead_length_deltas(const char* value); - void set_overhead_length_deltas(const void* value, size_t size); - std::string* mutable_overhead_length_deltas(); - std::string* release_overhead_length_deltas(); - void set_allocated_overhead_length_deltas(std::string* overhead_length_deltas); - - // optional bytes payload_length_deltas = 20; - bool has_payload_length_deltas() const; - void clear_payload_length_deltas(); - const std::string& payload_length_deltas() const; - void set_payload_length_deltas(const std::string& value); - void set_payload_length_deltas(std::string&& value); - void set_payload_length_deltas(const char* value); - void set_payload_length_deltas(const void* value, size_t size); - std::string* mutable_payload_length_deltas(); - std::string* release_payload_length_deltas(); - void set_allocated_payload_length_deltas(std::string* payload_length_deltas); - - // optional bytes padding_length_deltas = 21; - bool has_padding_length_deltas() const; - void clear_padding_length_deltas(); - const std::string& padding_length_deltas() const; - void set_padding_length_deltas(const std::string& value); - void set_padding_length_deltas(std::string&& value); - void set_padding_length_deltas(const char* value); - void set_padding_length_deltas(const void* value, size_t size); - std::string* mutable_padding_length_deltas(); - std::string* release_padding_length_deltas(); - void set_allocated_padding_length_deltas(std::string* padding_length_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 packet_number = 2; - bool has_packet_number() const; - void clear_packet_number(); - ::PROTOBUF_NAMESPACE_ID::int64 packet_number() const; - void set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int32 overhead_length = 3; - bool has_overhead_length() const; - void clear_overhead_length(); - ::PROTOBUF_NAMESPACE_ID::int32 overhead_length() const; - void set_overhead_length(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 payload_length = 4; - bool has_payload_length() const; - void clear_payload_length(); - ::PROTOBUF_NAMESPACE_ID::int32 payload_length() const; - void set_payload_length(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 padding_length = 5; - bool has_padding_length() const; - void clear_padding_length(); - ::PROTOBUF_NAMESPACE_ID::int32 padding_length() const; - void set_padding_length(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 number_of_deltas = 16; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.GenericPacketSent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr packet_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr overhead_length_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr payload_length_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr padding_length_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int64 packet_number_; - ::PROTOBUF_NAMESPACE_ID::int32 overhead_length_; - ::PROTOBUF_NAMESPACE_ID::int32 payload_length_; - ::PROTOBUF_NAMESPACE_ID::int32 padding_length_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class GenericAckReceived : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.GenericAckReceived) */ { - public: - GenericAckReceived(); - virtual ~GenericAckReceived(); - - GenericAckReceived(const GenericAckReceived& from); - GenericAckReceived(GenericAckReceived&& from) noexcept - : GenericAckReceived() { - *this = ::std::move(from); - } - - inline GenericAckReceived& operator=(const GenericAckReceived& from) { - CopyFrom(from); - return *this; - } - inline GenericAckReceived& operator=(GenericAckReceived&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const GenericAckReceived& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const GenericAckReceived* internal_default_instance() { - return reinterpret_cast( - &_GenericAckReceived_default_instance_); - } - static constexpr int kIndexInFileMessages = - 4; - - friend void swap(GenericAckReceived& a, GenericAckReceived& b) { - a.Swap(&b); - } - inline void Swap(GenericAckReceived* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline GenericAckReceived* New() const final { - return CreateMaybeMessage(nullptr); - } - - GenericAckReceived* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const GenericAckReceived& from); - void MergeFrom(const GenericAckReceived& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(GenericAckReceived* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.GenericAckReceived"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 17, - kPacketNumberDeltasFieldNumber = 18, - kAckedPacketNumberDeltasFieldNumber = 19, - kReceiveAckedPacketTimeMsDeltasFieldNumber = 20, - kTimestampMsFieldNumber = 1, - kPacketNumberFieldNumber = 2, - kAckedPacketNumberFieldNumber = 3, - kReceiveAckedPacketTimeMsFieldNumber = 4, - kNumberOfDeltasFieldNumber = 16, - }; - // optional bytes timestamp_ms_deltas = 17; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes packet_number_deltas = 18; - bool has_packet_number_deltas() const; - void clear_packet_number_deltas(); - const std::string& packet_number_deltas() const; - void set_packet_number_deltas(const std::string& value); - void set_packet_number_deltas(std::string&& value); - void set_packet_number_deltas(const char* value); - void set_packet_number_deltas(const void* value, size_t size); - std::string* mutable_packet_number_deltas(); - std::string* release_packet_number_deltas(); - void set_allocated_packet_number_deltas(std::string* packet_number_deltas); - - // optional bytes acked_packet_number_deltas = 19; - bool has_acked_packet_number_deltas() const; - void clear_acked_packet_number_deltas(); - const std::string& acked_packet_number_deltas() const; - void set_acked_packet_number_deltas(const std::string& value); - void set_acked_packet_number_deltas(std::string&& value); - void set_acked_packet_number_deltas(const char* value); - void set_acked_packet_number_deltas(const void* value, size_t size); - std::string* mutable_acked_packet_number_deltas(); - std::string* release_acked_packet_number_deltas(); - void set_allocated_acked_packet_number_deltas(std::string* acked_packet_number_deltas); - - // optional bytes receive_acked_packet_time_ms_deltas = 20; - bool has_receive_acked_packet_time_ms_deltas() const; - void clear_receive_acked_packet_time_ms_deltas(); - const std::string& receive_acked_packet_time_ms_deltas() const; - void set_receive_acked_packet_time_ms_deltas(const std::string& value); - void set_receive_acked_packet_time_ms_deltas(std::string&& value); - void set_receive_acked_packet_time_ms_deltas(const char* value); - void set_receive_acked_packet_time_ms_deltas(const void* value, size_t size); - std::string* mutable_receive_acked_packet_time_ms_deltas(); - std::string* release_receive_acked_packet_time_ms_deltas(); - void set_allocated_receive_acked_packet_time_ms_deltas(std::string* receive_acked_packet_time_ms_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 packet_number = 2; - bool has_packet_number() const; - void clear_packet_number(); - ::PROTOBUF_NAMESPACE_ID::int64 packet_number() const; - void set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 acked_packet_number = 3; - bool has_acked_packet_number() const; - void clear_acked_packet_number(); - ::PROTOBUF_NAMESPACE_ID::int64 acked_packet_number() const; - void set_acked_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 receive_acked_packet_time_ms = 4; - bool has_receive_acked_packet_time_ms() const; - void clear_receive_acked_packet_time_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 receive_acked_packet_time_ms() const; - void set_receive_acked_packet_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 number_of_deltas = 16; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.GenericAckReceived) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr packet_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr acked_packet_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr receive_acked_packet_time_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int64 packet_number_; - ::PROTOBUF_NAMESPACE_ID::int64 acked_packet_number_; - ::PROTOBUF_NAMESPACE_ID::int64 receive_acked_packet_time_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class IncomingRtpPackets : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.IncomingRtpPackets) */ { - public: - IncomingRtpPackets(); - virtual ~IncomingRtpPackets(); - - IncomingRtpPackets(const IncomingRtpPackets& from); - IncomingRtpPackets(IncomingRtpPackets&& from) noexcept - : IncomingRtpPackets() { - *this = ::std::move(from); - } - - inline IncomingRtpPackets& operator=(const IncomingRtpPackets& from) { - CopyFrom(from); - return *this; - } - inline IncomingRtpPackets& operator=(IncomingRtpPackets&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IncomingRtpPackets& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IncomingRtpPackets* internal_default_instance() { - return reinterpret_cast( - &_IncomingRtpPackets_default_instance_); - } - static constexpr int kIndexInFileMessages = - 5; - - friend void swap(IncomingRtpPackets& a, IncomingRtpPackets& b) { - a.Swap(&b); - } - inline void Swap(IncomingRtpPackets* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IncomingRtpPackets* New() const final { - return CreateMaybeMessage(nullptr); - } - - IncomingRtpPackets* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IncomingRtpPackets& from); - void MergeFrom(const IncomingRtpPackets& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IncomingRtpPackets* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.IncomingRtpPackets"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kMarkerDeltasFieldNumber = 102, - kPayloadTypeDeltasFieldNumber = 103, - kSequenceNumberDeltasFieldNumber = 104, - kRtpTimestampDeltasFieldNumber = 105, - kSsrcDeltasFieldNumber = 106, - kPayloadSizeDeltasFieldNumber = 108, - kHeaderSizeDeltasFieldNumber = 109, - kPaddingSizeDeltasFieldNumber = 110, - kTransportSequenceNumberDeltasFieldNumber = 115, - kTransmissionTimeOffsetDeltasFieldNumber = 116, - kAbsoluteSendTimeDeltasFieldNumber = 117, - kVideoRotationDeltasFieldNumber = 118, - kAudioLevelDeltasFieldNumber = 119, - kVoiceActivityDeltasFieldNumber = 120, - kTimestampMsFieldNumber = 1, - kPayloadTypeFieldNumber = 3, - kSequenceNumberFieldNumber = 4, - kRtpTimestampFieldNumber = 5, - kSsrcFieldNumber = 6, - kPayloadSizeFieldNumber = 8, - kHeaderSizeFieldNumber = 9, - kPaddingSizeFieldNumber = 10, - kNumberOfDeltasFieldNumber = 11, - kMarkerFieldNumber = 2, - kVoiceActivityFieldNumber = 20, - kTransportSequenceNumberFieldNumber = 15, - kTransmissionTimeOffsetFieldNumber = 16, - kAbsoluteSendTimeFieldNumber = 17, - kVideoRotationFieldNumber = 18, - kAudioLevelFieldNumber = 19, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes marker_deltas = 102; - bool has_marker_deltas() const; - void clear_marker_deltas(); - const std::string& marker_deltas() const; - void set_marker_deltas(const std::string& value); - void set_marker_deltas(std::string&& value); - void set_marker_deltas(const char* value); - void set_marker_deltas(const void* value, size_t size); - std::string* mutable_marker_deltas(); - std::string* release_marker_deltas(); - void set_allocated_marker_deltas(std::string* marker_deltas); - - // optional bytes payload_type_deltas = 103; - bool has_payload_type_deltas() const; - void clear_payload_type_deltas(); - const std::string& payload_type_deltas() const; - void set_payload_type_deltas(const std::string& value); - void set_payload_type_deltas(std::string&& value); - void set_payload_type_deltas(const char* value); - void set_payload_type_deltas(const void* value, size_t size); - std::string* mutable_payload_type_deltas(); - std::string* release_payload_type_deltas(); - void set_allocated_payload_type_deltas(std::string* payload_type_deltas); - - // optional bytes sequence_number_deltas = 104; - bool has_sequence_number_deltas() const; - void clear_sequence_number_deltas(); - const std::string& sequence_number_deltas() const; - void set_sequence_number_deltas(const std::string& value); - void set_sequence_number_deltas(std::string&& value); - void set_sequence_number_deltas(const char* value); - void set_sequence_number_deltas(const void* value, size_t size); - std::string* mutable_sequence_number_deltas(); - std::string* release_sequence_number_deltas(); - void set_allocated_sequence_number_deltas(std::string* sequence_number_deltas); - - // optional bytes rtp_timestamp_deltas = 105; - bool has_rtp_timestamp_deltas() const; - void clear_rtp_timestamp_deltas(); - const std::string& rtp_timestamp_deltas() const; - void set_rtp_timestamp_deltas(const std::string& value); - void set_rtp_timestamp_deltas(std::string&& value); - void set_rtp_timestamp_deltas(const char* value); - void set_rtp_timestamp_deltas(const void* value, size_t size); - std::string* mutable_rtp_timestamp_deltas(); - std::string* release_rtp_timestamp_deltas(); - void set_allocated_rtp_timestamp_deltas(std::string* rtp_timestamp_deltas); - - // optional bytes ssrc_deltas = 106; - bool has_ssrc_deltas() const; - void clear_ssrc_deltas(); - const std::string& ssrc_deltas() const; - void set_ssrc_deltas(const std::string& value); - void set_ssrc_deltas(std::string&& value); - void set_ssrc_deltas(const char* value); - void set_ssrc_deltas(const void* value, size_t size); - std::string* mutable_ssrc_deltas(); - std::string* release_ssrc_deltas(); - void set_allocated_ssrc_deltas(std::string* ssrc_deltas); - - // optional bytes payload_size_deltas = 108; - bool has_payload_size_deltas() const; - void clear_payload_size_deltas(); - const std::string& payload_size_deltas() const; - void set_payload_size_deltas(const std::string& value); - void set_payload_size_deltas(std::string&& value); - void set_payload_size_deltas(const char* value); - void set_payload_size_deltas(const void* value, size_t size); - std::string* mutable_payload_size_deltas(); - std::string* release_payload_size_deltas(); - void set_allocated_payload_size_deltas(std::string* payload_size_deltas); - - // optional bytes header_size_deltas = 109; - bool has_header_size_deltas() const; - void clear_header_size_deltas(); - const std::string& header_size_deltas() const; - void set_header_size_deltas(const std::string& value); - void set_header_size_deltas(std::string&& value); - void set_header_size_deltas(const char* value); - void set_header_size_deltas(const void* value, size_t size); - std::string* mutable_header_size_deltas(); - std::string* release_header_size_deltas(); - void set_allocated_header_size_deltas(std::string* header_size_deltas); - - // optional bytes padding_size_deltas = 110; - bool has_padding_size_deltas() const; - void clear_padding_size_deltas(); - const std::string& padding_size_deltas() const; - void set_padding_size_deltas(const std::string& value); - void set_padding_size_deltas(std::string&& value); - void set_padding_size_deltas(const char* value); - void set_padding_size_deltas(const void* value, size_t size); - std::string* mutable_padding_size_deltas(); - std::string* release_padding_size_deltas(); - void set_allocated_padding_size_deltas(std::string* padding_size_deltas); - - // optional bytes transport_sequence_number_deltas = 115; - bool has_transport_sequence_number_deltas() const; - void clear_transport_sequence_number_deltas(); - const std::string& transport_sequence_number_deltas() const; - void set_transport_sequence_number_deltas(const std::string& value); - void set_transport_sequence_number_deltas(std::string&& value); - void set_transport_sequence_number_deltas(const char* value); - void set_transport_sequence_number_deltas(const void* value, size_t size); - std::string* mutable_transport_sequence_number_deltas(); - std::string* release_transport_sequence_number_deltas(); - void set_allocated_transport_sequence_number_deltas(std::string* transport_sequence_number_deltas); - - // optional bytes transmission_time_offset_deltas = 116; - bool has_transmission_time_offset_deltas() const; - void clear_transmission_time_offset_deltas(); - const std::string& transmission_time_offset_deltas() const; - void set_transmission_time_offset_deltas(const std::string& value); - void set_transmission_time_offset_deltas(std::string&& value); - void set_transmission_time_offset_deltas(const char* value); - void set_transmission_time_offset_deltas(const void* value, size_t size); - std::string* mutable_transmission_time_offset_deltas(); - std::string* release_transmission_time_offset_deltas(); - void set_allocated_transmission_time_offset_deltas(std::string* transmission_time_offset_deltas); - - // optional bytes absolute_send_time_deltas = 117; - bool has_absolute_send_time_deltas() const; - void clear_absolute_send_time_deltas(); - const std::string& absolute_send_time_deltas() const; - void set_absolute_send_time_deltas(const std::string& value); - void set_absolute_send_time_deltas(std::string&& value); - void set_absolute_send_time_deltas(const char* value); - void set_absolute_send_time_deltas(const void* value, size_t size); - std::string* mutable_absolute_send_time_deltas(); - std::string* release_absolute_send_time_deltas(); - void set_allocated_absolute_send_time_deltas(std::string* absolute_send_time_deltas); - - // optional bytes video_rotation_deltas = 118; - bool has_video_rotation_deltas() const; - void clear_video_rotation_deltas(); - const std::string& video_rotation_deltas() const; - void set_video_rotation_deltas(const std::string& value); - void set_video_rotation_deltas(std::string&& value); - void set_video_rotation_deltas(const char* value); - void set_video_rotation_deltas(const void* value, size_t size); - std::string* mutable_video_rotation_deltas(); - std::string* release_video_rotation_deltas(); - void set_allocated_video_rotation_deltas(std::string* video_rotation_deltas); - - // optional bytes audio_level_deltas = 119; - bool has_audio_level_deltas() const; - void clear_audio_level_deltas(); - const std::string& audio_level_deltas() const; - void set_audio_level_deltas(const std::string& value); - void set_audio_level_deltas(std::string&& value); - void set_audio_level_deltas(const char* value); - void set_audio_level_deltas(const void* value, size_t size); - std::string* mutable_audio_level_deltas(); - std::string* release_audio_level_deltas(); - void set_allocated_audio_level_deltas(std::string* audio_level_deltas); - - // optional bytes voice_activity_deltas = 120; - bool has_voice_activity_deltas() const; - void clear_voice_activity_deltas(); - const std::string& voice_activity_deltas() const; - void set_voice_activity_deltas(const std::string& value); - void set_voice_activity_deltas(std::string&& value); - void set_voice_activity_deltas(const char* value); - void set_voice_activity_deltas(const void* value, size_t size); - std::string* mutable_voice_activity_deltas(); - std::string* release_voice_activity_deltas(); - void set_allocated_voice_activity_deltas(std::string* voice_activity_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 payload_type = 3; - bool has_payload_type() const; - void clear_payload_type(); - ::PROTOBUF_NAMESPACE_ID::uint32 payload_type() const; - void set_payload_type(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 sequence_number = 4; - bool has_sequence_number() const; - void clear_sequence_number(); - ::PROTOBUF_NAMESPACE_ID::uint32 sequence_number() const; - void set_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional fixed32 rtp_timestamp = 5; - bool has_rtp_timestamp() const; - void clear_rtp_timestamp(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtp_timestamp() const; - void set_rtp_timestamp(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional fixed32 ssrc = 6; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 payload_size = 8; - bool has_payload_size() const; - void clear_payload_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 payload_size() const; - void set_payload_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 header_size = 9; - bool has_header_size() const; - void clear_header_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 header_size() const; - void set_header_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 padding_size = 10; - bool has_padding_size() const; - void clear_padding_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 padding_size() const; - void set_padding_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 11; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional bool marker = 2; - bool has_marker() const; - void clear_marker(); - bool marker() const; - void set_marker(bool value); - - // optional bool voice_activity = 20; - bool has_voice_activity() const; - void clear_voice_activity(); - bool voice_activity() const; - void set_voice_activity(bool value); - - // optional uint32 transport_sequence_number = 15; - bool has_transport_sequence_number() const; - void clear_transport_sequence_number(); - ::PROTOBUF_NAMESPACE_ID::uint32 transport_sequence_number() const; - void set_transport_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 transmission_time_offset = 16; - bool has_transmission_time_offset() const; - void clear_transmission_time_offset(); - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset() const; - void set_transmission_time_offset(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 absolute_send_time = 17; - bool has_absolute_send_time() const; - void clear_absolute_send_time(); - ::PROTOBUF_NAMESPACE_ID::uint32 absolute_send_time() const; - void set_absolute_send_time(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 video_rotation = 18; - bool has_video_rotation() const; - void clear_video_rotation(); - ::PROTOBUF_NAMESPACE_ID::uint32 video_rotation() const; - void set_video_rotation(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 audio_level = 19; - bool has_audio_level() const; - void clear_audio_level(); - ::PROTOBUF_NAMESPACE_ID::uint32 audio_level() const; - void set_audio_level(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.IncomingRtpPackets) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr marker_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr payload_type_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr sequence_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr rtp_timestamp_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr ssrc_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr payload_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr header_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr padding_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr transport_sequence_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr transmission_time_offset_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr absolute_send_time_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr video_rotation_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr audio_level_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr voice_activity_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 payload_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 sequence_number_; - ::PROTOBUF_NAMESPACE_ID::uint32 rtp_timestamp_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 payload_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 header_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 padding_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - bool marker_; - bool voice_activity_; - ::PROTOBUF_NAMESPACE_ID::uint32 transport_sequence_number_; - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset_; - ::PROTOBUF_NAMESPACE_ID::uint32 absolute_send_time_; - ::PROTOBUF_NAMESPACE_ID::uint32 video_rotation_; - ::PROTOBUF_NAMESPACE_ID::uint32 audio_level_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class OutgoingRtpPackets : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.OutgoingRtpPackets) */ { - public: - OutgoingRtpPackets(); - virtual ~OutgoingRtpPackets(); - - OutgoingRtpPackets(const OutgoingRtpPackets& from); - OutgoingRtpPackets(OutgoingRtpPackets&& from) noexcept - : OutgoingRtpPackets() { - *this = ::std::move(from); - } - - inline OutgoingRtpPackets& operator=(const OutgoingRtpPackets& from) { - CopyFrom(from); - return *this; - } - inline OutgoingRtpPackets& operator=(OutgoingRtpPackets&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const OutgoingRtpPackets& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const OutgoingRtpPackets* internal_default_instance() { - return reinterpret_cast( - &_OutgoingRtpPackets_default_instance_); - } - static constexpr int kIndexInFileMessages = - 6; - - friend void swap(OutgoingRtpPackets& a, OutgoingRtpPackets& b) { - a.Swap(&b); - } - inline void Swap(OutgoingRtpPackets* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline OutgoingRtpPackets* New() const final { - return CreateMaybeMessage(nullptr); - } - - OutgoingRtpPackets* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const OutgoingRtpPackets& from); - void MergeFrom(const OutgoingRtpPackets& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(OutgoingRtpPackets* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.OutgoingRtpPackets"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kMarkerDeltasFieldNumber = 102, - kPayloadTypeDeltasFieldNumber = 103, - kSequenceNumberDeltasFieldNumber = 104, - kRtpTimestampDeltasFieldNumber = 105, - kSsrcDeltasFieldNumber = 106, - kPayloadSizeDeltasFieldNumber = 108, - kHeaderSizeDeltasFieldNumber = 109, - kPaddingSizeDeltasFieldNumber = 110, - kTransportSequenceNumberDeltasFieldNumber = 115, - kTransmissionTimeOffsetDeltasFieldNumber = 116, - kAbsoluteSendTimeDeltasFieldNumber = 117, - kVideoRotationDeltasFieldNumber = 118, - kAudioLevelDeltasFieldNumber = 119, - kVoiceActivityDeltasFieldNumber = 120, - kTimestampMsFieldNumber = 1, - kPayloadTypeFieldNumber = 3, - kSequenceNumberFieldNumber = 4, - kRtpTimestampFieldNumber = 5, - kSsrcFieldNumber = 6, - kPayloadSizeFieldNumber = 8, - kHeaderSizeFieldNumber = 9, - kPaddingSizeFieldNumber = 10, - kNumberOfDeltasFieldNumber = 11, - kMarkerFieldNumber = 2, - kVoiceActivityFieldNumber = 20, - kTransportSequenceNumberFieldNumber = 15, - kTransmissionTimeOffsetFieldNumber = 16, - kAbsoluteSendTimeFieldNumber = 17, - kVideoRotationFieldNumber = 18, - kAudioLevelFieldNumber = 19, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes marker_deltas = 102; - bool has_marker_deltas() const; - void clear_marker_deltas(); - const std::string& marker_deltas() const; - void set_marker_deltas(const std::string& value); - void set_marker_deltas(std::string&& value); - void set_marker_deltas(const char* value); - void set_marker_deltas(const void* value, size_t size); - std::string* mutable_marker_deltas(); - std::string* release_marker_deltas(); - void set_allocated_marker_deltas(std::string* marker_deltas); - - // optional bytes payload_type_deltas = 103; - bool has_payload_type_deltas() const; - void clear_payload_type_deltas(); - const std::string& payload_type_deltas() const; - void set_payload_type_deltas(const std::string& value); - void set_payload_type_deltas(std::string&& value); - void set_payload_type_deltas(const char* value); - void set_payload_type_deltas(const void* value, size_t size); - std::string* mutable_payload_type_deltas(); - std::string* release_payload_type_deltas(); - void set_allocated_payload_type_deltas(std::string* payload_type_deltas); - - // optional bytes sequence_number_deltas = 104; - bool has_sequence_number_deltas() const; - void clear_sequence_number_deltas(); - const std::string& sequence_number_deltas() const; - void set_sequence_number_deltas(const std::string& value); - void set_sequence_number_deltas(std::string&& value); - void set_sequence_number_deltas(const char* value); - void set_sequence_number_deltas(const void* value, size_t size); - std::string* mutable_sequence_number_deltas(); - std::string* release_sequence_number_deltas(); - void set_allocated_sequence_number_deltas(std::string* sequence_number_deltas); - - // optional bytes rtp_timestamp_deltas = 105; - bool has_rtp_timestamp_deltas() const; - void clear_rtp_timestamp_deltas(); - const std::string& rtp_timestamp_deltas() const; - void set_rtp_timestamp_deltas(const std::string& value); - void set_rtp_timestamp_deltas(std::string&& value); - void set_rtp_timestamp_deltas(const char* value); - void set_rtp_timestamp_deltas(const void* value, size_t size); - std::string* mutable_rtp_timestamp_deltas(); - std::string* release_rtp_timestamp_deltas(); - void set_allocated_rtp_timestamp_deltas(std::string* rtp_timestamp_deltas); - - // optional bytes ssrc_deltas = 106; - bool has_ssrc_deltas() const; - void clear_ssrc_deltas(); - const std::string& ssrc_deltas() const; - void set_ssrc_deltas(const std::string& value); - void set_ssrc_deltas(std::string&& value); - void set_ssrc_deltas(const char* value); - void set_ssrc_deltas(const void* value, size_t size); - std::string* mutable_ssrc_deltas(); - std::string* release_ssrc_deltas(); - void set_allocated_ssrc_deltas(std::string* ssrc_deltas); - - // optional bytes payload_size_deltas = 108; - bool has_payload_size_deltas() const; - void clear_payload_size_deltas(); - const std::string& payload_size_deltas() const; - void set_payload_size_deltas(const std::string& value); - void set_payload_size_deltas(std::string&& value); - void set_payload_size_deltas(const char* value); - void set_payload_size_deltas(const void* value, size_t size); - std::string* mutable_payload_size_deltas(); - std::string* release_payload_size_deltas(); - void set_allocated_payload_size_deltas(std::string* payload_size_deltas); - - // optional bytes header_size_deltas = 109; - bool has_header_size_deltas() const; - void clear_header_size_deltas(); - const std::string& header_size_deltas() const; - void set_header_size_deltas(const std::string& value); - void set_header_size_deltas(std::string&& value); - void set_header_size_deltas(const char* value); - void set_header_size_deltas(const void* value, size_t size); - std::string* mutable_header_size_deltas(); - std::string* release_header_size_deltas(); - void set_allocated_header_size_deltas(std::string* header_size_deltas); - - // optional bytes padding_size_deltas = 110; - bool has_padding_size_deltas() const; - void clear_padding_size_deltas(); - const std::string& padding_size_deltas() const; - void set_padding_size_deltas(const std::string& value); - void set_padding_size_deltas(std::string&& value); - void set_padding_size_deltas(const char* value); - void set_padding_size_deltas(const void* value, size_t size); - std::string* mutable_padding_size_deltas(); - std::string* release_padding_size_deltas(); - void set_allocated_padding_size_deltas(std::string* padding_size_deltas); - - // optional bytes transport_sequence_number_deltas = 115; - bool has_transport_sequence_number_deltas() const; - void clear_transport_sequence_number_deltas(); - const std::string& transport_sequence_number_deltas() const; - void set_transport_sequence_number_deltas(const std::string& value); - void set_transport_sequence_number_deltas(std::string&& value); - void set_transport_sequence_number_deltas(const char* value); - void set_transport_sequence_number_deltas(const void* value, size_t size); - std::string* mutable_transport_sequence_number_deltas(); - std::string* release_transport_sequence_number_deltas(); - void set_allocated_transport_sequence_number_deltas(std::string* transport_sequence_number_deltas); - - // optional bytes transmission_time_offset_deltas = 116; - bool has_transmission_time_offset_deltas() const; - void clear_transmission_time_offset_deltas(); - const std::string& transmission_time_offset_deltas() const; - void set_transmission_time_offset_deltas(const std::string& value); - void set_transmission_time_offset_deltas(std::string&& value); - void set_transmission_time_offset_deltas(const char* value); - void set_transmission_time_offset_deltas(const void* value, size_t size); - std::string* mutable_transmission_time_offset_deltas(); - std::string* release_transmission_time_offset_deltas(); - void set_allocated_transmission_time_offset_deltas(std::string* transmission_time_offset_deltas); - - // optional bytes absolute_send_time_deltas = 117; - bool has_absolute_send_time_deltas() const; - void clear_absolute_send_time_deltas(); - const std::string& absolute_send_time_deltas() const; - void set_absolute_send_time_deltas(const std::string& value); - void set_absolute_send_time_deltas(std::string&& value); - void set_absolute_send_time_deltas(const char* value); - void set_absolute_send_time_deltas(const void* value, size_t size); - std::string* mutable_absolute_send_time_deltas(); - std::string* release_absolute_send_time_deltas(); - void set_allocated_absolute_send_time_deltas(std::string* absolute_send_time_deltas); - - // optional bytes video_rotation_deltas = 118; - bool has_video_rotation_deltas() const; - void clear_video_rotation_deltas(); - const std::string& video_rotation_deltas() const; - void set_video_rotation_deltas(const std::string& value); - void set_video_rotation_deltas(std::string&& value); - void set_video_rotation_deltas(const char* value); - void set_video_rotation_deltas(const void* value, size_t size); - std::string* mutable_video_rotation_deltas(); - std::string* release_video_rotation_deltas(); - void set_allocated_video_rotation_deltas(std::string* video_rotation_deltas); - - // optional bytes audio_level_deltas = 119; - bool has_audio_level_deltas() const; - void clear_audio_level_deltas(); - const std::string& audio_level_deltas() const; - void set_audio_level_deltas(const std::string& value); - void set_audio_level_deltas(std::string&& value); - void set_audio_level_deltas(const char* value); - void set_audio_level_deltas(const void* value, size_t size); - std::string* mutable_audio_level_deltas(); - std::string* release_audio_level_deltas(); - void set_allocated_audio_level_deltas(std::string* audio_level_deltas); - - // optional bytes voice_activity_deltas = 120; - bool has_voice_activity_deltas() const; - void clear_voice_activity_deltas(); - const std::string& voice_activity_deltas() const; - void set_voice_activity_deltas(const std::string& value); - void set_voice_activity_deltas(std::string&& value); - void set_voice_activity_deltas(const char* value); - void set_voice_activity_deltas(const void* value, size_t size); - std::string* mutable_voice_activity_deltas(); - std::string* release_voice_activity_deltas(); - void set_allocated_voice_activity_deltas(std::string* voice_activity_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 payload_type = 3; - bool has_payload_type() const; - void clear_payload_type(); - ::PROTOBUF_NAMESPACE_ID::uint32 payload_type() const; - void set_payload_type(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 sequence_number = 4; - bool has_sequence_number() const; - void clear_sequence_number(); - ::PROTOBUF_NAMESPACE_ID::uint32 sequence_number() const; - void set_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional fixed32 rtp_timestamp = 5; - bool has_rtp_timestamp() const; - void clear_rtp_timestamp(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtp_timestamp() const; - void set_rtp_timestamp(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional fixed32 ssrc = 6; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 payload_size = 8; - bool has_payload_size() const; - void clear_payload_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 payload_size() const; - void set_payload_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 header_size = 9; - bool has_header_size() const; - void clear_header_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 header_size() const; - void set_header_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 padding_size = 10; - bool has_padding_size() const; - void clear_padding_size(); - ::PROTOBUF_NAMESPACE_ID::uint32 padding_size() const; - void set_padding_size(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 11; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional bool marker = 2; - bool has_marker() const; - void clear_marker(); - bool marker() const; - void set_marker(bool value); - - // optional bool voice_activity = 20; - bool has_voice_activity() const; - void clear_voice_activity(); - bool voice_activity() const; - void set_voice_activity(bool value); - - // optional uint32 transport_sequence_number = 15; - bool has_transport_sequence_number() const; - void clear_transport_sequence_number(); - ::PROTOBUF_NAMESPACE_ID::uint32 transport_sequence_number() const; - void set_transport_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 transmission_time_offset = 16; - bool has_transmission_time_offset() const; - void clear_transmission_time_offset(); - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset() const; - void set_transmission_time_offset(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 absolute_send_time = 17; - bool has_absolute_send_time() const; - void clear_absolute_send_time(); - ::PROTOBUF_NAMESPACE_ID::uint32 absolute_send_time() const; - void set_absolute_send_time(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 video_rotation = 18; - bool has_video_rotation() const; - void clear_video_rotation(); - ::PROTOBUF_NAMESPACE_ID::uint32 video_rotation() const; - void set_video_rotation(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 audio_level = 19; - bool has_audio_level() const; - void clear_audio_level(); - ::PROTOBUF_NAMESPACE_ID::uint32 audio_level() const; - void set_audio_level(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.OutgoingRtpPackets) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr marker_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr payload_type_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr sequence_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr rtp_timestamp_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr ssrc_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr payload_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr header_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr padding_size_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr transport_sequence_number_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr transmission_time_offset_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr absolute_send_time_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr video_rotation_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr audio_level_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr voice_activity_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 payload_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 sequence_number_; - ::PROTOBUF_NAMESPACE_ID::uint32 rtp_timestamp_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 payload_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 header_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 padding_size_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - bool marker_; - bool voice_activity_; - ::PROTOBUF_NAMESPACE_ID::uint32 transport_sequence_number_; - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset_; - ::PROTOBUF_NAMESPACE_ID::uint32 absolute_send_time_; - ::PROTOBUF_NAMESPACE_ID::uint32 video_rotation_; - ::PROTOBUF_NAMESPACE_ID::uint32 audio_level_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class IncomingRtcpPackets : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.IncomingRtcpPackets) */ { - public: - IncomingRtcpPackets(); - virtual ~IncomingRtcpPackets(); - - IncomingRtcpPackets(const IncomingRtcpPackets& from); - IncomingRtcpPackets(IncomingRtcpPackets&& from) noexcept - : IncomingRtcpPackets() { - *this = ::std::move(from); - } - - inline IncomingRtcpPackets& operator=(const IncomingRtcpPackets& from) { - CopyFrom(from); - return *this; - } - inline IncomingRtcpPackets& operator=(IncomingRtcpPackets&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IncomingRtcpPackets& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IncomingRtcpPackets* internal_default_instance() { - return reinterpret_cast( - &_IncomingRtcpPackets_default_instance_); - } - static constexpr int kIndexInFileMessages = - 7; - - friend void swap(IncomingRtcpPackets& a, IncomingRtcpPackets& b) { - a.Swap(&b); - } - inline void Swap(IncomingRtcpPackets* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IncomingRtcpPackets* New() const final { - return CreateMaybeMessage(nullptr); - } - - IncomingRtcpPackets* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IncomingRtcpPackets& from); - void MergeFrom(const IncomingRtcpPackets& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IncomingRtcpPackets* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.IncomingRtcpPackets"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kRawPacketFieldNumber = 2, - kTimestampMsDeltasFieldNumber = 101, - kRawPacketBlobsFieldNumber = 102, - kTimestampMsFieldNumber = 1, - kNumberOfDeltasFieldNumber = 3, - }; - // optional bytes raw_packet = 2; - bool has_raw_packet() const; - void clear_raw_packet(); - const std::string& raw_packet() const; - void set_raw_packet(const std::string& value); - void set_raw_packet(std::string&& value); - void set_raw_packet(const char* value); - void set_raw_packet(const void* value, size_t size); - std::string* mutable_raw_packet(); - std::string* release_raw_packet(); - void set_allocated_raw_packet(std::string* raw_packet); - - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes raw_packet_blobs = 102; - bool has_raw_packet_blobs() const; - void clear_raw_packet_blobs(); - const std::string& raw_packet_blobs() const; - void set_raw_packet_blobs(const std::string& value); - void set_raw_packet_blobs(std::string&& value); - void set_raw_packet_blobs(const char* value); - void set_raw_packet_blobs(const void* value, size_t size); - std::string* mutable_raw_packet_blobs(); - std::string* release_raw_packet_blobs(); - void set_allocated_raw_packet_blobs(std::string* raw_packet_blobs); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 number_of_deltas = 3; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.IncomingRtcpPackets) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr raw_packet_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr raw_packet_blobs_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class OutgoingRtcpPackets : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.OutgoingRtcpPackets) */ { - public: - OutgoingRtcpPackets(); - virtual ~OutgoingRtcpPackets(); - - OutgoingRtcpPackets(const OutgoingRtcpPackets& from); - OutgoingRtcpPackets(OutgoingRtcpPackets&& from) noexcept - : OutgoingRtcpPackets() { - *this = ::std::move(from); - } - - inline OutgoingRtcpPackets& operator=(const OutgoingRtcpPackets& from) { - CopyFrom(from); - return *this; - } - inline OutgoingRtcpPackets& operator=(OutgoingRtcpPackets&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const OutgoingRtcpPackets& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const OutgoingRtcpPackets* internal_default_instance() { - return reinterpret_cast( - &_OutgoingRtcpPackets_default_instance_); - } - static constexpr int kIndexInFileMessages = - 8; - - friend void swap(OutgoingRtcpPackets& a, OutgoingRtcpPackets& b) { - a.Swap(&b); - } - inline void Swap(OutgoingRtcpPackets* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline OutgoingRtcpPackets* New() const final { - return CreateMaybeMessage(nullptr); - } - - OutgoingRtcpPackets* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const OutgoingRtcpPackets& from); - void MergeFrom(const OutgoingRtcpPackets& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(OutgoingRtcpPackets* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.OutgoingRtcpPackets"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kRawPacketFieldNumber = 2, - kTimestampMsDeltasFieldNumber = 101, - kRawPacketBlobsFieldNumber = 102, - kTimestampMsFieldNumber = 1, - kNumberOfDeltasFieldNumber = 3, - }; - // optional bytes raw_packet = 2; - bool has_raw_packet() const; - void clear_raw_packet(); - const std::string& raw_packet() const; - void set_raw_packet(const std::string& value); - void set_raw_packet(std::string&& value); - void set_raw_packet(const char* value); - void set_raw_packet(const void* value, size_t size); - std::string* mutable_raw_packet(); - std::string* release_raw_packet(); - void set_allocated_raw_packet(std::string* raw_packet); - - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes raw_packet_blobs = 102; - bool has_raw_packet_blobs() const; - void clear_raw_packet_blobs(); - const std::string& raw_packet_blobs() const; - void set_raw_packet_blobs(const std::string& value); - void set_raw_packet_blobs(std::string&& value); - void set_raw_packet_blobs(const char* value); - void set_raw_packet_blobs(const void* value, size_t size); - std::string* mutable_raw_packet_blobs(); - std::string* release_raw_packet_blobs(); - void set_allocated_raw_packet_blobs(std::string* raw_packet_blobs); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 number_of_deltas = 3; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.OutgoingRtcpPackets) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr raw_packet_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr raw_packet_blobs_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioPlayoutEvents : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.AudioPlayoutEvents) */ { - public: - AudioPlayoutEvents(); - virtual ~AudioPlayoutEvents(); - - AudioPlayoutEvents(const AudioPlayoutEvents& from); - AudioPlayoutEvents(AudioPlayoutEvents&& from) noexcept - : AudioPlayoutEvents() { - *this = ::std::move(from); - } - - inline AudioPlayoutEvents& operator=(const AudioPlayoutEvents& from) { - CopyFrom(from); - return *this; - } - inline AudioPlayoutEvents& operator=(AudioPlayoutEvents&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioPlayoutEvents& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioPlayoutEvents* internal_default_instance() { - return reinterpret_cast( - &_AudioPlayoutEvents_default_instance_); - } - static constexpr int kIndexInFileMessages = - 9; - - friend void swap(AudioPlayoutEvents& a, AudioPlayoutEvents& b) { - a.Swap(&b); - } - inline void Swap(AudioPlayoutEvents* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioPlayoutEvents* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioPlayoutEvents* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioPlayoutEvents& from); - void MergeFrom(const AudioPlayoutEvents& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioPlayoutEvents* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.AudioPlayoutEvents"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kLocalSsrcDeltasFieldNumber = 102, - kTimestampMsFieldNumber = 1, - kLocalSsrcFieldNumber = 2, - kNumberOfDeltasFieldNumber = 3, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes local_ssrc_deltas = 102; - bool has_local_ssrc_deltas() const; - void clear_local_ssrc_deltas(); - const std::string& local_ssrc_deltas() const; - void set_local_ssrc_deltas(const std::string& value); - void set_local_ssrc_deltas(std::string&& value); - void set_local_ssrc_deltas(const char* value); - void set_local_ssrc_deltas(const void* value, size_t size); - std::string* mutable_local_ssrc_deltas(); - std::string* release_local_ssrc_deltas(); - void set_allocated_local_ssrc_deltas(std::string* local_ssrc_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 local_ssrc = 2; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 3; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.AudioPlayoutEvents) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr local_ssrc_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class FrameDecodedEvents : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.FrameDecodedEvents) */ { - public: - FrameDecodedEvents(); - virtual ~FrameDecodedEvents(); - - FrameDecodedEvents(const FrameDecodedEvents& from); - FrameDecodedEvents(FrameDecodedEvents&& from) noexcept - : FrameDecodedEvents() { - *this = ::std::move(from); - } - - inline FrameDecodedEvents& operator=(const FrameDecodedEvents& from) { - CopyFrom(from); - return *this; - } - inline FrameDecodedEvents& operator=(FrameDecodedEvents&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const FrameDecodedEvents& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const FrameDecodedEvents* internal_default_instance() { - return reinterpret_cast( - &_FrameDecodedEvents_default_instance_); - } - static constexpr int kIndexInFileMessages = - 10; - - friend void swap(FrameDecodedEvents& a, FrameDecodedEvents& b) { - a.Swap(&b); - } - inline void Swap(FrameDecodedEvents* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline FrameDecodedEvents* New() const final { - return CreateMaybeMessage(nullptr); - } - - FrameDecodedEvents* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const FrameDecodedEvents& from); - void MergeFrom(const FrameDecodedEvents& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(FrameDecodedEvents* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.FrameDecodedEvents"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef FrameDecodedEvents_Codec Codec; - static constexpr Codec CODEC_UNKNOWN = - FrameDecodedEvents_Codec_CODEC_UNKNOWN; - static constexpr Codec CODEC_GENERIC = - FrameDecodedEvents_Codec_CODEC_GENERIC; - static constexpr Codec CODEC_VP8 = - FrameDecodedEvents_Codec_CODEC_VP8; - static constexpr Codec CODEC_VP9 = - FrameDecodedEvents_Codec_CODEC_VP9; - static constexpr Codec CODEC_AV1 = - FrameDecodedEvents_Codec_CODEC_AV1; - static constexpr Codec CODEC_H264 = - FrameDecodedEvents_Codec_CODEC_H264; - static constexpr Codec CODEC_H265 = - FrameDecodedEvents_Codec_CODEC_H265; - static inline bool Codec_IsValid(int value) { - return FrameDecodedEvents_Codec_IsValid(value); - } - static constexpr Codec Codec_MIN = - FrameDecodedEvents_Codec_Codec_MIN; - static constexpr Codec Codec_MAX = - FrameDecodedEvents_Codec_Codec_MAX; - static constexpr int Codec_ARRAYSIZE = - FrameDecodedEvents_Codec_Codec_ARRAYSIZE; - template - static inline const std::string& Codec_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function Codec_Name."); - return FrameDecodedEvents_Codec_Name(enum_t_value); - } - static inline bool Codec_Parse(const std::string& name, - Codec* value) { - return FrameDecodedEvents_Codec_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kSsrcDeltasFieldNumber = 102, - kRenderTimeMsDeltasFieldNumber = 103, - kWidthDeltasFieldNumber = 104, - kHeightDeltasFieldNumber = 105, - kCodecDeltasFieldNumber = 106, - kQpDeltasFieldNumber = 107, - kTimestampMsFieldNumber = 1, - kRenderTimeMsFieldNumber = 3, - kSsrcFieldNumber = 2, - kWidthFieldNumber = 4, - kHeightFieldNumber = 5, - kCodecFieldNumber = 6, - kQpFieldNumber = 7, - kNumberOfDeltasFieldNumber = 15, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes ssrc_deltas = 102; - bool has_ssrc_deltas() const; - void clear_ssrc_deltas(); - const std::string& ssrc_deltas() const; - void set_ssrc_deltas(const std::string& value); - void set_ssrc_deltas(std::string&& value); - void set_ssrc_deltas(const char* value); - void set_ssrc_deltas(const void* value, size_t size); - std::string* mutable_ssrc_deltas(); - std::string* release_ssrc_deltas(); - void set_allocated_ssrc_deltas(std::string* ssrc_deltas); - - // optional bytes render_time_ms_deltas = 103; - bool has_render_time_ms_deltas() const; - void clear_render_time_ms_deltas(); - const std::string& render_time_ms_deltas() const; - void set_render_time_ms_deltas(const std::string& value); - void set_render_time_ms_deltas(std::string&& value); - void set_render_time_ms_deltas(const char* value); - void set_render_time_ms_deltas(const void* value, size_t size); - std::string* mutable_render_time_ms_deltas(); - std::string* release_render_time_ms_deltas(); - void set_allocated_render_time_ms_deltas(std::string* render_time_ms_deltas); - - // optional bytes width_deltas = 104; - bool has_width_deltas() const; - void clear_width_deltas(); - const std::string& width_deltas() const; - void set_width_deltas(const std::string& value); - void set_width_deltas(std::string&& value); - void set_width_deltas(const char* value); - void set_width_deltas(const void* value, size_t size); - std::string* mutable_width_deltas(); - std::string* release_width_deltas(); - void set_allocated_width_deltas(std::string* width_deltas); - - // optional bytes height_deltas = 105; - bool has_height_deltas() const; - void clear_height_deltas(); - const std::string& height_deltas() const; - void set_height_deltas(const std::string& value); - void set_height_deltas(std::string&& value); - void set_height_deltas(const char* value); - void set_height_deltas(const void* value, size_t size); - std::string* mutable_height_deltas(); - std::string* release_height_deltas(); - void set_allocated_height_deltas(std::string* height_deltas); - - // optional bytes codec_deltas = 106; - bool has_codec_deltas() const; - void clear_codec_deltas(); - const std::string& codec_deltas() const; - void set_codec_deltas(const std::string& value); - void set_codec_deltas(std::string&& value); - void set_codec_deltas(const char* value); - void set_codec_deltas(const void* value, size_t size); - std::string* mutable_codec_deltas(); - std::string* release_codec_deltas(); - void set_allocated_codec_deltas(std::string* codec_deltas); - - // optional bytes qp_deltas = 107; - bool has_qp_deltas() const; - void clear_qp_deltas(); - const std::string& qp_deltas() const; - void set_qp_deltas(const std::string& value); - void set_qp_deltas(std::string&& value); - void set_qp_deltas(const char* value); - void set_qp_deltas(const void* value, size_t size); - std::string* mutable_qp_deltas(); - std::string* release_qp_deltas(); - void set_allocated_qp_deltas(std::string* qp_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 render_time_ms = 3; - bool has_render_time_ms() const; - void clear_render_time_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 render_time_ms() const; - void set_render_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional fixed32 ssrc = 2; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional int32 width = 4; - bool has_width() const; - void clear_width(); - ::PROTOBUF_NAMESPACE_ID::int32 width() const; - void set_width(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 height = 5; - bool has_height() const; - void clear_height(); - ::PROTOBUF_NAMESPACE_ID::int32 height() const; - void set_height(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; - bool has_codec() const; - void clear_codec(); - ::webrtc::rtclog2::FrameDecodedEvents_Codec codec() const; - void set_codec(::webrtc::rtclog2::FrameDecodedEvents_Codec value); - - // optional uint32 qp = 7; - bool has_qp() const; - void clear_qp(); - ::PROTOBUF_NAMESPACE_ID::uint32 qp() const; - void set_qp(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 15; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.FrameDecodedEvents) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr ssrc_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr render_time_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr width_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr height_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr codec_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr qp_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int64 render_time_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - ::PROTOBUF_NAMESPACE_ID::int32 width_; - ::PROTOBUF_NAMESPACE_ID::int32 height_; - int codec_; - ::PROTOBUF_NAMESPACE_ID::uint32 qp_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class BeginLogEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.BeginLogEvent) */ { - public: - BeginLogEvent(); - virtual ~BeginLogEvent(); - - BeginLogEvent(const BeginLogEvent& from); - BeginLogEvent(BeginLogEvent&& from) noexcept - : BeginLogEvent() { - *this = ::std::move(from); - } - - inline BeginLogEvent& operator=(const BeginLogEvent& from) { - CopyFrom(from); - return *this; - } - inline BeginLogEvent& operator=(BeginLogEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BeginLogEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BeginLogEvent* internal_default_instance() { - return reinterpret_cast( - &_BeginLogEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 11; - - friend void swap(BeginLogEvent& a, BeginLogEvent& b) { - a.Swap(&b); - } - inline void Swap(BeginLogEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BeginLogEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - BeginLogEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BeginLogEvent& from); - void MergeFrom(const BeginLogEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BeginLogEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.BeginLogEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kUtcTimeMsFieldNumber = 3, - kVersionFieldNumber = 2, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int64 utc_time_ms = 3; - bool has_utc_time_ms() const; - void clear_utc_time_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 utc_time_ms() const; - void set_utc_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 version = 2; - bool has_version() const; - void clear_version(); - ::PROTOBUF_NAMESPACE_ID::uint32 version() const; - void set_version(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.BeginLogEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int64 utc_time_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 version_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class EndLogEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.EndLogEvent) */ { - public: - EndLogEvent(); - virtual ~EndLogEvent(); - - EndLogEvent(const EndLogEvent& from); - EndLogEvent(EndLogEvent&& from) noexcept - : EndLogEvent() { - *this = ::std::move(from); - } - - inline EndLogEvent& operator=(const EndLogEvent& from) { - CopyFrom(from); - return *this; - } - inline EndLogEvent& operator=(EndLogEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const EndLogEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const EndLogEvent* internal_default_instance() { - return reinterpret_cast( - &_EndLogEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 12; - - friend void swap(EndLogEvent& a, EndLogEvent& b) { - a.Swap(&b); - } - inline void Swap(EndLogEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline EndLogEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - EndLogEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const EndLogEvent& from); - void MergeFrom(const EndLogEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(EndLogEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.EndLogEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.EndLogEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class LossBasedBweUpdates : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.LossBasedBweUpdates) */ { - public: - LossBasedBweUpdates(); - virtual ~LossBasedBweUpdates(); - - LossBasedBweUpdates(const LossBasedBweUpdates& from); - LossBasedBweUpdates(LossBasedBweUpdates&& from) noexcept - : LossBasedBweUpdates() { - *this = ::std::move(from); - } - - inline LossBasedBweUpdates& operator=(const LossBasedBweUpdates& from) { - CopyFrom(from); - return *this; - } - inline LossBasedBweUpdates& operator=(LossBasedBweUpdates&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const LossBasedBweUpdates& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const LossBasedBweUpdates* internal_default_instance() { - return reinterpret_cast( - &_LossBasedBweUpdates_default_instance_); - } - static constexpr int kIndexInFileMessages = - 13; - - friend void swap(LossBasedBweUpdates& a, LossBasedBweUpdates& b) { - a.Swap(&b); - } - inline void Swap(LossBasedBweUpdates* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline LossBasedBweUpdates* New() const final { - return CreateMaybeMessage(nullptr); - } - - LossBasedBweUpdates* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const LossBasedBweUpdates& from); - void MergeFrom(const LossBasedBweUpdates& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(LossBasedBweUpdates* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.LossBasedBweUpdates"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kBitrateBpsDeltasFieldNumber = 102, - kFractionLossDeltasFieldNumber = 103, - kTotalPacketsDeltasFieldNumber = 104, - kTimestampMsFieldNumber = 1, - kBitrateBpsFieldNumber = 2, - kFractionLossFieldNumber = 3, - kTotalPacketsFieldNumber = 4, - kNumberOfDeltasFieldNumber = 5, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes bitrate_bps_deltas = 102; - bool has_bitrate_bps_deltas() const; - void clear_bitrate_bps_deltas(); - const std::string& bitrate_bps_deltas() const; - void set_bitrate_bps_deltas(const std::string& value); - void set_bitrate_bps_deltas(std::string&& value); - void set_bitrate_bps_deltas(const char* value); - void set_bitrate_bps_deltas(const void* value, size_t size); - std::string* mutable_bitrate_bps_deltas(); - std::string* release_bitrate_bps_deltas(); - void set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas); - - // optional bytes fraction_loss_deltas = 103; - bool has_fraction_loss_deltas() const; - void clear_fraction_loss_deltas(); - const std::string& fraction_loss_deltas() const; - void set_fraction_loss_deltas(const std::string& value); - void set_fraction_loss_deltas(std::string&& value); - void set_fraction_loss_deltas(const char* value); - void set_fraction_loss_deltas(const void* value, size_t size); - std::string* mutable_fraction_loss_deltas(); - std::string* release_fraction_loss_deltas(); - void set_allocated_fraction_loss_deltas(std::string* fraction_loss_deltas); - - // optional bytes total_packets_deltas = 104; - bool has_total_packets_deltas() const; - void clear_total_packets_deltas(); - const std::string& total_packets_deltas() const; - void set_total_packets_deltas(const std::string& value); - void set_total_packets_deltas(std::string&& value); - void set_total_packets_deltas(const char* value); - void set_total_packets_deltas(const void* value, size_t size); - std::string* mutable_total_packets_deltas(); - std::string* release_total_packets_deltas(); - void set_allocated_total_packets_deltas(std::string* total_packets_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 bitrate_bps = 2; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 fraction_loss = 3; - bool has_fraction_loss() const; - void clear_fraction_loss(); - ::PROTOBUF_NAMESPACE_ID::uint32 fraction_loss() const; - void set_fraction_loss(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 total_packets = 4; - bool has_total_packets() const; - void clear_total_packets(); - ::PROTOBUF_NAMESPACE_ID::uint32 total_packets() const; - void set_total_packets(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 5; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.LossBasedBweUpdates) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr bitrate_bps_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr fraction_loss_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr total_packets_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::uint32 fraction_loss_; - ::PROTOBUF_NAMESPACE_ID::uint32 total_packets_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class DelayBasedBweUpdates : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.DelayBasedBweUpdates) */ { - public: - DelayBasedBweUpdates(); - virtual ~DelayBasedBweUpdates(); - - DelayBasedBweUpdates(const DelayBasedBweUpdates& from); - DelayBasedBweUpdates(DelayBasedBweUpdates&& from) noexcept - : DelayBasedBweUpdates() { - *this = ::std::move(from); - } - - inline DelayBasedBweUpdates& operator=(const DelayBasedBweUpdates& from) { - CopyFrom(from); - return *this; - } - inline DelayBasedBweUpdates& operator=(DelayBasedBweUpdates&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const DelayBasedBweUpdates& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DelayBasedBweUpdates* internal_default_instance() { - return reinterpret_cast( - &_DelayBasedBweUpdates_default_instance_); - } - static constexpr int kIndexInFileMessages = - 14; - - friend void swap(DelayBasedBweUpdates& a, DelayBasedBweUpdates& b) { - a.Swap(&b); - } - inline void Swap(DelayBasedBweUpdates* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline DelayBasedBweUpdates* New() const final { - return CreateMaybeMessage(nullptr); - } - - DelayBasedBweUpdates* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const DelayBasedBweUpdates& from); - void MergeFrom(const DelayBasedBweUpdates& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(DelayBasedBweUpdates* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.DelayBasedBweUpdates"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef DelayBasedBweUpdates_DetectorState DetectorState; - static constexpr DetectorState BWE_UNKNOWN_STATE = - DelayBasedBweUpdates_DetectorState_BWE_UNKNOWN_STATE; - static constexpr DetectorState BWE_NORMAL = - DelayBasedBweUpdates_DetectorState_BWE_NORMAL; - static constexpr DetectorState BWE_UNDERUSING = - DelayBasedBweUpdates_DetectorState_BWE_UNDERUSING; - static constexpr DetectorState BWE_OVERUSING = - DelayBasedBweUpdates_DetectorState_BWE_OVERUSING; - static inline bool DetectorState_IsValid(int value) { - return DelayBasedBweUpdates_DetectorState_IsValid(value); - } - static constexpr DetectorState DetectorState_MIN = - DelayBasedBweUpdates_DetectorState_DetectorState_MIN; - static constexpr DetectorState DetectorState_MAX = - DelayBasedBweUpdates_DetectorState_DetectorState_MAX; - static constexpr int DetectorState_ARRAYSIZE = - DelayBasedBweUpdates_DetectorState_DetectorState_ARRAYSIZE; - template - static inline const std::string& DetectorState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DetectorState_Name."); - return DelayBasedBweUpdates_DetectorState_Name(enum_t_value); - } - static inline bool DetectorState_Parse(const std::string& name, - DetectorState* value) { - return DelayBasedBweUpdates_DetectorState_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kBitrateBpsDeltasFieldNumber = 102, - kDetectorStateDeltasFieldNumber = 103, - kTimestampMsFieldNumber = 1, - kBitrateBpsFieldNumber = 2, - kDetectorStateFieldNumber = 3, - kNumberOfDeltasFieldNumber = 4, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes bitrate_bps_deltas = 102; - bool has_bitrate_bps_deltas() const; - void clear_bitrate_bps_deltas(); - const std::string& bitrate_bps_deltas() const; - void set_bitrate_bps_deltas(const std::string& value); - void set_bitrate_bps_deltas(std::string&& value); - void set_bitrate_bps_deltas(const char* value); - void set_bitrate_bps_deltas(const void* value, size_t size); - std::string* mutable_bitrate_bps_deltas(); - std::string* release_bitrate_bps_deltas(); - void set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas); - - // optional bytes detector_state_deltas = 103; - bool has_detector_state_deltas() const; - void clear_detector_state_deltas(); - const std::string& detector_state_deltas() const; - void set_detector_state_deltas(const std::string& value); - void set_detector_state_deltas(std::string&& value); - void set_detector_state_deltas(const char* value); - void set_detector_state_deltas(const void* value, size_t size); - std::string* mutable_detector_state_deltas(); - std::string* release_detector_state_deltas(); - void set_allocated_detector_state_deltas(std::string* detector_state_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 bitrate_bps = 2; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; - bool has_detector_state() const; - void clear_detector_state(); - ::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState detector_state() const; - void set_detector_state(::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState value); - - // optional uint32 number_of_deltas = 4; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.DelayBasedBweUpdates) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr bitrate_bps_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr detector_state_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps_; - int detector_state_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class RtpHeaderExtensionConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.RtpHeaderExtensionConfig) */ { - public: - RtpHeaderExtensionConfig(); - virtual ~RtpHeaderExtensionConfig(); - - RtpHeaderExtensionConfig(const RtpHeaderExtensionConfig& from); - RtpHeaderExtensionConfig(RtpHeaderExtensionConfig&& from) noexcept - : RtpHeaderExtensionConfig() { - *this = ::std::move(from); - } - - inline RtpHeaderExtensionConfig& operator=(const RtpHeaderExtensionConfig& from) { - CopyFrom(from); - return *this; - } - inline RtpHeaderExtensionConfig& operator=(RtpHeaderExtensionConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RtpHeaderExtensionConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RtpHeaderExtensionConfig* internal_default_instance() { - return reinterpret_cast( - &_RtpHeaderExtensionConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 15; - - friend void swap(RtpHeaderExtensionConfig& a, RtpHeaderExtensionConfig& b) { - a.Swap(&b); - } - inline void Swap(RtpHeaderExtensionConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RtpHeaderExtensionConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - RtpHeaderExtensionConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RtpHeaderExtensionConfig& from); - void MergeFrom(const RtpHeaderExtensionConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RtpHeaderExtensionConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.RtpHeaderExtensionConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTransmissionTimeOffsetIdFieldNumber = 1, - kAbsoluteSendTimeIdFieldNumber = 2, - kTransportSequenceNumberIdFieldNumber = 3, - kVideoRotationIdFieldNumber = 4, - kAudioLevelIdFieldNumber = 5, - }; - // optional int32 transmission_time_offset_id = 1; - bool has_transmission_time_offset_id() const; - void clear_transmission_time_offset_id(); - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset_id() const; - void set_transmission_time_offset_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 absolute_send_time_id = 2; - bool has_absolute_send_time_id() const; - void clear_absolute_send_time_id(); - ::PROTOBUF_NAMESPACE_ID::int32 absolute_send_time_id() const; - void set_absolute_send_time_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 transport_sequence_number_id = 3; - bool has_transport_sequence_number_id() const; - void clear_transport_sequence_number_id(); - ::PROTOBUF_NAMESPACE_ID::int32 transport_sequence_number_id() const; - void set_transport_sequence_number_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 video_rotation_id = 4; - bool has_video_rotation_id() const; - void clear_video_rotation_id(); - ::PROTOBUF_NAMESPACE_ID::int32 video_rotation_id() const; - void set_video_rotation_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 audio_level_id = 5; - bool has_audio_level_id() const; - void clear_audio_level_id(); - ::PROTOBUF_NAMESPACE_ID::int32 audio_level_id() const; - void set_audio_level_id(::PROTOBUF_NAMESPACE_ID::int32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.RtpHeaderExtensionConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int32 transmission_time_offset_id_; - ::PROTOBUF_NAMESPACE_ID::int32 absolute_send_time_id_; - ::PROTOBUF_NAMESPACE_ID::int32 transport_sequence_number_id_; - ::PROTOBUF_NAMESPACE_ID::int32 video_rotation_id_; - ::PROTOBUF_NAMESPACE_ID::int32 audio_level_id_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class VideoRecvStreamConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.VideoRecvStreamConfig) */ { - public: - VideoRecvStreamConfig(); - virtual ~VideoRecvStreamConfig(); - - VideoRecvStreamConfig(const VideoRecvStreamConfig& from); - VideoRecvStreamConfig(VideoRecvStreamConfig&& from) noexcept - : VideoRecvStreamConfig() { - *this = ::std::move(from); - } - - inline VideoRecvStreamConfig& operator=(const VideoRecvStreamConfig& from) { - CopyFrom(from); - return *this; - } - inline VideoRecvStreamConfig& operator=(VideoRecvStreamConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const VideoRecvStreamConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const VideoRecvStreamConfig* internal_default_instance() { - return reinterpret_cast( - &_VideoRecvStreamConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 16; - - friend void swap(VideoRecvStreamConfig& a, VideoRecvStreamConfig& b) { - a.Swap(&b); - } - inline void Swap(VideoRecvStreamConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline VideoRecvStreamConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - VideoRecvStreamConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const VideoRecvStreamConfig& from); - void MergeFrom(const VideoRecvStreamConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(VideoRecvStreamConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.VideoRecvStreamConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 5, - kTimestampMsFieldNumber = 1, - kRemoteSsrcFieldNumber = 2, - kLocalSsrcFieldNumber = 3, - kRtxSsrcFieldNumber = 4, - }; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - bool has_header_extensions() const; - void clear_header_extensions(); - const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions() const; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* release_header_extensions(); - ::webrtc::rtclog2::RtpHeaderExtensionConfig* mutable_header_extensions(); - void set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 remote_ssrc = 2; - bool has_remote_ssrc() const; - void clear_remote_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc() const; - void set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 local_ssrc = 3; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 rtx_ssrc = 4; - bool has_rtx_ssrc() const; - void clear_rtx_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc() const; - void set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.VideoRecvStreamConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class VideoSendStreamConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.VideoSendStreamConfig) */ { - public: - VideoSendStreamConfig(); - virtual ~VideoSendStreamConfig(); - - VideoSendStreamConfig(const VideoSendStreamConfig& from); - VideoSendStreamConfig(VideoSendStreamConfig&& from) noexcept - : VideoSendStreamConfig() { - *this = ::std::move(from); - } - - inline VideoSendStreamConfig& operator=(const VideoSendStreamConfig& from) { - CopyFrom(from); - return *this; - } - inline VideoSendStreamConfig& operator=(VideoSendStreamConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const VideoSendStreamConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const VideoSendStreamConfig* internal_default_instance() { - return reinterpret_cast( - &_VideoSendStreamConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 17; - - friend void swap(VideoSendStreamConfig& a, VideoSendStreamConfig& b) { - a.Swap(&b); - } - inline void Swap(VideoSendStreamConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline VideoSendStreamConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - VideoSendStreamConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const VideoSendStreamConfig& from); - void MergeFrom(const VideoSendStreamConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(VideoSendStreamConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.VideoSendStreamConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 4, - kTimestampMsFieldNumber = 1, - kSsrcFieldNumber = 2, - kRtxSsrcFieldNumber = 3, - }; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - bool has_header_extensions() const; - void clear_header_extensions(); - const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions() const; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* release_header_extensions(); - ::webrtc::rtclog2::RtpHeaderExtensionConfig* mutable_header_extensions(); - void set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 ssrc = 2; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 rtx_ssrc = 3; - bool has_rtx_ssrc() const; - void clear_rtx_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc() const; - void set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.VideoSendStreamConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 rtx_ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioRecvStreamConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.AudioRecvStreamConfig) */ { - public: - AudioRecvStreamConfig(); - virtual ~AudioRecvStreamConfig(); - - AudioRecvStreamConfig(const AudioRecvStreamConfig& from); - AudioRecvStreamConfig(AudioRecvStreamConfig&& from) noexcept - : AudioRecvStreamConfig() { - *this = ::std::move(from); - } - - inline AudioRecvStreamConfig& operator=(const AudioRecvStreamConfig& from) { - CopyFrom(from); - return *this; - } - inline AudioRecvStreamConfig& operator=(AudioRecvStreamConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioRecvStreamConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioRecvStreamConfig* internal_default_instance() { - return reinterpret_cast( - &_AudioRecvStreamConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 18; - - friend void swap(AudioRecvStreamConfig& a, AudioRecvStreamConfig& b) { - a.Swap(&b); - } - inline void Swap(AudioRecvStreamConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioRecvStreamConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioRecvStreamConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioRecvStreamConfig& from); - void MergeFrom(const AudioRecvStreamConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioRecvStreamConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.AudioRecvStreamConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 5, - kTimestampMsFieldNumber = 1, - kRemoteSsrcFieldNumber = 2, - kLocalSsrcFieldNumber = 3, - }; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; - bool has_header_extensions() const; - void clear_header_extensions(); - const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions() const; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* release_header_extensions(); - ::webrtc::rtclog2::RtpHeaderExtensionConfig* mutable_header_extensions(); - void set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 remote_ssrc = 2; - bool has_remote_ssrc() const; - void clear_remote_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc() const; - void set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 local_ssrc = 3; - bool has_local_ssrc() const; - void clear_local_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc() const; - void set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.AudioRecvStreamConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 remote_ssrc_; - ::PROTOBUF_NAMESPACE_ID::uint32 local_ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioSendStreamConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.AudioSendStreamConfig) */ { - public: - AudioSendStreamConfig(); - virtual ~AudioSendStreamConfig(); - - AudioSendStreamConfig(const AudioSendStreamConfig& from); - AudioSendStreamConfig(AudioSendStreamConfig&& from) noexcept - : AudioSendStreamConfig() { - *this = ::std::move(from); - } - - inline AudioSendStreamConfig& operator=(const AudioSendStreamConfig& from) { - CopyFrom(from); - return *this; - } - inline AudioSendStreamConfig& operator=(AudioSendStreamConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioSendStreamConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioSendStreamConfig* internal_default_instance() { - return reinterpret_cast( - &_AudioSendStreamConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 19; - - friend void swap(AudioSendStreamConfig& a, AudioSendStreamConfig& b) { - a.Swap(&b); - } - inline void Swap(AudioSendStreamConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioSendStreamConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioSendStreamConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioSendStreamConfig& from); - void MergeFrom(const AudioSendStreamConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioSendStreamConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.AudioSendStreamConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kHeaderExtensionsFieldNumber = 4, - kTimestampMsFieldNumber = 1, - kSsrcFieldNumber = 2, - }; - // optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; - bool has_header_extensions() const; - void clear_header_extensions(); - const ::webrtc::rtclog2::RtpHeaderExtensionConfig& header_extensions() const; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* release_header_extensions(); - ::webrtc::rtclog2::RtpHeaderExtensionConfig* mutable_header_extensions(); - void set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 ssrc = 2; - bool has_ssrc() const; - void clear_ssrc(); - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc() const; - void set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.AudioSendStreamConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 ssrc_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class AudioNetworkAdaptations : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.AudioNetworkAdaptations) */ { - public: - AudioNetworkAdaptations(); - virtual ~AudioNetworkAdaptations(); - - AudioNetworkAdaptations(const AudioNetworkAdaptations& from); - AudioNetworkAdaptations(AudioNetworkAdaptations&& from) noexcept - : AudioNetworkAdaptations() { - *this = ::std::move(from); - } - - inline AudioNetworkAdaptations& operator=(const AudioNetworkAdaptations& from) { - CopyFrom(from); - return *this; - } - inline AudioNetworkAdaptations& operator=(AudioNetworkAdaptations&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AudioNetworkAdaptations& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AudioNetworkAdaptations* internal_default_instance() { - return reinterpret_cast( - &_AudioNetworkAdaptations_default_instance_); - } - static constexpr int kIndexInFileMessages = - 20; - - friend void swap(AudioNetworkAdaptations& a, AudioNetworkAdaptations& b) { - a.Swap(&b); - } - inline void Swap(AudioNetworkAdaptations* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AudioNetworkAdaptations* New() const final { - return CreateMaybeMessage(nullptr); - } - - AudioNetworkAdaptations* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AudioNetworkAdaptations& from); - void MergeFrom(const AudioNetworkAdaptations& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AudioNetworkAdaptations* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.AudioNetworkAdaptations"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kBitrateBpsDeltasFieldNumber = 102, - kFrameLengthMsDeltasFieldNumber = 103, - kUplinkPacketLossFractionDeltasFieldNumber = 104, - kEnableFecDeltasFieldNumber = 105, - kEnableDtxDeltasFieldNumber = 106, - kNumChannelsDeltasFieldNumber = 107, - kTimestampMsFieldNumber = 1, - kBitrateBpsFieldNumber = 2, - kFrameLengthMsFieldNumber = 3, - kUplinkPacketLossFractionFieldNumber = 4, - kEnableFecFieldNumber = 5, - kEnableDtxFieldNumber = 6, - kNumChannelsFieldNumber = 7, - kNumberOfDeltasFieldNumber = 8, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes bitrate_bps_deltas = 102; - bool has_bitrate_bps_deltas() const; - void clear_bitrate_bps_deltas(); - const std::string& bitrate_bps_deltas() const; - void set_bitrate_bps_deltas(const std::string& value); - void set_bitrate_bps_deltas(std::string&& value); - void set_bitrate_bps_deltas(const char* value); - void set_bitrate_bps_deltas(const void* value, size_t size); - std::string* mutable_bitrate_bps_deltas(); - std::string* release_bitrate_bps_deltas(); - void set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas); - - // optional bytes frame_length_ms_deltas = 103; - bool has_frame_length_ms_deltas() const; - void clear_frame_length_ms_deltas(); - const std::string& frame_length_ms_deltas() const; - void set_frame_length_ms_deltas(const std::string& value); - void set_frame_length_ms_deltas(std::string&& value); - void set_frame_length_ms_deltas(const char* value); - void set_frame_length_ms_deltas(const void* value, size_t size); - std::string* mutable_frame_length_ms_deltas(); - std::string* release_frame_length_ms_deltas(); - void set_allocated_frame_length_ms_deltas(std::string* frame_length_ms_deltas); - - // optional bytes uplink_packet_loss_fraction_deltas = 104; - bool has_uplink_packet_loss_fraction_deltas() const; - void clear_uplink_packet_loss_fraction_deltas(); - const std::string& uplink_packet_loss_fraction_deltas() const; - void set_uplink_packet_loss_fraction_deltas(const std::string& value); - void set_uplink_packet_loss_fraction_deltas(std::string&& value); - void set_uplink_packet_loss_fraction_deltas(const char* value); - void set_uplink_packet_loss_fraction_deltas(const void* value, size_t size); - std::string* mutable_uplink_packet_loss_fraction_deltas(); - std::string* release_uplink_packet_loss_fraction_deltas(); - void set_allocated_uplink_packet_loss_fraction_deltas(std::string* uplink_packet_loss_fraction_deltas); - - // optional bytes enable_fec_deltas = 105; - bool has_enable_fec_deltas() const; - void clear_enable_fec_deltas(); - const std::string& enable_fec_deltas() const; - void set_enable_fec_deltas(const std::string& value); - void set_enable_fec_deltas(std::string&& value); - void set_enable_fec_deltas(const char* value); - void set_enable_fec_deltas(const void* value, size_t size); - std::string* mutable_enable_fec_deltas(); - std::string* release_enable_fec_deltas(); - void set_allocated_enable_fec_deltas(std::string* enable_fec_deltas); - - // optional bytes enable_dtx_deltas = 106; - bool has_enable_dtx_deltas() const; - void clear_enable_dtx_deltas(); - const std::string& enable_dtx_deltas() const; - void set_enable_dtx_deltas(const std::string& value); - void set_enable_dtx_deltas(std::string&& value); - void set_enable_dtx_deltas(const char* value); - void set_enable_dtx_deltas(const void* value, size_t size); - std::string* mutable_enable_dtx_deltas(); - std::string* release_enable_dtx_deltas(); - void set_allocated_enable_dtx_deltas(std::string* enable_dtx_deltas); - - // optional bytes num_channels_deltas = 107; - bool has_num_channels_deltas() const; - void clear_num_channels_deltas(); - const std::string& num_channels_deltas() const; - void set_num_channels_deltas(const std::string& value); - void set_num_channels_deltas(std::string&& value); - void set_num_channels_deltas(const char* value); - void set_num_channels_deltas(const void* value, size_t size); - std::string* mutable_num_channels_deltas(); - std::string* release_num_channels_deltas(); - void set_allocated_num_channels_deltas(std::string* num_channels_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional int32 bitrate_bps = 2; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional int32 frame_length_ms = 3; - bool has_frame_length_ms() const; - void clear_frame_length_ms(); - ::PROTOBUF_NAMESPACE_ID::int32 frame_length_ms() const; - void set_frame_length_ms(::PROTOBUF_NAMESPACE_ID::int32 value); - - // optional uint32 uplink_packet_loss_fraction = 4; - bool has_uplink_packet_loss_fraction() const; - void clear_uplink_packet_loss_fraction(); - ::PROTOBUF_NAMESPACE_ID::uint32 uplink_packet_loss_fraction() const; - void set_uplink_packet_loss_fraction(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional bool enable_fec = 5; - bool has_enable_fec() const; - void clear_enable_fec(); - bool enable_fec() const; - void set_enable_fec(bool value); - - // optional bool enable_dtx = 6; - bool has_enable_dtx() const; - void clear_enable_dtx(); - bool enable_dtx() const; - void set_enable_dtx(bool value); - - // optional uint32 num_channels = 7; - bool has_num_channels() const; - void clear_num_channels(); - ::PROTOBUF_NAMESPACE_ID::uint32 num_channels() const; - void set_num_channels(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 8; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.AudioNetworkAdaptations) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr bitrate_bps_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr frame_length_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr uplink_packet_loss_fraction_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr enable_fec_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr enable_dtx_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr num_channels_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::int32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::int32 frame_length_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 uplink_packet_loss_fraction_; - bool enable_fec_; - bool enable_dtx_; - ::PROTOBUF_NAMESPACE_ID::uint32 num_channels_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class BweProbeCluster : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.BweProbeCluster) */ { - public: - BweProbeCluster(); - virtual ~BweProbeCluster(); - - BweProbeCluster(const BweProbeCluster& from); - BweProbeCluster(BweProbeCluster&& from) noexcept - : BweProbeCluster() { - *this = ::std::move(from); - } - - inline BweProbeCluster& operator=(const BweProbeCluster& from) { - CopyFrom(from); - return *this; - } - inline BweProbeCluster& operator=(BweProbeCluster&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BweProbeCluster& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BweProbeCluster* internal_default_instance() { - return reinterpret_cast( - &_BweProbeCluster_default_instance_); - } - static constexpr int kIndexInFileMessages = - 21; - - friend void swap(BweProbeCluster& a, BweProbeCluster& b) { - a.Swap(&b); - } - inline void Swap(BweProbeCluster* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BweProbeCluster* New() const final { - return CreateMaybeMessage(nullptr); - } - - BweProbeCluster* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BweProbeCluster& from); - void MergeFrom(const BweProbeCluster& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BweProbeCluster* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.BweProbeCluster"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kIdFieldNumber = 2, - kBitrateBpsFieldNumber = 3, - kMinPacketsFieldNumber = 4, - kMinBytesFieldNumber = 5, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 id = 2; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 bitrate_bps = 3; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 min_packets = 4; - bool has_min_packets() const; - void clear_min_packets(); - ::PROTOBUF_NAMESPACE_ID::uint32 min_packets() const; - void set_min_packets(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 min_bytes = 5; - bool has_min_bytes() const; - void clear_min_bytes(); - ::PROTOBUF_NAMESPACE_ID::uint32 min_bytes() const; - void set_min_bytes(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.BweProbeCluster) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 id_; - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps_; - ::PROTOBUF_NAMESPACE_ID::uint32 min_packets_; - ::PROTOBUF_NAMESPACE_ID::uint32 min_bytes_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class BweProbeResultSuccess : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.BweProbeResultSuccess) */ { - public: - BweProbeResultSuccess(); - virtual ~BweProbeResultSuccess(); - - BweProbeResultSuccess(const BweProbeResultSuccess& from); - BweProbeResultSuccess(BweProbeResultSuccess&& from) noexcept - : BweProbeResultSuccess() { - *this = ::std::move(from); - } - - inline BweProbeResultSuccess& operator=(const BweProbeResultSuccess& from) { - CopyFrom(from); - return *this; - } - inline BweProbeResultSuccess& operator=(BweProbeResultSuccess&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BweProbeResultSuccess& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BweProbeResultSuccess* internal_default_instance() { - return reinterpret_cast( - &_BweProbeResultSuccess_default_instance_); - } - static constexpr int kIndexInFileMessages = - 22; - - friend void swap(BweProbeResultSuccess& a, BweProbeResultSuccess& b) { - a.Swap(&b); - } - inline void Swap(BweProbeResultSuccess* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BweProbeResultSuccess* New() const final { - return CreateMaybeMessage(nullptr); - } - - BweProbeResultSuccess* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BweProbeResultSuccess& from); - void MergeFrom(const BweProbeResultSuccess& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BweProbeResultSuccess* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.BweProbeResultSuccess"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kIdFieldNumber = 2, - kBitrateBpsFieldNumber = 3, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 id = 2; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 bitrate_bps = 3; - bool has_bitrate_bps() const; - void clear_bitrate_bps(); - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps() const; - void set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.BweProbeResultSuccess) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 id_; - ::PROTOBUF_NAMESPACE_ID::uint32 bitrate_bps_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class BweProbeResultFailure : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.BweProbeResultFailure) */ { - public: - BweProbeResultFailure(); - virtual ~BweProbeResultFailure(); - - BweProbeResultFailure(const BweProbeResultFailure& from); - BweProbeResultFailure(BweProbeResultFailure&& from) noexcept - : BweProbeResultFailure() { - *this = ::std::move(from); - } - - inline BweProbeResultFailure& operator=(const BweProbeResultFailure& from) { - CopyFrom(from); - return *this; - } - inline BweProbeResultFailure& operator=(BweProbeResultFailure&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const BweProbeResultFailure& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const BweProbeResultFailure* internal_default_instance() { - return reinterpret_cast( - &_BweProbeResultFailure_default_instance_); - } - static constexpr int kIndexInFileMessages = - 23; - - friend void swap(BweProbeResultFailure& a, BweProbeResultFailure& b) { - a.Swap(&b); - } - inline void Swap(BweProbeResultFailure* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline BweProbeResultFailure* New() const final { - return CreateMaybeMessage(nullptr); - } - - BweProbeResultFailure* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const BweProbeResultFailure& from); - void MergeFrom(const BweProbeResultFailure& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(BweProbeResultFailure* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.BweProbeResultFailure"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef BweProbeResultFailure_FailureReason FailureReason; - static constexpr FailureReason UNKNOWN = - BweProbeResultFailure_FailureReason_UNKNOWN; - static constexpr FailureReason INVALID_SEND_RECEIVE_INTERVAL = - BweProbeResultFailure_FailureReason_INVALID_SEND_RECEIVE_INTERVAL; - static constexpr FailureReason INVALID_SEND_RECEIVE_RATIO = - BweProbeResultFailure_FailureReason_INVALID_SEND_RECEIVE_RATIO; - static constexpr FailureReason TIMEOUT = - BweProbeResultFailure_FailureReason_TIMEOUT; - static inline bool FailureReason_IsValid(int value) { - return BweProbeResultFailure_FailureReason_IsValid(value); - } - static constexpr FailureReason FailureReason_MIN = - BweProbeResultFailure_FailureReason_FailureReason_MIN; - static constexpr FailureReason FailureReason_MAX = - BweProbeResultFailure_FailureReason_FailureReason_MAX; - static constexpr int FailureReason_ARRAYSIZE = - BweProbeResultFailure_FailureReason_FailureReason_ARRAYSIZE; - template - static inline const std::string& FailureReason_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function FailureReason_Name."); - return BweProbeResultFailure_FailureReason_Name(enum_t_value); - } - static inline bool FailureReason_Parse(const std::string& name, - FailureReason* value) { - return BweProbeResultFailure_FailureReason_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kIdFieldNumber = 2, - kFailureFieldNumber = 3, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 id = 2; - bool has_id() const; - void clear_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 id() const; - void set_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; - bool has_failure() const; - void clear_failure(); - ::webrtc::rtclog2::BweProbeResultFailure_FailureReason failure() const; - void set_failure(::webrtc::rtclog2::BweProbeResultFailure_FailureReason value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.BweProbeResultFailure) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 id_; - int failure_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class AlrState : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.AlrState) */ { - public: - AlrState(); - virtual ~AlrState(); - - AlrState(const AlrState& from); - AlrState(AlrState&& from) noexcept - : AlrState() { - *this = ::std::move(from); - } - - inline AlrState& operator=(const AlrState& from) { - CopyFrom(from); - return *this; - } - inline AlrState& operator=(AlrState&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const AlrState& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AlrState* internal_default_instance() { - return reinterpret_cast( - &_AlrState_default_instance_); - } - static constexpr int kIndexInFileMessages = - 24; - - friend void swap(AlrState& a, AlrState& b) { - a.Swap(&b); - } - inline void Swap(AlrState* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline AlrState* New() const final { - return CreateMaybeMessage(nullptr); - } - - AlrState* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const AlrState& from); - void MergeFrom(const AlrState& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(AlrState* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.AlrState"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kInAlrFieldNumber = 2, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional bool in_alr = 2; - bool has_in_alr() const; - void clear_in_alr(); - bool in_alr() const; - void set_in_alr(bool value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.AlrState) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - bool in_alr_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class IceCandidatePairConfig : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.IceCandidatePairConfig) */ { - public: - IceCandidatePairConfig(); - virtual ~IceCandidatePairConfig(); - - IceCandidatePairConfig(const IceCandidatePairConfig& from); - IceCandidatePairConfig(IceCandidatePairConfig&& from) noexcept - : IceCandidatePairConfig() { - *this = ::std::move(from); - } - - inline IceCandidatePairConfig& operator=(const IceCandidatePairConfig& from) { - CopyFrom(from); - return *this; - } - inline IceCandidatePairConfig& operator=(IceCandidatePairConfig&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IceCandidatePairConfig& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IceCandidatePairConfig* internal_default_instance() { - return reinterpret_cast( - &_IceCandidatePairConfig_default_instance_); - } - static constexpr int kIndexInFileMessages = - 25; - - friend void swap(IceCandidatePairConfig& a, IceCandidatePairConfig& b) { - a.Swap(&b); - } - inline void Swap(IceCandidatePairConfig* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IceCandidatePairConfig* New() const final { - return CreateMaybeMessage(nullptr); - } - - IceCandidatePairConfig* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IceCandidatePairConfig& from); - void MergeFrom(const IceCandidatePairConfig& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IceCandidatePairConfig* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.IceCandidatePairConfig"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfigType; - static constexpr IceCandidatePairConfigType UNKNOWN_CONFIG_TYPE = - IceCandidatePairConfig_IceCandidatePairConfigType_UNKNOWN_CONFIG_TYPE; - static constexpr IceCandidatePairConfigType ADDED = - IceCandidatePairConfig_IceCandidatePairConfigType_ADDED; - static constexpr IceCandidatePairConfigType UPDATED = - IceCandidatePairConfig_IceCandidatePairConfigType_UPDATED; - static constexpr IceCandidatePairConfigType DESTROYED = - IceCandidatePairConfig_IceCandidatePairConfigType_DESTROYED; - static constexpr IceCandidatePairConfigType SELECTED = - IceCandidatePairConfig_IceCandidatePairConfigType_SELECTED; - static inline bool IceCandidatePairConfigType_IsValid(int value) { - return IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value); - } - static constexpr IceCandidatePairConfigType IceCandidatePairConfigType_MIN = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MIN; - static constexpr IceCandidatePairConfigType IceCandidatePairConfigType_MAX = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_MAX; - static constexpr int IceCandidatePairConfigType_ARRAYSIZE = - IceCandidatePairConfig_IceCandidatePairConfigType_IceCandidatePairConfigType_ARRAYSIZE; - template - static inline const std::string& IceCandidatePairConfigType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairConfigType_Name."); - return IceCandidatePairConfig_IceCandidatePairConfigType_Name(enum_t_value); - } - static inline bool IceCandidatePairConfigType_Parse(const std::string& name, - IceCandidatePairConfigType* value) { - return IceCandidatePairConfig_IceCandidatePairConfigType_Parse(name, value); - } - - typedef IceCandidatePairConfig_IceCandidateType IceCandidateType; - static constexpr IceCandidateType UNKNOWN_CANDIDATE_TYPE = - IceCandidatePairConfig_IceCandidateType_UNKNOWN_CANDIDATE_TYPE; - static constexpr IceCandidateType LOCAL = - IceCandidatePairConfig_IceCandidateType_LOCAL; - static constexpr IceCandidateType STUN = - IceCandidatePairConfig_IceCandidateType_STUN; - static constexpr IceCandidateType PRFLX = - IceCandidatePairConfig_IceCandidateType_PRFLX; - static constexpr IceCandidateType RELAY = - IceCandidatePairConfig_IceCandidateType_RELAY; - static inline bool IceCandidateType_IsValid(int value) { - return IceCandidatePairConfig_IceCandidateType_IsValid(value); - } - static constexpr IceCandidateType IceCandidateType_MIN = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_MIN; - static constexpr IceCandidateType IceCandidateType_MAX = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_MAX; - static constexpr int IceCandidateType_ARRAYSIZE = - IceCandidatePairConfig_IceCandidateType_IceCandidateType_ARRAYSIZE; - template - static inline const std::string& IceCandidateType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidateType_Name."); - return IceCandidatePairConfig_IceCandidateType_Name(enum_t_value); - } - static inline bool IceCandidateType_Parse(const std::string& name, - IceCandidateType* value) { - return IceCandidatePairConfig_IceCandidateType_Parse(name, value); - } - - typedef IceCandidatePairConfig_Protocol Protocol; - static constexpr Protocol UNKNOWN_PROTOCOL = - IceCandidatePairConfig_Protocol_UNKNOWN_PROTOCOL; - static constexpr Protocol UDP = - IceCandidatePairConfig_Protocol_UDP; - static constexpr Protocol TCP = - IceCandidatePairConfig_Protocol_TCP; - static constexpr Protocol SSLTCP = - IceCandidatePairConfig_Protocol_SSLTCP; - static constexpr Protocol TLS = - IceCandidatePairConfig_Protocol_TLS; - static inline bool Protocol_IsValid(int value) { - return IceCandidatePairConfig_Protocol_IsValid(value); - } - static constexpr Protocol Protocol_MIN = - IceCandidatePairConfig_Protocol_Protocol_MIN; - static constexpr Protocol Protocol_MAX = - IceCandidatePairConfig_Protocol_Protocol_MAX; - static constexpr int Protocol_ARRAYSIZE = - IceCandidatePairConfig_Protocol_Protocol_ARRAYSIZE; - template - static inline const std::string& Protocol_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function Protocol_Name."); - return IceCandidatePairConfig_Protocol_Name(enum_t_value); - } - static inline bool Protocol_Parse(const std::string& name, - Protocol* value) { - return IceCandidatePairConfig_Protocol_Parse(name, value); - } - - typedef IceCandidatePairConfig_AddressFamily AddressFamily; - static constexpr AddressFamily UNKNOWN_ADDRESS_FAMILY = - IceCandidatePairConfig_AddressFamily_UNKNOWN_ADDRESS_FAMILY; - static constexpr AddressFamily IPV4 = - IceCandidatePairConfig_AddressFamily_IPV4; - static constexpr AddressFamily IPV6 = - IceCandidatePairConfig_AddressFamily_IPV6; - static inline bool AddressFamily_IsValid(int value) { - return IceCandidatePairConfig_AddressFamily_IsValid(value); - } - static constexpr AddressFamily AddressFamily_MIN = - IceCandidatePairConfig_AddressFamily_AddressFamily_MIN; - static constexpr AddressFamily AddressFamily_MAX = - IceCandidatePairConfig_AddressFamily_AddressFamily_MAX; - static constexpr int AddressFamily_ARRAYSIZE = - IceCandidatePairConfig_AddressFamily_AddressFamily_ARRAYSIZE; - template - static inline const std::string& AddressFamily_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function AddressFamily_Name."); - return IceCandidatePairConfig_AddressFamily_Name(enum_t_value); - } - static inline bool AddressFamily_Parse(const std::string& name, - AddressFamily* value) { - return IceCandidatePairConfig_AddressFamily_Parse(name, value); - } - - typedef IceCandidatePairConfig_NetworkType NetworkType; - static constexpr NetworkType UNKNOWN_NETWORK_TYPE = - IceCandidatePairConfig_NetworkType_UNKNOWN_NETWORK_TYPE; - static constexpr NetworkType ETHERNET = - IceCandidatePairConfig_NetworkType_ETHERNET; - static constexpr NetworkType WIFI = - IceCandidatePairConfig_NetworkType_WIFI; - static constexpr NetworkType CELLULAR = - IceCandidatePairConfig_NetworkType_CELLULAR; - static constexpr NetworkType VPN = - IceCandidatePairConfig_NetworkType_VPN; - static constexpr NetworkType LOOPBACK = - IceCandidatePairConfig_NetworkType_LOOPBACK; - static inline bool NetworkType_IsValid(int value) { - return IceCandidatePairConfig_NetworkType_IsValid(value); - } - static constexpr NetworkType NetworkType_MIN = - IceCandidatePairConfig_NetworkType_NetworkType_MIN; - static constexpr NetworkType NetworkType_MAX = - IceCandidatePairConfig_NetworkType_NetworkType_MAX; - static constexpr int NetworkType_ARRAYSIZE = - IceCandidatePairConfig_NetworkType_NetworkType_ARRAYSIZE; - template - static inline const std::string& NetworkType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function NetworkType_Name."); - return IceCandidatePairConfig_NetworkType_Name(enum_t_value); - } - static inline bool NetworkType_Parse(const std::string& name, - NetworkType* value) { - return IceCandidatePairConfig_NetworkType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kConfigTypeFieldNumber = 2, - kCandidatePairIdFieldNumber = 3, - kLocalCandidateTypeFieldNumber = 4, - kLocalRelayProtocolFieldNumber = 5, - kLocalNetworkTypeFieldNumber = 6, - kLocalAddressFamilyFieldNumber = 7, - kRemoteCandidateTypeFieldNumber = 8, - kRemoteAddressFamilyFieldNumber = 9, - kCandidatePairProtocolFieldNumber = 10, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; - bool has_config_type() const; - void clear_config_type(); - ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType config_type() const; - void set_config_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType value); - - // optional uint32 candidate_pair_id = 3; - bool has_candidate_pair_id() const; - void clear_candidate_pair_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id() const; - void set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; - bool has_local_candidate_type() const; - void clear_local_candidate_type(); - ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType local_candidate_type() const; - void set_local_candidate_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; - bool has_local_relay_protocol() const; - void clear_local_relay_protocol(); - ::webrtc::rtclog2::IceCandidatePairConfig_Protocol local_relay_protocol() const; - void set_local_relay_protocol(::webrtc::rtclog2::IceCandidatePairConfig_Protocol value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; - bool has_local_network_type() const; - void clear_local_network_type(); - ::webrtc::rtclog2::IceCandidatePairConfig_NetworkType local_network_type() const; - void set_local_network_type(::webrtc::rtclog2::IceCandidatePairConfig_NetworkType value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; - bool has_local_address_family() const; - void clear_local_address_family(); - ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily local_address_family() const; - void set_local_address_family(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; - bool has_remote_candidate_type() const; - void clear_remote_candidate_type(); - ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType remote_candidate_type() const; - void set_remote_candidate_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; - bool has_remote_address_family() const; - void clear_remote_address_family(); - ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily remote_address_family() const; - void set_remote_address_family(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily value); - - // optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; - bool has_candidate_pair_protocol() const; - void clear_candidate_pair_protocol(); - ::webrtc::rtclog2::IceCandidatePairConfig_Protocol candidate_pair_protocol() const; - void set_candidate_pair_protocol(::webrtc::rtclog2::IceCandidatePairConfig_Protocol value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.IceCandidatePairConfig) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - int config_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id_; - int local_candidate_type_; - int local_relay_protocol_; - int local_network_type_; - int local_address_family_; - int remote_candidate_type_; - int remote_address_family_; - int candidate_pair_protocol_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class IceCandidatePairEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.IceCandidatePairEvent) */ { - public: - IceCandidatePairEvent(); - virtual ~IceCandidatePairEvent(); - - IceCandidatePairEvent(const IceCandidatePairEvent& from); - IceCandidatePairEvent(IceCandidatePairEvent&& from) noexcept - : IceCandidatePairEvent() { - *this = ::std::move(from); - } - - inline IceCandidatePairEvent& operator=(const IceCandidatePairEvent& from) { - CopyFrom(from); - return *this; - } - inline IceCandidatePairEvent& operator=(IceCandidatePairEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const IceCandidatePairEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const IceCandidatePairEvent* internal_default_instance() { - return reinterpret_cast( - &_IceCandidatePairEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 26; - - friend void swap(IceCandidatePairEvent& a, IceCandidatePairEvent& b) { - a.Swap(&b); - } - inline void Swap(IceCandidatePairEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline IceCandidatePairEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - IceCandidatePairEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const IceCandidatePairEvent& from); - void MergeFrom(const IceCandidatePairEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(IceCandidatePairEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.IceCandidatePairEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEventType; - static constexpr IceCandidatePairEventType UNKNOWN_CHECK_TYPE = - IceCandidatePairEvent_IceCandidatePairEventType_UNKNOWN_CHECK_TYPE; - static constexpr IceCandidatePairEventType CHECK_SENT = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_SENT; - static constexpr IceCandidatePairEventType CHECK_RECEIVED = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RECEIVED; - static constexpr IceCandidatePairEventType CHECK_RESPONSE_SENT = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_SENT; - static constexpr IceCandidatePairEventType CHECK_RESPONSE_RECEIVED = - IceCandidatePairEvent_IceCandidatePairEventType_CHECK_RESPONSE_RECEIVED; - static inline bool IceCandidatePairEventType_IsValid(int value) { - return IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value); - } - static constexpr IceCandidatePairEventType IceCandidatePairEventType_MIN = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MIN; - static constexpr IceCandidatePairEventType IceCandidatePairEventType_MAX = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_MAX; - static constexpr int IceCandidatePairEventType_ARRAYSIZE = - IceCandidatePairEvent_IceCandidatePairEventType_IceCandidatePairEventType_ARRAYSIZE; - template - static inline const std::string& IceCandidatePairEventType_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function IceCandidatePairEventType_Name."); - return IceCandidatePairEvent_IceCandidatePairEventType_Name(enum_t_value); - } - static inline bool IceCandidatePairEventType_Parse(const std::string& name, - IceCandidatePairEventType* value) { - return IceCandidatePairEvent_IceCandidatePairEventType_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kEventTypeFieldNumber = 2, - kCandidatePairIdFieldNumber = 3, - kTransactionIdFieldNumber = 4, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; - bool has_event_type() const; - void clear_event_type(); - ::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType event_type() const; - void set_event_type(::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType value); - - // optional uint32 candidate_pair_id = 3; - bool has_candidate_pair_id() const; - void clear_candidate_pair_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id() const; - void set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 transaction_id = 4; - bool has_transaction_id() const; - void clear_transaction_id(); - ::PROTOBUF_NAMESPACE_ID::uint32 transaction_id() const; - void set_transaction_id(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.IceCandidatePairEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - int event_type_; - ::PROTOBUF_NAMESPACE_ID::uint32 candidate_pair_id_; - ::PROTOBUF_NAMESPACE_ID::uint32 transaction_id_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class DtlsTransportStateEvent : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.DtlsTransportStateEvent) */ { - public: - DtlsTransportStateEvent(); - virtual ~DtlsTransportStateEvent(); - - DtlsTransportStateEvent(const DtlsTransportStateEvent& from); - DtlsTransportStateEvent(DtlsTransportStateEvent&& from) noexcept - : DtlsTransportStateEvent() { - *this = ::std::move(from); - } - - inline DtlsTransportStateEvent& operator=(const DtlsTransportStateEvent& from) { - CopyFrom(from); - return *this; - } - inline DtlsTransportStateEvent& operator=(DtlsTransportStateEvent&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const DtlsTransportStateEvent& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DtlsTransportStateEvent* internal_default_instance() { - return reinterpret_cast( - &_DtlsTransportStateEvent_default_instance_); - } - static constexpr int kIndexInFileMessages = - 27; - - friend void swap(DtlsTransportStateEvent& a, DtlsTransportStateEvent& b) { - a.Swap(&b); - } - inline void Swap(DtlsTransportStateEvent* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline DtlsTransportStateEvent* New() const final { - return CreateMaybeMessage(nullptr); - } - - DtlsTransportStateEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const DtlsTransportStateEvent& from); - void MergeFrom(const DtlsTransportStateEvent& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(DtlsTransportStateEvent* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.DtlsTransportStateEvent"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - typedef DtlsTransportStateEvent_DtlsTransportState DtlsTransportState; - static constexpr DtlsTransportState UNKNOWN_DTLS_TRANSPORT_STATE = - DtlsTransportStateEvent_DtlsTransportState_UNKNOWN_DTLS_TRANSPORT_STATE; - static constexpr DtlsTransportState DTLS_TRANSPORT_NEW = - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_NEW; - static constexpr DtlsTransportState DTLS_TRANSPORT_CONNECTING = - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CONNECTING; - static constexpr DtlsTransportState DTLS_TRANSPORT_CONNECTED = - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CONNECTED; - static constexpr DtlsTransportState DTLS_TRANSPORT_CLOSED = - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_CLOSED; - static constexpr DtlsTransportState DTLS_TRANSPORT_FAILED = - DtlsTransportStateEvent_DtlsTransportState_DTLS_TRANSPORT_FAILED; - static inline bool DtlsTransportState_IsValid(int value) { - return DtlsTransportStateEvent_DtlsTransportState_IsValid(value); - } - static constexpr DtlsTransportState DtlsTransportState_MIN = - DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_MIN; - static constexpr DtlsTransportState DtlsTransportState_MAX = - DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_MAX; - static constexpr int DtlsTransportState_ARRAYSIZE = - DtlsTransportStateEvent_DtlsTransportState_DtlsTransportState_ARRAYSIZE; - template - static inline const std::string& DtlsTransportState_Name(T enum_t_value) { - static_assert(::std::is_same::value || - ::std::is_integral::value, - "Incorrect type passed to function DtlsTransportState_Name."); - return DtlsTransportStateEvent_DtlsTransportState_Name(enum_t_value); - } - static inline bool DtlsTransportState_Parse(const std::string& name, - DtlsTransportState* value) { - return DtlsTransportStateEvent_DtlsTransportState_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kDtlsTransportStateFieldNumber = 2, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; - bool has_dtls_transport_state() const; - void clear_dtls_transport_state(); - ::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState dtls_transport_state() const; - void set_dtls_transport_state(::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.DtlsTransportStateEvent) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - int dtls_transport_state_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class DtlsWritableState : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.DtlsWritableState) */ { - public: - DtlsWritableState(); - virtual ~DtlsWritableState(); - - DtlsWritableState(const DtlsWritableState& from); - DtlsWritableState(DtlsWritableState&& from) noexcept - : DtlsWritableState() { - *this = ::std::move(from); - } - - inline DtlsWritableState& operator=(const DtlsWritableState& from) { - CopyFrom(from); - return *this; - } - inline DtlsWritableState& operator=(DtlsWritableState&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const DtlsWritableState& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DtlsWritableState* internal_default_instance() { - return reinterpret_cast( - &_DtlsWritableState_default_instance_); - } - static constexpr int kIndexInFileMessages = - 28; - - friend void swap(DtlsWritableState& a, DtlsWritableState& b) { - a.Swap(&b); - } - inline void Swap(DtlsWritableState* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline DtlsWritableState* New() const final { - return CreateMaybeMessage(nullptr); - } - - DtlsWritableState* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const DtlsWritableState& from); - void MergeFrom(const DtlsWritableState& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(DtlsWritableState* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.DtlsWritableState"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kWritableFieldNumber = 2, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional bool writable = 2; - bool has_writable() const; - void clear_writable(); - bool writable() const; - void set_writable(bool value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.DtlsWritableState) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - bool writable_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class RouteChange : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.RouteChange) */ { - public: - RouteChange(); - virtual ~RouteChange(); - - RouteChange(const RouteChange& from); - RouteChange(RouteChange&& from) noexcept - : RouteChange() { - *this = ::std::move(from); - } - - inline RouteChange& operator=(const RouteChange& from) { - CopyFrom(from); - return *this; - } - inline RouteChange& operator=(RouteChange&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RouteChange& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RouteChange* internal_default_instance() { - return reinterpret_cast( - &_RouteChange_default_instance_); - } - static constexpr int kIndexInFileMessages = - 29; - - friend void swap(RouteChange& a, RouteChange& b) { - a.Swap(&b); - } - inline void Swap(RouteChange* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RouteChange* New() const final { - return CreateMaybeMessage(nullptr); - } - - RouteChange* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RouteChange& from); - void MergeFrom(const RouteChange& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RouteChange* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.RouteChange"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsFieldNumber = 1, - kConnectedFieldNumber = 2, - kOverheadFieldNumber = 3, - }; - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional bool connected = 2; - bool has_connected() const; - void clear_connected(); - bool connected() const; - void set_connected(bool value); - - // optional uint32 overhead = 3; - bool has_overhead() const; - void clear_overhead(); - ::PROTOBUF_NAMESPACE_ID::uint32 overhead() const; - void set_overhead(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.RouteChange) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - bool connected_; - ::PROTOBUF_NAMESPACE_ID::uint32 overhead_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// ------------------------------------------------------------------- - -class RemoteEstimates : - public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:webrtc.rtclog2.RemoteEstimates) */ { - public: - RemoteEstimates(); - virtual ~RemoteEstimates(); - - RemoteEstimates(const RemoteEstimates& from); - RemoteEstimates(RemoteEstimates&& from) noexcept - : RemoteEstimates() { - *this = ::std::move(from); - } - - inline RemoteEstimates& operator=(const RemoteEstimates& from) { - CopyFrom(from); - return *this; - } - inline RemoteEstimates& operator=(RemoteEstimates&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - - inline const std::string& unknown_fields() const { - return _internal_metadata_.unknown_fields(); - } - inline std::string* mutable_unknown_fields() { - return _internal_metadata_.mutable_unknown_fields(); - } - - static const RemoteEstimates& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RemoteEstimates* internal_default_instance() { - return reinterpret_cast( - &_RemoteEstimates_default_instance_); - } - static constexpr int kIndexInFileMessages = - 30; - - friend void swap(RemoteEstimates& a, RemoteEstimates& b) { - a.Swap(&b); - } - inline void Swap(RemoteEstimates* other) { - if (other == this) return; - InternalSwap(other); - } - - // implements Message ---------------------------------------------- - - inline RemoteEstimates* New() const final { - return CreateMaybeMessage(nullptr); - } - - RemoteEstimates* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) - final; - void CopyFrom(const RemoteEstimates& from); - void MergeFrom(const RemoteEstimates& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; - #else - bool MergePartialFromCodedStream( - ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; - void DiscardUnknownFields(); - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - inline void SharedCtor(); - inline void SharedDtor(); - void SetCachedSize(int size) const; - void InternalSwap(RemoteEstimates* other); - friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; - static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { - return "webrtc.rtclog2.RemoteEstimates"; - } - private: - inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - std::string GetTypeName() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - enum : int { - kTimestampMsDeltasFieldNumber = 101, - kLinkCapacityLowerKbpsDeltasFieldNumber = 102, - kLinkCapacityUpperKbpsDeltasFieldNumber = 103, - kTimestampMsFieldNumber = 1, - kLinkCapacityLowerKbpsFieldNumber = 2, - kLinkCapacityUpperKbpsFieldNumber = 3, - kNumberOfDeltasFieldNumber = 4, - }; - // optional bytes timestamp_ms_deltas = 101; - bool has_timestamp_ms_deltas() const; - void clear_timestamp_ms_deltas(); - const std::string& timestamp_ms_deltas() const; - void set_timestamp_ms_deltas(const std::string& value); - void set_timestamp_ms_deltas(std::string&& value); - void set_timestamp_ms_deltas(const char* value); - void set_timestamp_ms_deltas(const void* value, size_t size); - std::string* mutable_timestamp_ms_deltas(); - std::string* release_timestamp_ms_deltas(); - void set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas); - - // optional bytes link_capacity_lower_kbps_deltas = 102; - bool has_link_capacity_lower_kbps_deltas() const; - void clear_link_capacity_lower_kbps_deltas(); - const std::string& link_capacity_lower_kbps_deltas() const; - void set_link_capacity_lower_kbps_deltas(const std::string& value); - void set_link_capacity_lower_kbps_deltas(std::string&& value); - void set_link_capacity_lower_kbps_deltas(const char* value); - void set_link_capacity_lower_kbps_deltas(const void* value, size_t size); - std::string* mutable_link_capacity_lower_kbps_deltas(); - std::string* release_link_capacity_lower_kbps_deltas(); - void set_allocated_link_capacity_lower_kbps_deltas(std::string* link_capacity_lower_kbps_deltas); - - // optional bytes link_capacity_upper_kbps_deltas = 103; - bool has_link_capacity_upper_kbps_deltas() const; - void clear_link_capacity_upper_kbps_deltas(); - const std::string& link_capacity_upper_kbps_deltas() const; - void set_link_capacity_upper_kbps_deltas(const std::string& value); - void set_link_capacity_upper_kbps_deltas(std::string&& value); - void set_link_capacity_upper_kbps_deltas(const char* value); - void set_link_capacity_upper_kbps_deltas(const void* value, size_t size); - std::string* mutable_link_capacity_upper_kbps_deltas(); - std::string* release_link_capacity_upper_kbps_deltas(); - void set_allocated_link_capacity_upper_kbps_deltas(std::string* link_capacity_upper_kbps_deltas); - - // optional int64 timestamp_ms = 1; - bool has_timestamp_ms() const; - void clear_timestamp_ms(); - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms() const; - void set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value); - - // optional uint32 link_capacity_lower_kbps = 2; - bool has_link_capacity_lower_kbps() const; - void clear_link_capacity_lower_kbps(); - ::PROTOBUF_NAMESPACE_ID::uint32 link_capacity_lower_kbps() const; - void set_link_capacity_lower_kbps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 link_capacity_upper_kbps = 3; - bool has_link_capacity_upper_kbps() const; - void clear_link_capacity_upper_kbps(); - ::PROTOBUF_NAMESPACE_ID::uint32 link_capacity_upper_kbps() const; - void set_link_capacity_upper_kbps(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // optional uint32 number_of_deltas = 4; - bool has_number_of_deltas() const; - void clear_number_of_deltas(); - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas() const; - void set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value); - - // @@protoc_insertion_point(class_scope:webrtc.rtclog2.RemoteEstimates) - private: - class _Internal; - - ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArenaLite _internal_metadata_; - ::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_; - mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timestamp_ms_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr link_capacity_lower_kbps_deltas_; - ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr link_capacity_upper_kbps_deltas_; - ::PROTOBUF_NAMESPACE_ID::int64 timestamp_ms_; - ::PROTOBUF_NAMESPACE_ID::uint32 link_capacity_lower_kbps_; - ::PROTOBUF_NAMESPACE_ID::uint32 link_capacity_upper_kbps_; - ::PROTOBUF_NAMESPACE_ID::uint32 number_of_deltas_; - friend struct ::TableStruct_rtc_5fevent_5flog2_2eproto; -}; -// =================================================================== - - -// =================================================================== - -#ifdef __GNUC__ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wstrict-aliasing" -#endif // __GNUC__ -// EventStream - -// repeated .webrtc.rtclog2.Event stream = 1 [deprecated = true]; -inline int EventStream::stream_size() const { - return stream_.size(); -} -inline void EventStream::clear_stream() { - stream_.Clear(); -} -inline ::webrtc::rtclog2::Event* EventStream::mutable_stream(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.stream) - return stream_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::Event >* -EventStream::mutable_stream() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.stream) - return &stream_; -} -inline const ::webrtc::rtclog2::Event& EventStream::stream(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.stream) - return stream_.Get(index); -} -inline ::webrtc::rtclog2::Event* EventStream::add_stream() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.stream) - return stream_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::Event >& -EventStream::stream() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.stream) - return stream_; -} - -// repeated .webrtc.rtclog2.IncomingRtpPackets incoming_rtp_packets = 2; -inline int EventStream::incoming_rtp_packets_size() const { - return incoming_rtp_packets_.size(); -} -inline void EventStream::clear_incoming_rtp_packets() { - incoming_rtp_packets_.Clear(); -} -inline ::webrtc::rtclog2::IncomingRtpPackets* EventStream::mutable_incoming_rtp_packets(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.incoming_rtp_packets) - return incoming_rtp_packets_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtpPackets >* -EventStream::mutable_incoming_rtp_packets() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.incoming_rtp_packets) - return &incoming_rtp_packets_; -} -inline const ::webrtc::rtclog2::IncomingRtpPackets& EventStream::incoming_rtp_packets(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.incoming_rtp_packets) - return incoming_rtp_packets_.Get(index); -} -inline ::webrtc::rtclog2::IncomingRtpPackets* EventStream::add_incoming_rtp_packets() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.incoming_rtp_packets) - return incoming_rtp_packets_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtpPackets >& -EventStream::incoming_rtp_packets() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.incoming_rtp_packets) - return incoming_rtp_packets_; -} - -// repeated .webrtc.rtclog2.OutgoingRtpPackets outgoing_rtp_packets = 3; -inline int EventStream::outgoing_rtp_packets_size() const { - return outgoing_rtp_packets_.size(); -} -inline void EventStream::clear_outgoing_rtp_packets() { - outgoing_rtp_packets_.Clear(); -} -inline ::webrtc::rtclog2::OutgoingRtpPackets* EventStream::mutable_outgoing_rtp_packets(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.outgoing_rtp_packets) - return outgoing_rtp_packets_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtpPackets >* -EventStream::mutable_outgoing_rtp_packets() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.outgoing_rtp_packets) - return &outgoing_rtp_packets_; -} -inline const ::webrtc::rtclog2::OutgoingRtpPackets& EventStream::outgoing_rtp_packets(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.outgoing_rtp_packets) - return outgoing_rtp_packets_.Get(index); -} -inline ::webrtc::rtclog2::OutgoingRtpPackets* EventStream::add_outgoing_rtp_packets() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.outgoing_rtp_packets) - return outgoing_rtp_packets_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtpPackets >& -EventStream::outgoing_rtp_packets() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.outgoing_rtp_packets) - return outgoing_rtp_packets_; -} - -// repeated .webrtc.rtclog2.IncomingRtcpPackets incoming_rtcp_packets = 4; -inline int EventStream::incoming_rtcp_packets_size() const { - return incoming_rtcp_packets_.size(); -} -inline void EventStream::clear_incoming_rtcp_packets() { - incoming_rtcp_packets_.Clear(); -} -inline ::webrtc::rtclog2::IncomingRtcpPackets* EventStream::mutable_incoming_rtcp_packets(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.incoming_rtcp_packets) - return incoming_rtcp_packets_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtcpPackets >* -EventStream::mutable_incoming_rtcp_packets() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.incoming_rtcp_packets) - return &incoming_rtcp_packets_; -} -inline const ::webrtc::rtclog2::IncomingRtcpPackets& EventStream::incoming_rtcp_packets(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.incoming_rtcp_packets) - return incoming_rtcp_packets_.Get(index); -} -inline ::webrtc::rtclog2::IncomingRtcpPackets* EventStream::add_incoming_rtcp_packets() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.incoming_rtcp_packets) - return incoming_rtcp_packets_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IncomingRtcpPackets >& -EventStream::incoming_rtcp_packets() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.incoming_rtcp_packets) - return incoming_rtcp_packets_; -} - -// repeated .webrtc.rtclog2.OutgoingRtcpPackets outgoing_rtcp_packets = 5; -inline int EventStream::outgoing_rtcp_packets_size() const { - return outgoing_rtcp_packets_.size(); -} -inline void EventStream::clear_outgoing_rtcp_packets() { - outgoing_rtcp_packets_.Clear(); -} -inline ::webrtc::rtclog2::OutgoingRtcpPackets* EventStream::mutable_outgoing_rtcp_packets(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.outgoing_rtcp_packets) - return outgoing_rtcp_packets_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtcpPackets >* -EventStream::mutable_outgoing_rtcp_packets() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.outgoing_rtcp_packets) - return &outgoing_rtcp_packets_; -} -inline const ::webrtc::rtclog2::OutgoingRtcpPackets& EventStream::outgoing_rtcp_packets(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.outgoing_rtcp_packets) - return outgoing_rtcp_packets_.Get(index); -} -inline ::webrtc::rtclog2::OutgoingRtcpPackets* EventStream::add_outgoing_rtcp_packets() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.outgoing_rtcp_packets) - return outgoing_rtcp_packets_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::OutgoingRtcpPackets >& -EventStream::outgoing_rtcp_packets() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.outgoing_rtcp_packets) - return outgoing_rtcp_packets_; -} - -// repeated .webrtc.rtclog2.AudioPlayoutEvents audio_playout_events = 6; -inline int EventStream::audio_playout_events_size() const { - return audio_playout_events_.size(); -} -inline void EventStream::clear_audio_playout_events() { - audio_playout_events_.Clear(); -} -inline ::webrtc::rtclog2::AudioPlayoutEvents* EventStream::mutable_audio_playout_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.audio_playout_events) - return audio_playout_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioPlayoutEvents >* -EventStream::mutable_audio_playout_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.audio_playout_events) - return &audio_playout_events_; -} -inline const ::webrtc::rtclog2::AudioPlayoutEvents& EventStream::audio_playout_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.audio_playout_events) - return audio_playout_events_.Get(index); -} -inline ::webrtc::rtclog2::AudioPlayoutEvents* EventStream::add_audio_playout_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.audio_playout_events) - return audio_playout_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioPlayoutEvents >& -EventStream::audio_playout_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.audio_playout_events) - return audio_playout_events_; -} - -// repeated .webrtc.rtclog2.FrameDecodedEvents frame_decoded_events = 7; -inline int EventStream::frame_decoded_events_size() const { - return frame_decoded_events_.size(); -} -inline void EventStream::clear_frame_decoded_events() { - frame_decoded_events_.Clear(); -} -inline ::webrtc::rtclog2::FrameDecodedEvents* EventStream::mutable_frame_decoded_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.frame_decoded_events) - return frame_decoded_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::FrameDecodedEvents >* -EventStream::mutable_frame_decoded_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.frame_decoded_events) - return &frame_decoded_events_; -} -inline const ::webrtc::rtclog2::FrameDecodedEvents& EventStream::frame_decoded_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.frame_decoded_events) - return frame_decoded_events_.Get(index); -} -inline ::webrtc::rtclog2::FrameDecodedEvents* EventStream::add_frame_decoded_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.frame_decoded_events) - return frame_decoded_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::FrameDecodedEvents >& -EventStream::frame_decoded_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.frame_decoded_events) - return frame_decoded_events_; -} - -// repeated .webrtc.rtclog2.BeginLogEvent begin_log_events = 16; -inline int EventStream::begin_log_events_size() const { - return begin_log_events_.size(); -} -inline void EventStream::clear_begin_log_events() { - begin_log_events_.Clear(); -} -inline ::webrtc::rtclog2::BeginLogEvent* EventStream::mutable_begin_log_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.begin_log_events) - return begin_log_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BeginLogEvent >* -EventStream::mutable_begin_log_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.begin_log_events) - return &begin_log_events_; -} -inline const ::webrtc::rtclog2::BeginLogEvent& EventStream::begin_log_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.begin_log_events) - return begin_log_events_.Get(index); -} -inline ::webrtc::rtclog2::BeginLogEvent* EventStream::add_begin_log_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.begin_log_events) - return begin_log_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BeginLogEvent >& -EventStream::begin_log_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.begin_log_events) - return begin_log_events_; -} - -// repeated .webrtc.rtclog2.EndLogEvent end_log_events = 17; -inline int EventStream::end_log_events_size() const { - return end_log_events_.size(); -} -inline void EventStream::clear_end_log_events() { - end_log_events_.Clear(); -} -inline ::webrtc::rtclog2::EndLogEvent* EventStream::mutable_end_log_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.end_log_events) - return end_log_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::EndLogEvent >* -EventStream::mutable_end_log_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.end_log_events) - return &end_log_events_; -} -inline const ::webrtc::rtclog2::EndLogEvent& EventStream::end_log_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.end_log_events) - return end_log_events_.Get(index); -} -inline ::webrtc::rtclog2::EndLogEvent* EventStream::add_end_log_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.end_log_events) - return end_log_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::EndLogEvent >& -EventStream::end_log_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.end_log_events) - return end_log_events_; -} - -// repeated .webrtc.rtclog2.LossBasedBweUpdates loss_based_bwe_updates = 18; -inline int EventStream::loss_based_bwe_updates_size() const { - return loss_based_bwe_updates_.size(); -} -inline void EventStream::clear_loss_based_bwe_updates() { - loss_based_bwe_updates_.Clear(); -} -inline ::webrtc::rtclog2::LossBasedBweUpdates* EventStream::mutable_loss_based_bwe_updates(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.loss_based_bwe_updates) - return loss_based_bwe_updates_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::LossBasedBweUpdates >* -EventStream::mutable_loss_based_bwe_updates() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.loss_based_bwe_updates) - return &loss_based_bwe_updates_; -} -inline const ::webrtc::rtclog2::LossBasedBweUpdates& EventStream::loss_based_bwe_updates(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.loss_based_bwe_updates) - return loss_based_bwe_updates_.Get(index); -} -inline ::webrtc::rtclog2::LossBasedBweUpdates* EventStream::add_loss_based_bwe_updates() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.loss_based_bwe_updates) - return loss_based_bwe_updates_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::LossBasedBweUpdates >& -EventStream::loss_based_bwe_updates() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.loss_based_bwe_updates) - return loss_based_bwe_updates_; -} - -// repeated .webrtc.rtclog2.DelayBasedBweUpdates delay_based_bwe_updates = 19; -inline int EventStream::delay_based_bwe_updates_size() const { - return delay_based_bwe_updates_.size(); -} -inline void EventStream::clear_delay_based_bwe_updates() { - delay_based_bwe_updates_.Clear(); -} -inline ::webrtc::rtclog2::DelayBasedBweUpdates* EventStream::mutable_delay_based_bwe_updates(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.delay_based_bwe_updates) - return delay_based_bwe_updates_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DelayBasedBweUpdates >* -EventStream::mutable_delay_based_bwe_updates() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.delay_based_bwe_updates) - return &delay_based_bwe_updates_; -} -inline const ::webrtc::rtclog2::DelayBasedBweUpdates& EventStream::delay_based_bwe_updates(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.delay_based_bwe_updates) - return delay_based_bwe_updates_.Get(index); -} -inline ::webrtc::rtclog2::DelayBasedBweUpdates* EventStream::add_delay_based_bwe_updates() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.delay_based_bwe_updates) - return delay_based_bwe_updates_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DelayBasedBweUpdates >& -EventStream::delay_based_bwe_updates() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.delay_based_bwe_updates) - return delay_based_bwe_updates_; -} - -// repeated .webrtc.rtclog2.AudioNetworkAdaptations audio_network_adaptations = 20; -inline int EventStream::audio_network_adaptations_size() const { - return audio_network_adaptations_.size(); -} -inline void EventStream::clear_audio_network_adaptations() { - audio_network_adaptations_.Clear(); -} -inline ::webrtc::rtclog2::AudioNetworkAdaptations* EventStream::mutable_audio_network_adaptations(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.audio_network_adaptations) - return audio_network_adaptations_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioNetworkAdaptations >* -EventStream::mutable_audio_network_adaptations() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.audio_network_adaptations) - return &audio_network_adaptations_; -} -inline const ::webrtc::rtclog2::AudioNetworkAdaptations& EventStream::audio_network_adaptations(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.audio_network_adaptations) - return audio_network_adaptations_.Get(index); -} -inline ::webrtc::rtclog2::AudioNetworkAdaptations* EventStream::add_audio_network_adaptations() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.audio_network_adaptations) - return audio_network_adaptations_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioNetworkAdaptations >& -EventStream::audio_network_adaptations() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.audio_network_adaptations) - return audio_network_adaptations_; -} - -// repeated .webrtc.rtclog2.BweProbeCluster probe_clusters = 21; -inline int EventStream::probe_clusters_size() const { - return probe_clusters_.size(); -} -inline void EventStream::clear_probe_clusters() { - probe_clusters_.Clear(); -} -inline ::webrtc::rtclog2::BweProbeCluster* EventStream::mutable_probe_clusters(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.probe_clusters) - return probe_clusters_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeCluster >* -EventStream::mutable_probe_clusters() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.probe_clusters) - return &probe_clusters_; -} -inline const ::webrtc::rtclog2::BweProbeCluster& EventStream::probe_clusters(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.probe_clusters) - return probe_clusters_.Get(index); -} -inline ::webrtc::rtclog2::BweProbeCluster* EventStream::add_probe_clusters() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.probe_clusters) - return probe_clusters_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeCluster >& -EventStream::probe_clusters() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.probe_clusters) - return probe_clusters_; -} - -// repeated .webrtc.rtclog2.BweProbeResultSuccess probe_success = 22; -inline int EventStream::probe_success_size() const { - return probe_success_.size(); -} -inline void EventStream::clear_probe_success() { - probe_success_.Clear(); -} -inline ::webrtc::rtclog2::BweProbeResultSuccess* EventStream::mutable_probe_success(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.probe_success) - return probe_success_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultSuccess >* -EventStream::mutable_probe_success() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.probe_success) - return &probe_success_; -} -inline const ::webrtc::rtclog2::BweProbeResultSuccess& EventStream::probe_success(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.probe_success) - return probe_success_.Get(index); -} -inline ::webrtc::rtclog2::BweProbeResultSuccess* EventStream::add_probe_success() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.probe_success) - return probe_success_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultSuccess >& -EventStream::probe_success() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.probe_success) - return probe_success_; -} - -// repeated .webrtc.rtclog2.BweProbeResultFailure probe_failure = 23; -inline int EventStream::probe_failure_size() const { - return probe_failure_.size(); -} -inline void EventStream::clear_probe_failure() { - probe_failure_.Clear(); -} -inline ::webrtc::rtclog2::BweProbeResultFailure* EventStream::mutable_probe_failure(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.probe_failure) - return probe_failure_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultFailure >* -EventStream::mutable_probe_failure() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.probe_failure) - return &probe_failure_; -} -inline const ::webrtc::rtclog2::BweProbeResultFailure& EventStream::probe_failure(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.probe_failure) - return probe_failure_.Get(index); -} -inline ::webrtc::rtclog2::BweProbeResultFailure* EventStream::add_probe_failure() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.probe_failure) - return probe_failure_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::BweProbeResultFailure >& -EventStream::probe_failure() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.probe_failure) - return probe_failure_; -} - -// repeated .webrtc.rtclog2.AlrState alr_states = 24; -inline int EventStream::alr_states_size() const { - return alr_states_.size(); -} -inline void EventStream::clear_alr_states() { - alr_states_.Clear(); -} -inline ::webrtc::rtclog2::AlrState* EventStream::mutable_alr_states(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.alr_states) - return alr_states_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AlrState >* -EventStream::mutable_alr_states() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.alr_states) - return &alr_states_; -} -inline const ::webrtc::rtclog2::AlrState& EventStream::alr_states(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.alr_states) - return alr_states_.Get(index); -} -inline ::webrtc::rtclog2::AlrState* EventStream::add_alr_states() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.alr_states) - return alr_states_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AlrState >& -EventStream::alr_states() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.alr_states) - return alr_states_; -} - -// repeated .webrtc.rtclog2.IceCandidatePairConfig ice_candidate_configs = 25; -inline int EventStream::ice_candidate_configs_size() const { - return ice_candidate_configs_.size(); -} -inline void EventStream::clear_ice_candidate_configs() { - ice_candidate_configs_.Clear(); -} -inline ::webrtc::rtclog2::IceCandidatePairConfig* EventStream::mutable_ice_candidate_configs(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.ice_candidate_configs) - return ice_candidate_configs_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairConfig >* -EventStream::mutable_ice_candidate_configs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.ice_candidate_configs) - return &ice_candidate_configs_; -} -inline const ::webrtc::rtclog2::IceCandidatePairConfig& EventStream::ice_candidate_configs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.ice_candidate_configs) - return ice_candidate_configs_.Get(index); -} -inline ::webrtc::rtclog2::IceCandidatePairConfig* EventStream::add_ice_candidate_configs() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.ice_candidate_configs) - return ice_candidate_configs_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairConfig >& -EventStream::ice_candidate_configs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.ice_candidate_configs) - return ice_candidate_configs_; -} - -// repeated .webrtc.rtclog2.IceCandidatePairEvent ice_candidate_events = 26; -inline int EventStream::ice_candidate_events_size() const { - return ice_candidate_events_.size(); -} -inline void EventStream::clear_ice_candidate_events() { - ice_candidate_events_.Clear(); -} -inline ::webrtc::rtclog2::IceCandidatePairEvent* EventStream::mutable_ice_candidate_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.ice_candidate_events) - return ice_candidate_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairEvent >* -EventStream::mutable_ice_candidate_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.ice_candidate_events) - return &ice_candidate_events_; -} -inline const ::webrtc::rtclog2::IceCandidatePairEvent& EventStream::ice_candidate_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.ice_candidate_events) - return ice_candidate_events_.Get(index); -} -inline ::webrtc::rtclog2::IceCandidatePairEvent* EventStream::add_ice_candidate_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.ice_candidate_events) - return ice_candidate_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::IceCandidatePairEvent >& -EventStream::ice_candidate_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.ice_candidate_events) - return ice_candidate_events_; -} - -// repeated .webrtc.rtclog2.DtlsTransportStateEvent dtls_transport_state_events = 27; -inline int EventStream::dtls_transport_state_events_size() const { - return dtls_transport_state_events_.size(); -} -inline void EventStream::clear_dtls_transport_state_events() { - dtls_transport_state_events_.Clear(); -} -inline ::webrtc::rtclog2::DtlsTransportStateEvent* EventStream::mutable_dtls_transport_state_events(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.dtls_transport_state_events) - return dtls_transport_state_events_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsTransportStateEvent >* -EventStream::mutable_dtls_transport_state_events() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.dtls_transport_state_events) - return &dtls_transport_state_events_; -} -inline const ::webrtc::rtclog2::DtlsTransportStateEvent& EventStream::dtls_transport_state_events(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.dtls_transport_state_events) - return dtls_transport_state_events_.Get(index); -} -inline ::webrtc::rtclog2::DtlsTransportStateEvent* EventStream::add_dtls_transport_state_events() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.dtls_transport_state_events) - return dtls_transport_state_events_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsTransportStateEvent >& -EventStream::dtls_transport_state_events() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.dtls_transport_state_events) - return dtls_transport_state_events_; -} - -// repeated .webrtc.rtclog2.DtlsWritableState dtls_writable_states = 28; -inline int EventStream::dtls_writable_states_size() const { - return dtls_writable_states_.size(); -} -inline void EventStream::clear_dtls_writable_states() { - dtls_writable_states_.Clear(); -} -inline ::webrtc::rtclog2::DtlsWritableState* EventStream::mutable_dtls_writable_states(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.dtls_writable_states) - return dtls_writable_states_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsWritableState >* -EventStream::mutable_dtls_writable_states() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.dtls_writable_states) - return &dtls_writable_states_; -} -inline const ::webrtc::rtclog2::DtlsWritableState& EventStream::dtls_writable_states(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.dtls_writable_states) - return dtls_writable_states_.Get(index); -} -inline ::webrtc::rtclog2::DtlsWritableState* EventStream::add_dtls_writable_states() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.dtls_writable_states) - return dtls_writable_states_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::DtlsWritableState >& -EventStream::dtls_writable_states() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.dtls_writable_states) - return dtls_writable_states_; -} - -// repeated .webrtc.rtclog2.GenericPacketSent generic_packets_sent = 29; -inline int EventStream::generic_packets_sent_size() const { - return generic_packets_sent_.size(); -} -inline void EventStream::clear_generic_packets_sent() { - generic_packets_sent_.Clear(); -} -inline ::webrtc::rtclog2::GenericPacketSent* EventStream::mutable_generic_packets_sent(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.generic_packets_sent) - return generic_packets_sent_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketSent >* -EventStream::mutable_generic_packets_sent() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.generic_packets_sent) - return &generic_packets_sent_; -} -inline const ::webrtc::rtclog2::GenericPacketSent& EventStream::generic_packets_sent(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.generic_packets_sent) - return generic_packets_sent_.Get(index); -} -inline ::webrtc::rtclog2::GenericPacketSent* EventStream::add_generic_packets_sent() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.generic_packets_sent) - return generic_packets_sent_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketSent >& -EventStream::generic_packets_sent() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.generic_packets_sent) - return generic_packets_sent_; -} - -// repeated .webrtc.rtclog2.GenericPacketReceived generic_packets_received = 30; -inline int EventStream::generic_packets_received_size() const { - return generic_packets_received_.size(); -} -inline void EventStream::clear_generic_packets_received() { - generic_packets_received_.Clear(); -} -inline ::webrtc::rtclog2::GenericPacketReceived* EventStream::mutable_generic_packets_received(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.generic_packets_received) - return generic_packets_received_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketReceived >* -EventStream::mutable_generic_packets_received() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.generic_packets_received) - return &generic_packets_received_; -} -inline const ::webrtc::rtclog2::GenericPacketReceived& EventStream::generic_packets_received(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.generic_packets_received) - return generic_packets_received_.Get(index); -} -inline ::webrtc::rtclog2::GenericPacketReceived* EventStream::add_generic_packets_received() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.generic_packets_received) - return generic_packets_received_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericPacketReceived >& -EventStream::generic_packets_received() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.generic_packets_received) - return generic_packets_received_; -} - -// repeated .webrtc.rtclog2.GenericAckReceived generic_acks_received = 31; -inline int EventStream::generic_acks_received_size() const { - return generic_acks_received_.size(); -} -inline void EventStream::clear_generic_acks_received() { - generic_acks_received_.Clear(); -} -inline ::webrtc::rtclog2::GenericAckReceived* EventStream::mutable_generic_acks_received(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.generic_acks_received) - return generic_acks_received_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericAckReceived >* -EventStream::mutable_generic_acks_received() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.generic_acks_received) - return &generic_acks_received_; -} -inline const ::webrtc::rtclog2::GenericAckReceived& EventStream::generic_acks_received(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.generic_acks_received) - return generic_acks_received_.Get(index); -} -inline ::webrtc::rtclog2::GenericAckReceived* EventStream::add_generic_acks_received() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.generic_acks_received) - return generic_acks_received_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::GenericAckReceived >& -EventStream::generic_acks_received() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.generic_acks_received) - return generic_acks_received_; -} - -// repeated .webrtc.rtclog2.RouteChange route_changes = 32; -inline int EventStream::route_changes_size() const { - return route_changes_.size(); -} -inline void EventStream::clear_route_changes() { - route_changes_.Clear(); -} -inline ::webrtc::rtclog2::RouteChange* EventStream::mutable_route_changes(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.route_changes) - return route_changes_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RouteChange >* -EventStream::mutable_route_changes() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.route_changes) - return &route_changes_; -} -inline const ::webrtc::rtclog2::RouteChange& EventStream::route_changes(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.route_changes) - return route_changes_.Get(index); -} -inline ::webrtc::rtclog2::RouteChange* EventStream::add_route_changes() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.route_changes) - return route_changes_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RouteChange >& -EventStream::route_changes() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.route_changes) - return route_changes_; -} - -// repeated .webrtc.rtclog2.RemoteEstimates remote_estimates = 33; -inline int EventStream::remote_estimates_size() const { - return remote_estimates_.size(); -} -inline void EventStream::clear_remote_estimates() { - remote_estimates_.Clear(); -} -inline ::webrtc::rtclog2::RemoteEstimates* EventStream::mutable_remote_estimates(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.remote_estimates) - return remote_estimates_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RemoteEstimates >* -EventStream::mutable_remote_estimates() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.remote_estimates) - return &remote_estimates_; -} -inline const ::webrtc::rtclog2::RemoteEstimates& EventStream::remote_estimates(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.remote_estimates) - return remote_estimates_.Get(index); -} -inline ::webrtc::rtclog2::RemoteEstimates* EventStream::add_remote_estimates() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.remote_estimates) - return remote_estimates_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::RemoteEstimates >& -EventStream::remote_estimates() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.remote_estimates) - return remote_estimates_; -} - -// repeated .webrtc.rtclog2.AudioRecvStreamConfig audio_recv_stream_configs = 101; -inline int EventStream::audio_recv_stream_configs_size() const { - return audio_recv_stream_configs_.size(); -} -inline void EventStream::clear_audio_recv_stream_configs() { - audio_recv_stream_configs_.Clear(); -} -inline ::webrtc::rtclog2::AudioRecvStreamConfig* EventStream::mutable_audio_recv_stream_configs(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.audio_recv_stream_configs) - return audio_recv_stream_configs_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioRecvStreamConfig >* -EventStream::mutable_audio_recv_stream_configs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.audio_recv_stream_configs) - return &audio_recv_stream_configs_; -} -inline const ::webrtc::rtclog2::AudioRecvStreamConfig& EventStream::audio_recv_stream_configs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.audio_recv_stream_configs) - return audio_recv_stream_configs_.Get(index); -} -inline ::webrtc::rtclog2::AudioRecvStreamConfig* EventStream::add_audio_recv_stream_configs() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.audio_recv_stream_configs) - return audio_recv_stream_configs_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioRecvStreamConfig >& -EventStream::audio_recv_stream_configs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.audio_recv_stream_configs) - return audio_recv_stream_configs_; -} - -// repeated .webrtc.rtclog2.AudioSendStreamConfig audio_send_stream_configs = 102; -inline int EventStream::audio_send_stream_configs_size() const { - return audio_send_stream_configs_.size(); -} -inline void EventStream::clear_audio_send_stream_configs() { - audio_send_stream_configs_.Clear(); -} -inline ::webrtc::rtclog2::AudioSendStreamConfig* EventStream::mutable_audio_send_stream_configs(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.audio_send_stream_configs) - return audio_send_stream_configs_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioSendStreamConfig >* -EventStream::mutable_audio_send_stream_configs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.audio_send_stream_configs) - return &audio_send_stream_configs_; -} -inline const ::webrtc::rtclog2::AudioSendStreamConfig& EventStream::audio_send_stream_configs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.audio_send_stream_configs) - return audio_send_stream_configs_.Get(index); -} -inline ::webrtc::rtclog2::AudioSendStreamConfig* EventStream::add_audio_send_stream_configs() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.audio_send_stream_configs) - return audio_send_stream_configs_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::AudioSendStreamConfig >& -EventStream::audio_send_stream_configs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.audio_send_stream_configs) - return audio_send_stream_configs_; -} - -// repeated .webrtc.rtclog2.VideoRecvStreamConfig video_recv_stream_configs = 103; -inline int EventStream::video_recv_stream_configs_size() const { - return video_recv_stream_configs_.size(); -} -inline void EventStream::clear_video_recv_stream_configs() { - video_recv_stream_configs_.Clear(); -} -inline ::webrtc::rtclog2::VideoRecvStreamConfig* EventStream::mutable_video_recv_stream_configs(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.video_recv_stream_configs) - return video_recv_stream_configs_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoRecvStreamConfig >* -EventStream::mutable_video_recv_stream_configs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.video_recv_stream_configs) - return &video_recv_stream_configs_; -} -inline const ::webrtc::rtclog2::VideoRecvStreamConfig& EventStream::video_recv_stream_configs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.video_recv_stream_configs) - return video_recv_stream_configs_.Get(index); -} -inline ::webrtc::rtclog2::VideoRecvStreamConfig* EventStream::add_video_recv_stream_configs() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.video_recv_stream_configs) - return video_recv_stream_configs_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoRecvStreamConfig >& -EventStream::video_recv_stream_configs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.video_recv_stream_configs) - return video_recv_stream_configs_; -} - -// repeated .webrtc.rtclog2.VideoSendStreamConfig video_send_stream_configs = 104; -inline int EventStream::video_send_stream_configs_size() const { - return video_send_stream_configs_.size(); -} -inline void EventStream::clear_video_send_stream_configs() { - video_send_stream_configs_.Clear(); -} -inline ::webrtc::rtclog2::VideoSendStreamConfig* EventStream::mutable_video_send_stream_configs(int index) { - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.EventStream.video_send_stream_configs) - return video_send_stream_configs_.Mutable(index); -} -inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoSendStreamConfig >* -EventStream::mutable_video_send_stream_configs() { - // @@protoc_insertion_point(field_mutable_list:webrtc.rtclog2.EventStream.video_send_stream_configs) - return &video_send_stream_configs_; -} -inline const ::webrtc::rtclog2::VideoSendStreamConfig& EventStream::video_send_stream_configs(int index) const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EventStream.video_send_stream_configs) - return video_send_stream_configs_.Get(index); -} -inline ::webrtc::rtclog2::VideoSendStreamConfig* EventStream::add_video_send_stream_configs() { - // @@protoc_insertion_point(field_add:webrtc.rtclog2.EventStream.video_send_stream_configs) - return video_send_stream_configs_.Add(); -} -inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::webrtc::rtclog2::VideoSendStreamConfig >& -EventStream::video_send_stream_configs() const { - // @@protoc_insertion_point(field_list:webrtc.rtclog2.EventStream.video_send_stream_configs) - return video_send_stream_configs_; -} - -// ------------------------------------------------------------------- - -// Event - -// ------------------------------------------------------------------- - -// GenericPacketReceived - -// optional int64 timestamp_ms = 1; -inline bool GenericPacketReceived::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void GenericPacketReceived::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericPacketReceived::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.timestamp_ms) - return timestamp_ms_; -} -inline void GenericPacketReceived::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000008u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.timestamp_ms) -} - -// optional int64 packet_number = 2; -inline bool GenericPacketReceived::has_packet_number() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void GenericPacketReceived::clear_packet_number() { - packet_number_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericPacketReceived::packet_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.packet_number) - return packet_number_; -} -inline void GenericPacketReceived::set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000010u; - packet_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.packet_number) -} - -// optional int32 packet_length = 3; -inline bool GenericPacketReceived::has_packet_length() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void GenericPacketReceived::clear_packet_length() { - packet_length_ = 0; - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 GenericPacketReceived::packet_length() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.packet_length) - return packet_length_; -} -inline void GenericPacketReceived::set_packet_length(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000020u; - packet_length_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.packet_length) -} - -// optional uint32 number_of_deltas = 16; -inline bool GenericPacketReceived::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void GenericPacketReceived::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 GenericPacketReceived::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.number_of_deltas) - return number_of_deltas_; -} -inline void GenericPacketReceived::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000040u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 17; -inline bool GenericPacketReceived::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void GenericPacketReceived::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& GenericPacketReceived::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void GenericPacketReceived::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) -} -inline void GenericPacketReceived::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) -} -inline void GenericPacketReceived::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) -} -inline void GenericPacketReceived::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) -} -inline std::string* GenericPacketReceived::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketReceived::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketReceived::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketReceived.timestamp_ms_deltas) -} - -// optional bytes packet_number_deltas = 18; -inline bool GenericPacketReceived::has_packet_number_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void GenericPacketReceived::clear_packet_number_deltas() { - packet_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& GenericPacketReceived::packet_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) - return packet_number_deltas_.GetNoArena(); -} -inline void GenericPacketReceived::set_packet_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) -} -inline void GenericPacketReceived::set_packet_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) -} -inline void GenericPacketReceived::set_packet_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) -} -inline void GenericPacketReceived::set_packet_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) -} -inline std::string* GenericPacketReceived::mutable_packet_number_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) - return packet_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketReceived::release_packet_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) - if (!has_packet_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return packet_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketReceived::set_allocated_packet_number_deltas(std::string* packet_number_deltas) { - if (packet_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - packet_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), packet_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketReceived.packet_number_deltas) -} - -// optional bytes packet_length_deltas = 19; -inline bool GenericPacketReceived::has_packet_length_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void GenericPacketReceived::clear_packet_length_deltas() { - packet_length_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& GenericPacketReceived::packet_length_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) - return packet_length_deltas_.GetNoArena(); -} -inline void GenericPacketReceived::set_packet_length_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - packet_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) -} -inline void GenericPacketReceived::set_packet_length_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - packet_length_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) -} -inline void GenericPacketReceived::set_packet_length_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - packet_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) -} -inline void GenericPacketReceived::set_packet_length_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - packet_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) -} -inline std::string* GenericPacketReceived::mutable_packet_length_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) - return packet_length_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketReceived::release_packet_length_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) - if (!has_packet_length_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return packet_length_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketReceived::set_allocated_packet_length_deltas(std::string* packet_length_deltas) { - if (packet_length_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - packet_length_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), packet_length_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketReceived.packet_length_deltas) -} - -// ------------------------------------------------------------------- - -// GenericPacketSent - -// optional int64 timestamp_ms = 1; -inline bool GenericPacketSent::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void GenericPacketSent::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericPacketSent::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.timestamp_ms) - return timestamp_ms_; -} -inline void GenericPacketSent::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000020u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.timestamp_ms) -} - -// optional int64 packet_number = 2; -inline bool GenericPacketSent::has_packet_number() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void GenericPacketSent::clear_packet_number() { - packet_number_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericPacketSent::packet_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.packet_number) - return packet_number_; -} -inline void GenericPacketSent::set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000040u; - packet_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.packet_number) -} - -// optional int32 overhead_length = 3; -inline bool GenericPacketSent::has_overhead_length() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void GenericPacketSent::clear_overhead_length() { - overhead_length_ = 0; - _has_bits_[0] &= ~0x00000080u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 GenericPacketSent::overhead_length() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.overhead_length) - return overhead_length_; -} -inline void GenericPacketSent::set_overhead_length(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000080u; - overhead_length_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.overhead_length) -} - -// optional int32 payload_length = 4; -inline bool GenericPacketSent::has_payload_length() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void GenericPacketSent::clear_payload_length() { - payload_length_ = 0; - _has_bits_[0] &= ~0x00000100u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 GenericPacketSent::payload_length() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.payload_length) - return payload_length_; -} -inline void GenericPacketSent::set_payload_length(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000100u; - payload_length_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.payload_length) -} - -// optional int32 padding_length = 5; -inline bool GenericPacketSent::has_padding_length() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void GenericPacketSent::clear_padding_length() { - padding_length_ = 0; - _has_bits_[0] &= ~0x00000200u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 GenericPacketSent::padding_length() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.padding_length) - return padding_length_; -} -inline void GenericPacketSent::set_padding_length(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000200u; - padding_length_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.padding_length) -} - -// optional uint32 number_of_deltas = 16; -inline bool GenericPacketSent::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000400u) != 0; -} -inline void GenericPacketSent::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000400u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 GenericPacketSent::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.number_of_deltas) - return number_of_deltas_; -} -inline void GenericPacketSent::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000400u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 17; -inline bool GenericPacketSent::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void GenericPacketSent::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& GenericPacketSent::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void GenericPacketSent::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) -} -inline void GenericPacketSent::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) -} -inline void GenericPacketSent::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) -} -inline void GenericPacketSent::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) -} -inline std::string* GenericPacketSent::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketSent::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketSent::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketSent.timestamp_ms_deltas) -} - -// optional bytes packet_number_deltas = 18; -inline bool GenericPacketSent::has_packet_number_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void GenericPacketSent::clear_packet_number_deltas() { - packet_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& GenericPacketSent::packet_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) - return packet_number_deltas_.GetNoArena(); -} -inline void GenericPacketSent::set_packet_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) -} -inline void GenericPacketSent::set_packet_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) -} -inline void GenericPacketSent::set_packet_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) -} -inline void GenericPacketSent::set_packet_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) -} -inline std::string* GenericPacketSent::mutable_packet_number_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) - return packet_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketSent::release_packet_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) - if (!has_packet_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return packet_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketSent::set_allocated_packet_number_deltas(std::string* packet_number_deltas) { - if (packet_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - packet_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), packet_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketSent.packet_number_deltas) -} - -// optional bytes overhead_length_deltas = 19; -inline bool GenericPacketSent::has_overhead_length_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void GenericPacketSent::clear_overhead_length_deltas() { - overhead_length_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& GenericPacketSent::overhead_length_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) - return overhead_length_deltas_.GetNoArena(); -} -inline void GenericPacketSent::set_overhead_length_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - overhead_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) -} -inline void GenericPacketSent::set_overhead_length_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - overhead_length_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) -} -inline void GenericPacketSent::set_overhead_length_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - overhead_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) -} -inline void GenericPacketSent::set_overhead_length_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - overhead_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) -} -inline std::string* GenericPacketSent::mutable_overhead_length_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) - return overhead_length_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketSent::release_overhead_length_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) - if (!has_overhead_length_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return overhead_length_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketSent::set_allocated_overhead_length_deltas(std::string* overhead_length_deltas) { - if (overhead_length_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - overhead_length_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), overhead_length_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketSent.overhead_length_deltas) -} - -// optional bytes payload_length_deltas = 20; -inline bool GenericPacketSent::has_payload_length_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void GenericPacketSent::clear_payload_length_deltas() { - payload_length_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& GenericPacketSent::payload_length_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) - return payload_length_deltas_.GetNoArena(); -} -inline void GenericPacketSent::set_payload_length_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - payload_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) -} -inline void GenericPacketSent::set_payload_length_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - payload_length_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) -} -inline void GenericPacketSent::set_payload_length_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - payload_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) -} -inline void GenericPacketSent::set_payload_length_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - payload_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) -} -inline std::string* GenericPacketSent::mutable_payload_length_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) - return payload_length_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketSent::release_payload_length_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) - if (!has_payload_length_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return payload_length_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketSent::set_allocated_payload_length_deltas(std::string* payload_length_deltas) { - if (payload_length_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - payload_length_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), payload_length_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketSent.payload_length_deltas) -} - -// optional bytes padding_length_deltas = 21; -inline bool GenericPacketSent::has_padding_length_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void GenericPacketSent::clear_padding_length_deltas() { - padding_length_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000010u; -} -inline const std::string& GenericPacketSent::padding_length_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) - return padding_length_deltas_.GetNoArena(); -} -inline void GenericPacketSent::set_padding_length_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000010u; - padding_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) -} -inline void GenericPacketSent::set_padding_length_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000010u; - padding_length_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) -} -inline void GenericPacketSent::set_padding_length_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000010u; - padding_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) -} -inline void GenericPacketSent::set_padding_length_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000010u; - padding_length_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) -} -inline std::string* GenericPacketSent::mutable_padding_length_deltas() { - _has_bits_[0] |= 0x00000010u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) - return padding_length_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericPacketSent::release_padding_length_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) - if (!has_padding_length_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000010u; - return padding_length_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericPacketSent::set_allocated_padding_length_deltas(std::string* padding_length_deltas) { - if (padding_length_deltas != nullptr) { - _has_bits_[0] |= 0x00000010u; - } else { - _has_bits_[0] &= ~0x00000010u; - } - padding_length_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), padding_length_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericPacketSent.padding_length_deltas) -} - -// ------------------------------------------------------------------- - -// GenericAckReceived - -// optional int64 timestamp_ms = 1; -inline bool GenericAckReceived::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void GenericAckReceived::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericAckReceived::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.timestamp_ms) - return timestamp_ms_; -} -inline void GenericAckReceived::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000010u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.timestamp_ms) -} - -// optional int64 packet_number = 2; -inline bool GenericAckReceived::has_packet_number() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void GenericAckReceived::clear_packet_number() { - packet_number_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericAckReceived::packet_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.packet_number) - return packet_number_; -} -inline void GenericAckReceived::set_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000020u; - packet_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.packet_number) -} - -// optional int64 acked_packet_number = 3; -inline bool GenericAckReceived::has_acked_packet_number() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void GenericAckReceived::clear_acked_packet_number() { - acked_packet_number_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericAckReceived::acked_packet_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.acked_packet_number) - return acked_packet_number_; -} -inline void GenericAckReceived::set_acked_packet_number(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000040u; - acked_packet_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.acked_packet_number) -} - -// optional int64 receive_acked_packet_time_ms = 4; -inline bool GenericAckReceived::has_receive_acked_packet_time_ms() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void GenericAckReceived::clear_receive_acked_packet_time_ms() { - receive_acked_packet_time_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000080u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 GenericAckReceived::receive_acked_packet_time_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms) - return receive_acked_packet_time_ms_; -} -inline void GenericAckReceived::set_receive_acked_packet_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000080u; - receive_acked_packet_time_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms) -} - -// optional uint32 number_of_deltas = 16; -inline bool GenericAckReceived::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void GenericAckReceived::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000100u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 GenericAckReceived::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.number_of_deltas) - return number_of_deltas_; -} -inline void GenericAckReceived::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000100u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 17; -inline bool GenericAckReceived::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void GenericAckReceived::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& GenericAckReceived::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void GenericAckReceived::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) -} -inline void GenericAckReceived::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) -} -inline void GenericAckReceived::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) -} -inline void GenericAckReceived::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) -} -inline std::string* GenericAckReceived::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericAckReceived::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericAckReceived::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericAckReceived.timestamp_ms_deltas) -} - -// optional bytes packet_number_deltas = 18; -inline bool GenericAckReceived::has_packet_number_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void GenericAckReceived::clear_packet_number_deltas() { - packet_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& GenericAckReceived::packet_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) - return packet_number_deltas_.GetNoArena(); -} -inline void GenericAckReceived::set_packet_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) -} -inline void GenericAckReceived::set_packet_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) -} -inline void GenericAckReceived::set_packet_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) -} -inline void GenericAckReceived::set_packet_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) -} -inline std::string* GenericAckReceived::mutable_packet_number_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) - return packet_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericAckReceived::release_packet_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) - if (!has_packet_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return packet_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericAckReceived::set_allocated_packet_number_deltas(std::string* packet_number_deltas) { - if (packet_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - packet_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), packet_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericAckReceived.packet_number_deltas) -} - -// optional bytes acked_packet_number_deltas = 19; -inline bool GenericAckReceived::has_acked_packet_number_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void GenericAckReceived::clear_acked_packet_number_deltas() { - acked_packet_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& GenericAckReceived::acked_packet_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) - return acked_packet_number_deltas_.GetNoArena(); -} -inline void GenericAckReceived::set_acked_packet_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - acked_packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) -} -inline void GenericAckReceived::set_acked_packet_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - acked_packet_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) -} -inline void GenericAckReceived::set_acked_packet_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - acked_packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) -} -inline void GenericAckReceived::set_acked_packet_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - acked_packet_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) -} -inline std::string* GenericAckReceived::mutable_acked_packet_number_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) - return acked_packet_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericAckReceived::release_acked_packet_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) - if (!has_acked_packet_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return acked_packet_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericAckReceived::set_allocated_acked_packet_number_deltas(std::string* acked_packet_number_deltas) { - if (acked_packet_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - acked_packet_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), acked_packet_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericAckReceived.acked_packet_number_deltas) -} - -// optional bytes receive_acked_packet_time_ms_deltas = 20; -inline bool GenericAckReceived::has_receive_acked_packet_time_ms_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void GenericAckReceived::clear_receive_acked_packet_time_ms_deltas() { - receive_acked_packet_time_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& GenericAckReceived::receive_acked_packet_time_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) - return receive_acked_packet_time_ms_deltas_.GetNoArena(); -} -inline void GenericAckReceived::set_receive_acked_packet_time_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - receive_acked_packet_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) -} -inline void GenericAckReceived::set_receive_acked_packet_time_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - receive_acked_packet_time_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) -} -inline void GenericAckReceived::set_receive_acked_packet_time_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - receive_acked_packet_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) -} -inline void GenericAckReceived::set_receive_acked_packet_time_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - receive_acked_packet_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) -} -inline std::string* GenericAckReceived::mutable_receive_acked_packet_time_ms_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) - return receive_acked_packet_time_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* GenericAckReceived::release_receive_acked_packet_time_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) - if (!has_receive_acked_packet_time_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return receive_acked_packet_time_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void GenericAckReceived::set_allocated_receive_acked_packet_time_ms_deltas(std::string* receive_acked_packet_time_ms_deltas) { - if (receive_acked_packet_time_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - receive_acked_packet_time_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), receive_acked_packet_time_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.GenericAckReceived.receive_acked_packet_time_ms_deltas) -} - -// ------------------------------------------------------------------- - -// IncomingRtpPackets - -// optional int64 timestamp_ms = 1; -inline bool IncomingRtpPackets::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00008000u) != 0; -} -inline void IncomingRtpPackets::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00008000u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 IncomingRtpPackets::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms) - return timestamp_ms_; -} -inline void IncomingRtpPackets::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00008000u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms) -} - -// optional bool marker = 2; -inline bool IncomingRtpPackets::has_marker() const { - return (_has_bits_[0] & 0x01000000u) != 0; -} -inline void IncomingRtpPackets::clear_marker() { - marker_ = false; - _has_bits_[0] &= ~0x01000000u; -} -inline bool IncomingRtpPackets::marker() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.marker) - return marker_; -} -inline void IncomingRtpPackets::set_marker(bool value) { - _has_bits_[0] |= 0x01000000u; - marker_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.marker) -} - -// optional uint32 payload_type = 3; -inline bool IncomingRtpPackets::has_payload_type() const { - return (_has_bits_[0] & 0x00010000u) != 0; -} -inline void IncomingRtpPackets::clear_payload_type() { - payload_type_ = 0u; - _has_bits_[0] &= ~0x00010000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.payload_type) - return payload_type_; -} -inline void IncomingRtpPackets::set_payload_type(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00010000u; - payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.payload_type) -} - -// optional uint32 sequence_number = 4; -inline bool IncomingRtpPackets::has_sequence_number() const { - return (_has_bits_[0] & 0x00020000u) != 0; -} -inline void IncomingRtpPackets::clear_sequence_number() { - sequence_number_ = 0u; - _has_bits_[0] &= ~0x00020000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::sequence_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.sequence_number) - return sequence_number_; -} -inline void IncomingRtpPackets::set_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00020000u; - sequence_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.sequence_number) -} - -// optional fixed32 rtp_timestamp = 5; -inline bool IncomingRtpPackets::has_rtp_timestamp() const { - return (_has_bits_[0] & 0x00040000u) != 0; -} -inline void IncomingRtpPackets::clear_rtp_timestamp() { - rtp_timestamp_ = 0u; - _has_bits_[0] &= ~0x00040000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::rtp_timestamp() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp) - return rtp_timestamp_; -} -inline void IncomingRtpPackets::set_rtp_timestamp(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00040000u; - rtp_timestamp_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp) -} - -// optional fixed32 ssrc = 6; -inline bool IncomingRtpPackets::has_ssrc() const { - return (_has_bits_[0] & 0x00080000u) != 0; -} -inline void IncomingRtpPackets::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00080000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.ssrc) - return ssrc_; -} -inline void IncomingRtpPackets::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00080000u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.ssrc) -} - -// optional uint32 payload_size = 8; -inline bool IncomingRtpPackets::has_payload_size() const { - return (_has_bits_[0] & 0x00100000u) != 0; -} -inline void IncomingRtpPackets::clear_payload_size() { - payload_size_ = 0u; - _has_bits_[0] &= ~0x00100000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::payload_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.payload_size) - return payload_size_; -} -inline void IncomingRtpPackets::set_payload_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00100000u; - payload_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.payload_size) -} - -// optional uint32 header_size = 9; -inline bool IncomingRtpPackets::has_header_size() const { - return (_has_bits_[0] & 0x00200000u) != 0; -} -inline void IncomingRtpPackets::clear_header_size() { - header_size_ = 0u; - _has_bits_[0] &= ~0x00200000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::header_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.header_size) - return header_size_; -} -inline void IncomingRtpPackets::set_header_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00200000u; - header_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.header_size) -} - -// optional uint32 padding_size = 10; -inline bool IncomingRtpPackets::has_padding_size() const { - return (_has_bits_[0] & 0x00400000u) != 0; -} -inline void IncomingRtpPackets::clear_padding_size() { - padding_size_ = 0u; - _has_bits_[0] &= ~0x00400000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::padding_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.padding_size) - return padding_size_; -} -inline void IncomingRtpPackets::set_padding_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00400000u; - padding_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.padding_size) -} - -// optional uint32 number_of_deltas = 11; -inline bool IncomingRtpPackets::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00800000u) != 0; -} -inline void IncomingRtpPackets::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00800000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.number_of_deltas) - return number_of_deltas_; -} -inline void IncomingRtpPackets::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00800000u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.number_of_deltas) -} - -// optional uint32 transport_sequence_number = 15; -inline bool IncomingRtpPackets::has_transport_sequence_number() const { - return (_has_bits_[0] & 0x04000000u) != 0; -} -inline void IncomingRtpPackets::clear_transport_sequence_number() { - transport_sequence_number_ = 0u; - _has_bits_[0] &= ~0x04000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::transport_sequence_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number) - return transport_sequence_number_; -} -inline void IncomingRtpPackets::set_transport_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x04000000u; - transport_sequence_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number) -} - -// optional int32 transmission_time_offset = 16; -inline bool IncomingRtpPackets::has_transmission_time_offset() const { - return (_has_bits_[0] & 0x08000000u) != 0; -} -inline void IncomingRtpPackets::clear_transmission_time_offset() { - transmission_time_offset_ = 0; - _has_bits_[0] &= ~0x08000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 IncomingRtpPackets::transmission_time_offset() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset) - return transmission_time_offset_; -} -inline void IncomingRtpPackets::set_transmission_time_offset(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x08000000u; - transmission_time_offset_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset) -} - -// optional uint32 absolute_send_time = 17; -inline bool IncomingRtpPackets::has_absolute_send_time() const { - return (_has_bits_[0] & 0x10000000u) != 0; -} -inline void IncomingRtpPackets::clear_absolute_send_time() { - absolute_send_time_ = 0u; - _has_bits_[0] &= ~0x10000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::absolute_send_time() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time) - return absolute_send_time_; -} -inline void IncomingRtpPackets::set_absolute_send_time(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x10000000u; - absolute_send_time_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time) -} - -// optional uint32 video_rotation = 18; -inline bool IncomingRtpPackets::has_video_rotation() const { - return (_has_bits_[0] & 0x20000000u) != 0; -} -inline void IncomingRtpPackets::clear_video_rotation() { - video_rotation_ = 0u; - _has_bits_[0] &= ~0x20000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::video_rotation() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.video_rotation) - return video_rotation_; -} -inline void IncomingRtpPackets::set_video_rotation(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x20000000u; - video_rotation_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.video_rotation) -} - -// optional uint32 audio_level = 19; -inline bool IncomingRtpPackets::has_audio_level() const { - return (_has_bits_[0] & 0x40000000u) != 0; -} -inline void IncomingRtpPackets::clear_audio_level() { - audio_level_ = 0u; - _has_bits_[0] &= ~0x40000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtpPackets::audio_level() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.audio_level) - return audio_level_; -} -inline void IncomingRtpPackets::set_audio_level(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x40000000u; - audio_level_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.audio_level) -} - -// optional bool voice_activity = 20; -inline bool IncomingRtpPackets::has_voice_activity() const { - return (_has_bits_[0] & 0x02000000u) != 0; -} -inline void IncomingRtpPackets::clear_voice_activity() { - voice_activity_ = false; - _has_bits_[0] &= ~0x02000000u; -} -inline bool IncomingRtpPackets::voice_activity() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.voice_activity) - return voice_activity_; -} -inline void IncomingRtpPackets::set_voice_activity(bool value) { - _has_bits_[0] |= 0x02000000u; - voice_activity_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.voice_activity) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool IncomingRtpPackets::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IncomingRtpPackets::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& IncomingRtpPackets::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) -} -inline void IncomingRtpPackets::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) -} -inline void IncomingRtpPackets::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) -} -inline void IncomingRtpPackets::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) -} -inline std::string* IncomingRtpPackets::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.timestamp_ms_deltas) -} - -// optional bytes marker_deltas = 102; -inline bool IncomingRtpPackets::has_marker_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IncomingRtpPackets::clear_marker_deltas() { - marker_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& IncomingRtpPackets::marker_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) - return marker_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_marker_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) -} -inline void IncomingRtpPackets::set_marker_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) -} -inline void IncomingRtpPackets::set_marker_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) -} -inline void IncomingRtpPackets::set_marker_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) -} -inline std::string* IncomingRtpPackets::mutable_marker_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) - return marker_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_marker_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) - if (!has_marker_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return marker_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_marker_deltas(std::string* marker_deltas) { - if (marker_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - marker_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), marker_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.marker_deltas) -} - -// optional bytes payload_type_deltas = 103; -inline bool IncomingRtpPackets::has_payload_type_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void IncomingRtpPackets::clear_payload_type_deltas() { - payload_type_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& IncomingRtpPackets::payload_type_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) - return payload_type_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_payload_type_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) -} -inline void IncomingRtpPackets::set_payload_type_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) -} -inline void IncomingRtpPackets::set_payload_type_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) -} -inline void IncomingRtpPackets::set_payload_type_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) -} -inline std::string* IncomingRtpPackets::mutable_payload_type_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) - return payload_type_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_payload_type_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) - if (!has_payload_type_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return payload_type_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_payload_type_deltas(std::string* payload_type_deltas) { - if (payload_type_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - payload_type_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), payload_type_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.payload_type_deltas) -} - -// optional bytes sequence_number_deltas = 104; -inline bool IncomingRtpPackets::has_sequence_number_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void IncomingRtpPackets::clear_sequence_number_deltas() { - sequence_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& IncomingRtpPackets::sequence_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) - return sequence_number_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_sequence_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) -} -inline void IncomingRtpPackets::set_sequence_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) -} -inline void IncomingRtpPackets::set_sequence_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) -} -inline void IncomingRtpPackets::set_sequence_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) -} -inline std::string* IncomingRtpPackets::mutable_sequence_number_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) - return sequence_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_sequence_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) - if (!has_sequence_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return sequence_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_sequence_number_deltas(std::string* sequence_number_deltas) { - if (sequence_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - sequence_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), sequence_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.sequence_number_deltas) -} - -// optional bytes rtp_timestamp_deltas = 105; -inline bool IncomingRtpPackets::has_rtp_timestamp_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void IncomingRtpPackets::clear_rtp_timestamp_deltas() { - rtp_timestamp_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000010u; -} -inline const std::string& IncomingRtpPackets::rtp_timestamp_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) - return rtp_timestamp_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_rtp_timestamp_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) -} -inline void IncomingRtpPackets::set_rtp_timestamp_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) -} -inline void IncomingRtpPackets::set_rtp_timestamp_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) -} -inline void IncomingRtpPackets::set_rtp_timestamp_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) -} -inline std::string* IncomingRtpPackets::mutable_rtp_timestamp_deltas() { - _has_bits_[0] |= 0x00000010u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) - return rtp_timestamp_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_rtp_timestamp_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) - if (!has_rtp_timestamp_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000010u; - return rtp_timestamp_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_rtp_timestamp_deltas(std::string* rtp_timestamp_deltas) { - if (rtp_timestamp_deltas != nullptr) { - _has_bits_[0] |= 0x00000010u; - } else { - _has_bits_[0] &= ~0x00000010u; - } - rtp_timestamp_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), rtp_timestamp_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.rtp_timestamp_deltas) -} - -// optional bytes ssrc_deltas = 106; -inline bool IncomingRtpPackets::has_ssrc_deltas() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void IncomingRtpPackets::clear_ssrc_deltas() { - ssrc_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000020u; -} -inline const std::string& IncomingRtpPackets::ssrc_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) - return ssrc_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_ssrc_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) -} -inline void IncomingRtpPackets::set_ssrc_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) -} -inline void IncomingRtpPackets::set_ssrc_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) -} -inline void IncomingRtpPackets::set_ssrc_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) -} -inline std::string* IncomingRtpPackets::mutable_ssrc_deltas() { - _has_bits_[0] |= 0x00000020u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) - return ssrc_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_ssrc_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) - if (!has_ssrc_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000020u; - return ssrc_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_ssrc_deltas(std::string* ssrc_deltas) { - if (ssrc_deltas != nullptr) { - _has_bits_[0] |= 0x00000020u; - } else { - _has_bits_[0] &= ~0x00000020u; - } - ssrc_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ssrc_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.ssrc_deltas) -} - -// optional bytes payload_size_deltas = 108; -inline bool IncomingRtpPackets::has_payload_size_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void IncomingRtpPackets::clear_payload_size_deltas() { - payload_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000040u; -} -inline const std::string& IncomingRtpPackets::payload_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) - return payload_size_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_payload_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) -} -inline void IncomingRtpPackets::set_payload_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) -} -inline void IncomingRtpPackets::set_payload_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) -} -inline void IncomingRtpPackets::set_payload_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) -} -inline std::string* IncomingRtpPackets::mutable_payload_size_deltas() { - _has_bits_[0] |= 0x00000040u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) - return payload_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_payload_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) - if (!has_payload_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000040u; - return payload_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_payload_size_deltas(std::string* payload_size_deltas) { - if (payload_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000040u; - } else { - _has_bits_[0] &= ~0x00000040u; - } - payload_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), payload_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.payload_size_deltas) -} - -// optional bytes header_size_deltas = 109; -inline bool IncomingRtpPackets::has_header_size_deltas() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void IncomingRtpPackets::clear_header_size_deltas() { - header_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000080u; -} -inline const std::string& IncomingRtpPackets::header_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) - return header_size_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_header_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) -} -inline void IncomingRtpPackets::set_header_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) -} -inline void IncomingRtpPackets::set_header_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) -} -inline void IncomingRtpPackets::set_header_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) -} -inline std::string* IncomingRtpPackets::mutable_header_size_deltas() { - _has_bits_[0] |= 0x00000080u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) - return header_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_header_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) - if (!has_header_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000080u; - return header_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_header_size_deltas(std::string* header_size_deltas) { - if (header_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000080u; - } else { - _has_bits_[0] &= ~0x00000080u; - } - header_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), header_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.header_size_deltas) -} - -// optional bytes padding_size_deltas = 110; -inline bool IncomingRtpPackets::has_padding_size_deltas() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void IncomingRtpPackets::clear_padding_size_deltas() { - padding_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000100u; -} -inline const std::string& IncomingRtpPackets::padding_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) - return padding_size_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_padding_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) -} -inline void IncomingRtpPackets::set_padding_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) -} -inline void IncomingRtpPackets::set_padding_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) -} -inline void IncomingRtpPackets::set_padding_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) -} -inline std::string* IncomingRtpPackets::mutable_padding_size_deltas() { - _has_bits_[0] |= 0x00000100u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) - return padding_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_padding_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) - if (!has_padding_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000100u; - return padding_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_padding_size_deltas(std::string* padding_size_deltas) { - if (padding_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000100u; - } else { - _has_bits_[0] &= ~0x00000100u; - } - padding_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), padding_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.padding_size_deltas) -} - -// optional bytes transport_sequence_number_deltas = 115; -inline bool IncomingRtpPackets::has_transport_sequence_number_deltas() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void IncomingRtpPackets::clear_transport_sequence_number_deltas() { - transport_sequence_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000200u; -} -inline const std::string& IncomingRtpPackets::transport_sequence_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) - return transport_sequence_number_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_transport_sequence_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) -} -inline void IncomingRtpPackets::set_transport_sequence_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) -} -inline void IncomingRtpPackets::set_transport_sequence_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) -} -inline void IncomingRtpPackets::set_transport_sequence_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) -} -inline std::string* IncomingRtpPackets::mutable_transport_sequence_number_deltas() { - _has_bits_[0] |= 0x00000200u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) - return transport_sequence_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_transport_sequence_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) - if (!has_transport_sequence_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000200u; - return transport_sequence_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_transport_sequence_number_deltas(std::string* transport_sequence_number_deltas) { - if (transport_sequence_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000200u; - } else { - _has_bits_[0] &= ~0x00000200u; - } - transport_sequence_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), transport_sequence_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.transport_sequence_number_deltas) -} - -// optional bytes transmission_time_offset_deltas = 116; -inline bool IncomingRtpPackets::has_transmission_time_offset_deltas() const { - return (_has_bits_[0] & 0x00000400u) != 0; -} -inline void IncomingRtpPackets::clear_transmission_time_offset_deltas() { - transmission_time_offset_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000400u; -} -inline const std::string& IncomingRtpPackets::transmission_time_offset_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) - return transmission_time_offset_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_transmission_time_offset_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) -} -inline void IncomingRtpPackets::set_transmission_time_offset_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) -} -inline void IncomingRtpPackets::set_transmission_time_offset_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) -} -inline void IncomingRtpPackets::set_transmission_time_offset_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) -} -inline std::string* IncomingRtpPackets::mutable_transmission_time_offset_deltas() { - _has_bits_[0] |= 0x00000400u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) - return transmission_time_offset_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_transmission_time_offset_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) - if (!has_transmission_time_offset_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000400u; - return transmission_time_offset_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_transmission_time_offset_deltas(std::string* transmission_time_offset_deltas) { - if (transmission_time_offset_deltas != nullptr) { - _has_bits_[0] |= 0x00000400u; - } else { - _has_bits_[0] &= ~0x00000400u; - } - transmission_time_offset_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), transmission_time_offset_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.transmission_time_offset_deltas) -} - -// optional bytes absolute_send_time_deltas = 117; -inline bool IncomingRtpPackets::has_absolute_send_time_deltas() const { - return (_has_bits_[0] & 0x00000800u) != 0; -} -inline void IncomingRtpPackets::clear_absolute_send_time_deltas() { - absolute_send_time_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000800u; -} -inline const std::string& IncomingRtpPackets::absolute_send_time_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) - return absolute_send_time_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_absolute_send_time_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) -} -inline void IncomingRtpPackets::set_absolute_send_time_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) -} -inline void IncomingRtpPackets::set_absolute_send_time_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) -} -inline void IncomingRtpPackets::set_absolute_send_time_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) -} -inline std::string* IncomingRtpPackets::mutable_absolute_send_time_deltas() { - _has_bits_[0] |= 0x00000800u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) - return absolute_send_time_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_absolute_send_time_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) - if (!has_absolute_send_time_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000800u; - return absolute_send_time_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_absolute_send_time_deltas(std::string* absolute_send_time_deltas) { - if (absolute_send_time_deltas != nullptr) { - _has_bits_[0] |= 0x00000800u; - } else { - _has_bits_[0] &= ~0x00000800u; - } - absolute_send_time_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), absolute_send_time_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.absolute_send_time_deltas) -} - -// optional bytes video_rotation_deltas = 118; -inline bool IncomingRtpPackets::has_video_rotation_deltas() const { - return (_has_bits_[0] & 0x00001000u) != 0; -} -inline void IncomingRtpPackets::clear_video_rotation_deltas() { - video_rotation_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00001000u; -} -inline const std::string& IncomingRtpPackets::video_rotation_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) - return video_rotation_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_video_rotation_deltas(const std::string& value) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) -} -inline void IncomingRtpPackets::set_video_rotation_deltas(std::string&& value) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) -} -inline void IncomingRtpPackets::set_video_rotation_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) -} -inline void IncomingRtpPackets::set_video_rotation_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) -} -inline std::string* IncomingRtpPackets::mutable_video_rotation_deltas() { - _has_bits_[0] |= 0x00001000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) - return video_rotation_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_video_rotation_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) - if (!has_video_rotation_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00001000u; - return video_rotation_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_video_rotation_deltas(std::string* video_rotation_deltas) { - if (video_rotation_deltas != nullptr) { - _has_bits_[0] |= 0x00001000u; - } else { - _has_bits_[0] &= ~0x00001000u; - } - video_rotation_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), video_rotation_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.video_rotation_deltas) -} - -// optional bytes audio_level_deltas = 119; -inline bool IncomingRtpPackets::has_audio_level_deltas() const { - return (_has_bits_[0] & 0x00002000u) != 0; -} -inline void IncomingRtpPackets::clear_audio_level_deltas() { - audio_level_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00002000u; -} -inline const std::string& IncomingRtpPackets::audio_level_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) - return audio_level_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_audio_level_deltas(const std::string& value) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) -} -inline void IncomingRtpPackets::set_audio_level_deltas(std::string&& value) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) -} -inline void IncomingRtpPackets::set_audio_level_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) -} -inline void IncomingRtpPackets::set_audio_level_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) -} -inline std::string* IncomingRtpPackets::mutable_audio_level_deltas() { - _has_bits_[0] |= 0x00002000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) - return audio_level_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_audio_level_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) - if (!has_audio_level_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00002000u; - return audio_level_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_audio_level_deltas(std::string* audio_level_deltas) { - if (audio_level_deltas != nullptr) { - _has_bits_[0] |= 0x00002000u; - } else { - _has_bits_[0] &= ~0x00002000u; - } - audio_level_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), audio_level_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.audio_level_deltas) -} - -// optional bytes voice_activity_deltas = 120; -inline bool IncomingRtpPackets::has_voice_activity_deltas() const { - return (_has_bits_[0] & 0x00004000u) != 0; -} -inline void IncomingRtpPackets::clear_voice_activity_deltas() { - voice_activity_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00004000u; -} -inline const std::string& IncomingRtpPackets::voice_activity_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) - return voice_activity_deltas_.GetNoArena(); -} -inline void IncomingRtpPackets::set_voice_activity_deltas(const std::string& value) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) -} -inline void IncomingRtpPackets::set_voice_activity_deltas(std::string&& value) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) -} -inline void IncomingRtpPackets::set_voice_activity_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) -} -inline void IncomingRtpPackets::set_voice_activity_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) -} -inline std::string* IncomingRtpPackets::mutable_voice_activity_deltas() { - _has_bits_[0] |= 0x00004000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) - return voice_activity_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtpPackets::release_voice_activity_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) - if (!has_voice_activity_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00004000u; - return voice_activity_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtpPackets::set_allocated_voice_activity_deltas(std::string* voice_activity_deltas) { - if (voice_activity_deltas != nullptr) { - _has_bits_[0] |= 0x00004000u; - } else { - _has_bits_[0] &= ~0x00004000u; - } - voice_activity_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), voice_activity_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtpPackets.voice_activity_deltas) -} - -// ------------------------------------------------------------------- - -// OutgoingRtpPackets - -// optional int64 timestamp_ms = 1; -inline bool OutgoingRtpPackets::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00008000u) != 0; -} -inline void OutgoingRtpPackets::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00008000u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 OutgoingRtpPackets::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms) - return timestamp_ms_; -} -inline void OutgoingRtpPackets::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00008000u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms) -} - -// optional bool marker = 2; -inline bool OutgoingRtpPackets::has_marker() const { - return (_has_bits_[0] & 0x01000000u) != 0; -} -inline void OutgoingRtpPackets::clear_marker() { - marker_ = false; - _has_bits_[0] &= ~0x01000000u; -} -inline bool OutgoingRtpPackets::marker() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.marker) - return marker_; -} -inline void OutgoingRtpPackets::set_marker(bool value) { - _has_bits_[0] |= 0x01000000u; - marker_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.marker) -} - -// optional uint32 payload_type = 3; -inline bool OutgoingRtpPackets::has_payload_type() const { - return (_has_bits_[0] & 0x00010000u) != 0; -} -inline void OutgoingRtpPackets::clear_payload_type() { - payload_type_ = 0u; - _has_bits_[0] &= ~0x00010000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::payload_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.payload_type) - return payload_type_; -} -inline void OutgoingRtpPackets::set_payload_type(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00010000u; - payload_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.payload_type) -} - -// optional uint32 sequence_number = 4; -inline bool OutgoingRtpPackets::has_sequence_number() const { - return (_has_bits_[0] & 0x00020000u) != 0; -} -inline void OutgoingRtpPackets::clear_sequence_number() { - sequence_number_ = 0u; - _has_bits_[0] &= ~0x00020000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::sequence_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.sequence_number) - return sequence_number_; -} -inline void OutgoingRtpPackets::set_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00020000u; - sequence_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.sequence_number) -} - -// optional fixed32 rtp_timestamp = 5; -inline bool OutgoingRtpPackets::has_rtp_timestamp() const { - return (_has_bits_[0] & 0x00040000u) != 0; -} -inline void OutgoingRtpPackets::clear_rtp_timestamp() { - rtp_timestamp_ = 0u; - _has_bits_[0] &= ~0x00040000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::rtp_timestamp() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp) - return rtp_timestamp_; -} -inline void OutgoingRtpPackets::set_rtp_timestamp(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00040000u; - rtp_timestamp_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp) -} - -// optional fixed32 ssrc = 6; -inline bool OutgoingRtpPackets::has_ssrc() const { - return (_has_bits_[0] & 0x00080000u) != 0; -} -inline void OutgoingRtpPackets::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00080000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.ssrc) - return ssrc_; -} -inline void OutgoingRtpPackets::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00080000u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.ssrc) -} - -// optional uint32 payload_size = 8; -inline bool OutgoingRtpPackets::has_payload_size() const { - return (_has_bits_[0] & 0x00100000u) != 0; -} -inline void OutgoingRtpPackets::clear_payload_size() { - payload_size_ = 0u; - _has_bits_[0] &= ~0x00100000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::payload_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.payload_size) - return payload_size_; -} -inline void OutgoingRtpPackets::set_payload_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00100000u; - payload_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.payload_size) -} - -// optional uint32 header_size = 9; -inline bool OutgoingRtpPackets::has_header_size() const { - return (_has_bits_[0] & 0x00200000u) != 0; -} -inline void OutgoingRtpPackets::clear_header_size() { - header_size_ = 0u; - _has_bits_[0] &= ~0x00200000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::header_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.header_size) - return header_size_; -} -inline void OutgoingRtpPackets::set_header_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00200000u; - header_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.header_size) -} - -// optional uint32 padding_size = 10; -inline bool OutgoingRtpPackets::has_padding_size() const { - return (_has_bits_[0] & 0x00400000u) != 0; -} -inline void OutgoingRtpPackets::clear_padding_size() { - padding_size_ = 0u; - _has_bits_[0] &= ~0x00400000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::padding_size() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.padding_size) - return padding_size_; -} -inline void OutgoingRtpPackets::set_padding_size(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00400000u; - padding_size_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.padding_size) -} - -// optional uint32 number_of_deltas = 11; -inline bool OutgoingRtpPackets::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00800000u) != 0; -} -inline void OutgoingRtpPackets::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00800000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.number_of_deltas) - return number_of_deltas_; -} -inline void OutgoingRtpPackets::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00800000u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.number_of_deltas) -} - -// optional uint32 transport_sequence_number = 15; -inline bool OutgoingRtpPackets::has_transport_sequence_number() const { - return (_has_bits_[0] & 0x04000000u) != 0; -} -inline void OutgoingRtpPackets::clear_transport_sequence_number() { - transport_sequence_number_ = 0u; - _has_bits_[0] &= ~0x04000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::transport_sequence_number() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number) - return transport_sequence_number_; -} -inline void OutgoingRtpPackets::set_transport_sequence_number(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x04000000u; - transport_sequence_number_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number) -} - -// optional int32 transmission_time_offset = 16; -inline bool OutgoingRtpPackets::has_transmission_time_offset() const { - return (_has_bits_[0] & 0x08000000u) != 0; -} -inline void OutgoingRtpPackets::clear_transmission_time_offset() { - transmission_time_offset_ = 0; - _has_bits_[0] &= ~0x08000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 OutgoingRtpPackets::transmission_time_offset() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset) - return transmission_time_offset_; -} -inline void OutgoingRtpPackets::set_transmission_time_offset(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x08000000u; - transmission_time_offset_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset) -} - -// optional uint32 absolute_send_time = 17; -inline bool OutgoingRtpPackets::has_absolute_send_time() const { - return (_has_bits_[0] & 0x10000000u) != 0; -} -inline void OutgoingRtpPackets::clear_absolute_send_time() { - absolute_send_time_ = 0u; - _has_bits_[0] &= ~0x10000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::absolute_send_time() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time) - return absolute_send_time_; -} -inline void OutgoingRtpPackets::set_absolute_send_time(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x10000000u; - absolute_send_time_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time) -} - -// optional uint32 video_rotation = 18; -inline bool OutgoingRtpPackets::has_video_rotation() const { - return (_has_bits_[0] & 0x20000000u) != 0; -} -inline void OutgoingRtpPackets::clear_video_rotation() { - video_rotation_ = 0u; - _has_bits_[0] &= ~0x20000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::video_rotation() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.video_rotation) - return video_rotation_; -} -inline void OutgoingRtpPackets::set_video_rotation(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x20000000u; - video_rotation_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.video_rotation) -} - -// optional uint32 audio_level = 19; -inline bool OutgoingRtpPackets::has_audio_level() const { - return (_has_bits_[0] & 0x40000000u) != 0; -} -inline void OutgoingRtpPackets::clear_audio_level() { - audio_level_ = 0u; - _has_bits_[0] &= ~0x40000000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtpPackets::audio_level() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.audio_level) - return audio_level_; -} -inline void OutgoingRtpPackets::set_audio_level(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x40000000u; - audio_level_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.audio_level) -} - -// optional bool voice_activity = 20; -inline bool OutgoingRtpPackets::has_voice_activity() const { - return (_has_bits_[0] & 0x02000000u) != 0; -} -inline void OutgoingRtpPackets::clear_voice_activity() { - voice_activity_ = false; - _has_bits_[0] &= ~0x02000000u; -} -inline bool OutgoingRtpPackets::voice_activity() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.voice_activity) - return voice_activity_; -} -inline void OutgoingRtpPackets::set_voice_activity(bool value) { - _has_bits_[0] |= 0x02000000u; - voice_activity_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.voice_activity) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool OutgoingRtpPackets::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void OutgoingRtpPackets::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& OutgoingRtpPackets::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtpPackets::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtpPackets::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtpPackets::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.timestamp_ms_deltas) -} - -// optional bytes marker_deltas = 102; -inline bool OutgoingRtpPackets::has_marker_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void OutgoingRtpPackets::clear_marker_deltas() { - marker_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& OutgoingRtpPackets::marker_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) - return marker_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_marker_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) -} -inline void OutgoingRtpPackets::set_marker_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) -} -inline void OutgoingRtpPackets::set_marker_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) -} -inline void OutgoingRtpPackets::set_marker_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - marker_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_marker_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) - return marker_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_marker_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) - if (!has_marker_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return marker_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_marker_deltas(std::string* marker_deltas) { - if (marker_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - marker_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), marker_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.marker_deltas) -} - -// optional bytes payload_type_deltas = 103; -inline bool OutgoingRtpPackets::has_payload_type_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void OutgoingRtpPackets::clear_payload_type_deltas() { - payload_type_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& OutgoingRtpPackets::payload_type_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) - return payload_type_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_payload_type_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) -} -inline void OutgoingRtpPackets::set_payload_type_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) -} -inline void OutgoingRtpPackets::set_payload_type_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) -} -inline void OutgoingRtpPackets::set_payload_type_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - payload_type_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_payload_type_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) - return payload_type_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_payload_type_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) - if (!has_payload_type_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return payload_type_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_payload_type_deltas(std::string* payload_type_deltas) { - if (payload_type_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - payload_type_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), payload_type_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.payload_type_deltas) -} - -// optional bytes sequence_number_deltas = 104; -inline bool OutgoingRtpPackets::has_sequence_number_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void OutgoingRtpPackets::clear_sequence_number_deltas() { - sequence_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& OutgoingRtpPackets::sequence_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) - return sequence_number_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_sequence_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_sequence_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_sequence_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_sequence_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_sequence_number_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) - return sequence_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_sequence_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) - if (!has_sequence_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return sequence_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_sequence_number_deltas(std::string* sequence_number_deltas) { - if (sequence_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - sequence_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), sequence_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.sequence_number_deltas) -} - -// optional bytes rtp_timestamp_deltas = 105; -inline bool OutgoingRtpPackets::has_rtp_timestamp_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void OutgoingRtpPackets::clear_rtp_timestamp_deltas() { - rtp_timestamp_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000010u; -} -inline const std::string& OutgoingRtpPackets::rtp_timestamp_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) - return rtp_timestamp_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_rtp_timestamp_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) -} -inline void OutgoingRtpPackets::set_rtp_timestamp_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) -} -inline void OutgoingRtpPackets::set_rtp_timestamp_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) -} -inline void OutgoingRtpPackets::set_rtp_timestamp_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000010u; - rtp_timestamp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_rtp_timestamp_deltas() { - _has_bits_[0] |= 0x00000010u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) - return rtp_timestamp_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_rtp_timestamp_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) - if (!has_rtp_timestamp_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000010u; - return rtp_timestamp_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_rtp_timestamp_deltas(std::string* rtp_timestamp_deltas) { - if (rtp_timestamp_deltas != nullptr) { - _has_bits_[0] |= 0x00000010u; - } else { - _has_bits_[0] &= ~0x00000010u; - } - rtp_timestamp_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), rtp_timestamp_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.rtp_timestamp_deltas) -} - -// optional bytes ssrc_deltas = 106; -inline bool OutgoingRtpPackets::has_ssrc_deltas() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void OutgoingRtpPackets::clear_ssrc_deltas() { - ssrc_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000020u; -} -inline const std::string& OutgoingRtpPackets::ssrc_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) - return ssrc_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_ssrc_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) -} -inline void OutgoingRtpPackets::set_ssrc_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) -} -inline void OutgoingRtpPackets::set_ssrc_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) -} -inline void OutgoingRtpPackets::set_ssrc_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000020u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_ssrc_deltas() { - _has_bits_[0] |= 0x00000020u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) - return ssrc_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_ssrc_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) - if (!has_ssrc_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000020u; - return ssrc_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_ssrc_deltas(std::string* ssrc_deltas) { - if (ssrc_deltas != nullptr) { - _has_bits_[0] |= 0x00000020u; - } else { - _has_bits_[0] &= ~0x00000020u; - } - ssrc_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ssrc_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.ssrc_deltas) -} - -// optional bytes payload_size_deltas = 108; -inline bool OutgoingRtpPackets::has_payload_size_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void OutgoingRtpPackets::clear_payload_size_deltas() { - payload_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000040u; -} -inline const std::string& OutgoingRtpPackets::payload_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) - return payload_size_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_payload_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) -} -inline void OutgoingRtpPackets::set_payload_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) -} -inline void OutgoingRtpPackets::set_payload_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) -} -inline void OutgoingRtpPackets::set_payload_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000040u; - payload_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_payload_size_deltas() { - _has_bits_[0] |= 0x00000040u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) - return payload_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_payload_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) - if (!has_payload_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000040u; - return payload_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_payload_size_deltas(std::string* payload_size_deltas) { - if (payload_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000040u; - } else { - _has_bits_[0] &= ~0x00000040u; - } - payload_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), payload_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.payload_size_deltas) -} - -// optional bytes header_size_deltas = 109; -inline bool OutgoingRtpPackets::has_header_size_deltas() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void OutgoingRtpPackets::clear_header_size_deltas() { - header_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000080u; -} -inline const std::string& OutgoingRtpPackets::header_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) - return header_size_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_header_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) -} -inline void OutgoingRtpPackets::set_header_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) -} -inline void OutgoingRtpPackets::set_header_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) -} -inline void OutgoingRtpPackets::set_header_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000080u; - header_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_header_size_deltas() { - _has_bits_[0] |= 0x00000080u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) - return header_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_header_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) - if (!has_header_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000080u; - return header_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_header_size_deltas(std::string* header_size_deltas) { - if (header_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000080u; - } else { - _has_bits_[0] &= ~0x00000080u; - } - header_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), header_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.header_size_deltas) -} - -// optional bytes padding_size_deltas = 110; -inline bool OutgoingRtpPackets::has_padding_size_deltas() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void OutgoingRtpPackets::clear_padding_size_deltas() { - padding_size_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000100u; -} -inline const std::string& OutgoingRtpPackets::padding_size_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) - return padding_size_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_padding_size_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) -} -inline void OutgoingRtpPackets::set_padding_size_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) -} -inline void OutgoingRtpPackets::set_padding_size_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) -} -inline void OutgoingRtpPackets::set_padding_size_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000100u; - padding_size_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_padding_size_deltas() { - _has_bits_[0] |= 0x00000100u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) - return padding_size_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_padding_size_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) - if (!has_padding_size_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000100u; - return padding_size_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_padding_size_deltas(std::string* padding_size_deltas) { - if (padding_size_deltas != nullptr) { - _has_bits_[0] |= 0x00000100u; - } else { - _has_bits_[0] &= ~0x00000100u; - } - padding_size_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), padding_size_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.padding_size_deltas) -} - -// optional bytes transport_sequence_number_deltas = 115; -inline bool OutgoingRtpPackets::has_transport_sequence_number_deltas() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void OutgoingRtpPackets::clear_transport_sequence_number_deltas() { - transport_sequence_number_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000200u; -} -inline const std::string& OutgoingRtpPackets::transport_sequence_number_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) - return transport_sequence_number_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_transport_sequence_number_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_transport_sequence_number_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_transport_sequence_number_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) -} -inline void OutgoingRtpPackets::set_transport_sequence_number_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000200u; - transport_sequence_number_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_transport_sequence_number_deltas() { - _has_bits_[0] |= 0x00000200u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) - return transport_sequence_number_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_transport_sequence_number_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) - if (!has_transport_sequence_number_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000200u; - return transport_sequence_number_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_transport_sequence_number_deltas(std::string* transport_sequence_number_deltas) { - if (transport_sequence_number_deltas != nullptr) { - _has_bits_[0] |= 0x00000200u; - } else { - _has_bits_[0] &= ~0x00000200u; - } - transport_sequence_number_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), transport_sequence_number_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.transport_sequence_number_deltas) -} - -// optional bytes transmission_time_offset_deltas = 116; -inline bool OutgoingRtpPackets::has_transmission_time_offset_deltas() const { - return (_has_bits_[0] & 0x00000400u) != 0; -} -inline void OutgoingRtpPackets::clear_transmission_time_offset_deltas() { - transmission_time_offset_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000400u; -} -inline const std::string& OutgoingRtpPackets::transmission_time_offset_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) - return transmission_time_offset_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_transmission_time_offset_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) -} -inline void OutgoingRtpPackets::set_transmission_time_offset_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) -} -inline void OutgoingRtpPackets::set_transmission_time_offset_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) -} -inline void OutgoingRtpPackets::set_transmission_time_offset_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000400u; - transmission_time_offset_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_transmission_time_offset_deltas() { - _has_bits_[0] |= 0x00000400u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) - return transmission_time_offset_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_transmission_time_offset_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) - if (!has_transmission_time_offset_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000400u; - return transmission_time_offset_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_transmission_time_offset_deltas(std::string* transmission_time_offset_deltas) { - if (transmission_time_offset_deltas != nullptr) { - _has_bits_[0] |= 0x00000400u; - } else { - _has_bits_[0] &= ~0x00000400u; - } - transmission_time_offset_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), transmission_time_offset_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.transmission_time_offset_deltas) -} - -// optional bytes absolute_send_time_deltas = 117; -inline bool OutgoingRtpPackets::has_absolute_send_time_deltas() const { - return (_has_bits_[0] & 0x00000800u) != 0; -} -inline void OutgoingRtpPackets::clear_absolute_send_time_deltas() { - absolute_send_time_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000800u; -} -inline const std::string& OutgoingRtpPackets::absolute_send_time_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) - return absolute_send_time_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_absolute_send_time_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) -} -inline void OutgoingRtpPackets::set_absolute_send_time_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) -} -inline void OutgoingRtpPackets::set_absolute_send_time_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) -} -inline void OutgoingRtpPackets::set_absolute_send_time_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000800u; - absolute_send_time_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_absolute_send_time_deltas() { - _has_bits_[0] |= 0x00000800u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) - return absolute_send_time_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_absolute_send_time_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) - if (!has_absolute_send_time_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000800u; - return absolute_send_time_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_absolute_send_time_deltas(std::string* absolute_send_time_deltas) { - if (absolute_send_time_deltas != nullptr) { - _has_bits_[0] |= 0x00000800u; - } else { - _has_bits_[0] &= ~0x00000800u; - } - absolute_send_time_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), absolute_send_time_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.absolute_send_time_deltas) -} - -// optional bytes video_rotation_deltas = 118; -inline bool OutgoingRtpPackets::has_video_rotation_deltas() const { - return (_has_bits_[0] & 0x00001000u) != 0; -} -inline void OutgoingRtpPackets::clear_video_rotation_deltas() { - video_rotation_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00001000u; -} -inline const std::string& OutgoingRtpPackets::video_rotation_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) - return video_rotation_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_video_rotation_deltas(const std::string& value) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) -} -inline void OutgoingRtpPackets::set_video_rotation_deltas(std::string&& value) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) -} -inline void OutgoingRtpPackets::set_video_rotation_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) -} -inline void OutgoingRtpPackets::set_video_rotation_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00001000u; - video_rotation_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_video_rotation_deltas() { - _has_bits_[0] |= 0x00001000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) - return video_rotation_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_video_rotation_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) - if (!has_video_rotation_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00001000u; - return video_rotation_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_video_rotation_deltas(std::string* video_rotation_deltas) { - if (video_rotation_deltas != nullptr) { - _has_bits_[0] |= 0x00001000u; - } else { - _has_bits_[0] &= ~0x00001000u; - } - video_rotation_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), video_rotation_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.video_rotation_deltas) -} - -// optional bytes audio_level_deltas = 119; -inline bool OutgoingRtpPackets::has_audio_level_deltas() const { - return (_has_bits_[0] & 0x00002000u) != 0; -} -inline void OutgoingRtpPackets::clear_audio_level_deltas() { - audio_level_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00002000u; -} -inline const std::string& OutgoingRtpPackets::audio_level_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) - return audio_level_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_audio_level_deltas(const std::string& value) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) -} -inline void OutgoingRtpPackets::set_audio_level_deltas(std::string&& value) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) -} -inline void OutgoingRtpPackets::set_audio_level_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) -} -inline void OutgoingRtpPackets::set_audio_level_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00002000u; - audio_level_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_audio_level_deltas() { - _has_bits_[0] |= 0x00002000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) - return audio_level_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_audio_level_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) - if (!has_audio_level_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00002000u; - return audio_level_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_audio_level_deltas(std::string* audio_level_deltas) { - if (audio_level_deltas != nullptr) { - _has_bits_[0] |= 0x00002000u; - } else { - _has_bits_[0] &= ~0x00002000u; - } - audio_level_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), audio_level_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.audio_level_deltas) -} - -// optional bytes voice_activity_deltas = 120; -inline bool OutgoingRtpPackets::has_voice_activity_deltas() const { - return (_has_bits_[0] & 0x00004000u) != 0; -} -inline void OutgoingRtpPackets::clear_voice_activity_deltas() { - voice_activity_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00004000u; -} -inline const std::string& OutgoingRtpPackets::voice_activity_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) - return voice_activity_deltas_.GetNoArena(); -} -inline void OutgoingRtpPackets::set_voice_activity_deltas(const std::string& value) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) -} -inline void OutgoingRtpPackets::set_voice_activity_deltas(std::string&& value) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) -} -inline void OutgoingRtpPackets::set_voice_activity_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) -} -inline void OutgoingRtpPackets::set_voice_activity_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00004000u; - voice_activity_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) -} -inline std::string* OutgoingRtpPackets::mutable_voice_activity_deltas() { - _has_bits_[0] |= 0x00004000u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) - return voice_activity_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtpPackets::release_voice_activity_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) - if (!has_voice_activity_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00004000u; - return voice_activity_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtpPackets::set_allocated_voice_activity_deltas(std::string* voice_activity_deltas) { - if (voice_activity_deltas != nullptr) { - _has_bits_[0] |= 0x00004000u; - } else { - _has_bits_[0] &= ~0x00004000u; - } - voice_activity_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), voice_activity_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtpPackets.voice_activity_deltas) -} - -// ------------------------------------------------------------------- - -// IncomingRtcpPackets - -// optional int64 timestamp_ms = 1; -inline bool IncomingRtcpPackets::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void IncomingRtcpPackets::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 IncomingRtcpPackets::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms) - return timestamp_ms_; -} -inline void IncomingRtcpPackets::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000008u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms) -} - -// optional bytes raw_packet = 2; -inline bool IncomingRtcpPackets::has_raw_packet() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IncomingRtcpPackets::clear_raw_packet() { - raw_packet_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& IncomingRtcpPackets::raw_packet() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) - return raw_packet_.GetNoArena(); -} -inline void IncomingRtcpPackets::set_raw_packet(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) -} -inline void IncomingRtcpPackets::set_raw_packet(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) -} -inline void IncomingRtcpPackets::set_raw_packet(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) -} -inline void IncomingRtcpPackets::set_raw_packet(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) -} -inline std::string* IncomingRtcpPackets::mutable_raw_packet() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) - return raw_packet_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtcpPackets::release_raw_packet() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) - if (!has_raw_packet()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return raw_packet_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtcpPackets::set_allocated_raw_packet(std::string* raw_packet) { - if (raw_packet != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - raw_packet_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), raw_packet); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtcpPackets.raw_packet) -} - -// optional uint32 number_of_deltas = 3; -inline bool IncomingRtcpPackets::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void IncomingRtcpPackets::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IncomingRtcpPackets::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtcpPackets.number_of_deltas) - return number_of_deltas_; -} -inline void IncomingRtcpPackets::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtcpPackets.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool IncomingRtcpPackets::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IncomingRtcpPackets::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& IncomingRtcpPackets::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void IncomingRtcpPackets::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) -} -inline void IncomingRtcpPackets::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) -} -inline void IncomingRtcpPackets::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) -} -inline void IncomingRtcpPackets::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) -} -inline std::string* IncomingRtcpPackets::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtcpPackets::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtcpPackets::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtcpPackets.timestamp_ms_deltas) -} - -// optional bytes raw_packet_blobs = 102; -inline bool IncomingRtcpPackets::has_raw_packet_blobs() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void IncomingRtcpPackets::clear_raw_packet_blobs() { - raw_packet_blobs_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& IncomingRtcpPackets::raw_packet_blobs() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) - return raw_packet_blobs_.GetNoArena(); -} -inline void IncomingRtcpPackets::set_raw_packet_blobs(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) -} -inline void IncomingRtcpPackets::set_raw_packet_blobs(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) -} -inline void IncomingRtcpPackets::set_raw_packet_blobs(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) -} -inline void IncomingRtcpPackets::set_raw_packet_blobs(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) -} -inline std::string* IncomingRtcpPackets::mutable_raw_packet_blobs() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) - return raw_packet_blobs_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* IncomingRtcpPackets::release_raw_packet_blobs() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) - if (!has_raw_packet_blobs()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return raw_packet_blobs_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void IncomingRtcpPackets::set_allocated_raw_packet_blobs(std::string* raw_packet_blobs) { - if (raw_packet_blobs != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - raw_packet_blobs_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), raw_packet_blobs); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.IncomingRtcpPackets.raw_packet_blobs) -} - -// ------------------------------------------------------------------- - -// OutgoingRtcpPackets - -// optional int64 timestamp_ms = 1; -inline bool OutgoingRtcpPackets::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void OutgoingRtcpPackets::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 OutgoingRtcpPackets::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms) - return timestamp_ms_; -} -inline void OutgoingRtcpPackets::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000008u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms) -} - -// optional bytes raw_packet = 2; -inline bool OutgoingRtcpPackets::has_raw_packet() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void OutgoingRtcpPackets::clear_raw_packet() { - raw_packet_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& OutgoingRtcpPackets::raw_packet() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) - return raw_packet_.GetNoArena(); -} -inline void OutgoingRtcpPackets::set_raw_packet(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) -} -inline void OutgoingRtcpPackets::set_raw_packet(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) -} -inline void OutgoingRtcpPackets::set_raw_packet(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) -} -inline void OutgoingRtcpPackets::set_raw_packet(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - raw_packet_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) -} -inline std::string* OutgoingRtcpPackets::mutable_raw_packet() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) - return raw_packet_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtcpPackets::release_raw_packet() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) - if (!has_raw_packet()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return raw_packet_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtcpPackets::set_allocated_raw_packet(std::string* raw_packet) { - if (raw_packet != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - raw_packet_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), raw_packet); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet) -} - -// optional uint32 number_of_deltas = 3; -inline bool OutgoingRtcpPackets::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void OutgoingRtcpPackets::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 OutgoingRtcpPackets::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtcpPackets.number_of_deltas) - return number_of_deltas_; -} -inline void OutgoingRtcpPackets::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtcpPackets.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool OutgoingRtcpPackets::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void OutgoingRtcpPackets::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& OutgoingRtcpPackets::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void OutgoingRtcpPackets::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtcpPackets::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtcpPackets::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) -} -inline void OutgoingRtcpPackets::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) -} -inline std::string* OutgoingRtcpPackets::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtcpPackets::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtcpPackets::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtcpPackets.timestamp_ms_deltas) -} - -// optional bytes raw_packet_blobs = 102; -inline bool OutgoingRtcpPackets::has_raw_packet_blobs() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void OutgoingRtcpPackets::clear_raw_packet_blobs() { - raw_packet_blobs_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& OutgoingRtcpPackets::raw_packet_blobs() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) - return raw_packet_blobs_.GetNoArena(); -} -inline void OutgoingRtcpPackets::set_raw_packet_blobs(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) -} -inline void OutgoingRtcpPackets::set_raw_packet_blobs(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) -} -inline void OutgoingRtcpPackets::set_raw_packet_blobs(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) -} -inline void OutgoingRtcpPackets::set_raw_packet_blobs(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - raw_packet_blobs_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) -} -inline std::string* OutgoingRtcpPackets::mutable_raw_packet_blobs() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) - return raw_packet_blobs_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* OutgoingRtcpPackets::release_raw_packet_blobs() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) - if (!has_raw_packet_blobs()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return raw_packet_blobs_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void OutgoingRtcpPackets::set_allocated_raw_packet_blobs(std::string* raw_packet_blobs) { - if (raw_packet_blobs != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - raw_packet_blobs_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), raw_packet_blobs); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.OutgoingRtcpPackets.raw_packet_blobs) -} - -// ------------------------------------------------------------------- - -// AudioPlayoutEvents - -// optional int64 timestamp_ms = 1; -inline bool AudioPlayoutEvents::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void AudioPlayoutEvents::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 AudioPlayoutEvents::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms) - return timestamp_ms_; -} -inline void AudioPlayoutEvents::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000004u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms) -} - -// optional uint32 local_ssrc = 2; -inline bool AudioPlayoutEvents::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void AudioPlayoutEvents::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioPlayoutEvents::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc) - return local_ssrc_; -} -inline void AudioPlayoutEvents::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc) -} - -// optional uint32 number_of_deltas = 3; -inline bool AudioPlayoutEvents::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void AudioPlayoutEvents::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioPlayoutEvents::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioPlayoutEvents.number_of_deltas) - return number_of_deltas_; -} -inline void AudioPlayoutEvents::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioPlayoutEvents.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool AudioPlayoutEvents::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioPlayoutEvents::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& AudioPlayoutEvents::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void AudioPlayoutEvents::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) -} -inline void AudioPlayoutEvents::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) -} -inline void AudioPlayoutEvents::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) -} -inline void AudioPlayoutEvents::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) -} -inline std::string* AudioPlayoutEvents::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioPlayoutEvents::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioPlayoutEvents::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioPlayoutEvents.timestamp_ms_deltas) -} - -// optional bytes local_ssrc_deltas = 102; -inline bool AudioPlayoutEvents::has_local_ssrc_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioPlayoutEvents::clear_local_ssrc_deltas() { - local_ssrc_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& AudioPlayoutEvents::local_ssrc_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) - return local_ssrc_deltas_.GetNoArena(); -} -inline void AudioPlayoutEvents::set_local_ssrc_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) -} -inline void AudioPlayoutEvents::set_local_ssrc_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) -} -inline void AudioPlayoutEvents::set_local_ssrc_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - local_ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) -} -inline void AudioPlayoutEvents::set_local_ssrc_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - local_ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) -} -inline std::string* AudioPlayoutEvents::mutable_local_ssrc_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) - return local_ssrc_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioPlayoutEvents::release_local_ssrc_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) - if (!has_local_ssrc_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return local_ssrc_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioPlayoutEvents::set_allocated_local_ssrc_deltas(std::string* local_ssrc_deltas) { - if (local_ssrc_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - local_ssrc_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), local_ssrc_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioPlayoutEvents.local_ssrc_deltas) -} - -// ------------------------------------------------------------------- - -// FrameDecodedEvents - -// optional int64 timestamp_ms = 1; -inline bool FrameDecodedEvents::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void FrameDecodedEvents::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000080u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 FrameDecodedEvents::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms) - return timestamp_ms_; -} -inline void FrameDecodedEvents::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000080u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms) -} - -// optional fixed32 ssrc = 2; -inline bool FrameDecodedEvents::has_ssrc() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void FrameDecodedEvents::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00000200u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 FrameDecodedEvents::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.ssrc) - return ssrc_; -} -inline void FrameDecodedEvents::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000200u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.ssrc) -} - -// optional int64 render_time_ms = 3; -inline bool FrameDecodedEvents::has_render_time_ms() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void FrameDecodedEvents::clear_render_time_ms() { - render_time_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000100u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 FrameDecodedEvents::render_time_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.render_time_ms) - return render_time_ms_; -} -inline void FrameDecodedEvents::set_render_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000100u; - render_time_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.render_time_ms) -} - -// optional int32 width = 4; -inline bool FrameDecodedEvents::has_width() const { - return (_has_bits_[0] & 0x00000400u) != 0; -} -inline void FrameDecodedEvents::clear_width() { - width_ = 0; - _has_bits_[0] &= ~0x00000400u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 FrameDecodedEvents::width() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.width) - return width_; -} -inline void FrameDecodedEvents::set_width(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000400u; - width_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.width) -} - -// optional int32 height = 5; -inline bool FrameDecodedEvents::has_height() const { - return (_has_bits_[0] & 0x00000800u) != 0; -} -inline void FrameDecodedEvents::clear_height() { - height_ = 0; - _has_bits_[0] &= ~0x00000800u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 FrameDecodedEvents::height() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.height) - return height_; -} -inline void FrameDecodedEvents::set_height(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000800u; - height_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.height) -} - -// optional .webrtc.rtclog2.FrameDecodedEvents.Codec codec = 6; -inline bool FrameDecodedEvents::has_codec() const { - return (_has_bits_[0] & 0x00001000u) != 0; -} -inline void FrameDecodedEvents::clear_codec() { - codec_ = 0; - _has_bits_[0] &= ~0x00001000u; -} -inline ::webrtc::rtclog2::FrameDecodedEvents_Codec FrameDecodedEvents::codec() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.codec) - return static_cast< ::webrtc::rtclog2::FrameDecodedEvents_Codec >(codec_); -} -inline void FrameDecodedEvents::set_codec(::webrtc::rtclog2::FrameDecodedEvents_Codec value) { - assert(::webrtc::rtclog2::FrameDecodedEvents_Codec_IsValid(value)); - _has_bits_[0] |= 0x00001000u; - codec_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.codec) -} - -// optional uint32 qp = 7; -inline bool FrameDecodedEvents::has_qp() const { - return (_has_bits_[0] & 0x00002000u) != 0; -} -inline void FrameDecodedEvents::clear_qp() { - qp_ = 0u; - _has_bits_[0] &= ~0x00002000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 FrameDecodedEvents::qp() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.qp) - return qp_; -} -inline void FrameDecodedEvents::set_qp(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00002000u; - qp_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.qp) -} - -// optional uint32 number_of_deltas = 15; -inline bool FrameDecodedEvents::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00004000u) != 0; -} -inline void FrameDecodedEvents::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00004000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 FrameDecodedEvents::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.number_of_deltas) - return number_of_deltas_; -} -inline void FrameDecodedEvents::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00004000u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool FrameDecodedEvents::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void FrameDecodedEvents::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& FrameDecodedEvents::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) -} -inline void FrameDecodedEvents::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) -} -inline void FrameDecodedEvents::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) -} -inline void FrameDecodedEvents::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) -} -inline std::string* FrameDecodedEvents::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.timestamp_ms_deltas) -} - -// optional bytes ssrc_deltas = 102; -inline bool FrameDecodedEvents::has_ssrc_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void FrameDecodedEvents::clear_ssrc_deltas() { - ssrc_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& FrameDecodedEvents::ssrc_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) - return ssrc_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_ssrc_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) -} -inline void FrameDecodedEvents::set_ssrc_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - ssrc_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) -} -inline void FrameDecodedEvents::set_ssrc_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) -} -inline void FrameDecodedEvents::set_ssrc_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - ssrc_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) -} -inline std::string* FrameDecodedEvents::mutable_ssrc_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) - return ssrc_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_ssrc_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) - if (!has_ssrc_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return ssrc_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_ssrc_deltas(std::string* ssrc_deltas) { - if (ssrc_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - ssrc_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ssrc_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.ssrc_deltas) -} - -// optional bytes render_time_ms_deltas = 103; -inline bool FrameDecodedEvents::has_render_time_ms_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void FrameDecodedEvents::clear_render_time_ms_deltas() { - render_time_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& FrameDecodedEvents::render_time_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) - return render_time_ms_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_render_time_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - render_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) -} -inline void FrameDecodedEvents::set_render_time_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - render_time_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) -} -inline void FrameDecodedEvents::set_render_time_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - render_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) -} -inline void FrameDecodedEvents::set_render_time_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - render_time_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) -} -inline std::string* FrameDecodedEvents::mutable_render_time_ms_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) - return render_time_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_render_time_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) - if (!has_render_time_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return render_time_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_render_time_ms_deltas(std::string* render_time_ms_deltas) { - if (render_time_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - render_time_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), render_time_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.render_time_ms_deltas) -} - -// optional bytes width_deltas = 104; -inline bool FrameDecodedEvents::has_width_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void FrameDecodedEvents::clear_width_deltas() { - width_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& FrameDecodedEvents::width_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.width_deltas) - return width_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_width_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - width_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.width_deltas) -} -inline void FrameDecodedEvents::set_width_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - width_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.width_deltas) -} -inline void FrameDecodedEvents::set_width_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - width_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.width_deltas) -} -inline void FrameDecodedEvents::set_width_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - width_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.width_deltas) -} -inline std::string* FrameDecodedEvents::mutable_width_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.width_deltas) - return width_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_width_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.width_deltas) - if (!has_width_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return width_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_width_deltas(std::string* width_deltas) { - if (width_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - width_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), width_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.width_deltas) -} - -// optional bytes height_deltas = 105; -inline bool FrameDecodedEvents::has_height_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void FrameDecodedEvents::clear_height_deltas() { - height_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000010u; -} -inline const std::string& FrameDecodedEvents::height_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.height_deltas) - return height_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_height_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000010u; - height_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.height_deltas) -} -inline void FrameDecodedEvents::set_height_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000010u; - height_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.height_deltas) -} -inline void FrameDecodedEvents::set_height_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000010u; - height_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.height_deltas) -} -inline void FrameDecodedEvents::set_height_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000010u; - height_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.height_deltas) -} -inline std::string* FrameDecodedEvents::mutable_height_deltas() { - _has_bits_[0] |= 0x00000010u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.height_deltas) - return height_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_height_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.height_deltas) - if (!has_height_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000010u; - return height_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_height_deltas(std::string* height_deltas) { - if (height_deltas != nullptr) { - _has_bits_[0] |= 0x00000010u; - } else { - _has_bits_[0] &= ~0x00000010u; - } - height_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), height_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.height_deltas) -} - -// optional bytes codec_deltas = 106; -inline bool FrameDecodedEvents::has_codec_deltas() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void FrameDecodedEvents::clear_codec_deltas() { - codec_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000020u; -} -inline const std::string& FrameDecodedEvents::codec_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) - return codec_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_codec_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000020u; - codec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) -} -inline void FrameDecodedEvents::set_codec_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000020u; - codec_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) -} -inline void FrameDecodedEvents::set_codec_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000020u; - codec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) -} -inline void FrameDecodedEvents::set_codec_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000020u; - codec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) -} -inline std::string* FrameDecodedEvents::mutable_codec_deltas() { - _has_bits_[0] |= 0x00000020u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) - return codec_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_codec_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) - if (!has_codec_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000020u; - return codec_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_codec_deltas(std::string* codec_deltas) { - if (codec_deltas != nullptr) { - _has_bits_[0] |= 0x00000020u; - } else { - _has_bits_[0] &= ~0x00000020u; - } - codec_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), codec_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.codec_deltas) -} - -// optional bytes qp_deltas = 107; -inline bool FrameDecodedEvents::has_qp_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void FrameDecodedEvents::clear_qp_deltas() { - qp_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000040u; -} -inline const std::string& FrameDecodedEvents::qp_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) - return qp_deltas_.GetNoArena(); -} -inline void FrameDecodedEvents::set_qp_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000040u; - qp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) -} -inline void FrameDecodedEvents::set_qp_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000040u; - qp_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) -} -inline void FrameDecodedEvents::set_qp_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000040u; - qp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) -} -inline void FrameDecodedEvents::set_qp_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000040u; - qp_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) -} -inline std::string* FrameDecodedEvents::mutable_qp_deltas() { - _has_bits_[0] |= 0x00000040u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) - return qp_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* FrameDecodedEvents::release_qp_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) - if (!has_qp_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000040u; - return qp_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void FrameDecodedEvents::set_allocated_qp_deltas(std::string* qp_deltas) { - if (qp_deltas != nullptr) { - _has_bits_[0] |= 0x00000040u; - } else { - _has_bits_[0] &= ~0x00000040u; - } - qp_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), qp_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.FrameDecodedEvents.qp_deltas) -} - -// ------------------------------------------------------------------- - -// BeginLogEvent - -// optional int64 timestamp_ms = 1; -inline bool BeginLogEvent::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BeginLogEvent::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 BeginLogEvent::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BeginLogEvent.timestamp_ms) - return timestamp_ms_; -} -inline void BeginLogEvent::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BeginLogEvent.timestamp_ms) -} - -// optional uint32 version = 2; -inline bool BeginLogEvent::has_version() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BeginLogEvent::clear_version() { - version_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BeginLogEvent::version() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BeginLogEvent.version) - return version_; -} -inline void BeginLogEvent::set_version(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - version_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BeginLogEvent.version) -} - -// optional int64 utc_time_ms = 3; -inline bool BeginLogEvent::has_utc_time_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BeginLogEvent::clear_utc_time_ms() { - utc_time_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 BeginLogEvent::utc_time_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BeginLogEvent.utc_time_ms) - return utc_time_ms_; -} -inline void BeginLogEvent::set_utc_time_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000002u; - utc_time_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BeginLogEvent.utc_time_ms) -} - -// ------------------------------------------------------------------- - -// EndLogEvent - -// optional int64 timestamp_ms = 1; -inline bool EndLogEvent::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void EndLogEvent::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 EndLogEvent::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.EndLogEvent.timestamp_ms) - return timestamp_ms_; -} -inline void EndLogEvent::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.EndLogEvent.timestamp_ms) -} - -// ------------------------------------------------------------------- - -// LossBasedBweUpdates - -// optional int64 timestamp_ms = 1; -inline bool LossBasedBweUpdates::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void LossBasedBweUpdates::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 LossBasedBweUpdates::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms) - return timestamp_ms_; -} -inline void LossBasedBweUpdates::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000010u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms) -} - -// optional uint32 bitrate_bps = 2; -inline bool LossBasedBweUpdates::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void LossBasedBweUpdates::clear_bitrate_bps() { - bitrate_bps_ = 0u; - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 LossBasedBweUpdates::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps) - return bitrate_bps_; -} -inline void LossBasedBweUpdates::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000020u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps) -} - -// optional uint32 fraction_loss = 3; -inline bool LossBasedBweUpdates::has_fraction_loss() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void LossBasedBweUpdates::clear_fraction_loss() { - fraction_loss_ = 0u; - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 LossBasedBweUpdates::fraction_loss() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss) - return fraction_loss_; -} -inline void LossBasedBweUpdates::set_fraction_loss(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000040u; - fraction_loss_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss) -} - -// optional uint32 total_packets = 4; -inline bool LossBasedBweUpdates::has_total_packets() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void LossBasedBweUpdates::clear_total_packets() { - total_packets_ = 0u; - _has_bits_[0] &= ~0x00000080u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 LossBasedBweUpdates::total_packets() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.total_packets) - return total_packets_; -} -inline void LossBasedBweUpdates::set_total_packets(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000080u; - total_packets_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.total_packets) -} - -// optional uint32 number_of_deltas = 5; -inline bool LossBasedBweUpdates::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void LossBasedBweUpdates::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000100u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 LossBasedBweUpdates::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.number_of_deltas) - return number_of_deltas_; -} -inline void LossBasedBweUpdates::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000100u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool LossBasedBweUpdates::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void LossBasedBweUpdates::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& LossBasedBweUpdates::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void LossBasedBweUpdates::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) -} -inline void LossBasedBweUpdates::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) -} -inline void LossBasedBweUpdates::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) -} -inline void LossBasedBweUpdates::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) -} -inline std::string* LossBasedBweUpdates::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* LossBasedBweUpdates::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void LossBasedBweUpdates::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.LossBasedBweUpdates.timestamp_ms_deltas) -} - -// optional bytes bitrate_bps_deltas = 102; -inline bool LossBasedBweUpdates::has_bitrate_bps_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void LossBasedBweUpdates::clear_bitrate_bps_deltas() { - bitrate_bps_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& LossBasedBweUpdates::bitrate_bps_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) - return bitrate_bps_deltas_.GetNoArena(); -} -inline void LossBasedBweUpdates::set_bitrate_bps_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) -} -inline void LossBasedBweUpdates::set_bitrate_bps_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) -} -inline void LossBasedBweUpdates::set_bitrate_bps_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) -} -inline void LossBasedBweUpdates::set_bitrate_bps_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) -} -inline std::string* LossBasedBweUpdates::mutable_bitrate_bps_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) - return bitrate_bps_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* LossBasedBweUpdates::release_bitrate_bps_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) - if (!has_bitrate_bps_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return bitrate_bps_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void LossBasedBweUpdates::set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas) { - if (bitrate_bps_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - bitrate_bps_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), bitrate_bps_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.LossBasedBweUpdates.bitrate_bps_deltas) -} - -// optional bytes fraction_loss_deltas = 103; -inline bool LossBasedBweUpdates::has_fraction_loss_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void LossBasedBweUpdates::clear_fraction_loss_deltas() { - fraction_loss_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& LossBasedBweUpdates::fraction_loss_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) - return fraction_loss_deltas_.GetNoArena(); -} -inline void LossBasedBweUpdates::set_fraction_loss_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - fraction_loss_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) -} -inline void LossBasedBweUpdates::set_fraction_loss_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - fraction_loss_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) -} -inline void LossBasedBweUpdates::set_fraction_loss_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - fraction_loss_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) -} -inline void LossBasedBweUpdates::set_fraction_loss_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - fraction_loss_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) -} -inline std::string* LossBasedBweUpdates::mutable_fraction_loss_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) - return fraction_loss_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* LossBasedBweUpdates::release_fraction_loss_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) - if (!has_fraction_loss_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return fraction_loss_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void LossBasedBweUpdates::set_allocated_fraction_loss_deltas(std::string* fraction_loss_deltas) { - if (fraction_loss_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - fraction_loss_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), fraction_loss_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.LossBasedBweUpdates.fraction_loss_deltas) -} - -// optional bytes total_packets_deltas = 104; -inline bool LossBasedBweUpdates::has_total_packets_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void LossBasedBweUpdates::clear_total_packets_deltas() { - total_packets_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& LossBasedBweUpdates::total_packets_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) - return total_packets_deltas_.GetNoArena(); -} -inline void LossBasedBweUpdates::set_total_packets_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - total_packets_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) -} -inline void LossBasedBweUpdates::set_total_packets_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - total_packets_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) -} -inline void LossBasedBweUpdates::set_total_packets_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - total_packets_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) -} -inline void LossBasedBweUpdates::set_total_packets_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - total_packets_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) -} -inline std::string* LossBasedBweUpdates::mutable_total_packets_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) - return total_packets_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* LossBasedBweUpdates::release_total_packets_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) - if (!has_total_packets_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return total_packets_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void LossBasedBweUpdates::set_allocated_total_packets_deltas(std::string* total_packets_deltas) { - if (total_packets_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - total_packets_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), total_packets_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.LossBasedBweUpdates.total_packets_deltas) -} - -// ------------------------------------------------------------------- - -// DelayBasedBweUpdates - -// optional int64 timestamp_ms = 1; -inline bool DelayBasedBweUpdates::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void DelayBasedBweUpdates::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 DelayBasedBweUpdates::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms) - return timestamp_ms_; -} -inline void DelayBasedBweUpdates::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000008u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms) -} - -// optional uint32 bitrate_bps = 2; -inline bool DelayBasedBweUpdates::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void DelayBasedBweUpdates::clear_bitrate_bps() { - bitrate_bps_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 DelayBasedBweUpdates::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps) - return bitrate_bps_; -} -inline void DelayBasedBweUpdates::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps) -} - -// optional .webrtc.rtclog2.DelayBasedBweUpdates.DetectorState detector_state = 3; -inline bool DelayBasedBweUpdates::has_detector_state() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void DelayBasedBweUpdates::clear_detector_state() { - detector_state_ = 0; - _has_bits_[0] &= ~0x00000020u; -} -inline ::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState DelayBasedBweUpdates::detector_state() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.detector_state) - return static_cast< ::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState >(detector_state_); -} -inline void DelayBasedBweUpdates::set_detector_state(::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState value) { - assert(::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState_IsValid(value)); - _has_bits_[0] |= 0x00000020u; - detector_state_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.detector_state) -} - -// optional uint32 number_of_deltas = 4; -inline bool DelayBasedBweUpdates::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void DelayBasedBweUpdates::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 DelayBasedBweUpdates::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.number_of_deltas) - return number_of_deltas_; -} -inline void DelayBasedBweUpdates::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000040u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool DelayBasedBweUpdates::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void DelayBasedBweUpdates::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& DelayBasedBweUpdates::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void DelayBasedBweUpdates::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) -} -inline void DelayBasedBweUpdates::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) -} -inline void DelayBasedBweUpdates::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) -} -inline void DelayBasedBweUpdates::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) -} -inline std::string* DelayBasedBweUpdates::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* DelayBasedBweUpdates::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void DelayBasedBweUpdates::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.DelayBasedBweUpdates.timestamp_ms_deltas) -} - -// optional bytes bitrate_bps_deltas = 102; -inline bool DelayBasedBweUpdates::has_bitrate_bps_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void DelayBasedBweUpdates::clear_bitrate_bps_deltas() { - bitrate_bps_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& DelayBasedBweUpdates::bitrate_bps_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) - return bitrate_bps_deltas_.GetNoArena(); -} -inline void DelayBasedBweUpdates::set_bitrate_bps_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) -} -inline void DelayBasedBweUpdates::set_bitrate_bps_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) -} -inline void DelayBasedBweUpdates::set_bitrate_bps_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) -} -inline void DelayBasedBweUpdates::set_bitrate_bps_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) -} -inline std::string* DelayBasedBweUpdates::mutable_bitrate_bps_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) - return bitrate_bps_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* DelayBasedBweUpdates::release_bitrate_bps_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) - if (!has_bitrate_bps_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return bitrate_bps_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void DelayBasedBweUpdates::set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas) { - if (bitrate_bps_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - bitrate_bps_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), bitrate_bps_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.DelayBasedBweUpdates.bitrate_bps_deltas) -} - -// optional bytes detector_state_deltas = 103; -inline bool DelayBasedBweUpdates::has_detector_state_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void DelayBasedBweUpdates::clear_detector_state_deltas() { - detector_state_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& DelayBasedBweUpdates::detector_state_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) - return detector_state_deltas_.GetNoArena(); -} -inline void DelayBasedBweUpdates::set_detector_state_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - detector_state_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) -} -inline void DelayBasedBweUpdates::set_detector_state_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - detector_state_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) -} -inline void DelayBasedBweUpdates::set_detector_state_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - detector_state_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) -} -inline void DelayBasedBweUpdates::set_detector_state_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - detector_state_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) -} -inline std::string* DelayBasedBweUpdates::mutable_detector_state_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) - return detector_state_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* DelayBasedBweUpdates::release_detector_state_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) - if (!has_detector_state_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return detector_state_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void DelayBasedBweUpdates::set_allocated_detector_state_deltas(std::string* detector_state_deltas) { - if (detector_state_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - detector_state_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), detector_state_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.DelayBasedBweUpdates.detector_state_deltas) -} - -// ------------------------------------------------------------------- - -// RtpHeaderExtensionConfig - -// optional int32 transmission_time_offset_id = 1; -inline bool RtpHeaderExtensionConfig::has_transmission_time_offset_id() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RtpHeaderExtensionConfig::clear_transmission_time_offset_id() { - transmission_time_offset_id_ = 0; - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtensionConfig::transmission_time_offset_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RtpHeaderExtensionConfig.transmission_time_offset_id) - return transmission_time_offset_id_; -} -inline void RtpHeaderExtensionConfig::set_transmission_time_offset_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000001u; - transmission_time_offset_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RtpHeaderExtensionConfig.transmission_time_offset_id) -} - -// optional int32 absolute_send_time_id = 2; -inline bool RtpHeaderExtensionConfig::has_absolute_send_time_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RtpHeaderExtensionConfig::clear_absolute_send_time_id() { - absolute_send_time_id_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtensionConfig::absolute_send_time_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RtpHeaderExtensionConfig.absolute_send_time_id) - return absolute_send_time_id_; -} -inline void RtpHeaderExtensionConfig::set_absolute_send_time_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000002u; - absolute_send_time_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RtpHeaderExtensionConfig.absolute_send_time_id) -} - -// optional int32 transport_sequence_number_id = 3; -inline bool RtpHeaderExtensionConfig::has_transport_sequence_number_id() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void RtpHeaderExtensionConfig::clear_transport_sequence_number_id() { - transport_sequence_number_id_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtensionConfig::transport_sequence_number_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RtpHeaderExtensionConfig.transport_sequence_number_id) - return transport_sequence_number_id_; -} -inline void RtpHeaderExtensionConfig::set_transport_sequence_number_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000004u; - transport_sequence_number_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RtpHeaderExtensionConfig.transport_sequence_number_id) -} - -// optional int32 video_rotation_id = 4; -inline bool RtpHeaderExtensionConfig::has_video_rotation_id() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void RtpHeaderExtensionConfig::clear_video_rotation_id() { - video_rotation_id_ = 0; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtensionConfig::video_rotation_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RtpHeaderExtensionConfig.video_rotation_id) - return video_rotation_id_; -} -inline void RtpHeaderExtensionConfig::set_video_rotation_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000008u; - video_rotation_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RtpHeaderExtensionConfig.video_rotation_id) -} - -// optional int32 audio_level_id = 5; -inline bool RtpHeaderExtensionConfig::has_audio_level_id() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void RtpHeaderExtensionConfig::clear_audio_level_id() { - audio_level_id_ = 0; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 RtpHeaderExtensionConfig::audio_level_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RtpHeaderExtensionConfig.audio_level_id) - return audio_level_id_; -} -inline void RtpHeaderExtensionConfig::set_audio_level_id(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000010u; - audio_level_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RtpHeaderExtensionConfig.audio_level_id) -} - -// ------------------------------------------------------------------- - -// VideoRecvStreamConfig - -// optional int64 timestamp_ms = 1; -inline bool VideoRecvStreamConfig::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void VideoRecvStreamConfig::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 VideoRecvStreamConfig::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoRecvStreamConfig.timestamp_ms) - return timestamp_ms_; -} -inline void VideoRecvStreamConfig::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoRecvStreamConfig.timestamp_ms) -} - -// optional uint32 remote_ssrc = 2; -inline bool VideoRecvStreamConfig::has_remote_ssrc() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void VideoRecvStreamConfig::clear_remote_ssrc() { - remote_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoRecvStreamConfig::remote_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoRecvStreamConfig.remote_ssrc) - return remote_ssrc_; -} -inline void VideoRecvStreamConfig::set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - remote_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoRecvStreamConfig.remote_ssrc) -} - -// optional uint32 local_ssrc = 3; -inline bool VideoRecvStreamConfig::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void VideoRecvStreamConfig::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoRecvStreamConfig::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoRecvStreamConfig.local_ssrc) - return local_ssrc_; -} -inline void VideoRecvStreamConfig::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoRecvStreamConfig.local_ssrc) -} - -// optional uint32 rtx_ssrc = 4; -inline bool VideoRecvStreamConfig::has_rtx_ssrc() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void VideoRecvStreamConfig::clear_rtx_ssrc() { - rtx_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoRecvStreamConfig::rtx_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoRecvStreamConfig.rtx_ssrc) - return rtx_ssrc_; -} -inline void VideoRecvStreamConfig::set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - rtx_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoRecvStreamConfig.rtx_ssrc) -} - -// optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; -inline bool VideoRecvStreamConfig::has_header_extensions() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void VideoRecvStreamConfig::clear_header_extensions() { - if (header_extensions_ != nullptr) header_extensions_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog2::RtpHeaderExtensionConfig& VideoRecvStreamConfig::header_extensions() const { - const ::webrtc::rtclog2::RtpHeaderExtensionConfig* p = header_extensions_; - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoRecvStreamConfig.header_extensions) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog2::_RtpHeaderExtensionConfig_default_instance_); -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* VideoRecvStreamConfig::release_header_extensions() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.VideoRecvStreamConfig.header_extensions) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* temp = header_extensions_; - header_extensions_ = nullptr; - return temp; -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* VideoRecvStreamConfig::mutable_header_extensions() { - _has_bits_[0] |= 0x00000001u; - if (header_extensions_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog2::RtpHeaderExtensionConfig>(GetArenaNoVirtual()); - header_extensions_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.VideoRecvStreamConfig.header_extensions) - return header_extensions_; -} -inline void VideoRecvStreamConfig::set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete header_extensions_; - } - if (header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - header_extensions = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, header_extensions, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - header_extensions_ = header_extensions; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.VideoRecvStreamConfig.header_extensions) -} - -// ------------------------------------------------------------------- - -// VideoSendStreamConfig - -// optional int64 timestamp_ms = 1; -inline bool VideoSendStreamConfig::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void VideoSendStreamConfig::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 VideoSendStreamConfig::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoSendStreamConfig.timestamp_ms) - return timestamp_ms_; -} -inline void VideoSendStreamConfig::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoSendStreamConfig.timestamp_ms) -} - -// optional uint32 ssrc = 2; -inline bool VideoSendStreamConfig::has_ssrc() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void VideoSendStreamConfig::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoSendStreamConfig::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoSendStreamConfig.ssrc) - return ssrc_; -} -inline void VideoSendStreamConfig::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoSendStreamConfig.ssrc) -} - -// optional uint32 rtx_ssrc = 3; -inline bool VideoSendStreamConfig::has_rtx_ssrc() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void VideoSendStreamConfig::clear_rtx_ssrc() { - rtx_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 VideoSendStreamConfig::rtx_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoSendStreamConfig.rtx_ssrc) - return rtx_ssrc_; -} -inline void VideoSendStreamConfig::set_rtx_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - rtx_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.VideoSendStreamConfig.rtx_ssrc) -} - -// optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; -inline bool VideoSendStreamConfig::has_header_extensions() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void VideoSendStreamConfig::clear_header_extensions() { - if (header_extensions_ != nullptr) header_extensions_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog2::RtpHeaderExtensionConfig& VideoSendStreamConfig::header_extensions() const { - const ::webrtc::rtclog2::RtpHeaderExtensionConfig* p = header_extensions_; - // @@protoc_insertion_point(field_get:webrtc.rtclog2.VideoSendStreamConfig.header_extensions) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog2::_RtpHeaderExtensionConfig_default_instance_); -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* VideoSendStreamConfig::release_header_extensions() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.VideoSendStreamConfig.header_extensions) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* temp = header_extensions_; - header_extensions_ = nullptr; - return temp; -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* VideoSendStreamConfig::mutable_header_extensions() { - _has_bits_[0] |= 0x00000001u; - if (header_extensions_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog2::RtpHeaderExtensionConfig>(GetArenaNoVirtual()); - header_extensions_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.VideoSendStreamConfig.header_extensions) - return header_extensions_; -} -inline void VideoSendStreamConfig::set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete header_extensions_; - } - if (header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - header_extensions = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, header_extensions, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - header_extensions_ = header_extensions; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.VideoSendStreamConfig.header_extensions) -} - -// ------------------------------------------------------------------- - -// AudioRecvStreamConfig - -// optional int64 timestamp_ms = 1; -inline bool AudioRecvStreamConfig::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioRecvStreamConfig::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 AudioRecvStreamConfig::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioRecvStreamConfig.timestamp_ms) - return timestamp_ms_; -} -inline void AudioRecvStreamConfig::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioRecvStreamConfig.timestamp_ms) -} - -// optional uint32 remote_ssrc = 2; -inline bool AudioRecvStreamConfig::has_remote_ssrc() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void AudioRecvStreamConfig::clear_remote_ssrc() { - remote_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioRecvStreamConfig::remote_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioRecvStreamConfig.remote_ssrc) - return remote_ssrc_; -} -inline void AudioRecvStreamConfig::set_remote_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - remote_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioRecvStreamConfig.remote_ssrc) -} - -// optional uint32 local_ssrc = 3; -inline bool AudioRecvStreamConfig::has_local_ssrc() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void AudioRecvStreamConfig::clear_local_ssrc() { - local_ssrc_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioRecvStreamConfig::local_ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioRecvStreamConfig.local_ssrc) - return local_ssrc_; -} -inline void AudioRecvStreamConfig::set_local_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - local_ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioRecvStreamConfig.local_ssrc) -} - -// optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 5; -inline bool AudioRecvStreamConfig::has_header_extensions() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioRecvStreamConfig::clear_header_extensions() { - if (header_extensions_ != nullptr) header_extensions_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog2::RtpHeaderExtensionConfig& AudioRecvStreamConfig::header_extensions() const { - const ::webrtc::rtclog2::RtpHeaderExtensionConfig* p = header_extensions_; - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioRecvStreamConfig.header_extensions) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog2::_RtpHeaderExtensionConfig_default_instance_); -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* AudioRecvStreamConfig::release_header_extensions() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioRecvStreamConfig.header_extensions) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* temp = header_extensions_; - header_extensions_ = nullptr; - return temp; -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* AudioRecvStreamConfig::mutable_header_extensions() { - _has_bits_[0] |= 0x00000001u; - if (header_extensions_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog2::RtpHeaderExtensionConfig>(GetArenaNoVirtual()); - header_extensions_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioRecvStreamConfig.header_extensions) - return header_extensions_; -} -inline void AudioRecvStreamConfig::set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete header_extensions_; - } - if (header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - header_extensions = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, header_extensions, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - header_extensions_ = header_extensions; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioRecvStreamConfig.header_extensions) -} - -// ------------------------------------------------------------------- - -// AudioSendStreamConfig - -// optional int64 timestamp_ms = 1; -inline bool AudioSendStreamConfig::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioSendStreamConfig::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 AudioSendStreamConfig::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioSendStreamConfig.timestamp_ms) - return timestamp_ms_; -} -inline void AudioSendStreamConfig::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000002u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioSendStreamConfig.timestamp_ms) -} - -// optional uint32 ssrc = 2; -inline bool AudioSendStreamConfig::has_ssrc() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void AudioSendStreamConfig::clear_ssrc() { - ssrc_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioSendStreamConfig::ssrc() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioSendStreamConfig.ssrc) - return ssrc_; -} -inline void AudioSendStreamConfig::set_ssrc(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - ssrc_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioSendStreamConfig.ssrc) -} - -// optional .webrtc.rtclog2.RtpHeaderExtensionConfig header_extensions = 4; -inline bool AudioSendStreamConfig::has_header_extensions() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioSendStreamConfig::clear_header_extensions() { - if (header_extensions_ != nullptr) header_extensions_->Clear(); - _has_bits_[0] &= ~0x00000001u; -} -inline const ::webrtc::rtclog2::RtpHeaderExtensionConfig& AudioSendStreamConfig::header_extensions() const { - const ::webrtc::rtclog2::RtpHeaderExtensionConfig* p = header_extensions_; - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioSendStreamConfig.header_extensions) - return p != nullptr ? *p : *reinterpret_cast( - &::webrtc::rtclog2::_RtpHeaderExtensionConfig_default_instance_); -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* AudioSendStreamConfig::release_header_extensions() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioSendStreamConfig.header_extensions) - _has_bits_[0] &= ~0x00000001u; - ::webrtc::rtclog2::RtpHeaderExtensionConfig* temp = header_extensions_; - header_extensions_ = nullptr; - return temp; -} -inline ::webrtc::rtclog2::RtpHeaderExtensionConfig* AudioSendStreamConfig::mutable_header_extensions() { - _has_bits_[0] |= 0x00000001u; - if (header_extensions_ == nullptr) { - auto* p = CreateMaybeMessage<::webrtc::rtclog2::RtpHeaderExtensionConfig>(GetArenaNoVirtual()); - header_extensions_ = p; - } - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioSendStreamConfig.header_extensions) - return header_extensions_; -} -inline void AudioSendStreamConfig::set_allocated_header_extensions(::webrtc::rtclog2::RtpHeaderExtensionConfig* header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete header_extensions_; - } - if (header_extensions) { - ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - header_extensions = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( - message_arena, header_extensions, submessage_arena); - } - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - header_extensions_ = header_extensions; - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioSendStreamConfig.header_extensions) -} - -// ------------------------------------------------------------------- - -// AudioNetworkAdaptations - -// optional int64 timestamp_ms = 1; -inline bool AudioNetworkAdaptations::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void AudioNetworkAdaptations::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000080u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 AudioNetworkAdaptations::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms) - return timestamp_ms_; -} -inline void AudioNetworkAdaptations::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000080u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms) -} - -// optional int32 bitrate_bps = 2; -inline bool AudioNetworkAdaptations::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void AudioNetworkAdaptations::clear_bitrate_bps() { - bitrate_bps_ = 0; - _has_bits_[0] &= ~0x00000100u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 AudioNetworkAdaptations::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps) - return bitrate_bps_; -} -inline void AudioNetworkAdaptations::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000100u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps) -} - -// optional int32 frame_length_ms = 3; -inline bool AudioNetworkAdaptations::has_frame_length_ms() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void AudioNetworkAdaptations::clear_frame_length_ms() { - frame_length_ms_ = 0; - _has_bits_[0] &= ~0x00000200u; -} -inline ::PROTOBUF_NAMESPACE_ID::int32 AudioNetworkAdaptations::frame_length_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms) - return frame_length_ms_; -} -inline void AudioNetworkAdaptations::set_frame_length_ms(::PROTOBUF_NAMESPACE_ID::int32 value) { - _has_bits_[0] |= 0x00000200u; - frame_length_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms) -} - -// optional uint32 uplink_packet_loss_fraction = 4; -inline bool AudioNetworkAdaptations::has_uplink_packet_loss_fraction() const { - return (_has_bits_[0] & 0x00000400u) != 0; -} -inline void AudioNetworkAdaptations::clear_uplink_packet_loss_fraction() { - uplink_packet_loss_fraction_ = 0u; - _has_bits_[0] &= ~0x00000400u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioNetworkAdaptations::uplink_packet_loss_fraction() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction) - return uplink_packet_loss_fraction_; -} -inline void AudioNetworkAdaptations::set_uplink_packet_loss_fraction(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000400u; - uplink_packet_loss_fraction_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction) -} - -// optional bool enable_fec = 5; -inline bool AudioNetworkAdaptations::has_enable_fec() const { - return (_has_bits_[0] & 0x00000800u) != 0; -} -inline void AudioNetworkAdaptations::clear_enable_fec() { - enable_fec_ = false; - _has_bits_[0] &= ~0x00000800u; -} -inline bool AudioNetworkAdaptations::enable_fec() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec) - return enable_fec_; -} -inline void AudioNetworkAdaptations::set_enable_fec(bool value) { - _has_bits_[0] |= 0x00000800u; - enable_fec_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec) -} - -// optional bool enable_dtx = 6; -inline bool AudioNetworkAdaptations::has_enable_dtx() const { - return (_has_bits_[0] & 0x00001000u) != 0; -} -inline void AudioNetworkAdaptations::clear_enable_dtx() { - enable_dtx_ = false; - _has_bits_[0] &= ~0x00001000u; -} -inline bool AudioNetworkAdaptations::enable_dtx() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx) - return enable_dtx_; -} -inline void AudioNetworkAdaptations::set_enable_dtx(bool value) { - _has_bits_[0] |= 0x00001000u; - enable_dtx_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx) -} - -// optional uint32 num_channels = 7; -inline bool AudioNetworkAdaptations::has_num_channels() const { - return (_has_bits_[0] & 0x00002000u) != 0; -} -inline void AudioNetworkAdaptations::clear_num_channels() { - num_channels_ = 0u; - _has_bits_[0] &= ~0x00002000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioNetworkAdaptations::num_channels() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.num_channels) - return num_channels_; -} -inline void AudioNetworkAdaptations::set_num_channels(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00002000u; - num_channels_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.num_channels) -} - -// optional uint32 number_of_deltas = 8; -inline bool AudioNetworkAdaptations::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00004000u) != 0; -} -inline void AudioNetworkAdaptations::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00004000u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 AudioNetworkAdaptations::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.number_of_deltas) - return number_of_deltas_; -} -inline void AudioNetworkAdaptations::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00004000u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool AudioNetworkAdaptations::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AudioNetworkAdaptations::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& AudioNetworkAdaptations::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) -} -inline void AudioNetworkAdaptations::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) -} -inline void AudioNetworkAdaptations::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) -} -inline void AudioNetworkAdaptations::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.timestamp_ms_deltas) -} - -// optional bytes bitrate_bps_deltas = 102; -inline bool AudioNetworkAdaptations::has_bitrate_bps_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AudioNetworkAdaptations::clear_bitrate_bps_deltas() { - bitrate_bps_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& AudioNetworkAdaptations::bitrate_bps_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) - return bitrate_bps_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_bitrate_bps_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) -} -inline void AudioNetworkAdaptations::set_bitrate_bps_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) -} -inline void AudioNetworkAdaptations::set_bitrate_bps_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) -} -inline void AudioNetworkAdaptations::set_bitrate_bps_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - bitrate_bps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_bitrate_bps_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) - return bitrate_bps_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_bitrate_bps_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) - if (!has_bitrate_bps_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return bitrate_bps_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_bitrate_bps_deltas(std::string* bitrate_bps_deltas) { - if (bitrate_bps_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - bitrate_bps_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), bitrate_bps_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.bitrate_bps_deltas) -} - -// optional bytes frame_length_ms_deltas = 103; -inline bool AudioNetworkAdaptations::has_frame_length_ms_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void AudioNetworkAdaptations::clear_frame_length_ms_deltas() { - frame_length_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& AudioNetworkAdaptations::frame_length_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) - return frame_length_ms_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_frame_length_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - frame_length_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) -} -inline void AudioNetworkAdaptations::set_frame_length_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - frame_length_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) -} -inline void AudioNetworkAdaptations::set_frame_length_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - frame_length_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) -} -inline void AudioNetworkAdaptations::set_frame_length_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - frame_length_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_frame_length_ms_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) - return frame_length_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_frame_length_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) - if (!has_frame_length_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return frame_length_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_frame_length_ms_deltas(std::string* frame_length_ms_deltas) { - if (frame_length_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - frame_length_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), frame_length_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.frame_length_ms_deltas) -} - -// optional bytes uplink_packet_loss_fraction_deltas = 104; -inline bool AudioNetworkAdaptations::has_uplink_packet_loss_fraction_deltas() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void AudioNetworkAdaptations::clear_uplink_packet_loss_fraction_deltas() { - uplink_packet_loss_fraction_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000008u; -} -inline const std::string& AudioNetworkAdaptations::uplink_packet_loss_fraction_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) - return uplink_packet_loss_fraction_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_uplink_packet_loss_fraction_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000008u; - uplink_packet_loss_fraction_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) -} -inline void AudioNetworkAdaptations::set_uplink_packet_loss_fraction_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000008u; - uplink_packet_loss_fraction_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) -} -inline void AudioNetworkAdaptations::set_uplink_packet_loss_fraction_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000008u; - uplink_packet_loss_fraction_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) -} -inline void AudioNetworkAdaptations::set_uplink_packet_loss_fraction_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000008u; - uplink_packet_loss_fraction_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_uplink_packet_loss_fraction_deltas() { - _has_bits_[0] |= 0x00000008u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) - return uplink_packet_loss_fraction_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_uplink_packet_loss_fraction_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) - if (!has_uplink_packet_loss_fraction_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000008u; - return uplink_packet_loss_fraction_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_uplink_packet_loss_fraction_deltas(std::string* uplink_packet_loss_fraction_deltas) { - if (uplink_packet_loss_fraction_deltas != nullptr) { - _has_bits_[0] |= 0x00000008u; - } else { - _has_bits_[0] &= ~0x00000008u; - } - uplink_packet_loss_fraction_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), uplink_packet_loss_fraction_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.uplink_packet_loss_fraction_deltas) -} - -// optional bytes enable_fec_deltas = 105; -inline bool AudioNetworkAdaptations::has_enable_fec_deltas() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void AudioNetworkAdaptations::clear_enable_fec_deltas() { - enable_fec_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000010u; -} -inline const std::string& AudioNetworkAdaptations::enable_fec_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) - return enable_fec_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_enable_fec_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000010u; - enable_fec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) -} -inline void AudioNetworkAdaptations::set_enable_fec_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000010u; - enable_fec_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) -} -inline void AudioNetworkAdaptations::set_enable_fec_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000010u; - enable_fec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) -} -inline void AudioNetworkAdaptations::set_enable_fec_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000010u; - enable_fec_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_enable_fec_deltas() { - _has_bits_[0] |= 0x00000010u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) - return enable_fec_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_enable_fec_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) - if (!has_enable_fec_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000010u; - return enable_fec_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_enable_fec_deltas(std::string* enable_fec_deltas) { - if (enable_fec_deltas != nullptr) { - _has_bits_[0] |= 0x00000010u; - } else { - _has_bits_[0] &= ~0x00000010u; - } - enable_fec_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), enable_fec_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.enable_fec_deltas) -} - -// optional bytes enable_dtx_deltas = 106; -inline bool AudioNetworkAdaptations::has_enable_dtx_deltas() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void AudioNetworkAdaptations::clear_enable_dtx_deltas() { - enable_dtx_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000020u; -} -inline const std::string& AudioNetworkAdaptations::enable_dtx_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) - return enable_dtx_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_enable_dtx_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000020u; - enable_dtx_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) -} -inline void AudioNetworkAdaptations::set_enable_dtx_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000020u; - enable_dtx_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) -} -inline void AudioNetworkAdaptations::set_enable_dtx_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000020u; - enable_dtx_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) -} -inline void AudioNetworkAdaptations::set_enable_dtx_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000020u; - enable_dtx_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_enable_dtx_deltas() { - _has_bits_[0] |= 0x00000020u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) - return enable_dtx_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_enable_dtx_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) - if (!has_enable_dtx_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000020u; - return enable_dtx_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_enable_dtx_deltas(std::string* enable_dtx_deltas) { - if (enable_dtx_deltas != nullptr) { - _has_bits_[0] |= 0x00000020u; - } else { - _has_bits_[0] &= ~0x00000020u; - } - enable_dtx_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), enable_dtx_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.enable_dtx_deltas) -} - -// optional bytes num_channels_deltas = 107; -inline bool AudioNetworkAdaptations::has_num_channels_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void AudioNetworkAdaptations::clear_num_channels_deltas() { - num_channels_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000040u; -} -inline const std::string& AudioNetworkAdaptations::num_channels_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) - return num_channels_deltas_.GetNoArena(); -} -inline void AudioNetworkAdaptations::set_num_channels_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000040u; - num_channels_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) -} -inline void AudioNetworkAdaptations::set_num_channels_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000040u; - num_channels_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) -} -inline void AudioNetworkAdaptations::set_num_channels_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000040u; - num_channels_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) -} -inline void AudioNetworkAdaptations::set_num_channels_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000040u; - num_channels_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) -} -inline std::string* AudioNetworkAdaptations::mutable_num_channels_deltas() { - _has_bits_[0] |= 0x00000040u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) - return num_channels_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* AudioNetworkAdaptations::release_num_channels_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) - if (!has_num_channels_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000040u; - return num_channels_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void AudioNetworkAdaptations::set_allocated_num_channels_deltas(std::string* num_channels_deltas) { - if (num_channels_deltas != nullptr) { - _has_bits_[0] |= 0x00000040u; - } else { - _has_bits_[0] &= ~0x00000040u; - } - num_channels_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), num_channels_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.AudioNetworkAdaptations.num_channels_deltas) -} - -// ------------------------------------------------------------------- - -// BweProbeCluster - -// optional int64 timestamp_ms = 1; -inline bool BweProbeCluster::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BweProbeCluster::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 BweProbeCluster::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeCluster.timestamp_ms) - return timestamp_ms_; -} -inline void BweProbeCluster::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeCluster.timestamp_ms) -} - -// optional uint32 id = 2; -inline bool BweProbeCluster::has_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BweProbeCluster::clear_id() { - id_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeCluster.id) - return id_; -} -inline void BweProbeCluster::set_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeCluster.id) -} - -// optional uint32 bitrate_bps = 3; -inline bool BweProbeCluster::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BweProbeCluster::clear_bitrate_bps() { - bitrate_bps_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeCluster.bitrate_bps) - return bitrate_bps_; -} -inline void BweProbeCluster::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeCluster.bitrate_bps) -} - -// optional uint32 min_packets = 4; -inline bool BweProbeCluster::has_min_packets() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void BweProbeCluster::clear_min_packets() { - min_packets_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::min_packets() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeCluster.min_packets) - return min_packets_; -} -inline void BweProbeCluster::set_min_packets(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - min_packets_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeCluster.min_packets) -} - -// optional uint32 min_bytes = 5; -inline bool BweProbeCluster::has_min_bytes() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void BweProbeCluster::clear_min_bytes() { - min_bytes_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeCluster::min_bytes() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeCluster.min_bytes) - return min_bytes_; -} -inline void BweProbeCluster::set_min_bytes(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - min_bytes_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeCluster.min_bytes) -} - -// ------------------------------------------------------------------- - -// BweProbeResultSuccess - -// optional int64 timestamp_ms = 1; -inline bool BweProbeResultSuccess::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BweProbeResultSuccess::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 BweProbeResultSuccess::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultSuccess.timestamp_ms) - return timestamp_ms_; -} -inline void BweProbeResultSuccess::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultSuccess.timestamp_ms) -} - -// optional uint32 id = 2; -inline bool BweProbeResultSuccess::has_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BweProbeResultSuccess::clear_id() { - id_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeResultSuccess::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultSuccess.id) - return id_; -} -inline void BweProbeResultSuccess::set_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultSuccess.id) -} - -// optional uint32 bitrate_bps = 3; -inline bool BweProbeResultSuccess::has_bitrate_bps() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BweProbeResultSuccess::clear_bitrate_bps() { - bitrate_bps_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeResultSuccess::bitrate_bps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultSuccess.bitrate_bps) - return bitrate_bps_; -} -inline void BweProbeResultSuccess::set_bitrate_bps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - bitrate_bps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultSuccess.bitrate_bps) -} - -// ------------------------------------------------------------------- - -// BweProbeResultFailure - -// optional int64 timestamp_ms = 1; -inline bool BweProbeResultFailure::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void BweProbeResultFailure::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 BweProbeResultFailure::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultFailure.timestamp_ms) - return timestamp_ms_; -} -inline void BweProbeResultFailure::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultFailure.timestamp_ms) -} - -// optional uint32 id = 2; -inline bool BweProbeResultFailure::has_id() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void BweProbeResultFailure::clear_id() { - id_ = 0u; - _has_bits_[0] &= ~0x00000002u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 BweProbeResultFailure::id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultFailure.id) - return id_; -} -inline void BweProbeResultFailure::set_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000002u; - id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultFailure.id) -} - -// optional .webrtc.rtclog2.BweProbeResultFailure.FailureReason failure = 3; -inline bool BweProbeResultFailure::has_failure() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void BweProbeResultFailure::clear_failure() { - failure_ = 0; - _has_bits_[0] &= ~0x00000004u; -} -inline ::webrtc::rtclog2::BweProbeResultFailure_FailureReason BweProbeResultFailure::failure() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.BweProbeResultFailure.failure) - return static_cast< ::webrtc::rtclog2::BweProbeResultFailure_FailureReason >(failure_); -} -inline void BweProbeResultFailure::set_failure(::webrtc::rtclog2::BweProbeResultFailure_FailureReason value) { - assert(::webrtc::rtclog2::BweProbeResultFailure_FailureReason_IsValid(value)); - _has_bits_[0] |= 0x00000004u; - failure_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.BweProbeResultFailure.failure) -} - -// ------------------------------------------------------------------- - -// AlrState - -// optional int64 timestamp_ms = 1; -inline bool AlrState::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void AlrState::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 AlrState::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AlrState.timestamp_ms) - return timestamp_ms_; -} -inline void AlrState::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AlrState.timestamp_ms) -} - -// optional bool in_alr = 2; -inline bool AlrState::has_in_alr() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void AlrState::clear_in_alr() { - in_alr_ = false; - _has_bits_[0] &= ~0x00000002u; -} -inline bool AlrState::in_alr() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.AlrState.in_alr) - return in_alr_; -} -inline void AlrState::set_in_alr(bool value) { - _has_bits_[0] |= 0x00000002u; - in_alr_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.AlrState.in_alr) -} - -// ------------------------------------------------------------------- - -// IceCandidatePairConfig - -// optional int64 timestamp_ms = 1; -inline bool IceCandidatePairConfig::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IceCandidatePairConfig::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 IceCandidatePairConfig::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.timestamp_ms) - return timestamp_ms_; -} -inline void IceCandidatePairConfig::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.timestamp_ms) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidatePairConfigType config_type = 2; -inline bool IceCandidatePairConfig::has_config_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IceCandidatePairConfig::clear_config_type() { - config_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType IceCandidatePairConfig::config_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.config_type) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType >(config_type_); -} -inline void IceCandidatePairConfig::set_config_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - config_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.config_type) -} - -// optional uint32 candidate_pair_id = 3; -inline bool IceCandidatePairConfig::has_candidate_pair_id() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void IceCandidatePairConfig::clear_candidate_pair_id() { - candidate_pair_id_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IceCandidatePairConfig::candidate_pair_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.candidate_pair_id) - return candidate_pair_id_; -} -inline void IceCandidatePairConfig::set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - candidate_pair_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.candidate_pair_id) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType local_candidate_type = 4; -inline bool IceCandidatePairConfig::has_local_candidate_type() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void IceCandidatePairConfig::clear_local_candidate_type() { - local_candidate_type_ = 0; - _has_bits_[0] &= ~0x00000008u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::local_candidate_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.local_candidate_type) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType >(local_candidate_type_); -} -inline void IceCandidatePairConfig::set_local_candidate_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(value)); - _has_bits_[0] |= 0x00000008u; - local_candidate_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.local_candidate_type) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol local_relay_protocol = 5; -inline bool IceCandidatePairConfig::has_local_relay_protocol() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void IceCandidatePairConfig::clear_local_relay_protocol() { - local_relay_protocol_ = 0; - _has_bits_[0] &= ~0x00000010u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_Protocol IceCandidatePairConfig::local_relay_protocol() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.local_relay_protocol) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_Protocol >(local_relay_protocol_); -} -inline void IceCandidatePairConfig::set_local_relay_protocol(::webrtc::rtclog2::IceCandidatePairConfig_Protocol value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(value)); - _has_bits_[0] |= 0x00000010u; - local_relay_protocol_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.local_relay_protocol) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.NetworkType local_network_type = 6; -inline bool IceCandidatePairConfig::has_local_network_type() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void IceCandidatePairConfig::clear_local_network_type() { - local_network_type_ = 0; - _has_bits_[0] &= ~0x00000020u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_NetworkType IceCandidatePairConfig::local_network_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.local_network_type) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_NetworkType >(local_network_type_); -} -inline void IceCandidatePairConfig::set_local_network_type(::webrtc::rtclog2::IceCandidatePairConfig_NetworkType value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_NetworkType_IsValid(value)); - _has_bits_[0] |= 0x00000020u; - local_network_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.local_network_type) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily local_address_family = 7; -inline bool IceCandidatePairConfig::has_local_address_family() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void IceCandidatePairConfig::clear_local_address_family() { - local_address_family_ = 0; - _has_bits_[0] &= ~0x00000040u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::local_address_family() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.local_address_family) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily >(local_address_family_); -} -inline void IceCandidatePairConfig::set_local_address_family(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(value)); - _has_bits_[0] |= 0x00000040u; - local_address_family_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.local_address_family) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.IceCandidateType remote_candidate_type = 8; -inline bool IceCandidatePairConfig::has_remote_candidate_type() const { - return (_has_bits_[0] & 0x00000080u) != 0; -} -inline void IceCandidatePairConfig::clear_remote_candidate_type() { - remote_candidate_type_ = 0; - _has_bits_[0] &= ~0x00000080u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType IceCandidatePairConfig::remote_candidate_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.remote_candidate_type) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType >(remote_candidate_type_); -} -inline void IceCandidatePairConfig::set_remote_candidate_type(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType_IsValid(value)); - _has_bits_[0] |= 0x00000080u; - remote_candidate_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.remote_candidate_type) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.AddressFamily remote_address_family = 9; -inline bool IceCandidatePairConfig::has_remote_address_family() const { - return (_has_bits_[0] & 0x00000100u) != 0; -} -inline void IceCandidatePairConfig::clear_remote_address_family() { - remote_address_family_ = 0; - _has_bits_[0] &= ~0x00000100u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily IceCandidatePairConfig::remote_address_family() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.remote_address_family) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily >(remote_address_family_); -} -inline void IceCandidatePairConfig::set_remote_address_family(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily_IsValid(value)); - _has_bits_[0] |= 0x00000100u; - remote_address_family_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.remote_address_family) -} - -// optional .webrtc.rtclog2.IceCandidatePairConfig.Protocol candidate_pair_protocol = 10; -inline bool IceCandidatePairConfig::has_candidate_pair_protocol() const { - return (_has_bits_[0] & 0x00000200u) != 0; -} -inline void IceCandidatePairConfig::clear_candidate_pair_protocol() { - candidate_pair_protocol_ = 0; - _has_bits_[0] &= ~0x00000200u; -} -inline ::webrtc::rtclog2::IceCandidatePairConfig_Protocol IceCandidatePairConfig::candidate_pair_protocol() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairConfig.candidate_pair_protocol) - return static_cast< ::webrtc::rtclog2::IceCandidatePairConfig_Protocol >(candidate_pair_protocol_); -} -inline void IceCandidatePairConfig::set_candidate_pair_protocol(::webrtc::rtclog2::IceCandidatePairConfig_Protocol value) { - assert(::webrtc::rtclog2::IceCandidatePairConfig_Protocol_IsValid(value)); - _has_bits_[0] |= 0x00000200u; - candidate_pair_protocol_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairConfig.candidate_pair_protocol) -} - -// ------------------------------------------------------------------- - -// IceCandidatePairEvent - -// optional int64 timestamp_ms = 1; -inline bool IceCandidatePairEvent::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void IceCandidatePairEvent::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 IceCandidatePairEvent::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairEvent.timestamp_ms) - return timestamp_ms_; -} -inline void IceCandidatePairEvent::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairEvent.timestamp_ms) -} - -// optional .webrtc.rtclog2.IceCandidatePairEvent.IceCandidatePairEventType event_type = 2; -inline bool IceCandidatePairEvent::has_event_type() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void IceCandidatePairEvent::clear_event_type() { - event_type_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType IceCandidatePairEvent::event_type() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairEvent.event_type) - return static_cast< ::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType >(event_type_); -} -inline void IceCandidatePairEvent::set_event_type(::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType value) { - assert(::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - event_type_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairEvent.event_type) -} - -// optional uint32 candidate_pair_id = 3; -inline bool IceCandidatePairEvent::has_candidate_pair_id() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void IceCandidatePairEvent::clear_candidate_pair_id() { - candidate_pair_id_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IceCandidatePairEvent::candidate_pair_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairEvent.candidate_pair_id) - return candidate_pair_id_; -} -inline void IceCandidatePairEvent::set_candidate_pair_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - candidate_pair_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairEvent.candidate_pair_id) -} - -// optional uint32 transaction_id = 4; -inline bool IceCandidatePairEvent::has_transaction_id() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void IceCandidatePairEvent::clear_transaction_id() { - transaction_id_ = 0u; - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 IceCandidatePairEvent::transaction_id() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.IceCandidatePairEvent.transaction_id) - return transaction_id_; -} -inline void IceCandidatePairEvent::set_transaction_id(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000008u; - transaction_id_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.IceCandidatePairEvent.transaction_id) -} - -// ------------------------------------------------------------------- - -// DtlsTransportStateEvent - -// optional int64 timestamp_ms = 1; -inline bool DtlsTransportStateEvent::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void DtlsTransportStateEvent::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 DtlsTransportStateEvent::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DtlsTransportStateEvent.timestamp_ms) - return timestamp_ms_; -} -inline void DtlsTransportStateEvent::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DtlsTransportStateEvent.timestamp_ms) -} - -// optional .webrtc.rtclog2.DtlsTransportStateEvent.DtlsTransportState dtls_transport_state = 2; -inline bool DtlsTransportStateEvent::has_dtls_transport_state() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void DtlsTransportStateEvent::clear_dtls_transport_state() { - dtls_transport_state_ = 0; - _has_bits_[0] &= ~0x00000002u; -} -inline ::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState DtlsTransportStateEvent::dtls_transport_state() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DtlsTransportStateEvent.dtls_transport_state) - return static_cast< ::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState >(dtls_transport_state_); -} -inline void DtlsTransportStateEvent::set_dtls_transport_state(::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState value) { - assert(::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState_IsValid(value)); - _has_bits_[0] |= 0x00000002u; - dtls_transport_state_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DtlsTransportStateEvent.dtls_transport_state) -} - -// ------------------------------------------------------------------- - -// DtlsWritableState - -// optional int64 timestamp_ms = 1; -inline bool DtlsWritableState::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void DtlsWritableState::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 DtlsWritableState::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DtlsWritableState.timestamp_ms) - return timestamp_ms_; -} -inline void DtlsWritableState::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DtlsWritableState.timestamp_ms) -} - -// optional bool writable = 2; -inline bool DtlsWritableState::has_writable() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void DtlsWritableState::clear_writable() { - writable_ = false; - _has_bits_[0] &= ~0x00000002u; -} -inline bool DtlsWritableState::writable() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.DtlsWritableState.writable) - return writable_; -} -inline void DtlsWritableState::set_writable(bool value) { - _has_bits_[0] |= 0x00000002u; - writable_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.DtlsWritableState.writable) -} - -// ------------------------------------------------------------------- - -// RouteChange - -// optional int64 timestamp_ms = 1; -inline bool RouteChange::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RouteChange::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000001u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 RouteChange::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RouteChange.timestamp_ms) - return timestamp_ms_; -} -inline void RouteChange::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RouteChange.timestamp_ms) -} - -// optional bool connected = 2; -inline bool RouteChange::has_connected() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RouteChange::clear_connected() { - connected_ = false; - _has_bits_[0] &= ~0x00000002u; -} -inline bool RouteChange::connected() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RouteChange.connected) - return connected_; -} -inline void RouteChange::set_connected(bool value) { - _has_bits_[0] |= 0x00000002u; - connected_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RouteChange.connected) -} - -// optional uint32 overhead = 3; -inline bool RouteChange::has_overhead() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void RouteChange::clear_overhead() { - overhead_ = 0u; - _has_bits_[0] &= ~0x00000004u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RouteChange::overhead() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RouteChange.overhead) - return overhead_; -} -inline void RouteChange::set_overhead(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000004u; - overhead_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RouteChange.overhead) -} - -// ------------------------------------------------------------------- - -// RemoteEstimates - -// optional int64 timestamp_ms = 1; -inline bool RemoteEstimates::has_timestamp_ms() const { - return (_has_bits_[0] & 0x00000008u) != 0; -} -inline void RemoteEstimates::clear_timestamp_ms() { - timestamp_ms_ = PROTOBUF_LONGLONG(0); - _has_bits_[0] &= ~0x00000008u; -} -inline ::PROTOBUF_NAMESPACE_ID::int64 RemoteEstimates::timestamp_ms() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.timestamp_ms) - return timestamp_ms_; -} -inline void RemoteEstimates::set_timestamp_ms(::PROTOBUF_NAMESPACE_ID::int64 value) { - _has_bits_[0] |= 0x00000008u; - timestamp_ms_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.timestamp_ms) -} - -// optional uint32 link_capacity_lower_kbps = 2; -inline bool RemoteEstimates::has_link_capacity_lower_kbps() const { - return (_has_bits_[0] & 0x00000010u) != 0; -} -inline void RemoteEstimates::clear_link_capacity_lower_kbps() { - link_capacity_lower_kbps_ = 0u; - _has_bits_[0] &= ~0x00000010u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RemoteEstimates::link_capacity_lower_kbps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps) - return link_capacity_lower_kbps_; -} -inline void RemoteEstimates::set_link_capacity_lower_kbps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000010u; - link_capacity_lower_kbps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps) -} - -// optional uint32 link_capacity_upper_kbps = 3; -inline bool RemoteEstimates::has_link_capacity_upper_kbps() const { - return (_has_bits_[0] & 0x00000020u) != 0; -} -inline void RemoteEstimates::clear_link_capacity_upper_kbps() { - link_capacity_upper_kbps_ = 0u; - _has_bits_[0] &= ~0x00000020u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RemoteEstimates::link_capacity_upper_kbps() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps) - return link_capacity_upper_kbps_; -} -inline void RemoteEstimates::set_link_capacity_upper_kbps(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000020u; - link_capacity_upper_kbps_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps) -} - -// optional uint32 number_of_deltas = 4; -inline bool RemoteEstimates::has_number_of_deltas() const { - return (_has_bits_[0] & 0x00000040u) != 0; -} -inline void RemoteEstimates::clear_number_of_deltas() { - number_of_deltas_ = 0u; - _has_bits_[0] &= ~0x00000040u; -} -inline ::PROTOBUF_NAMESPACE_ID::uint32 RemoteEstimates::number_of_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.number_of_deltas) - return number_of_deltas_; -} -inline void RemoteEstimates::set_number_of_deltas(::PROTOBUF_NAMESPACE_ID::uint32 value) { - _has_bits_[0] |= 0x00000040u; - number_of_deltas_ = value; - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.number_of_deltas) -} - -// optional bytes timestamp_ms_deltas = 101; -inline bool RemoteEstimates::has_timestamp_ms_deltas() const { - return (_has_bits_[0] & 0x00000001u) != 0; -} -inline void RemoteEstimates::clear_timestamp_ms_deltas() { - timestamp_ms_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000001u; -} -inline const std::string& RemoteEstimates::timestamp_ms_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) - return timestamp_ms_deltas_.GetNoArena(); -} -inline void RemoteEstimates::set_timestamp_ms_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) -} -inline void RemoteEstimates::set_timestamp_ms_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) -} -inline void RemoteEstimates::set_timestamp_ms_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) -} -inline void RemoteEstimates::set_timestamp_ms_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000001u; - timestamp_ms_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) -} -inline std::string* RemoteEstimates::mutable_timestamp_ms_deltas() { - _has_bits_[0] |= 0x00000001u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) - return timestamp_ms_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RemoteEstimates::release_timestamp_ms_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) - if (!has_timestamp_ms_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000001u; - return timestamp_ms_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RemoteEstimates::set_allocated_timestamp_ms_deltas(std::string* timestamp_ms_deltas) { - if (timestamp_ms_deltas != nullptr) { - _has_bits_[0] |= 0x00000001u; - } else { - _has_bits_[0] &= ~0x00000001u; - } - timestamp_ms_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), timestamp_ms_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.RemoteEstimates.timestamp_ms_deltas) -} - -// optional bytes link_capacity_lower_kbps_deltas = 102; -inline bool RemoteEstimates::has_link_capacity_lower_kbps_deltas() const { - return (_has_bits_[0] & 0x00000002u) != 0; -} -inline void RemoteEstimates::clear_link_capacity_lower_kbps_deltas() { - link_capacity_lower_kbps_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000002u; -} -inline const std::string& RemoteEstimates::link_capacity_lower_kbps_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) - return link_capacity_lower_kbps_deltas_.GetNoArena(); -} -inline void RemoteEstimates::set_link_capacity_lower_kbps_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000002u; - link_capacity_lower_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_lower_kbps_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000002u; - link_capacity_lower_kbps_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_lower_kbps_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000002u; - link_capacity_lower_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_lower_kbps_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000002u; - link_capacity_lower_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) -} -inline std::string* RemoteEstimates::mutable_link_capacity_lower_kbps_deltas() { - _has_bits_[0] |= 0x00000002u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) - return link_capacity_lower_kbps_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RemoteEstimates::release_link_capacity_lower_kbps_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) - if (!has_link_capacity_lower_kbps_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000002u; - return link_capacity_lower_kbps_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RemoteEstimates::set_allocated_link_capacity_lower_kbps_deltas(std::string* link_capacity_lower_kbps_deltas) { - if (link_capacity_lower_kbps_deltas != nullptr) { - _has_bits_[0] |= 0x00000002u; - } else { - _has_bits_[0] &= ~0x00000002u; - } - link_capacity_lower_kbps_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), link_capacity_lower_kbps_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.RemoteEstimates.link_capacity_lower_kbps_deltas) -} - -// optional bytes link_capacity_upper_kbps_deltas = 103; -inline bool RemoteEstimates::has_link_capacity_upper_kbps_deltas() const { - return (_has_bits_[0] & 0x00000004u) != 0; -} -inline void RemoteEstimates::clear_link_capacity_upper_kbps_deltas() { - link_capacity_upper_kbps_deltas_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); - _has_bits_[0] &= ~0x00000004u; -} -inline const std::string& RemoteEstimates::link_capacity_upper_kbps_deltas() const { - // @@protoc_insertion_point(field_get:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) - return link_capacity_upper_kbps_deltas_.GetNoArena(); -} -inline void RemoteEstimates::set_link_capacity_upper_kbps_deltas(const std::string& value) { - _has_bits_[0] |= 0x00000004u; - link_capacity_upper_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_upper_kbps_deltas(std::string&& value) { - _has_bits_[0] |= 0x00000004u; - link_capacity_upper_kbps_deltas_.SetNoArena( - &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_upper_kbps_deltas(const char* value) { - GOOGLE_DCHECK(value != nullptr); - _has_bits_[0] |= 0x00000004u; - link_capacity_upper_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) -} -inline void RemoteEstimates::set_link_capacity_upper_kbps_deltas(const void* value, size_t size) { - _has_bits_[0] |= 0x00000004u; - link_capacity_upper_kbps_deltas_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) -} -inline std::string* RemoteEstimates::mutable_link_capacity_upper_kbps_deltas() { - _has_bits_[0] |= 0x00000004u; - // @@protoc_insertion_point(field_mutable:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) - return link_capacity_upper_kbps_deltas_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline std::string* RemoteEstimates::release_link_capacity_upper_kbps_deltas() { - // @@protoc_insertion_point(field_release:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) - if (!has_link_capacity_upper_kbps_deltas()) { - return nullptr; - } - _has_bits_[0] &= ~0x00000004u; - return link_capacity_upper_kbps_deltas_.ReleaseNonDefaultNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); -} -inline void RemoteEstimates::set_allocated_link_capacity_upper_kbps_deltas(std::string* link_capacity_upper_kbps_deltas) { - if (link_capacity_upper_kbps_deltas != nullptr) { - _has_bits_[0] |= 0x00000004u; - } else { - _has_bits_[0] &= ~0x00000004u; - } - link_capacity_upper_kbps_deltas_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), link_capacity_upper_kbps_deltas); - // @@protoc_insertion_point(field_set_allocated:webrtc.rtclog2.RemoteEstimates.link_capacity_upper_kbps_deltas) -} - -#ifdef __GNUC__ - #pragma GCC diagnostic pop -#endif // __GNUC__ -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - - -// @@protoc_insertion_point(namespace_scope) - -} // namespace rtclog2 -} // namespace webrtc - -PROTOBUF_NAMESPACE_OPEN - -template <> struct is_proto_enum< ::webrtc::rtclog2::FrameDecodedEvents_Codec> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::DelayBasedBweUpdates_DetectorState> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::BweProbeResultFailure_FailureReason> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidatePairConfigType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairConfig_IceCandidateType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairConfig_Protocol> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairConfig_AddressFamily> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairConfig_NetworkType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::IceCandidatePairEvent_IceCandidatePairEventType> : ::std::true_type {}; -template <> struct is_proto_enum< ::webrtc::rtclog2::DtlsTransportStateEvent_DtlsTransportState> : ::std::true_type {}; - -PROTOBUF_NAMESPACE_CLOSE - -// @@protoc_insertion_point(global_scope) - -#include -#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_rtc_5fevent_5flog2_2eproto diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc index 4465d44c8..4a272f08c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc @@ -39,6 +39,12 @@ constexpr size_t kMaxEventsInConfigHistory = 1000; std::unique_ptr CreateEncoder( RtcEventLog::EncodingType type) { switch (type) { + case RtcEventLog::EncodingType::Legacy: + RTC_LOG(LS_INFO) << "Creating legacy encoder for RTC event log."; + return std::make_unique(); + case RtcEventLog::EncodingType::NewFormat: + RTC_LOG(LS_INFO) << "Creating new format encoder for RTC event log."; + return std::make_unique(); default: RTC_LOG(LS_ERROR) << "Unknown RtcEventLog encoder type (" << int(type) << ")"; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc index 3ff360a4f..24d5962aa 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc @@ -680,10 +680,8 @@ ParsedRtcEventLog::ParseStatus StoreRtcpPackets( raw_packet_values[i])) { continue; } - const size_t data_size = raw_packet_values[i].size(); - const uint8_t* data = - reinterpret_cast(raw_packet_values[i].data()); - rtcp_packets->emplace_back(1000 * timestamp_ms, data, data_size); + std::string data(raw_packet_values[i]); + rtcp_packets->emplace_back(1000 * timestamp_ms, data); } return ParsedRtcEventLog::ParseStatus::Success(); } @@ -1095,8 +1093,7 @@ void ParsedRtcEventLog::Clear() { video_recv_configs_.clear(); video_send_configs_.clear(); - memset(last_incoming_rtcp_packet_, 0, IP_PACKET_SIZE); - last_incoming_rtcp_packet_length_ = 0; + last_incoming_rtcp_packet_.clear(); first_timestamp_ = std::numeric_limits::max(); last_timestamp_ = std::numeric_limits::min(); @@ -1224,7 +1221,9 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( StoreFirstAndLastTimestamp(bwe_probe_success_events()); StoreFirstAndLastTimestamp(bwe_delay_updates()); StoreFirstAndLastTimestamp(bwe_loss_updates()); - StoreFirstAndLastTimestamp(decoded_frames()); + for (const auto& frame_stream : decoded_frames()) { + StoreFirstAndLastTimestamp(frame_stream.second); + } StoreFirstAndLastTimestamp(dtls_transport_states()); StoreFirstAndLastTimestamp(dtls_writable_states()); StoreFirstAndLastTimestamp(ice_candidate_pair_configs()); @@ -1476,27 +1475,23 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( } case rtclog::Event::RTCP_EVENT: { PacketDirection direction; - uint8_t packet[IP_PACKET_SIZE]; - size_t total_length; - auto status = GetRtcpPacket(event, &direction, packet, &total_length); + std::vector packet; + auto status = GetRtcpPacket(event, &direction, &packet); RTC_RETURN_IF_ERROR(status); RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us()); int64_t timestamp_us = event.timestamp_us(); - RTC_PARSE_CHECK_OR_RETURN_LE(total_length, IP_PACKET_SIZE); if (direction == kIncomingPacket) { // Currently incoming RTCP packets are logged twice, both for audio and // video. Only act on one of them. Compare against the previous parsed // incoming RTCP packet. - if (total_length == last_incoming_rtcp_packet_length_ && - memcmp(last_incoming_rtcp_packet_, packet, total_length) == 0) + if (packet == last_incoming_rtcp_packet_) break; incoming_rtcp_packets_.push_back( - LoggedRtcpPacketIncoming(timestamp_us, packet, total_length)); - last_incoming_rtcp_packet_length_ = total_length; - memcpy(last_incoming_rtcp_packet_, packet, total_length); + LoggedRtcpPacketIncoming(timestamp_us, packet)); + last_incoming_rtcp_packet_ = packet; } else { outgoing_rtcp_packets_.push_back( - LoggedRtcpPacketOutgoing(timestamp_us, packet, total_length)); + LoggedRtcpPacketOutgoing(timestamp_us, packet)); } break; } @@ -1655,12 +1650,10 @@ const RtpHeaderExtensionMap* ParsedRtcEventLog::GetRtpHeaderExtensionMap( return nullptr; } -// The packet must have space for at least IP_PACKET_SIZE bytes. ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::GetRtcpPacket( const rtclog::Event& event, PacketDirection* incoming, - uint8_t* packet, - size_t* length) const { + std::vector* packet) const { RTC_PARSE_CHECK_OR_RETURN(event.has_type()); RTC_PARSE_CHECK_OR_RETURN_EQ(event.type(), rtclog::Event::RTCP_EVENT); RTC_PARSE_CHECK_OR_RETURN(event.has_rtcp_packet()); @@ -1670,16 +1663,11 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::GetRtcpPacket( if (incoming != nullptr) { *incoming = rtcp_packet.incoming() ? kIncomingPacket : kOutgoingPacket; } - // Get packet length. - RTC_PARSE_CHECK_OR_RETURN(rtcp_packet.has_packet_data()); - if (length != nullptr) { - *length = rtcp_packet.packet_data().size(); - } // Get packet contents. + RTC_PARSE_CHECK_OR_RETURN(rtcp_packet.has_packet_data()); if (packet != nullptr) { - RTC_PARSE_CHECK_OR_RETURN_LE(rtcp_packet.packet_data().size(), - static_cast(IP_PACKET_SIZE)); - memcpy(packet, rtcp_packet.packet_data().data(), + packet->resize(rtcp_packet.packet_data().size()); + memcpy(packet->data(), rtcp_packet.packet_data().data(), rtcp_packet.packet_data().size()); } return ParseStatus::Success(); @@ -2160,7 +2148,7 @@ std::vector ParsedRtcEventLog::GetPacketInfos( // Add an offset to avoid |capture_ticks| to become negative in the case // of reordering. - constexpr int64_t kStartingCaptureTimeTicks = 90 * 48 * 1000; + constexpr int64_t kStartingCaptureTimeTicks = 90 * 48 * 10000; int64_t capture_ticks = kStartingCaptureTimeTicks + stream->unwrap_capture_ticks.Unwrap(rtp.header.timestamp); @@ -2813,7 +2801,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreFrameDecodedEvents( RTC_PARSE_CHECK_OR_RETURN_LE(proto.qp(), 255); base_frame.qp = static_cast(proto.qp()); - decoded_frames_.push_back(base_frame); + decoded_frames_[base_frame.ssrc].push_back(base_frame); const size_t number_of_deltas = proto.has_number_of_deltas() ? proto.number_of_deltas() : 0u; @@ -2894,7 +2882,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreFrameDecodedEvents( std::numeric_limits::max()); frame.qp = static_cast(qp_values[i].value()); - decoded_frames_.push_back(frame); + decoded_frames_[frame.ssrc].push_back(frame); } return ParseStatus::Success(); } @@ -3017,13 +3005,11 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericPacketSentEvent( RTC_PARSE_CHECK_OR_RETURN_EQ(overhead_length_values.size(), number_of_deltas); std::vector> payload_length_values = DecodeDeltas( - proto.payload_length_deltas(), ToUnsigned(proto.payload_length()), - number_of_deltas); // TODO(terelius): Remove ToUnsigned + proto.payload_length_deltas(), proto.payload_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(payload_length_values.size(), number_of_deltas); std::vector> padding_length_values = DecodeDeltas( - proto.padding_length_deltas(), ToUnsigned(proto.padding_length()), - number_of_deltas); // TODO(terelius): Remove ToUnsigned + proto.padding_length_deltas(), proto.padding_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(padding_length_values.size(), number_of_deltas); for (size_t i = 0; i < number_of_deltas; i++) { @@ -3087,10 +3073,10 @@ ParsedRtcEventLog::StoreGenericPacketReceivedEvent( int64_t packet_number; RTC_PARSE_CHECK_OR_RETURN( ToSigned(packet_number_values[i].value(), &packet_number)); - int32_t packet_length; - RTC_PARSE_CHECK_OR_RETURN( - ToSigned(packet_length_values[i].value(), - &packet_length)); // TODO(terelius): Remove ToSigned + RTC_PARSE_CHECK_OR_RETURN_LE(packet_length_values[i].value(), + std::numeric_limits::max()); + int32_t packet_length = + static_cast(packet_length_values[i].value()); generic_packets_received_.push_back( {timestamp_ms * 1000, packet_number, packet_length}); } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h index 542f1cc6e..dce075aff 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h @@ -612,7 +612,8 @@ class ParsedRtcEventLog { } // Media - const std::vector& decoded_frames() const { + const std::map>& decoded_frames() + const { return decoded_frames_; } @@ -669,8 +670,7 @@ class ParsedRtcEventLog { // NB: The packet must have space for at least IP_PACKET_SIZE bytes. ParseStatus GetRtcpPacket(const rtclog::Event& event, PacketDirection* incoming, - uint8_t* packet, - size_t* length) const; + std::vector* packet) const; ParseStatusOr GetVideoReceiveConfig( const rtclog::Event& event) const; @@ -855,7 +855,7 @@ class ParsedRtcEventLog { std::vector dtls_transport_states_; std::vector dtls_writable_states_; - std::vector decoded_frames_; + std::map> decoded_frames_; std::vector ice_candidate_pair_configs_; std::vector ice_candidate_pair_events_; @@ -872,8 +872,7 @@ class ParsedRtcEventLog { std::vector route_change_events_; std::vector remote_estimate_events_; - uint8_t last_incoming_rtcp_packet_[IP_PACKET_SIZE]; - uint8_t last_incoming_rtcp_packet_length_; + std::vector last_incoming_rtcp_packet_; int64_t first_timestamp_; int64_t last_timestamp_; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc new file mode 100644 index 000000000..1c13cc639 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc @@ -0,0 +1,1313 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "logging/rtc_event_log/rtc_event_log_unittest_helper.h" + +#include // memcmp + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/ntp_time.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace test { + +namespace { + +struct ExtensionPair { + RTPExtensionType type; + const char* name; +}; + +constexpr int kMaxCsrcs = 3; + +// Maximum serialized size of a header extension, including 1 byte ID. +constexpr int kMaxExtensionSizeBytes = 4; +constexpr int kMaxNumExtensions = 5; + +constexpr ExtensionPair kExtensions[kMaxNumExtensions] = { + {RTPExtensionType::kRtpExtensionTransmissionTimeOffset, + RtpExtension::kTimestampOffsetUri}, + {RTPExtensionType::kRtpExtensionAbsoluteSendTime, + RtpExtension::kAbsSendTimeUri}, + {RTPExtensionType::kRtpExtensionTransportSequenceNumber, + RtpExtension::kTransportSequenceNumberUri}, + {RTPExtensionType::kRtpExtensionAudioLevel, RtpExtension::kAudioLevelUri}, + {RTPExtensionType::kRtpExtensionVideoRotation, + RtpExtension::kVideoRotationUri}}; + +template +void ShuffleInPlace(Random* prng, rtc::ArrayView array) { + RTC_DCHECK_LE(array.size(), std::numeric_limits::max()); + for (uint32_t i = 0; i + 1 < array.size(); i++) { + uint32_t other = prng->Rand(i, static_cast(array.size() - 1)); + std::swap(array[i], array[other]); + } +} + +absl::optional GetExtensionId(const std::vector& extensions, + const std::string& uri) { + for (const auto& extension : extensions) { + if (extension.uri == uri) + return extension.id; + } + return absl::nullopt; +} + +} // namespace + +std::unique_ptr EventGenerator::NewAlrState() { + return std::make_unique(prng_.Rand()); +} + +std::unique_ptr EventGenerator::NewAudioPlayout( + uint32_t ssrc) { + return std::make_unique(ssrc); +} + +std::unique_ptr +EventGenerator::NewAudioNetworkAdaptation() { + std::unique_ptr config = + std::make_unique(); + + config->bitrate_bps = prng_.Rand(0, 3000000); + config->enable_fec = prng_.Rand(); + config->enable_dtx = prng_.Rand(); + config->frame_length_ms = prng_.Rand(10, 120); + config->num_channels = prng_.Rand(1, 2); + config->uplink_packet_loss_fraction = prng_.Rand(); + + return std::make_unique(std::move(config)); +} + +std::unique_ptr +EventGenerator::NewBweUpdateDelayBased() { + constexpr int32_t kMaxBweBps = 20000000; + int32_t bitrate_bps = prng_.Rand(0, kMaxBweBps); + BandwidthUsage state = static_cast( + prng_.Rand(static_cast(BandwidthUsage::kLast) - 1)); + return std::make_unique(bitrate_bps, state); +} + +std::unique_ptr +EventGenerator::NewBweUpdateLossBased() { + constexpr int32_t kMaxBweBps = 20000000; + constexpr int32_t kMaxPackets = 1000; + int32_t bitrate_bps = prng_.Rand(0, kMaxBweBps); + uint8_t fraction_lost = prng_.Rand(); + int32_t total_packets = prng_.Rand(1, kMaxPackets); + + return std::make_unique( + bitrate_bps, fraction_lost, total_packets); +} + +std::unique_ptr +EventGenerator::NewDtlsTransportState() { + DtlsTransportState state = static_cast( + prng_.Rand(static_cast(DtlsTransportState::kNumValues) - 1)); + + return std::make_unique(state); +} + +std::unique_ptr +EventGenerator::NewDtlsWritableState() { + bool writable = prng_.Rand(); + return std::make_unique(writable); +} + +std::unique_ptr EventGenerator::NewFrameDecodedEvent( + uint32_t ssrc) { + constexpr int kMinRenderDelayMs = 1; + constexpr int kMaxRenderDelayMs = 2000000; + constexpr int kMaxWidth = 15360; + constexpr int kMaxHeight = 8640; + constexpr int kMinWidth = 16; + constexpr int kMinHeight = 16; + constexpr int kNumCodecTypes = 5; + + constexpr VideoCodecType kCodecList[kNumCodecTypes] = { + kVideoCodecGeneric, kVideoCodecVP8, kVideoCodecVP9, kVideoCodecAV1, + kVideoCodecH264}; + const int64_t render_time_ms = + rtc::TimeMillis() + prng_.Rand(kMinRenderDelayMs, kMaxRenderDelayMs); + const int width = prng_.Rand(kMinWidth, kMaxWidth); + const int height = prng_.Rand(kMinHeight, kMaxHeight); + const VideoCodecType codec = kCodecList[prng_.Rand(0, kNumCodecTypes - 1)]; + const uint8_t qp = prng_.Rand(); + return std::make_unique(render_time_ms, ssrc, width, + height, codec, qp); +} + +std::unique_ptr +EventGenerator::NewProbeClusterCreated() { + constexpr int kMaxBweBps = 20000000; + constexpr int kMaxNumProbes = 10000; + int id = prng_.Rand(1, kMaxNumProbes); + int bitrate_bps = prng_.Rand(0, kMaxBweBps); + int min_probes = prng_.Rand(5, 50); + int min_bytes = prng_.Rand(500, 50000); + + return std::make_unique(id, bitrate_bps, + min_probes, min_bytes); +} + +std::unique_ptr +EventGenerator::NewProbeResultFailure() { + constexpr int kMaxNumProbes = 10000; + int id = prng_.Rand(1, kMaxNumProbes); + ProbeFailureReason reason = static_cast( + prng_.Rand(static_cast(ProbeFailureReason::kLast) - 1)); + + return std::make_unique(id, reason); +} + +std::unique_ptr +EventGenerator::NewProbeResultSuccess() { + constexpr int kMaxBweBps = 20000000; + constexpr int kMaxNumProbes = 10000; + int id = prng_.Rand(1, kMaxNumProbes); + int bitrate_bps = prng_.Rand(0, kMaxBweBps); + + return std::make_unique(id, bitrate_bps); +} + +std::unique_ptr +EventGenerator::NewIceCandidatePairConfig() { + IceCandidateType local_candidate_type = static_cast( + prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); + IceCandidateNetworkType local_network_type = + static_cast(prng_.Rand( + static_cast(IceCandidateNetworkType::kNumValues) - 1)); + IceCandidatePairAddressFamily local_address_family = + static_cast(prng_.Rand( + static_cast(IceCandidatePairAddressFamily::kNumValues) - + 1)); + IceCandidateType remote_candidate_type = static_cast( + prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); + IceCandidatePairAddressFamily remote_address_family = + static_cast(prng_.Rand( + static_cast(IceCandidatePairAddressFamily::kNumValues) - + 1)); + IceCandidatePairProtocol protocol_type = + static_cast(prng_.Rand( + static_cast(IceCandidatePairProtocol::kNumValues) - 1)); + + IceCandidatePairDescription desc; + desc.local_candidate_type = local_candidate_type; + desc.local_relay_protocol = protocol_type; + desc.local_network_type = local_network_type; + desc.local_address_family = local_address_family; + desc.remote_candidate_type = remote_candidate_type; + desc.remote_address_family = remote_address_family; + desc.candidate_pair_protocol = protocol_type; + + IceCandidatePairConfigType type = + static_cast(prng_.Rand( + static_cast(IceCandidatePairConfigType::kNumValues) - 1)); + uint32_t pair_id = prng_.Rand(); + return std::make_unique(type, pair_id, desc); +} + +std::unique_ptr +EventGenerator::NewIceCandidatePair() { + IceCandidatePairEventType type = + static_cast(prng_.Rand( + static_cast(IceCandidatePairEventType::kNumValues) - 1)); + uint32_t pair_id = prng_.Rand(); + uint32_t transaction_id = prng_.Rand(); + + return std::make_unique(type, pair_id, + transaction_id); +} + +rtcp::ReportBlock EventGenerator::NewReportBlock() { + rtcp::ReportBlock report_block; + report_block.SetMediaSsrc(prng_.Rand()); + report_block.SetFractionLost(prng_.Rand()); + // cumulative_lost is a 3-byte signed value. + RTC_DCHECK(report_block.SetCumulativeLost( + prng_.Rand(-(1 << 23) + 1, (1 << 23) - 1))); + report_block.SetExtHighestSeqNum(prng_.Rand()); + report_block.SetJitter(prng_.Rand()); + report_block.SetLastSr(prng_.Rand()); + report_block.SetDelayLastSr(prng_.Rand()); + return report_block; +} + +rtcp::SenderReport EventGenerator::NewSenderReport() { + rtcp::SenderReport sender_report; + sender_report.SetSenderSsrc(prng_.Rand()); + sender_report.SetNtp(NtpTime(prng_.Rand(), prng_.Rand())); + sender_report.SetRtpTimestamp(prng_.Rand()); + sender_report.SetPacketCount(prng_.Rand()); + sender_report.SetOctetCount(prng_.Rand()); + sender_report.AddReportBlock(NewReportBlock()); + return sender_report; +} + +rtcp::ReceiverReport EventGenerator::NewReceiverReport() { + rtcp::ReceiverReport receiver_report; + receiver_report.SetSenderSsrc(prng_.Rand()); + receiver_report.AddReportBlock(NewReportBlock()); + return receiver_report; +} + +rtcp::ExtendedReports EventGenerator::NewExtendedReports() { + rtcp::ExtendedReports extended_report; + extended_report.SetSenderSsrc(prng_.Rand()); + + rtcp::Rrtr rrtr; + rrtr.SetNtp(NtpTime(prng_.Rand(), prng_.Rand())); + extended_report.SetRrtr(rrtr); + + rtcp::ReceiveTimeInfo time_info( + prng_.Rand(), prng_.Rand(), prng_.Rand()); + extended_report.AddDlrrItem(time_info); + + rtcp::TargetBitrate target_bitrate; + target_bitrate.AddTargetBitrate(/*spatial layer*/ prng_.Rand(0, 3), + /*temporal layer*/ prng_.Rand(0, 3), + /*bitrate kbps*/ prng_.Rand(0, 50000)); + target_bitrate.AddTargetBitrate(/*spatial layer*/ prng_.Rand(4, 7), + /*temporal layer*/ prng_.Rand(4, 7), + /*bitrate kbps*/ prng_.Rand(0, 50000)); + extended_report.SetTargetBitrate(target_bitrate); + return extended_report; +} + +rtcp::Nack EventGenerator::NewNack() { + rtcp::Nack nack; + uint16_t base_seq_no = prng_.Rand(); + std::vector nack_list; + nack_list.push_back(base_seq_no); + for (uint16_t i = 1u; i < 10u; i++) { + if (prng_.Rand()) + nack_list.push_back(base_seq_no + i); + } + nack.SetPacketIds(nack_list); + return nack; +} + +rtcp::Fir EventGenerator::NewFir() { + rtcp::Fir fir; + fir.SetSenderSsrc(prng_.Rand()); + fir.AddRequestTo(/*ssrc*/ prng_.Rand(), + /*seq num*/ prng_.Rand()); + fir.AddRequestTo(/*ssrc*/ prng_.Rand(), + /*seq num*/ prng_.Rand()); + return fir; +} + +rtcp::Pli EventGenerator::NewPli() { + rtcp::Pli pli; + pli.SetSenderSsrc(prng_.Rand()); + pli.SetMediaSsrc(prng_.Rand()); + return pli; +} + +rtcp::TransportFeedback EventGenerator::NewTransportFeedback() { + rtcp::TransportFeedback transport_feedback; + uint16_t base_seq_no = prng_.Rand(); + int64_t base_time_us = prng_.Rand(); + transport_feedback.SetBase(base_seq_no, base_time_us); + transport_feedback.AddReceivedPacket(base_seq_no, base_time_us); + int64_t time_us = base_time_us; + for (uint16_t i = 1u; i < 10u; i++) { + time_us += prng_.Rand(0, 100000); + if (prng_.Rand()) { + transport_feedback.AddReceivedPacket(base_seq_no + i, time_us); + } + } + return transport_feedback; +} + +rtcp::Remb EventGenerator::NewRemb() { + rtcp::Remb remb; + // The remb bitrate is transported as a 16-bit mantissa and an 8-bit exponent. + uint64_t bitrate_bps = prng_.Rand(0, (1 << 16) - 1) << prng_.Rand(7); + std::vector ssrcs{prng_.Rand(), prng_.Rand()}; + remb.SetSsrcs(ssrcs); + remb.SetBitrateBps(bitrate_bps); + return remb; +} + +rtcp::LossNotification EventGenerator::NewLossNotification() { + rtcp::LossNotification loss_notification; + const uint16_t last_decoded = prng_.Rand(); + const uint16_t last_received = + last_decoded + (prng_.Rand() & 0x7fff); + const bool decodability_flag = prng_.Rand(); + EXPECT_TRUE( + loss_notification.Set(last_decoded, last_received, decodability_flag)); + return loss_notification; +} + +std::unique_ptr EventGenerator::NewRouteChange() { + return std::make_unique(prng_.Rand(), + prng_.Rand(0, 128)); +} + +std::unique_ptr EventGenerator::NewRemoteEstimate() { + return std::make_unique( + DataRate::KilobitsPerSec(prng_.Rand(0, 100000)), + DataRate::KilobitsPerSec(prng_.Rand(0, 100000))); +} + +std::unique_ptr +EventGenerator::NewRtcpPacketIncoming() { + enum class SupportedRtcpTypes { + kSenderReport = 0, + kReceiverReport, + kExtendedReports, + kFir, + kPli, + kNack, + kRemb, + kTransportFeedback, + kNumValues + }; + SupportedRtcpTypes type = static_cast( + prng_.Rand(0, static_cast(SupportedRtcpTypes::kNumValues) - 1)); + switch (type) { + case SupportedRtcpTypes::kSenderReport: { + rtcp::SenderReport sender_report = NewSenderReport(); + rtc::Buffer buffer = sender_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kReceiverReport: { + rtcp::ReceiverReport receiver_report = NewReceiverReport(); + rtc::Buffer buffer = receiver_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kExtendedReports: { + rtcp::ExtendedReports extended_report = NewExtendedReports(); + rtc::Buffer buffer = extended_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kFir: { + rtcp::Fir fir = NewFir(); + rtc::Buffer buffer = fir.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kPli: { + rtcp::Pli pli = NewPli(); + rtc::Buffer buffer = pli.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kNack: { + rtcp::Nack nack = NewNack(); + rtc::Buffer buffer = nack.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kRemb: { + rtcp::Remb remb = NewRemb(); + rtc::Buffer buffer = remb.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kTransportFeedback: { + rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); + rtc::Buffer buffer = transport_feedback.Build(); + return std::make_unique(buffer); + } + default: + RTC_NOTREACHED(); + rtc::Buffer buffer; + return std::make_unique(buffer); + } +} + +std::unique_ptr +EventGenerator::NewRtcpPacketOutgoing() { + enum class SupportedRtcpTypes { + kSenderReport = 0, + kReceiverReport, + kExtendedReports, + kFir, + kPli, + kNack, + kRemb, + kTransportFeedback, + kNumValues + }; + SupportedRtcpTypes type = static_cast( + prng_.Rand(0, static_cast(SupportedRtcpTypes::kNumValues) - 1)); + switch (type) { + case SupportedRtcpTypes::kSenderReport: { + rtcp::SenderReport sender_report = NewSenderReport(); + rtc::Buffer buffer = sender_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kReceiverReport: { + rtcp::ReceiverReport receiver_report = NewReceiverReport(); + rtc::Buffer buffer = receiver_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kExtendedReports: { + rtcp::ExtendedReports extended_report = NewExtendedReports(); + rtc::Buffer buffer = extended_report.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kFir: { + rtcp::Fir fir = NewFir(); + rtc::Buffer buffer = fir.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kPli: { + rtcp::Pli pli = NewPli(); + rtc::Buffer buffer = pli.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kNack: { + rtcp::Nack nack = NewNack(); + rtc::Buffer buffer = nack.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kRemb: { + rtcp::Remb remb = NewRemb(); + rtc::Buffer buffer = remb.Build(); + return std::make_unique(buffer); + } + case SupportedRtcpTypes::kTransportFeedback: { + rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); + rtc::Buffer buffer = transport_feedback.Build(); + return std::make_unique(buffer); + } + default: + RTC_NOTREACHED(); + rtc::Buffer buffer; + return std::make_unique(buffer); + } +} + +std::unique_ptr +EventGenerator::NewGenericPacketSent() { + return std::make_unique( + sent_packet_number_++, prng_.Rand(40, 50), prng_.Rand(0, 150), + prng_.Rand(0, 1000)); +} +std::unique_ptr +EventGenerator::NewGenericPacketReceived() { + return std::make_unique( + received_packet_number_++, prng_.Rand(40, 250)); +} +std::unique_ptr +EventGenerator::NewGenericAckReceived() { + absl::optional receive_timestamp = absl::nullopt; + if (prng_.Rand(0, 2) > 0) { + receive_timestamp = prng_.Rand(0, 100000); + } + AckedPacket packet = {prng_.Rand(40, 250), receive_timestamp}; + return std::move(RtcEventGenericAckReceived::CreateLogs( + received_packet_number_++, std::vector{packet})[0]); +} + +void EventGenerator::RandomizeRtpPacket( + size_t payload_size, + size_t padding_size, + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + RtpPacket* rtp_packet, + bool all_configured_exts) { + constexpr int kMaxPayloadType = 127; + rtp_packet->SetPayloadType(prng_.Rand(kMaxPayloadType)); + rtp_packet->SetMarker(prng_.Rand()); + rtp_packet->SetSequenceNumber(prng_.Rand()); + rtp_packet->SetSsrc(ssrc); + rtp_packet->SetTimestamp(prng_.Rand()); + + uint32_t csrcs_count = prng_.Rand(0, kMaxCsrcs); + std::vector csrcs; + for (size_t i = 0; i < csrcs_count; i++) { + csrcs.push_back(prng_.Rand()); + } + rtp_packet->SetCsrcs(csrcs); + + if (extension_map.IsRegistered(TransmissionOffset::kId) && + (all_configured_exts || prng_.Rand())) { + rtp_packet->SetExtension(prng_.Rand(0x00ffffff)); + } + + if (extension_map.IsRegistered(AudioLevel::kId) && + (all_configured_exts || prng_.Rand())) { + rtp_packet->SetExtension(prng_.Rand(), prng_.Rand(127)); + } + + if (extension_map.IsRegistered(AbsoluteSendTime::kId) && + (all_configured_exts || prng_.Rand())) { + rtp_packet->SetExtension(prng_.Rand(0x00ffffff)); + } + + if (extension_map.IsRegistered(VideoOrientation::kId) && + (all_configured_exts || prng_.Rand())) { + rtp_packet->SetExtension(prng_.Rand(3)); + } + + if (extension_map.IsRegistered(TransportSequenceNumber::kId) && + (all_configured_exts || prng_.Rand())) { + rtp_packet->SetExtension(prng_.Rand()); + } + + RTC_CHECK_LE(rtp_packet->headers_size() + payload_size, IP_PACKET_SIZE); + + uint8_t* payload = rtp_packet->AllocatePayload(payload_size); + RTC_DCHECK(payload != nullptr); + for (size_t i = 0; i < payload_size; i++) { + payload[i] = prng_.Rand(); + } + RTC_CHECK(rtp_packet->SetPadding(padding_size)); +} + +std::unique_ptr EventGenerator::NewRtpPacketIncoming( + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + bool all_configured_exts) { + constexpr size_t kMaxPaddingLength = 224; + const bool padding = prng_.Rand(0, 9) == 0; // Let padding be 10% probable. + const size_t padding_size = !padding ? 0u : prng_.Rand(0u, kMaxPaddingLength); + + // 12 bytes RTP header, 4 bytes for 0xBEDE + alignment, 4 bytes per CSRC. + constexpr size_t kMaxHeaderSize = + 16 + 4 * kMaxCsrcs + kMaxExtensionSizeBytes * kMaxNumExtensions; + + // In principle, a packet can contain both padding and other payload. + // Currently, RTC eventlog encoder-parser can only maintain padding length if + // packet is full padding. + // TODO(webrtc:9730): Remove the deterministic logic for padding_size > 0. + size_t payload_size = + padding_size > 0 ? 0 + : prng_.Rand(0u, static_cast(IP_PACKET_SIZE - + 1 - padding_size - + kMaxHeaderSize)); + + RtpPacketReceived rtp_packet(&extension_map); + RandomizeRtpPacket(payload_size, padding_size, ssrc, extension_map, + &rtp_packet, all_configured_exts); + + return std::make_unique(rtp_packet); +} + +std::unique_ptr EventGenerator::NewRtpPacketOutgoing( + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + bool all_configured_exts) { + constexpr size_t kMaxPaddingLength = 224; + const bool padding = prng_.Rand(0, 9) == 0; // Let padding be 10% probable. + const size_t padding_size = !padding ? 0u : prng_.Rand(0u, kMaxPaddingLength); + + // 12 bytes RTP header, 4 bytes for 0xBEDE + alignment, 4 bytes per CSRC. + constexpr size_t kMaxHeaderSize = + 16 + 4 * kMaxCsrcs + kMaxExtensionSizeBytes * kMaxNumExtensions; + + // In principle,a packet can contain both padding and other payload. + // Currently, RTC eventlog encoder-parser can only maintain padding length if + // packet is full padding. + // TODO(webrtc:9730): Remove the deterministic logic for padding_size > 0. + size_t payload_size = + padding_size > 0 ? 0 + : prng_.Rand(0u, static_cast(IP_PACKET_SIZE - + 1 - padding_size - + kMaxHeaderSize)); + + RtpPacketToSend rtp_packet(&extension_map, + kMaxHeaderSize + payload_size + padding_size); + RandomizeRtpPacket(payload_size, padding_size, ssrc, extension_map, + &rtp_packet, all_configured_exts); + + int probe_cluster_id = prng_.Rand(0, 100000); + return std::make_unique(rtp_packet, + probe_cluster_id); +} + +RtpHeaderExtensionMap EventGenerator::NewRtpHeaderExtensionMap( + bool configure_all) { + RtpHeaderExtensionMap extension_map; + std::vector id(RtpExtension::kOneByteHeaderExtensionMaxId - + RtpExtension::kMinId + 1); + std::iota(id.begin(), id.end(), RtpExtension::kMinId); + ShuffleInPlace(&prng_, rtc::ArrayView(id)); + + if (configure_all || prng_.Rand()) { + extension_map.Register(id[0]); + } + if (configure_all || prng_.Rand()) { + extension_map.Register(id[1]); + } + if (configure_all || prng_.Rand()) { + extension_map.Register(id[2]); + } + if (configure_all || prng_.Rand()) { + extension_map.Register(id[3]); + } + if (configure_all || prng_.Rand()) { + extension_map.Register(id[4]); + } + + return extension_map; +} + +std::unique_ptr +EventGenerator::NewAudioReceiveStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions) { + auto config = std::make_unique(); + // Add SSRCs for the stream. + config->remote_ssrc = ssrc; + config->local_ssrc = prng_.Rand(); + // Add header extensions. + for (size_t i = 0; i < kMaxNumExtensions; i++) { + uint8_t id = extensions.GetId(kExtensions[i].type); + if (id != RtpHeaderExtensionMap::kInvalidId) { + config->rtp_extensions.emplace_back(kExtensions[i].name, id); + } + } + + return std::make_unique(std::move(config)); +} + +std::unique_ptr +EventGenerator::NewAudioSendStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions) { + auto config = std::make_unique(); + // Add SSRC to the stream. + config->local_ssrc = ssrc; + // Add header extensions. + for (size_t i = 0; i < kMaxNumExtensions; i++) { + uint8_t id = extensions.GetId(kExtensions[i].type); + if (id != RtpHeaderExtensionMap::kInvalidId) { + config->rtp_extensions.emplace_back(kExtensions[i].name, id); + } + } + return std::make_unique(std::move(config)); +} + +std::unique_ptr +EventGenerator::NewVideoReceiveStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions) { + auto config = std::make_unique(); + + // Add SSRCs for the stream. + config->remote_ssrc = ssrc; + config->local_ssrc = prng_.Rand(); + // Add extensions and settings for RTCP. + config->rtcp_mode = + prng_.Rand() ? RtcpMode::kCompound : RtcpMode::kReducedSize; + config->remb = prng_.Rand(); + config->rtx_ssrc = prng_.Rand(); + config->codecs.emplace_back(prng_.Rand() ? "VP8" : "H264", + prng_.Rand(127), prng_.Rand(127)); + // Add header extensions. + for (size_t i = 0; i < kMaxNumExtensions; i++) { + uint8_t id = extensions.GetId(kExtensions[i].type); + if (id != RtpHeaderExtensionMap::kInvalidId) { + config->rtp_extensions.emplace_back(kExtensions[i].name, id); + } + } + return std::make_unique(std::move(config)); +} + +std::unique_ptr +EventGenerator::NewVideoSendStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions) { + auto config = std::make_unique(); + + config->codecs.emplace_back(prng_.Rand() ? "VP8" : "H264", + prng_.Rand(127), prng_.Rand(127)); + config->local_ssrc = ssrc; + config->rtx_ssrc = prng_.Rand(); + // Add header extensions. + for (size_t i = 0; i < kMaxNumExtensions; i++) { + uint8_t id = extensions.GetId(kExtensions[i].type); + if (id != RtpHeaderExtensionMap::kInvalidId) { + config->rtp_extensions.emplace_back(kExtensions[i].name, id); + } + } + return std::make_unique(std::move(config)); +} + +void EventVerifier::VerifyLoggedAlrStateEvent( + const RtcEventAlrState& original_event, + const LoggedAlrStateEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.in_alr(), logged_event.in_alr); +} + +void EventVerifier::VerifyLoggedAudioPlayoutEvent( + const RtcEventAudioPlayout& original_event, + const LoggedAudioPlayoutEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.ssrc(), logged_event.ssrc); +} + +void EventVerifier::VerifyLoggedAudioNetworkAdaptationEvent( + const RtcEventAudioNetworkAdaptation& original_event, + const LoggedAudioNetworkAdaptationEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + EXPECT_EQ(original_event.config().bitrate_bps, + logged_event.config.bitrate_bps); + EXPECT_EQ(original_event.config().enable_dtx, logged_event.config.enable_dtx); + EXPECT_EQ(original_event.config().enable_fec, logged_event.config.enable_fec); + EXPECT_EQ(original_event.config().frame_length_ms, + logged_event.config.frame_length_ms); + EXPECT_EQ(original_event.config().num_channels, + logged_event.config.num_channels); + + // uplink_packet_loss_fraction + ASSERT_EQ(original_event.config().uplink_packet_loss_fraction.has_value(), + logged_event.config.uplink_packet_loss_fraction.has_value()); + if (original_event.config().uplink_packet_loss_fraction.has_value()) { + const float original = + original_event.config().uplink_packet_loss_fraction.value(); + const float logged = + logged_event.config.uplink_packet_loss_fraction.value(); + const float uplink_packet_loss_fraction_delta = std::abs(original - logged); + EXPECT_LE(uplink_packet_loss_fraction_delta, 0.0001f); + } +} + +void EventVerifier::VerifyLoggedBweDelayBasedUpdate( + const RtcEventBweUpdateDelayBased& original_event, + const LoggedBweDelayBasedUpdate& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); + EXPECT_EQ(original_event.detector_state(), logged_event.detector_state); +} + +void EventVerifier::VerifyLoggedBweLossBasedUpdate( + const RtcEventBweUpdateLossBased& original_event, + const LoggedBweLossBasedUpdate& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); + EXPECT_EQ(original_event.fraction_loss(), logged_event.fraction_lost); + EXPECT_EQ(original_event.total_packets(), logged_event.expected_packets); +} + +void EventVerifier::VerifyLoggedBweProbeClusterCreatedEvent( + const RtcEventProbeClusterCreated& original_event, + const LoggedBweProbeClusterCreatedEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.id(), logged_event.id); + EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); + EXPECT_EQ(original_event.min_probes(), logged_event.min_packets); + EXPECT_EQ(original_event.min_bytes(), logged_event.min_bytes); +} + +void EventVerifier::VerifyLoggedBweProbeFailureEvent( + const RtcEventProbeResultFailure& original_event, + const LoggedBweProbeFailureEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.id(), logged_event.id); + EXPECT_EQ(original_event.failure_reason(), logged_event.failure_reason); +} + +void EventVerifier::VerifyLoggedBweProbeSuccessEvent( + const RtcEventProbeResultSuccess& original_event, + const LoggedBweProbeSuccessEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.id(), logged_event.id); + EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); +} + +void EventVerifier::VerifyLoggedDtlsTransportState( + const RtcEventDtlsTransportState& original_event, + const LoggedDtlsTransportState& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.dtls_transport_state(), + logged_event.dtls_transport_state); +} + +void EventVerifier::VerifyLoggedDtlsWritableState( + const RtcEventDtlsWritableState& original_event, + const LoggedDtlsWritableState& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.writable(), logged_event.writable); +} + +void EventVerifier::VerifyLoggedFrameDecoded( + const RtcEventFrameDecoded& original_event, + const LoggedFrameDecoded& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.ssrc(), logged_event.ssrc); + EXPECT_EQ(original_event.render_time_ms(), logged_event.render_time_ms); + EXPECT_EQ(original_event.width(), logged_event.width); + EXPECT_EQ(original_event.height(), logged_event.height); + EXPECT_EQ(original_event.codec(), logged_event.codec); + EXPECT_EQ(original_event.qp(), logged_event.qp); +} + +void EventVerifier::VerifyLoggedIceCandidatePairConfig( + const RtcEventIceCandidatePairConfig& original_event, + const LoggedIceCandidatePairConfig& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + EXPECT_EQ(original_event.type(), logged_event.type); + EXPECT_EQ(original_event.candidate_pair_id(), logged_event.candidate_pair_id); + EXPECT_EQ(original_event.candidate_pair_desc().local_candidate_type, + logged_event.local_candidate_type); + EXPECT_EQ(original_event.candidate_pair_desc().local_relay_protocol, + logged_event.local_relay_protocol); + EXPECT_EQ(original_event.candidate_pair_desc().local_network_type, + logged_event.local_network_type); + EXPECT_EQ(original_event.candidate_pair_desc().local_address_family, + logged_event.local_address_family); + EXPECT_EQ(original_event.candidate_pair_desc().remote_candidate_type, + logged_event.remote_candidate_type); + EXPECT_EQ(original_event.candidate_pair_desc().remote_address_family, + logged_event.remote_address_family); + EXPECT_EQ(original_event.candidate_pair_desc().candidate_pair_protocol, + logged_event.candidate_pair_protocol); +} + +void EventVerifier::VerifyLoggedIceCandidatePairEvent( + const RtcEventIceCandidatePair& original_event, + const LoggedIceCandidatePairEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + EXPECT_EQ(original_event.type(), logged_event.type); + EXPECT_EQ(original_event.candidate_pair_id(), logged_event.candidate_pair_id); + if (encoding_type_ == RtcEventLog::EncodingType::NewFormat) { + EXPECT_EQ(original_event.transaction_id(), logged_event.transaction_id); + } +} + +void VerifyLoggedRtpHeader(const RtpPacket& original_header, + const RTPHeader& logged_header) { + // Standard RTP header. + EXPECT_EQ(original_header.Marker(), logged_header.markerBit); + EXPECT_EQ(original_header.PayloadType(), logged_header.payloadType); + EXPECT_EQ(original_header.SequenceNumber(), logged_header.sequenceNumber); + EXPECT_EQ(original_header.Timestamp(), logged_header.timestamp); + EXPECT_EQ(original_header.Ssrc(), logged_header.ssrc); + + EXPECT_EQ(original_header.headers_size(), logged_header.headerLength); + + // TransmissionOffset header extension. + ASSERT_EQ(original_header.HasExtension(), + logged_header.extension.hasTransmissionTimeOffset); + if (logged_header.extension.hasTransmissionTimeOffset) { + int32_t offset; + ASSERT_TRUE(original_header.GetExtension(&offset)); + EXPECT_EQ(offset, logged_header.extension.transmissionTimeOffset); + } + + // AbsoluteSendTime header extension. + ASSERT_EQ(original_header.HasExtension(), + logged_header.extension.hasAbsoluteSendTime); + if (logged_header.extension.hasAbsoluteSendTime) { + uint32_t sendtime; + ASSERT_TRUE(original_header.GetExtension(&sendtime)); + EXPECT_EQ(sendtime, logged_header.extension.absoluteSendTime); + } + + // TransportSequenceNumber header extension. + ASSERT_EQ(original_header.HasExtension(), + logged_header.extension.hasTransportSequenceNumber); + if (logged_header.extension.hasTransportSequenceNumber) { + uint16_t seqnum; + ASSERT_TRUE(original_header.GetExtension(&seqnum)); + EXPECT_EQ(seqnum, logged_header.extension.transportSequenceNumber); + } + + // AudioLevel header extension. + ASSERT_EQ(original_header.HasExtension(), + logged_header.extension.hasAudioLevel); + if (logged_header.extension.hasAudioLevel) { + bool voice_activity; + uint8_t audio_level; + ASSERT_TRUE(original_header.GetExtension(&voice_activity, + &audio_level)); + EXPECT_EQ(voice_activity, logged_header.extension.voiceActivity); + EXPECT_EQ(audio_level, logged_header.extension.audioLevel); + } + + // VideoOrientation header extension. + ASSERT_EQ(original_header.HasExtension(), + logged_header.extension.hasVideoRotation); + if (logged_header.extension.hasVideoRotation) { + uint8_t rotation; + ASSERT_TRUE(original_header.GetExtension(&rotation)); + EXPECT_EQ(ConvertCVOByteToVideoRotation(rotation), + logged_header.extension.videoRotation); + } +} + +void EventVerifier::VerifyLoggedRouteChangeEvent( + const RtcEventRouteChange& original_event, + const LoggedRouteChangeEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.connected(), logged_event.connected); + EXPECT_EQ(original_event.overhead(), logged_event.overhead); +} + +void EventVerifier::VerifyLoggedRemoteEstimateEvent( + const RtcEventRemoteEstimate& original_event, + const LoggedRemoteEstimateEvent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.link_capacity_lower_, + logged_event.link_capacity_lower); + EXPECT_EQ(original_event.link_capacity_upper_, + logged_event.link_capacity_upper); +} + +void EventVerifier::VerifyLoggedRtpPacketIncoming( + const RtcEventRtpPacketIncoming& original_event, + const LoggedRtpPacketIncoming& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + EXPECT_EQ(original_event.header().headers_size(), + logged_event.rtp.header_length); + + EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length); + + // Currently, RTC eventlog encoder-parser can only maintain padding length + // if packet is full padding. + EXPECT_EQ(original_event.padding_length(), + logged_event.rtp.header.paddingLength); + + VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header); +} + +void EventVerifier::VerifyLoggedRtpPacketOutgoing( + const RtcEventRtpPacketOutgoing& original_event, + const LoggedRtpPacketOutgoing& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + EXPECT_EQ(original_event.header().headers_size(), + logged_event.rtp.header_length); + + EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length); + + // Currently, RTC eventlog encoder-parser can only maintain padding length + // if packet is full padding. + EXPECT_EQ(original_event.padding_length(), + logged_event.rtp.header.paddingLength); + + // TODO(terelius): Probe cluster ID isn't parsed, used or tested. Unless + // someone has a strong reason to keep it, it'll be removed. + + VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header); +} + +void EventVerifier::VerifyLoggedGenericPacketSent( + const RtcEventGenericPacketSent& original_event, + const LoggedGenericPacketSent& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); + EXPECT_EQ(original_event.overhead_length(), logged_event.overhead_length); + EXPECT_EQ(original_event.payload_length(), logged_event.payload_length); + EXPECT_EQ(original_event.padding_length(), logged_event.padding_length); +} + +void EventVerifier::VerifyLoggedGenericPacketReceived( + const RtcEventGenericPacketReceived& original_event, + const LoggedGenericPacketReceived& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); + EXPECT_EQ(static_cast(original_event.packet_length()), + logged_event.packet_length); +} + +void EventVerifier::VerifyLoggedGenericAckReceived( + const RtcEventGenericAckReceived& original_event, + const LoggedGenericAckReceived& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); + EXPECT_EQ(original_event.acked_packet_number(), + logged_event.acked_packet_number); + EXPECT_EQ(original_event.receive_acked_packet_time_ms(), + logged_event.receive_acked_packet_time_ms); +} + +void EventVerifier::VerifyLoggedRtcpPacketIncoming( + const RtcEventRtcpPacketIncoming& original_event, + const LoggedRtcpPacketIncoming& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + ASSERT_EQ(original_event.packet().size(), logged_event.rtcp.raw_data.size()); + EXPECT_EQ( + memcmp(original_event.packet().data(), logged_event.rtcp.raw_data.data(), + original_event.packet().size()), + 0); +} + +void EventVerifier::VerifyLoggedRtcpPacketOutgoing( + const RtcEventRtcpPacketOutgoing& original_event, + const LoggedRtcpPacketOutgoing& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + + ASSERT_EQ(original_event.packet().size(), logged_event.rtcp.raw_data.size()); + EXPECT_EQ( + memcmp(original_event.packet().data(), logged_event.rtcp.raw_data.data(), + original_event.packet().size()), + 0); +} + +void EventVerifier::VerifyReportBlock( + const rtcp::ReportBlock& original_report_block, + const rtcp::ReportBlock& logged_report_block) { + EXPECT_EQ(original_report_block.source_ssrc(), + logged_report_block.source_ssrc()); + EXPECT_EQ(original_report_block.fraction_lost(), + logged_report_block.fraction_lost()); + EXPECT_EQ(original_report_block.cumulative_lost_signed(), + logged_report_block.cumulative_lost_signed()); + EXPECT_EQ(original_report_block.extended_high_seq_num(), + logged_report_block.extended_high_seq_num()); + EXPECT_EQ(original_report_block.jitter(), logged_report_block.jitter()); + EXPECT_EQ(original_report_block.last_sr(), logged_report_block.last_sr()); + EXPECT_EQ(original_report_block.delay_since_last_sr(), + logged_report_block.delay_since_last_sr()); +} + +void EventVerifier::VerifyLoggedSenderReport( + int64_t log_time_us, + const rtcp::SenderReport& original_sr, + const LoggedRtcpPacketSenderReport& logged_sr) { + EXPECT_EQ(log_time_us, logged_sr.log_time_us()); + EXPECT_EQ(original_sr.sender_ssrc(), logged_sr.sr.sender_ssrc()); + EXPECT_EQ(original_sr.ntp(), logged_sr.sr.ntp()); + EXPECT_EQ(original_sr.rtp_timestamp(), logged_sr.sr.rtp_timestamp()); + EXPECT_EQ(original_sr.sender_packet_count(), + logged_sr.sr.sender_packet_count()); + EXPECT_EQ(original_sr.sender_octet_count(), + logged_sr.sr.sender_octet_count()); + ASSERT_EQ(original_sr.report_blocks().size(), + logged_sr.sr.report_blocks().size()); + for (size_t i = 0; i < original_sr.report_blocks().size(); i++) { + VerifyReportBlock(original_sr.report_blocks()[i], + logged_sr.sr.report_blocks()[i]); + } +} + +void EventVerifier::VerifyLoggedReceiverReport( + int64_t log_time_us, + const rtcp::ReceiverReport& original_rr, + const LoggedRtcpPacketReceiverReport& logged_rr) { + EXPECT_EQ(log_time_us, logged_rr.log_time_us()); + EXPECT_EQ(original_rr.sender_ssrc(), logged_rr.rr.sender_ssrc()); + ASSERT_EQ(original_rr.report_blocks().size(), + logged_rr.rr.report_blocks().size()); + for (size_t i = 0; i < original_rr.report_blocks().size(); i++) { + VerifyReportBlock(original_rr.report_blocks()[i], + logged_rr.rr.report_blocks()[i]); + } +} + +void EventVerifier::VerifyLoggedExtendedReports( + int64_t log_time_us, + const rtcp::ExtendedReports& original_xr, + const LoggedRtcpPacketExtendedReports& logged_xr) { + EXPECT_EQ(original_xr.sender_ssrc(), logged_xr.xr.sender_ssrc()); + + EXPECT_EQ(original_xr.rrtr().has_value(), logged_xr.xr.rrtr().has_value()); + if (original_xr.rrtr().has_value() && logged_xr.xr.rrtr().has_value()) { + EXPECT_EQ(original_xr.rrtr()->ntp(), logged_xr.xr.rrtr()->ntp()); + } + + const auto& original_subblocks = original_xr.dlrr().sub_blocks(); + const auto& logged_subblocks = logged_xr.xr.dlrr().sub_blocks(); + ASSERT_EQ(original_subblocks.size(), logged_subblocks.size()); + for (size_t i = 0; i < original_subblocks.size(); i++) { + EXPECT_EQ(original_subblocks[i].ssrc, logged_subblocks[i].ssrc); + EXPECT_EQ(original_subblocks[i].last_rr, logged_subblocks[i].last_rr); + EXPECT_EQ(original_subblocks[i].delay_since_last_rr, + logged_subblocks[i].delay_since_last_rr); + } + + EXPECT_EQ(original_xr.target_bitrate().has_value(), + logged_xr.xr.target_bitrate().has_value()); + if (original_xr.target_bitrate().has_value() && + logged_xr.xr.target_bitrate().has_value()) { + const auto& original_bitrates = + original_xr.target_bitrate()->GetTargetBitrates(); + const auto& logged_bitrates = + logged_xr.xr.target_bitrate()->GetTargetBitrates(); + ASSERT_EQ(original_bitrates.size(), logged_bitrates.size()); + for (size_t i = 0; i < original_bitrates.size(); i++) { + EXPECT_EQ(original_bitrates[i].spatial_layer, + logged_bitrates[i].spatial_layer); + EXPECT_EQ(original_bitrates[i].temporal_layer, + logged_bitrates[i].temporal_layer); + EXPECT_EQ(original_bitrates[i].target_bitrate_kbps, + logged_bitrates[i].target_bitrate_kbps); + } + } +} + +void EventVerifier::VerifyLoggedFir(int64_t log_time_us, + const rtcp::Fir& original_fir, + const LoggedRtcpPacketFir& logged_fir) { + EXPECT_EQ(original_fir.sender_ssrc(), logged_fir.fir.sender_ssrc()); + + const auto& original_requests = original_fir.requests(); + const auto& logged_requests = logged_fir.fir.requests(); + ASSERT_EQ(original_requests.size(), logged_requests.size()); + for (size_t i = 0; i < original_requests.size(); i++) { + EXPECT_EQ(original_requests[i].ssrc, logged_requests[i].ssrc); + EXPECT_EQ(original_requests[i].seq_nr, logged_requests[i].seq_nr); + } +} + +void EventVerifier::VerifyLoggedPli(int64_t log_time_us, + const rtcp::Pli& original_pli, + const LoggedRtcpPacketPli& logged_pli) { + EXPECT_EQ(original_pli.sender_ssrc(), logged_pli.pli.sender_ssrc()); + EXPECT_EQ(original_pli.media_ssrc(), logged_pli.pli.media_ssrc()); +} + +void EventVerifier::VerifyLoggedNack(int64_t log_time_us, + const rtcp::Nack& original_nack, + const LoggedRtcpPacketNack& logged_nack) { + EXPECT_EQ(log_time_us, logged_nack.log_time_us()); + EXPECT_EQ(original_nack.packet_ids(), logged_nack.nack.packet_ids()); +} + +void EventVerifier::VerifyLoggedTransportFeedback( + int64_t log_time_us, + const rtcp::TransportFeedback& original_transport_feedback, + const LoggedRtcpPacketTransportFeedback& logged_transport_feedback) { + EXPECT_EQ(log_time_us, logged_transport_feedback.log_time_us()); + ASSERT_EQ( + original_transport_feedback.GetReceivedPackets().size(), + logged_transport_feedback.transport_feedback.GetReceivedPackets().size()); + for (size_t i = 0; + i < original_transport_feedback.GetReceivedPackets().size(); i++) { + EXPECT_EQ( + original_transport_feedback.GetReceivedPackets()[i].sequence_number(), + logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] + .sequence_number()); + EXPECT_EQ( + original_transport_feedback.GetReceivedPackets()[i].delta_us(), + logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] + .delta_us()); + } +} + +void EventVerifier::VerifyLoggedRemb(int64_t log_time_us, + const rtcp::Remb& original_remb, + const LoggedRtcpPacketRemb& logged_remb) { + EXPECT_EQ(log_time_us, logged_remb.log_time_us()); + EXPECT_EQ(original_remb.ssrcs(), logged_remb.remb.ssrcs()); + EXPECT_EQ(original_remb.bitrate_bps(), logged_remb.remb.bitrate_bps()); +} + +void EventVerifier::VerifyLoggedLossNotification( + int64_t log_time_us, + const rtcp::LossNotification& original_loss_notification, + const LoggedRtcpPacketLossNotification& logged_loss_notification) { + EXPECT_EQ(log_time_us, logged_loss_notification.log_time_us()); + EXPECT_EQ(original_loss_notification.last_decoded(), + logged_loss_notification.loss_notification.last_decoded()); + EXPECT_EQ(original_loss_notification.last_received(), + logged_loss_notification.loss_notification.last_received()); + EXPECT_EQ(original_loss_notification.decodability_flag(), + logged_loss_notification.loss_notification.decodability_flag()); +} + +void EventVerifier::VerifyLoggedStartEvent( + int64_t start_time_us, + int64_t utc_start_time_us, + const LoggedStartEvent& logged_event) const { + EXPECT_EQ(start_time_us / 1000, logged_event.log_time_ms()); + if (encoding_type_ == RtcEventLog::EncodingType::NewFormat) { + EXPECT_EQ(utc_start_time_us / 1000, logged_event.utc_start_time_ms); + } +} + +void EventVerifier::VerifyLoggedStopEvent( + int64_t stop_time_us, + const LoggedStopEvent& logged_event) const { + EXPECT_EQ(stop_time_us / 1000, logged_event.log_time_ms()); +} + +void VerifyLoggedStreamConfig(const rtclog::StreamConfig& original_config, + const rtclog::StreamConfig& logged_config) { + EXPECT_EQ(original_config.local_ssrc, logged_config.local_ssrc); + EXPECT_EQ(original_config.remote_ssrc, logged_config.remote_ssrc); + EXPECT_EQ(original_config.rtx_ssrc, logged_config.rtx_ssrc); + + EXPECT_EQ(original_config.rtp_extensions.size(), + logged_config.rtp_extensions.size()); + size_t recognized_extensions = 0; + for (size_t i = 0; i < kMaxNumExtensions; i++) { + auto original_id = + GetExtensionId(original_config.rtp_extensions, kExtensions[i].name); + auto logged_id = + GetExtensionId(logged_config.rtp_extensions, kExtensions[i].name); + EXPECT_EQ(original_id, logged_id) + << "IDs for " << kExtensions[i].name << " don't match. Original ID " + << original_id.value_or(-1) << ". Parsed ID " << logged_id.value_or(-1) + << "."; + if (original_id) { + recognized_extensions++; + } + } + EXPECT_EQ(recognized_extensions, original_config.rtp_extensions.size()); +} + +void EventVerifier::VerifyLoggedAudioRecvConfig( + const RtcEventAudioReceiveStreamConfig& original_event, + const LoggedAudioRecvConfig& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + VerifyLoggedStreamConfig(original_event.config(), logged_event.config); +} + +void EventVerifier::VerifyLoggedAudioSendConfig( + const RtcEventAudioSendStreamConfig& original_event, + const LoggedAudioSendConfig& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + VerifyLoggedStreamConfig(original_event.config(), logged_event.config); +} + +void EventVerifier::VerifyLoggedVideoRecvConfig( + const RtcEventVideoReceiveStreamConfig& original_event, + const LoggedVideoRecvConfig& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + VerifyLoggedStreamConfig(original_event.config(), logged_event.config); +} + +void EventVerifier::VerifyLoggedVideoSendConfig( + const RtcEventVideoSendStreamConfig& original_event, + const LoggedVideoSendConfig& logged_event) const { + EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); + VerifyLoggedStreamConfig(original_event.config(), logged_event.config); +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h new file mode 100644 index 000000000..8f564ff9d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h @@ -0,0 +1,325 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ +#define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ + +#include +#include + +#include + +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "logging/rtc_event_log/rtc_stream_config.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/random.h" + +namespace webrtc { + +namespace test { + +class EventGenerator { + public: + explicit EventGenerator(uint64_t seed) : prng_(seed) {} + + std::unique_ptr NewAlrState(); + std::unique_ptr NewAudioNetworkAdaptation(); + std::unique_ptr NewAudioPlayout(uint32_t ssrc); + std::unique_ptr NewBweUpdateDelayBased(); + std::unique_ptr NewBweUpdateLossBased(); + std::unique_ptr NewDtlsTransportState(); + std::unique_ptr NewDtlsWritableState(); + std::unique_ptr NewFrameDecodedEvent(uint32_t ssrc); + std::unique_ptr NewGenericAckReceived(); + std::unique_ptr NewGenericPacketReceived(); + std::unique_ptr NewGenericPacketSent(); + std::unique_ptr NewIceCandidatePair(); + std::unique_ptr NewIceCandidatePairConfig(); + std::unique_ptr NewProbeClusterCreated(); + std::unique_ptr NewProbeResultFailure(); + std::unique_ptr NewProbeResultSuccess(); + std::unique_ptr NewRouteChange(); + std::unique_ptr NewRemoteEstimate(); + std::unique_ptr NewRtcpPacketIncoming(); + std::unique_ptr NewRtcpPacketOutgoing(); + + rtcp::SenderReport NewSenderReport(); + rtcp::ReceiverReport NewReceiverReport(); + rtcp::ExtendedReports NewExtendedReports(); + rtcp::Nack NewNack(); + rtcp::Remb NewRemb(); + rtcp::Fir NewFir(); + rtcp::Pli NewPli(); + rtcp::TransportFeedback NewTransportFeedback(); + rtcp::LossNotification NewLossNotification(); + + // |all_configured_exts| determines whether the RTP packet exhibits all + // configured extensions, or a random subset thereof. + void RandomizeRtpPacket(size_t payload_size, + size_t padding_size, + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + RtpPacket* rtp_packet, + bool all_configured_exts); + + // |all_configured_exts| determines whether the RTP packet exhibits all + // configured extensions, or a random subset thereof. + std::unique_ptr NewRtpPacketIncoming( + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + bool all_configured_exts = true); + + // |all_configured_exts| determines whether the RTP packet exhibits all + // configured extensions, or a random subset thereof. + std::unique_ptr NewRtpPacketOutgoing( + uint32_t ssrc, + const RtpHeaderExtensionMap& extension_map, + bool all_configured_exts = true); + + // |configure_all| determines whether all supported extensions are configured, + // or a random subset. + RtpHeaderExtensionMap NewRtpHeaderExtensionMap(bool configure_all = false); + + std::unique_ptr NewAudioReceiveStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions); + + std::unique_ptr NewAudioSendStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions); + + std::unique_ptr NewVideoReceiveStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions); + + std::unique_ptr NewVideoSendStreamConfig( + uint32_t ssrc, + const RtpHeaderExtensionMap& extensions); + + private: + rtcp::ReportBlock NewReportBlock(); + int sent_packet_number_ = 0; + int received_packet_number_ = 0; + + Random prng_; +}; + +class EventVerifier { + public: + explicit EventVerifier(RtcEventLog::EncodingType encoding_type) + : encoding_type_(encoding_type) {} + + void VerifyLoggedAlrStateEvent(const RtcEventAlrState& original_event, + const LoggedAlrStateEvent& logged_event) const; + + void VerifyLoggedAudioPlayoutEvent( + const RtcEventAudioPlayout& original_event, + const LoggedAudioPlayoutEvent& logged_event) const; + + void VerifyLoggedAudioNetworkAdaptationEvent( + const RtcEventAudioNetworkAdaptation& original_event, + const LoggedAudioNetworkAdaptationEvent& logged_event) const; + + void VerifyLoggedBweDelayBasedUpdate( + const RtcEventBweUpdateDelayBased& original_event, + const LoggedBweDelayBasedUpdate& logged_event) const; + + void VerifyLoggedBweLossBasedUpdate( + const RtcEventBweUpdateLossBased& original_event, + const LoggedBweLossBasedUpdate& logged_event) const; + + void VerifyLoggedBweProbeClusterCreatedEvent( + const RtcEventProbeClusterCreated& original_event, + const LoggedBweProbeClusterCreatedEvent& logged_event) const; + + void VerifyLoggedBweProbeFailureEvent( + const RtcEventProbeResultFailure& original_event, + const LoggedBweProbeFailureEvent& logged_event) const; + + void VerifyLoggedBweProbeSuccessEvent( + const RtcEventProbeResultSuccess& original_event, + const LoggedBweProbeSuccessEvent& logged_event) const; + + void VerifyLoggedDtlsTransportState( + const RtcEventDtlsTransportState& original_event, + const LoggedDtlsTransportState& logged_event) const; + + void VerifyLoggedDtlsWritableState( + const RtcEventDtlsWritableState& original_event, + const LoggedDtlsWritableState& logged_event) const; + + void VerifyLoggedFrameDecoded(const RtcEventFrameDecoded& original_event, + const LoggedFrameDecoded& logged_event) const; + + void VerifyLoggedIceCandidatePairConfig( + const RtcEventIceCandidatePairConfig& original_event, + const LoggedIceCandidatePairConfig& logged_event) const; + + void VerifyLoggedIceCandidatePairEvent( + const RtcEventIceCandidatePair& original_event, + const LoggedIceCandidatePairEvent& logged_event) const; + + void VerifyLoggedRouteChangeEvent( + const RtcEventRouteChange& original_event, + const LoggedRouteChangeEvent& logged_event) const; + + void VerifyLoggedRemoteEstimateEvent( + const RtcEventRemoteEstimate& original_event, + const LoggedRemoteEstimateEvent& logged_event) const; + + void VerifyLoggedRtpPacketIncoming( + const RtcEventRtpPacketIncoming& original_event, + const LoggedRtpPacketIncoming& logged_event) const; + + void VerifyLoggedRtpPacketOutgoing( + const RtcEventRtpPacketOutgoing& original_event, + const LoggedRtpPacketOutgoing& logged_event) const; + + void VerifyLoggedGenericPacketSent( + const RtcEventGenericPacketSent& original_event, + const LoggedGenericPacketSent& logged_event) const; + + void VerifyLoggedGenericPacketReceived( + const RtcEventGenericPacketReceived& original_event, + const LoggedGenericPacketReceived& logged_event) const; + + void VerifyLoggedGenericAckReceived( + const RtcEventGenericAckReceived& original_event, + const LoggedGenericAckReceived& logged_event) const; + + template + void VerifyLoggedRtpPacket(const EventType& original_event, + const ParsedType& logged_event) { + static_assert(sizeof(ParsedType) == 0, + "You have to use one of the two defined template " + "specializations of VerifyLoggedRtpPacket"); + } + + template + void VerifyLoggedRtpPacket(const RtcEventRtpPacketIncoming& original_event, + const LoggedRtpPacketIncoming& logged_event) { + VerifyLoggedRtpPacketIncoming(original_event, logged_event); + } + + template + void VerifyLoggedRtpPacket(const RtcEventRtpPacketOutgoing& original_event, + const LoggedRtpPacketOutgoing& logged_event) { + VerifyLoggedRtpPacketOutgoing(original_event, logged_event); + } + + void VerifyLoggedRtcpPacketIncoming( + const RtcEventRtcpPacketIncoming& original_event, + const LoggedRtcpPacketIncoming& logged_event) const; + + void VerifyLoggedRtcpPacketOutgoing( + const RtcEventRtcpPacketOutgoing& original_event, + const LoggedRtcpPacketOutgoing& logged_event) const; + + void VerifyLoggedSenderReport(int64_t log_time_us, + const rtcp::SenderReport& original_sr, + const LoggedRtcpPacketSenderReport& logged_sr); + void VerifyLoggedReceiverReport( + int64_t log_time_us, + const rtcp::ReceiverReport& original_rr, + const LoggedRtcpPacketReceiverReport& logged_rr); + void VerifyLoggedExtendedReports( + int64_t log_time_us, + const rtcp::ExtendedReports& original_xr, + const LoggedRtcpPacketExtendedReports& logged_xr); + void VerifyLoggedFir(int64_t log_time_us, + const rtcp::Fir& original_fir, + const LoggedRtcpPacketFir& logged_fir); + void VerifyLoggedPli(int64_t log_time_us, + const rtcp::Pli& original_pli, + const LoggedRtcpPacketPli& logged_pli); + void VerifyLoggedNack(int64_t log_time_us, + const rtcp::Nack& original_nack, + const LoggedRtcpPacketNack& logged_nack); + void VerifyLoggedTransportFeedback( + int64_t log_time_us, + const rtcp::TransportFeedback& original_transport_feedback, + const LoggedRtcpPacketTransportFeedback& logged_transport_feedback); + void VerifyLoggedRemb(int64_t log_time_us, + const rtcp::Remb& original_remb, + const LoggedRtcpPacketRemb& logged_remb); + void VerifyLoggedLossNotification( + int64_t log_time_us, + const rtcp::LossNotification& original_loss_notification, + const LoggedRtcpPacketLossNotification& logged_loss_notification); + + void VerifyLoggedStartEvent(int64_t start_time_us, + int64_t utc_start_time_us, + const LoggedStartEvent& logged_event) const; + void VerifyLoggedStopEvent(int64_t stop_time_us, + const LoggedStopEvent& logged_event) const; + + void VerifyLoggedAudioRecvConfig( + const RtcEventAudioReceiveStreamConfig& original_event, + const LoggedAudioRecvConfig& logged_event) const; + + void VerifyLoggedAudioSendConfig( + const RtcEventAudioSendStreamConfig& original_event, + const LoggedAudioSendConfig& logged_event) const; + + void VerifyLoggedVideoRecvConfig( + const RtcEventVideoReceiveStreamConfig& original_event, + const LoggedVideoRecvConfig& logged_event) const; + + void VerifyLoggedVideoSendConfig( + const RtcEventVideoSendStreamConfig& original_event, + const LoggedVideoSendConfig& logged_event) const; + + private: + void VerifyReportBlock(const rtcp::ReportBlock& original_report_block, + const rtcp::ReportBlock& logged_report_block); + + RtcEventLog::EncodingType encoding_type_; +}; + +} // namespace test +} // namespace webrtc + +#endif // LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc index 1040757f8..734a30be7 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc @@ -183,7 +183,8 @@ absl::optional FakeVoiceMediaChannel::GetBaseMinimumPlayoutDelayMs( } return absl::nullopt; } -bool FakeVoiceMediaChannel::GetStats(VoiceMediaInfo* info) { +bool FakeVoiceMediaChannel::GetStats(VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) { return false; } void FakeVoiceMediaChannel::SetRawAudioSink( diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h index 338c329aa..1751dd8bf 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h @@ -349,7 +349,7 @@ class FakeVoiceMediaChannel : public RtpHelper { absl::optional GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const override; - bool GetStats(VoiceMediaInfo* info) override; + bool GetStats(VoiceMediaInfo* info, bool get_and_clear_legacy_stats) override; void SetRawAudioSink( uint32_t ssrc, diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h index eb08f69b5..3df83edce 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h @@ -28,7 +28,7 @@ namespace cricket { // Fake NetworkInterface that sends/receives RTP/RTCP packets. class FakeNetworkInterface : public MediaChannel::NetworkInterface, - public rtc::MessageHandler { + public rtc::MessageHandlerAutoCleanup { public: FakeNetworkInterface() : thread_(rtc::Thread::Current()), diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h index e8400a58a..a947b4799 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h @@ -204,7 +204,8 @@ class MediaChannel : public sigslot::has_slots<> { // ssrc must be the first SSRC of the media stream if the stream uses // multiple SSRCs. virtual bool RemoveRecvStream(uint32_t ssrc) = 0; - // Resets any cached StreamParams for an unsignaled RecvStream. + // Resets any cached StreamParams for an unsignaled RecvStream, and removes + // any existing unsignaled streams. virtual void ResetUnsignaledRecvStream() = 0; // Returns the absoulte sendtime extension id value from media channel. virtual int GetRtpSendTimeExtnId() const; @@ -833,7 +834,8 @@ class VoiceMediaChannel : public MediaChannel, public Delayable { // DTMF event 0-9, *, #, A-D. virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0; // Gets quality stats for the channel. - virtual bool GetStats(VoiceMediaInfo* info) = 0; + virtual bool GetStats(VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) = 0; virtual void SetRawAudioSink( uint32_t ssrc, diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc index 31c9d4d1a..840da1d9b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc @@ -14,7 +14,7 @@ namespace cricket { const int kVideoCodecClockrate = 90000; const int kDataCodecClockrate = 90000; -const int kDataMaxBandwidth = 30720; // bps +const int kRtpDataMaxBandwidth = 30720; // bps const float kHighSystemCpuThreshold = 0.85f; const float kLowSystemCpuThreshold = 0.65f; @@ -107,7 +107,6 @@ const char kH264CodecName[] = "H264"; #ifndef DISABLE_H265 const char kH265CodecName[] = "H265"; #endif -const char kHEVCCodecName[] = "H265X"; // RFC 6184 RTP Payload Format for H.264 video const char kH264FmtpProfileLevelId[] = "profile-level-id"; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h index e0e0ecc81..2f5733dfb 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h @@ -21,7 +21,7 @@ namespace cricket { extern const int kVideoCodecClockrate; extern const int kDataCodecClockrate; -extern const int kDataMaxBandwidth; // bps +extern const int kRtpDataMaxBandwidth; // bps // Default CPU thresholds. extern const float kHighSystemCpuThreshold; @@ -133,7 +133,6 @@ RTC_EXPORT extern const char kH264CodecName[]; #ifndef DISABLE_H265 RTC_EXPORT extern const char kH265CodecName[]; #endif -RTC_EXPORT extern const char kHEVCCodecName[]; // RFC 6184 RTP Payload Format for H.264 video RTC_EXPORT extern const char kH264FmtpProfileLevelId[]; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc index 805025872..36a9694cf 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc @@ -161,11 +161,20 @@ webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( } CompositeMediaEngine::CompositeMediaEngine( - std::unique_ptr voice_engine, + std::unique_ptr trials, + std::unique_ptr audio_engine, std::unique_ptr video_engine) - : voice_engine_(std::move(voice_engine)), + : trials_(std::move(trials)), + voice_engine_(std::move(audio_engine)), video_engine_(std::move(video_engine)) {} +CompositeMediaEngine::CompositeMediaEngine( + std::unique_ptr audio_engine, + std::unique_ptr video_engine) + : CompositeMediaEngine(nullptr, + std::move(audio_engine), + std::move(video_engine)) {} + CompositeMediaEngine::~CompositeMediaEngine() = default; bool CompositeMediaEngine::Init() { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h index 4d9cc56a1..1d8917cfc 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_encoder_factory.h" #include "api/crypto/crypto_options.h" #include "api/rtp_parameters.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video/video_bitrate_allocator_factory.h" #include "call/audio_state.h" #include "media/base/codec.h" @@ -131,8 +132,12 @@ class MediaEngineInterface { // CompositeMediaEngine constructs a MediaEngine from separate // voice and video engine classes. +// Optionally owns a WebRtcKeyValueConfig trials map. class CompositeMediaEngine : public MediaEngineInterface { public: + CompositeMediaEngine(std::unique_ptr trials, + std::unique_ptr audio_engine, + std::unique_ptr video_engine); CompositeMediaEngine(std::unique_ptr audio_engine, std::unique_ptr video_engine); ~CompositeMediaEngine() override; @@ -144,6 +149,7 @@ class CompositeMediaEngine : public MediaEngineInterface { const VideoEngineInterface& video() const override; private: + const std::unique_ptr trials_; std::unique_ptr voice_engine_; std::unique_ptr video_engine_; }; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc index 0303cd33d..f05dffabf 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc @@ -62,7 +62,7 @@ RtpDataMediaChannel::RtpDataMediaChannel(const MediaConfig& config) void RtpDataMediaChannel::Construct() { sending_ = false; receiving_ = false; - send_limiter_.reset(new rtc::DataRateLimiter(kDataMaxBandwidth / 8, 1.0)); + send_limiter_.reset(new rtc::DataRateLimiter(kRtpDataMaxBandwidth / 8, 1.0)); } RtpDataMediaChannel::~RtpDataMediaChannel() { @@ -245,7 +245,7 @@ void RtpDataMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, bool RtpDataMediaChannel::SetMaxSendBandwidth(int bps) { if (bps <= 0) { - bps = kDataMaxBandwidth; + bps = kRtpDataMaxBandwidth; } send_limiter_.reset(new rtc::DataRateLimiter(bps / 8, 1.0)); RTC_LOG(LS_INFO) << "RtpDataMediaChannel::SetSendBandwidth to " << bps diff --git a/TMessagesProj/jni/voip/webrtc/media/base/rtp_utils.cc b/TMessagesProj/jni/voip/webrtc/media/base/rtp_utils.cc index 4a2b3267c..471417522 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/rtp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/rtp_utils.cc @@ -323,6 +323,7 @@ absl::string_view RtpPacketTypeToString(RtpPacketType packet_type) { case RtpPacketType::kUnknown: return "Unknown"; } + RTC_CHECK_NOTREACHED(); } RtpPacketType InferRtpPacketType(rtc::ArrayView packet) { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc index 73e77cc37..ddcf4cac8 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc @@ -349,4 +349,22 @@ void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) { source_resolution_alignment_, sink_wants.resolution_alignment); } +int VideoAdapter::GetTargetPixels() const { + webrtc::MutexLock lock(&mutex_); + return resolution_request_target_pixel_count_; +} + +float VideoAdapter::GetMaxFramerate() const { + webrtc::MutexLock lock(&mutex_); + // Minimum of |max_fps_| and |max_framerate_request_| is used to throttle + // frame-rate. + int framerate = std::min(max_framerate_request_, + max_fps_.value_or(max_framerate_request_)); + if (framerate == std::numeric_limits::max()) { + return std::numeric_limits::infinity(); + } else { + return max_framerate_request_; + } +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h index 2c4263276..3ed58954e 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h @@ -20,6 +20,7 @@ #include "media/base/video_common.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" namespace cricket { @@ -28,7 +29,7 @@ namespace cricket { // specified input and output formats. The adaptation includes dropping frames // to reduce frame rate and scaling frames. // VideoAdapter is thread safe. -class VideoAdapter { +class RTC_EXPORT VideoAdapter { public: VideoAdapter(); // The source requests output frames whose width and height are divisible @@ -97,6 +98,14 @@ class VideoAdapter { void OnSinkWants(const rtc::VideoSinkWants& sink_wants) RTC_LOCKS_EXCLUDED(mutex_); + // Returns maximum image area, which shouldn't impose any adaptations. + // Can return |numeric_limits::max()| if no limit is set. + int GetTargetPixels() const; + + // Returns current frame-rate limit. + // Can return |numeric_limits::infinity()| if no limit is set. + float GetMaxFramerate() const; + private: // Determine if frame should be dropped based on input fps and requested fps. bool KeepFrame(int64_t in_timestamp_ns) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -136,7 +145,7 @@ class VideoAdapter { int max_framerate_request_ RTC_GUARDED_BY(mutex_); // The critical section to protect the above variables. - webrtc::Mutex mutex_; + mutable webrtc::Mutex mutex_; RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter); }; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_video_codec_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/fake_video_codec_factory.cc index 63a1d5096..6f4f796b1 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_video_codec_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_video_codec_factory.cc @@ -15,7 +15,6 @@ #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc index cb62d9fc0..e320880b2 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc @@ -100,7 +100,8 @@ void FakeAudioReceiveStream::Reconfigure( config_ = config; } -webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats() const { +webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats( + bool get_and_clear_legacy_stats) const { return stats_; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h index 97eb49c89..385bbcd76 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h @@ -24,6 +24,7 @@ #include #include +#include "api/transport/field_trial_based_config.h" #include "api/video/video_frame.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" @@ -104,7 +105,8 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { void Start() override { started_ = true; } void Stop() override { started_ = false; } - webrtc::AudioReceiveStream::Stats GetStats() const override; + webrtc::AudioReceiveStream::Stats GetStats( + bool get_and_clear_legacy_stats) const override; void SetSink(webrtc::AudioSinkInterface* sink) override; void SetGain(float gain) override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override { @@ -361,6 +363,10 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { webrtc::Call::Stats GetStats() const override; + const webrtc::WebRtcKeyValueConfig& trials() const override { + return trials_; + } + void SignalChannelNetworkState(webrtc::MediaType media, webrtc::NetworkState state) override; void OnAudioTransportOverheadChanged( @@ -384,6 +390,7 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { int num_created_send_streams_; int num_created_receive_streams_; + webrtc::FieldTrialBasedConfig trials_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc index 6e63ec6f7..f74d4adfb 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/match.h" #include "absl/types/optional.h" #include "api/video/video_codec_constants.h" #include "media/base/media_constants.h" @@ -26,7 +27,6 @@ #include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace cricket { @@ -103,7 +103,9 @@ constexpr const SimulcastFormat kSimulcastFormats[] = { const int kMaxScreenshareSimulcastLayers = 2; // Multiway: Number of temporal layers for each simulcast stream. -int DefaultNumberOfTemporalLayers(int simulcast_id, bool screenshare) { +int DefaultNumberOfTemporalLayers(int simulcast_id, + bool screenshare, + const webrtc::WebRtcKeyValueConfig& trials) { RTC_CHECK_GE(simulcast_id, 0); RTC_CHECK_LT(simulcast_id, webrtc::kMaxSimulcastStreams); @@ -114,10 +116,8 @@ int DefaultNumberOfTemporalLayers(int simulcast_id, bool screenshare) { : kDefaultNumTemporalLayers; const std::string group_name = - screenshare ? webrtc::field_trial::FindFullName( - "WebRTC-VP8ScreenshareTemporalLayers") - : webrtc::field_trial::FindFullName( - "WebRTC-VP8ConferenceTemporalLayers"); + screenshare ? trials.Lookup("WebRTC-VP8ScreenshareTemporalLayers") + : trials.Lookup("WebRTC-VP8ConferenceTemporalLayers"); if (group_name.empty()) return default_num_temporal_layers; @@ -231,9 +231,10 @@ webrtc::DataRate GetTotalMaxBitrate( size_t LimitSimulcastLayerCount(int width, int height, size_t need_layers, - size_t layer_count) { - if (!webrtc::field_trial::IsDisabled( - kUseLegacySimulcastLayerLimitFieldTrial)) { + size_t layer_count, + const webrtc::WebRtcKeyValueConfig& trials) { + if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), + "Disabled")) { size_t adaptive_layer_count = std::max( need_layers, kSimulcastFormats[FindSimulcastFormatIndex(width, height)].max_layers); @@ -254,27 +255,28 @@ std::vector GetSimulcastConfig( double bitrate_priority, int max_qp, bool is_screenshare_with_conference_mode, - bool temporal_layers_supported) { + bool temporal_layers_supported, + const webrtc::WebRtcKeyValueConfig& trials) { RTC_DCHECK_LE(min_layers, max_layers); RTC_DCHECK(max_layers > 1 || is_screenshare_with_conference_mode); const bool base_heavy_tl3_rate_alloc = - webrtc::RateControlSettings::ParseFromFieldTrials() + webrtc::RateControlSettings::ParseFromKeyValueConfig(&trials) .Vp8BaseHeavyTl3RateAllocation(); if (is_screenshare_with_conference_mode) { return GetScreenshareLayers(max_layers, width, height, bitrate_priority, max_qp, temporal_layers_supported, - base_heavy_tl3_rate_alloc); + base_heavy_tl3_rate_alloc, trials); } else { // Some applications rely on the old behavior limiting the simulcast layer // count based on the resolution automatically, which they can get through // the WebRTC-LegacySimulcastLayerLimit field trial until they update. max_layers = - LimitSimulcastLayerCount(width, height, min_layers, max_layers); + LimitSimulcastLayerCount(width, height, min_layers, max_layers, trials); return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority, max_qp, temporal_layers_supported, - base_heavy_tl3_rate_alloc); + base_heavy_tl3_rate_alloc, trials); } } @@ -285,7 +287,8 @@ std::vector GetNormalSimulcastLayers( double bitrate_priority, int max_qp, bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc) { + bool base_heavy_tl3_rate_alloc, + const webrtc::WebRtcKeyValueConfig& trials) { std::vector layers(layer_count); // Format width and height has to be divisible by |2 ^ num_simulcast_layers - @@ -300,11 +303,13 @@ std::vector GetNormalSimulcastLayers( // TODO(pbos): Fill actual temporal-layer bitrate thresholds. layers[s].max_qp = max_qp; layers[s].num_temporal_layers = - temporal_layers_supported ? DefaultNumberOfTemporalLayers(s, false) : 1; + temporal_layers_supported + ? DefaultNumberOfTemporalLayers(s, false, trials) + : 1; layers[s].max_bitrate_bps = FindSimulcastMaxBitrate(width, height).bps(); layers[s].target_bitrate_bps = FindSimulcastTargetBitrate(width, height).bps(); - int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false); + int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false, trials); if (s == 0) { // If alternative temporal rate allocation is selected, adjust the // bitrate of the lowest simulcast stream so that absolute bitrate for @@ -356,7 +361,8 @@ std::vector GetScreenshareLayers( double bitrate_priority, int max_qp, bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc) { + bool base_heavy_tl3_rate_alloc, + const webrtc::WebRtcKeyValueConfig& trials) { auto max_screenshare_layers = kMaxScreenshareSimulcastLayers; size_t num_simulcast_layers = std::min(max_layers, max_screenshare_layers); @@ -379,7 +385,8 @@ std::vector GetScreenshareLayers( // restrictions. The base simulcast layer will still use legacy setup. if (num_simulcast_layers == kMaxScreenshareSimulcastLayers) { // Add optional upper simulcast layer. - const int num_temporal_layers = DefaultNumberOfTemporalLayers(1, true); + const int num_temporal_layers = + DefaultNumberOfTemporalLayers(1, true, trials); int max_bitrate_bps; bool using_boosted_bitrate = false; if (!temporal_layers_supported) { @@ -389,7 +396,7 @@ std::vector GetScreenshareLayers( kScreenshareHighStreamMaxBitrate.bps() * webrtc::SimulcastRateAllocator::GetTemporalRateAllocation( num_temporal_layers, 0, base_heavy_tl3_rate_alloc)); - } else if (DefaultNumberOfTemporalLayers(1, true) != 3 || + } else if (DefaultNumberOfTemporalLayers(1, true, trials) != 3 || base_heavy_tl3_rate_alloc) { // Experimental temporal layer mode used, use increased max bitrate. max_bitrate_bps = kScreenshareHighStreamMaxBitrate.bps(); @@ -409,18 +416,12 @@ std::vector GetScreenshareLayers( layers[1].max_qp = max_qp; layers[1].max_framerate = kDefaultVideoMaxFramerate; layers[1].num_temporal_layers = - temporal_layers_supported ? DefaultNumberOfTemporalLayers(1, true) : 1; + temporal_layers_supported + ? DefaultNumberOfTemporalLayers(1, true, trials) + : 1; layers[1].min_bitrate_bps = using_boosted_bitrate ? kScreenshareHighStreamMinBitrate.bps() : layers[0].target_bitrate_bps * 2; - - // Cap max bitrate so it isn't overly high for the given resolution. - int resolution_limited_bitrate = - std::max(FindSimulcastMaxBitrate(width, height).bps(), - layers[1].min_bitrate_bps); - max_bitrate_bps = - std::min(max_bitrate_bps, resolution_limited_bitrate); - layers[1].target_bitrate_bps = max_bitrate_bps; layers[1].max_bitrate_bps = max_bitrate_bps; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h index 28b08560a..5defa525d 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h @@ -15,6 +15,7 @@ #include +#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/video_codecs/video_encoder_config.h" @@ -41,7 +42,8 @@ std::vector GetSimulcastConfig( double bitrate_priority, int max_qp, bool is_screenshare_with_conference_mode, - bool temporal_layers_supported); + bool temporal_layers_supported, + const webrtc::WebRtcKeyValueConfig& trials); // Gets the simulcast config layers for a non-screensharing case. std::vector GetNormalSimulcastLayers( @@ -51,7 +53,8 @@ std::vector GetNormalSimulcastLayers( double bitrate_priority, int max_qp, bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc); + bool base_heavy_tl3_rate_alloc, + const webrtc::WebRtcKeyValueConfig& trials); // Gets simulcast config layers for screenshare settings. std::vector GetScreenshareLayers( @@ -61,7 +64,8 @@ std::vector GetScreenshareLayers( double bitrate_priority, int max_qp, bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc); + bool base_heavy_tl3_rate_alloc, + const webrtc::WebRtcKeyValueConfig& trials); } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc index 60baed9da..e0c0ff7bc 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc @@ -103,8 +103,8 @@ int VerifyCodec(const webrtc::VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_OK; } -bool StreamResolutionCompare(const webrtc::SimulcastStream& a, - const webrtc::SimulcastStream& b) { +bool StreamResolutionCompare(const webrtc::SpatialLayer& a, + const webrtc::SpatialLayer& b) { return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) < std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate); } @@ -120,10 +120,9 @@ class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { EncodedImageCallback::Result OnEncodedImage( const webrtc::EncodedImage& encoded_image, - const webrtc::CodecSpecificInfo* codec_specific_info, - const webrtc::RTPFragmentationHeader* fragmentation) override { + const webrtc::CodecSpecificInfo* codec_specific_info) override { return adapter_->OnEncodedImage(stream_idx_, encoded_image, - codec_specific_info, fragmentation); + codec_specific_info); } private: @@ -157,8 +156,6 @@ SimulcastEncoderAdapter::SimulcastEncoderAdapter( // The adapter is typically created on the worker thread, but operated on // the encoder task queue. encoder_queue_.Detach(); - - memset(&codec_, 0, sizeof(webrtc::VideoCodec)); } SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { @@ -243,10 +240,6 @@ int SimulcastEncoderAdapter::InitEncode( RTC_DCHECK_LT(lowest_resolution_stream_index, number_of_streams); RTC_DCHECK_LT(highest_resolution_stream_index, number_of_streams); - const SdpVideoFormat format( - codec_.codecType == webrtc::kVideoCodecVP8 ? "VP8" : "H264", - video_format_.parameters); - for (int i = 0; i < number_of_streams; ++i) { // If an existing encoder instance exists, reuse it. // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, @@ -256,10 +249,10 @@ int SimulcastEncoderAdapter::InitEncode( encoder = std::move(stored_encoders_.top()); stored_encoders_.pop(); } else { - encoder = primary_encoder_factory_->CreateVideoEncoder(format); + encoder = primary_encoder_factory_->CreateVideoEncoder(video_format_); if (fallback_encoder_factory_ != nullptr) { encoder = CreateVideoEncoderSoftwareFallbackWrapper( - fallback_encoder_factory_->CreateVideoEncoder(format), + fallback_encoder_factory_->CreateVideoEncoder(video_format_), std::move(encoder), i == lowest_resolution_stream_index && prefer_temporal_support_on_base_layer_); @@ -378,7 +371,7 @@ int SimulcastEncoderAdapter::Encode( } // Temporary thay may hold the result of texture to i420 buffer conversion. - rtc::scoped_refptr src_buffer; + rtc::scoped_refptr src_buffer; int src_width = input_image.width(); int src_height = input_image.height(); for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { @@ -434,12 +427,14 @@ int SimulcastEncoderAdapter::Encode( } } else { if (src_buffer == nullptr) { - src_buffer = input_image.video_frame_buffer()->ToI420(); + src_buffer = input_image.video_frame_buffer(); + } + rtc::scoped_refptr dst_buffer = + src_buffer->Scale(dst_width, dst_height); + if (!dst_buffer) { + RTC_LOG(LS_ERROR) << "Failed to scale video frame"; + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; } - rtc::scoped_refptr dst_buffer = - I420Buffer::Create(dst_width, dst_height); - - dst_buffer->ScaleFrom(*src_buffer); // UpdateRect is not propagated to lower simulcast layers currently. // TODO(ilnik): Consider scaling UpdateRect together with the buffer. @@ -559,15 +554,14 @@ void SimulcastEncoderAdapter::OnLossNotification( EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( size_t stream_idx, const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codecSpecificInfo) { EncodedImage stream_image(encodedImage); CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; stream_image.SetSpatialIndex(stream_idx); - return encoded_complete_callback_->OnEncodedImage( - stream_image, &stream_codec_specific, fragmentation); + return encoded_complete_callback_->OnEncodedImage(stream_image, + &stream_codec_specific); } void SimulcastEncoderAdapter::PopulateStreamCodec( @@ -644,6 +638,7 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { VideoEncoder::EncoderInfo encoder_info; encoder_info.implementation_name = "SimulcastEncoderAdapter"; encoder_info.requested_resolution_alignment = 1; + encoder_info.apply_alignment_to_all_simulcast_layers = false; encoder_info.supports_native_handle = true; encoder_info.scaling_settings.thresholds = absl::nullopt; if (streaminfos_.empty()) { @@ -695,6 +690,9 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_info.requested_resolution_alignment = cricket::LeastCommonMultiple( encoder_info.requested_resolution_alignment, encoder_impl_info.requested_resolution_alignment); + if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) { + encoder_info.apply_alignment_to_all_simulcast_layers = true; + } if (num_active_streams == 1 && codec_.simulcastStream[i].active) { encoder_info.scaling_settings = encoder_impl_info.scaling_settings; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h index a4cf86315..5b2c02757 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h @@ -70,8 +70,7 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { EncodedImageCallback::Result OnEncodedImage( size_t stream_idx, const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation); + const CodecSpecificInfo* codec_specific_info); EncoderInfo GetEncoderInfo() const override; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc index b026b9d7c..17176512c 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc @@ -14,8 +14,8 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/match.h" #include "media/engine/webrtc_voice_engine.h" -#include "system_wrappers/include/field_trial.h" #ifdef HAVE_WEBRTC_VIDEO #include "media/engine/webrtc_video_engine.h" @@ -27,20 +27,29 @@ namespace cricket { std::unique_ptr CreateMediaEngine( MediaEngineDependencies dependencies) { + // TODO(sprang): Make populating |dependencies.trials| mandatory and remove + // these fallbacks. + std::unique_ptr fallback_trials( + dependencies.trials ? nullptr : new webrtc::FieldTrialBasedConfig()); + const webrtc::WebRtcKeyValueConfig& trials = + dependencies.trials ? *dependencies.trials : *fallback_trials; auto audio_engine = std::make_unique( dependencies.task_queue_factory, std::move(dependencies.adm), std::move(dependencies.audio_encoder_factory), std::move(dependencies.audio_decoder_factory), std::move(dependencies.audio_mixer), - std::move(dependencies.audio_processing)); + std::move(dependencies.audio_processing), + std::move(dependencies.onUnknownAudioSsrc), + dependencies.audio_frame_processor, trials); #ifdef HAVE_WEBRTC_VIDEO auto video_engine = std::make_unique( std::move(dependencies.video_encoder_factory), - std::move(dependencies.video_decoder_factory)); + std::move(dependencies.video_decoder_factory), trials); #else auto video_engine = std::make_unique(); #endif - return std::make_unique(std::move(audio_engine), + return std::make_unique(std::move(fallback_trials), + std::move(audio_engine), std::move(video_engine)); } @@ -87,7 +96,8 @@ bool ValidateRtpExtensions( std::vector FilterRtpExtensions( const std::vector& extensions, bool (*supported)(absl::string_view), - bool filter_redundant_extensions) { + bool filter_redundant_extensions, + const webrtc::WebRtcKeyValueConfig& trials) { RTC_DCHECK(ValidateRtpExtensions(extensions)); RTC_DCHECK(supported); std::vector result; @@ -121,7 +131,8 @@ std::vector FilterRtpExtensions( result.erase(it, result.end()); // Keep just the highest priority extension of any in the following lists. - if (webrtc::field_trial::IsEnabled("WebRTC-FilterAbsSendTimeExtension")) { + if (absl::StartsWith(trials.Lookup("WebRTC-FilterAbsSendTimeExtension"), + "Enabled")) { static const char* const kBweExtensionPriorities[] = { webrtc::RtpExtension::kTransportSequenceNumberUri, webrtc::RtpExtension::kAbsSendTimeUri, diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h index dbb2a5fbb..2bfd6b29b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h @@ -15,12 +15,14 @@ #include #include +#include "api/audio/audio_frame_processor.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/rtp_parameters.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "media/base/codec.h" @@ -45,9 +47,14 @@ struct MediaEngineDependencies { rtc::scoped_refptr audio_decoder_factory; rtc::scoped_refptr audio_mixer; rtc::scoped_refptr audio_processing; + webrtc::AudioFrameProcessor* audio_frame_processor = nullptr; std::unique_ptr video_encoder_factory; std::unique_ptr video_decoder_factory; + + std::function onUnknownAudioSsrc = nullptr; + + const webrtc::WebRtcKeyValueConfig* trials = nullptr; }; // CreateMediaEngine may be called on any thread, though the engine is @@ -66,7 +73,8 @@ bool ValidateRtpExtensions(const std::vector& extensions); std::vector FilterRtpExtensions( const std::vector& extensions, bool (*supported)(absl::string_view), - bool filter_redundant_extensions); + bool filter_redundant_extensions, + const webrtc::WebRtcKeyValueConfig& trials); webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc index 14e73b70c..8a916c4c7 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc @@ -35,12 +35,13 @@ #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" +#include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" namespace cricket { @@ -61,23 +62,27 @@ const char* StreamTypeToString( return nullptr; } -// If this field trial is enabled, we will enable sending FlexFEC and disable -// sending ULPFEC whenever the former has been negotiated in the SDPs. -bool IsFlexfecFieldTrialEnabled() { - return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03"); +bool IsEnabled(const webrtc::WebRtcKeyValueConfig& trials, + absl::string_view name) { + return absl::StartsWith(trials.Lookup(name), "Enabled"); } -// If this field trial is enabled, the "flexfec-03" codec will be advertised -// as being supported. This means that "flexfec-03" will appear in the default -// SDP offer, and we therefore need to be ready to receive FlexFEC packets from -// the remote. It also means that FlexFEC SSRCs will be generated by -// MediaSession and added as "a=ssrc:" and "a=ssrc-group:" lines in the local -// SDP. -bool IsFlexfecAdvertisedFieldTrialEnabled() { - return webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03-Advertised"); +bool PowerOfTwo(int value) { + return (value > 0) && ((value & (value - 1)) == 0); } -void AddDefaultFeedbackParams(VideoCodec* codec) { +bool IsScaleFactorsPowerOfTwo(const webrtc::VideoEncoderConfig& config) { + for (const auto& layer : config.simulcast_layers) { + double scale = std::max(layer.scale_resolution_down_by, 1.0); + if (std::round(scale) != scale || !PowerOfTwo(scale)) { + return false; + } + } + return true; +} + +void AddDefaultFeedbackParams(VideoCodec* codec, + const webrtc::WebRtcKeyValueConfig& trials) { // Don't add any feedback params for RED and ULPFEC. if (codec->name == kRedCodecName || codec->name == kUlpfecCodecName) return; @@ -91,7 +96,7 @@ void AddDefaultFeedbackParams(VideoCodec* codec) { codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty)); codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli)); if (codec->name == kVp8CodecName && - webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) { + IsEnabled(trials, "WebRTC-RtcpLossNotification")) { codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamLntf, kParamValueEmpty)); } } @@ -101,7 +106,8 @@ void AddDefaultFeedbackParams(VideoCodec* codec) { // codecs for recognized codecs (VP8, VP9, H264, and RED). It will also add // default feedback params to the codecs. std::vector AssignPayloadTypesAndDefaultCodecs( - std::vector input_formats) { + std::vector input_formats, + const webrtc::WebRtcKeyValueConfig& trials) { if (input_formats.empty()) return std::vector(); static const int kFirstDynamicPayloadType = 96; @@ -111,7 +117,7 @@ std::vector AssignPayloadTypesAndDefaultCodecs( input_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName)); input_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName)); - if (IsFlexfecAdvertisedFieldTrialEnabled()) { + if (IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised")) { webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName); // This value is currently arbitrarily set to 10 seconds. (The unit // is microseconds.) This parameter MUST be present in the SDP, but @@ -125,7 +131,7 @@ std::vector AssignPayloadTypesAndDefaultCodecs( for (const webrtc::SdpVideoFormat& format : input_formats) { VideoCodec codec(format); codec.id = payload_type; - AddDefaultFeedbackParams(&codec); + AddDefaultFeedbackParams(&codec, trials); output_codecs.push_back(codec); // Increment payload type. @@ -159,7 +165,8 @@ std::vector AssignPayloadTypesAndDefaultCodecs( template std::vector GetPayloadTypesAndDefaultCodecs( const T* factory, - bool is_decoder_factory) { + bool is_decoder_factory, + const webrtc::WebRtcKeyValueConfig& trials) { if (!factory) { return {}; } @@ -170,7 +177,8 @@ std::vector GetPayloadTypesAndDefaultCodecs( AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); } - return AssignPayloadTypesAndDefaultCodecs(std::move(supported_formats)); + return AssignPayloadTypesAndDefaultCodecs(std::move(supported_formats), + trials); } bool IsTemporalLayersSupported(const std::string& codec_name) { @@ -245,11 +253,11 @@ static bool ValidateStreamParams(const StreamParams& sp) { } // Returns true if the given codec is disallowed from doing simulcast. -bool IsCodecDisabledForSimulcast(const std::string& codec_name) { - return !webrtc::field_trial::IsDisabled("WebRTC-H264Simulcast") +bool IsCodecDisabledForSimulcast(const std::string& codec_name, + const webrtc::WebRtcKeyValueConfig& trials) { + return !absl::StartsWith(trials.Lookup("WebRTC-H264Simulcast"), "Disabled") ? absl::EqualsIgnoreCase(codec_name, kVp9CodecName) : absl::EqualsIgnoreCase(codec_name, kH264CodecName) || - absl::EqualsIgnoreCase(codec_name, kH265CodecName) || absl::EqualsIgnoreCase(codec_name, kVp9CodecName); } @@ -273,9 +281,11 @@ static int GetMaxDefaultVideoBitrateKbps(int width, return max_bitrate; } -bool GetVp9LayersFromFieldTrialGroup(size_t* num_spatial_layers, - size_t* num_temporal_layers) { - std::string group = webrtc::field_trial::FindFullName("WebRTC-SupportVP9SVC"); +bool GetVp9LayersFromFieldTrialGroup( + size_t* num_spatial_layers, + size_t* num_temporal_layers, + const webrtc::WebRtcKeyValueConfig& trials) { + std::string group = trials.Lookup("WebRTC-SupportVP9SVC"); if (group.empty()) return false; @@ -294,19 +304,21 @@ bool GetVp9LayersFromFieldTrialGroup(size_t* num_spatial_layers, return true; } -absl::optional GetVp9SpatialLayersFromFieldTrial() { +absl::optional GetVp9SpatialLayersFromFieldTrial( + const webrtc::WebRtcKeyValueConfig& trials) { size_t num_sl; size_t num_tl; - if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) { + if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl, trials)) { return num_sl; } return absl::nullopt; } -absl::optional GetVp9TemporalLayersFromFieldTrial() { +absl::optional GetVp9TemporalLayersFromFieldTrial( + const webrtc::WebRtcKeyValueConfig& trials) { size_t num_sl; size_t num_tl; - if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) { + if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl, trials)) { return num_tl; } return absl::nullopt; @@ -468,14 +480,14 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( const size_t default_num_spatial_layers = parameters_.config.rtp.ssrcs.size(); const size_t num_spatial_layers = - GetVp9SpatialLayersFromFieldTrial().value_or( - default_num_spatial_layers); + GetVp9SpatialLayersFromFieldTrial(call_->trials()) + .value_or(default_num_spatial_layers); const size_t default_num_temporal_layers = num_spatial_layers > 1 ? kConferenceDefaultNumTemporalLayers : 1; const size_t num_temporal_layers = - GetVp9TemporalLayersFromFieldTrial().value_or( - default_num_temporal_layers); + GetVp9TemporalLayersFromFieldTrial(call_->trials()) + .value_or(default_num_temporal_layers); vp9_settings.numberOfSpatialLayers = std::min( num_spatial_layers, kConferenceMaxNumSpatialLayers); @@ -497,7 +509,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( {"onkeypic", webrtc::InterLayerPredMode::kOnKeyPic}}); webrtc::ParseFieldTrial( {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode}, - webrtc::field_trial::FindFullName("WebRTC-Vp9InterLayerPred")); + call_->trials().Lookup("WebRTC-Vp9InterLayerPred")); if (interlayer_pred_experiment_enabled) { vp9_settings.interLayerPred = inter_layer_pred_mode; } else { @@ -568,9 +580,11 @@ void DefaultUnsignalledSsrcHandler::SetDefaultSink( WebRtcVideoEngine::WebRtcVideoEngine( std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory) + std::unique_ptr video_decoder_factory, + const webrtc::WebRtcKeyValueConfig& trials) : decoder_factory_(std::move(video_decoder_factory)), - encoder_factory_(std::move(video_encoder_factory)) { + encoder_factory_(std::move(video_encoder_factory)), + trials_(trials) { RTC_LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()"; } @@ -591,12 +605,12 @@ VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel( } std::vector WebRtcVideoEngine::send_codecs() const { return GetPayloadTypesAndDefaultCodecs(encoder_factory_.get(), - /*is_decoder_factory=*/false); + /*is_decoder_factory=*/false, trials_); } std::vector WebRtcVideoEngine::recv_codecs() const { return GetPayloadTypesAndDefaultCodecs(decoder_factory_.get(), - /*is_decoder_factory=*/true); + /*is_decoder_factory=*/true, trials_); } std::vector @@ -615,11 +629,22 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const { webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) { result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv); } + result.emplace_back(webrtc::RtpExtension::kGenericFrameDescriptorUri00, id++, + IsEnabled(trials_, "WebRTC-GenericDescriptorAdvertised") + ? webrtc::RtpTransceiverDirection::kSendRecv + : webrtc::RtpTransceiverDirection::kStopped); result.emplace_back( - webrtc::RtpExtension::kGenericFrameDescriptorUri00, id, - webrtc::field_trial::IsEnabled("WebRTC-GenericDescriptorAdvertised") + webrtc::RtpExtension::kDependencyDescriptorUri, id++, + IsEnabled(trials_, "WebRTC-DependencyDescriptorAdvertised") ? webrtc::RtpTransceiverDirection::kSendRecv : webrtc::RtpTransceiverDirection::kStopped); + + result.emplace_back( + webrtc::RtpExtension::kVideoLayersAllocationUri, id++, + IsEnabled(trials_, "WebRTC-VideoLayersAllocationAdvertised") + ? webrtc::RtpTransceiverDirection::kSendRecv + : webrtc::RtpTransceiverDirection::kStopped); + return result; } @@ -641,12 +666,13 @@ WebRtcVideoChannel::WebRtcVideoChannel( bitrate_allocator_factory_(bitrate_allocator_factory), default_send_options_(options), last_stats_log_ms_(-1), - discard_unknown_ssrc_packets_(webrtc::field_trial::IsEnabled( - "WebRTC-Video-DiscardPacketsWithUnknownSsrc")), + discard_unknown_ssrc_packets_( + IsEnabled(call_->trials(), + "WebRTC-Video-DiscardPacketsWithUnknownSsrc")), crypto_options_(crypto_options), unknown_ssrc_packet_buffer_( - webrtc::field_trial::IsEnabled( - "WebRTC-Video-BufferPacketsWithUnknownSsrc") + IsEnabled(call_->trials(), + "WebRTC-Video-BufferPacketsWithUnknownSsrc") ? new UnhandledPacketsBuffer() : nullptr) { RTC_DCHECK(thread_checker_.IsCurrent()); @@ -654,7 +680,7 @@ WebRtcVideoChannel::WebRtcVideoChannel( rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; sending_ = false; recv_codecs_ = MapCodecs(GetPayloadTypesAndDefaultCodecs( - decoder_factory_, /*is_decoder_factory=*/true)); + decoder_factory_, /*is_decoder_factory=*/true, call_->trials())); recv_flexfec_payload_type_ = recv_codecs_.empty() ? 0 : recv_codecs_.front().flexfec_payload_type; } @@ -748,7 +774,7 @@ bool WebRtcVideoChannel::GetChangedSendParameters( } // Never enable sending FlexFEC, unless we are in the experiment. - if (!IsFlexfecFieldTrialEnabled()) { + if (!IsEnabled(call_->trials(), "WebRTC-FlexFEC-03")) { RTC_LOG(LS_INFO) << "WebRTC-FlexFEC-03 field trial is not enabled."; for (VideoCodecSettings& codec : negotiated_codecs) codec.flexfec_payload_type = -1; @@ -768,7 +794,8 @@ bool WebRtcVideoChannel::GetChangedSendParameters( changed_params->extmap_allow_mixed = params.extmap_allow_mixed; } std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true); + params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true, + call_->trials()); if (!send_rtp_extensions_ || (*send_rtp_extensions_ != filtered_extensions)) { changed_params->rtp_header_extensions = absl::optional>(filtered_extensions); @@ -822,97 +849,85 @@ bool WebRtcVideoChannel::SetSendParameters(const VideoSendParameters& params) { } void WebRtcVideoChannel::RequestEncoderFallback() { - invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread_, [this] { - RTC_DCHECK_RUN_ON(&thread_checker_); - if (negotiated_codecs_.size() <= 1) { - RTC_LOG(LS_WARNING) - << "Encoder failed but no fallback codec is available"; - return; - } + RTC_DCHECK_RUN_ON(&thread_checker_); + if (negotiated_codecs_.size() <= 1) { + RTC_LOG(LS_WARNING) << "Encoder failed but no fallback codec is available"; + return; + } - ChangedSendParameters params; - params.negotiated_codecs = negotiated_codecs_; - params.negotiated_codecs->erase(params.negotiated_codecs->begin()); - params.send_codec = params.negotiated_codecs->front(); - ApplyChangedParams(params); - }); + ChangedSendParameters params; + params.negotiated_codecs = negotiated_codecs_; + params.negotiated_codecs->erase(params.negotiated_codecs->begin()); + params.send_codec = params.negotiated_codecs->front(); + ApplyChangedParams(params); } void WebRtcVideoChannel::RequestEncoderSwitch( const EncoderSwitchRequestCallback::Config& conf) { - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this, conf] { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); - if (!allow_codec_switching_) { - RTC_LOG(LS_INFO) << "Encoder switch requested but codec switching has" - " not been enabled yet."; - requested_encoder_switch_ = conf; - return; - } + if (!allow_codec_switching_) { + RTC_LOG(LS_INFO) << "Encoder switch requested but codec switching has" + " not been enabled yet."; + requested_encoder_switch_ = conf; + return; + } - for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { - if (codec_setting.codec.name == conf.codec_name) { - if (conf.param) { - auto it = codec_setting.codec.params.find(*conf.param); + for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { + if (codec_setting.codec.name == conf.codec_name) { + if (conf.param) { + auto it = codec_setting.codec.params.find(*conf.param); + if (it == codec_setting.codec.params.end()) + continue; - if (it == codec_setting.codec.params.end()) { - continue; - } + if (conf.value && it->second != *conf.value) + continue; + } - if (conf.value && it->second != *conf.value) { - continue; - } - } - - if (send_codec_ == codec_setting) { - // Already using this codec, no switch required. - return; - } - - ChangedSendParameters params; - params.send_codec = codec_setting; - ApplyChangedParams(params); + if (send_codec_ == codec_setting) { + // Already using this codec, no switch required. return; } - } - RTC_LOG(LS_WARNING) << "Requested encoder with codec_name:" - << conf.codec_name - << ", param:" << conf.param.value_or("none") - << " and value:" << conf.value.value_or("none") - << "not found. No switch performed."; - }); + ChangedSendParameters params; + params.send_codec = codec_setting; + ApplyChangedParams(params); + return; + } + } + + RTC_LOG(LS_WARNING) << "Requested encoder with codec_name:" << conf.codec_name + << ", param:" << conf.param.value_or("none") + << " and value:" << conf.value.value_or("none") + << "not found. No switch performed."; } void WebRtcVideoChannel::RequestEncoderSwitch( const webrtc::SdpVideoFormat& format) { - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this, format] { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); - for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { - if (IsSameCodec(format.name, format.parameters, codec_setting.codec.name, - codec_setting.codec.params)) { - VideoCodecSettings new_codec_setting = codec_setting; - for (const auto& kv : format.parameters) { - new_codec_setting.codec.params[kv.first] = kv.second; - } + for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { + if (IsSameCodec(format.name, format.parameters, codec_setting.codec.name, + codec_setting.codec.params)) { + VideoCodecSettings new_codec_setting = codec_setting; + for (const auto& kv : format.parameters) { + new_codec_setting.codec.params[kv.first] = kv.second; + } - if (send_codec_ == new_codec_setting) { - // Already using this codec, no switch required. - return; - } - - ChangedSendParameters params; - params.send_codec = new_codec_setting; - ApplyChangedParams(params); + if (send_codec_ == new_codec_setting) { + // Already using this codec, no switch required. return; } - } - RTC_LOG(LS_WARNING) << "Encoder switch failed: SdpVideoFormat " - << format.ToString() << " not negotiated."; - }); + ChangedSendParameters params; + params.send_codec = new_codec_setting; + ApplyChangedParams(params); + return; + } + } + + RTC_LOG(LS_WARNING) << "Encoder switch failed: SdpVideoFormat " + << format.ToString() << " not negotiated."; } bool WebRtcVideoChannel::ApplyChangedParams( @@ -1120,7 +1135,8 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( if (params.is_stream_active) { const std::vector local_supported_codecs = GetPayloadTypesAndDefaultCodecs(decoder_factory_, - /*is_decoder_factory=*/true); + /*is_decoder_factory=*/true, + call_->trials()); for (const VideoCodecSettings& mapped_codec : mapped_codecs) { if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) { RTC_LOG(LS_ERROR) @@ -1138,7 +1154,8 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( // Handle RTP header extensions. std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false); + params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false, + call_->trials()); if (filtered_extensions != recv_rtp_extensions_) { changed_params->rtp_header_extensions = absl::optional>(filtered_extensions); @@ -1292,6 +1309,21 @@ bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) { video_config_.periodic_alr_bandwidth_probing; config.encoder_settings.experiment_cpu_load_estimator = video_config_.experiment_cpu_load_estimator; + using TargetBitrateType = + webrtc::VideoStreamEncoderSettings::BitrateAllocationCallbackType; + if (send_rtp_extensions_ && + webrtc::RtpExtension::FindHeaderExtensionByUri( + *send_rtp_extensions_, + webrtc::RtpExtension::kVideoLayersAllocationUri)) { + config.encoder_settings.allocation_cb_type = + TargetBitrateType::kVideoLayersAllocation; + } else if (IsEnabled(call_->trials(), "WebRTC-Target-Bitrate-Rtcp")) { + config.encoder_settings.allocation_cb_type = + TargetBitrateType::kVideoBitrateAllocation; + } else { + config.encoder_settings.allocation_cb_type = + TargetBitrateType::kVideoBitrateAllocationWhenScreenSharing; + } config.encoder_settings.encoder_factory = encoder_factory_; config.encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory_; @@ -1467,7 +1499,7 @@ void WebRtcVideoChannel::ConfigureReceiverRtp( // TODO(brandtr): Generalize when we add support for multistream protection. flexfec_config->payload_type = recv_flexfec_payload_type_; - if (IsFlexfecAdvertisedFieldTrialEnabled() && + if (IsEnabled(call_->trials(), "WebRTC-FlexFEC-03-Advertised") && sp.GetFecFrSsrc(ssrc, &flexfec_config->remote_ssrc)) { flexfec_config->protected_media_ssrcs = {ssrc}; flexfec_config->local_ssrc = config->rtp.local_ssrc; @@ -1700,7 +1732,7 @@ void WebRtcVideoChannel::BackfillBufferedPackets( int delivery_packet_error_cnt = 0; webrtc::PacketReceiver* receiver = this->call_->Receiver(); unknown_ssrc_packet_buffer_->BackfillPackets( - ssrcs, [&](uint32_t ssrc, int64_t packet_time_us, + ssrcs, [&](uint32_t /*ssrc*/, int64_t packet_time_us, rtc::CopyOnWriteBuffer packet) { switch (receiver->DeliverPacket(webrtc::MediaType::VIDEO, packet, packet_time_us)) { @@ -1760,13 +1792,14 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { // The group should be a positive integer with an explicit size, in // which case that is used as UDP recevie buffer size. All other values shall // result in the default value being used. - const std::string group_name = - webrtc::field_trial::FindFullName("WebRTC-IncreasedReceivebuffers"); + const std::string group_name_recv_buf_size = + call_->trials().Lookup("WebRTC-IncreasedReceivebuffers"); int recv_buffer_size = kVideoRtpRecvBufferSize; - if (!group_name.empty() && - (sscanf(group_name.c_str(), "%d", &recv_buffer_size) != 1 || + if (!group_name_recv_buf_size.empty() && + (sscanf(group_name_recv_buf_size.c_str(), "%d", &recv_buffer_size) != 1 || recv_buffer_size <= 0)) { - RTC_LOG(LS_WARNING) << "Invalid receive buffer size: " << group_name; + RTC_LOG(LS_WARNING) << "Invalid receive buffer size: " + << group_name_recv_buf_size; recv_buffer_size = kVideoRtpRecvBufferSize; } @@ -1777,8 +1810,19 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { // In b/15152257, we are seeing a significant number of packets discarded // due to lack of socket buffer space, although it's not yet clear what the // ideal value should be. + const std::string group_name_send_buf_size = + call_->trials().Lookup("WebRTC-SendBufferSizeBytes"); + int send_buffer_size = kVideoRtpSendBufferSize; + if (!group_name_send_buf_size.empty() && + (sscanf(group_name_send_buf_size.c_str(), "%d", &send_buffer_size) != 1 || + send_buffer_size <= 0)) { + RTC_LOG(LS_WARNING) << "Invalid send buffer size: " + << group_name_send_buf_size; + send_buffer_size = kVideoRtpSendBufferSize; + } + MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_SNDBUF, - kVideoRtpSendBufferSize); + send_buffer_size); } void WebRtcVideoChannel::SetFrameDecryptor( @@ -1804,18 +1848,16 @@ void WebRtcVideoChannel::SetFrameEncryptor( } void WebRtcVideoChannel::SetVideoCodecSwitchingEnabled(bool enabled) { - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this, enabled] { - RTC_DCHECK_RUN_ON(&thread_checker_); - allow_codec_switching_ = enabled; - if (allow_codec_switching_) { - RTC_LOG(LS_INFO) << "Encoder switching enabled."; - if (requested_encoder_switch_) { - RTC_LOG(LS_INFO) << "Executing cached video encoder switch request."; - RequestEncoderSwitch(*requested_encoder_switch_); - requested_encoder_switch_.reset(); - } + RTC_DCHECK_RUN_ON(&thread_checker_); + allow_codec_switching_ = enabled; + if (allow_codec_switching_) { + RTC_LOG(LS_INFO) << "Encoder switching enabled."; + if (requested_encoder_switch_) { + RTC_LOG(LS_INFO) << "Executing cached video encoder switch request."; + RequestEncoderSwitch(*requested_encoder_switch_); + requested_encoder_switch_.reset(); } - }); + } } bool WebRtcVideoChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, @@ -1946,12 +1988,11 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( enable_cpu_overuse_detection_(enable_cpu_overuse_detection), source_(nullptr), stream_(nullptr), - encoder_sink_(nullptr), parameters_(std::move(config), options, max_bitrate_bps, codec_settings), rtp_parameters_(CreateRtpParametersWithEncodings(sp)), sending_(false), - disable_automatic_resize_(webrtc::field_trial::IsEnabled( - "WebRTC-Video-DisableAutomaticResize")) { + disable_automatic_resize_( + IsEnabled(call->trials(), "WebRTC-Video-DisableAutomaticResize")) { // Maximum packet size may come in RtpConfig from external transport, for // example from QuicTransportInterface implementation, so do not exceed // given max_packet_size. @@ -1972,7 +2013,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( // FlexFEC SSRCs. // TODO(brandtr): This code needs to be generalized when we add support for // multistream protection. - if (IsFlexfecFieldTrialEnabled()) { + if (IsEnabled(call_->trials(), "WebRTC-FlexFEC-03")) { uint32_t flexfec_ssrc; bool flexfec_enabled = false; for (uint32_t primary_ssrc : parameters_.config.rtp.ssrcs) { @@ -2047,7 +2088,7 @@ bool WebRtcVideoChannel::WebRtcVideoSendStream::SetVideoSend( // Switch to the new source. source_ = source; if (source && stream_) { - stream_->SetSource(this, GetDegradationPreference()); + stream_->SetSource(source_, GetDegradationPreference()); } return true; } @@ -2077,8 +2118,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetDegradationPreference() const { webrtc::VideoTrackInterface::ContentHint::kText) { degradation_preference = webrtc::DegradationPreference::MAINTAIN_RESOLUTION; - } else if (webrtc::field_trial::IsEnabled( - "WebRTC-Video-BalancedDegradation")) { + } else if (IsEnabled(call_->trials(), "WebRTC-Video-BalancedDegradation")) { // Standard wants balanced by default, but it needs to be tuned first. degradation_preference = webrtc::DegradationPreference::BALANCED; } else { @@ -2221,9 +2261,11 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( // TODO(bugs.webrtc.org/8807): The bitrate priority really doesn't require an // entire encoder reconfiguration, it just needs to update the bitrate // allocator. - bool reconfigure_encoder = - new_param || (new_parameters.encodings[0].bitrate_priority != - rtp_parameters_.encodings[0].bitrate_priority); + bool reconfigure_encoder = new_param || + (new_parameters.encodings[0].bitrate_priority != + rtp_parameters_.encodings[0].bitrate_priority) || + new_parameters.encodings[0].scalability_mode != + rtp_parameters_.encodings[0].scalability_mode; // TODO(bugs.webrtc.org/8807): The active field as well should not require // a full encoder reconfiguration, but it needs to update both the bitrate @@ -2247,7 +2289,7 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( } if (new_degradation_preference) { if (source_ && stream_) { - stream_->SetSource(this, GetDegradationPreference()); + stream_->SetSource(source_, GetDegradationPreference()); } } return webrtc::RTCError::OK(); @@ -2329,7 +2371,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( // or a screencast (and not in simulcast screenshare experiment), only // configure a single stream. encoder_config.number_of_streams = parameters_.config.rtp.ssrcs.size(); - if (IsCodecDisabledForSimulcast(codec.name)) { + if (IsCodecDisabledForSimulcast(codec.name, call_->trials())) { encoder_config.number_of_streams = 1; } @@ -2382,6 +2424,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) { encoder_config.simulcast_layers[i].active = rtp_parameters_.encodings[i].active; + encoder_config.simulcast_layers[i].scalability_mode = + rtp_parameters_.encodings[i].scalability_mode; if (rtp_parameters_.encodings[i].min_bitrate_bps) { encoder_config.simulcast_layers[i].min_bitrate_bps = *rtp_parameters_.encodings[i].min_bitrate_bps; @@ -2446,38 +2490,6 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSend(bool send) { UpdateSendState(); } -void WebRtcVideoChannel::WebRtcVideoSendStream::RemoveSink( - rtc::VideoSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(encoder_sink_ == sink); - encoder_sink_ = nullptr; - source_->RemoveSink(sink); -} - -void WebRtcVideoChannel::WebRtcVideoSendStream::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { - if (worker_thread_ == rtc::Thread::Current()) { - // AddOrUpdateSink is called on |worker_thread_| if this is the first - // registration of |sink|. - RTC_DCHECK_RUN_ON(&thread_checker_); - encoder_sink_ = sink; - source_->AddOrUpdateSink(encoder_sink_, wants); - } else { - // Subsequent calls to AddOrUpdateSink will happen on the encoder task - // queue. - invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread_, [this, sink, wants] { - RTC_DCHECK_RUN_ON(&thread_checker_); - // |sink| may be invalidated after this task was posted since - // RemoveSink is called on the worker thread. - bool encoder_sink_valid = (sink == encoder_sink_); - if (source_ && encoder_sink_valid) { - source_->AddOrUpdateSink(encoder_sink_, wants); - } - }); - } -} std::vector WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( bool log_stats) { @@ -2698,7 +2710,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { parameters_.encoder_config.encoder_specific_settings = NULL; if (source_) { - stream_->SetSource(this, GetDegradationPreference()); + stream_->SetSource(source_, GetDegradationPreference()); } // Call stream_->Start() if necessary conditions are met. @@ -2924,8 +2936,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { MaybeAssociateFlexfecWithVideo(); stream_->Start(); - if (webrtc::field_trial::IsEnabled( - "WebRTC-Video-BufferPacketsWithUnknownSsrc")) { + if (IsEnabled(call_->trials(), "WebRTC-Video-BufferPacketsWithUnknownSsrc")) { channel_->BackfillBufferedPackets(stream_params_.ssrcs); } } @@ -3376,15 +3387,18 @@ void WebRtcVideoChannel::SetDepacketizerToDecoderFrameTransformer( // TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of // EncoderStreamFactory and instead set this value individually for each stream // in the VideoEncoderConfig.simulcast_layers. -EncoderStreamFactory::EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode) +EncoderStreamFactory::EncoderStreamFactory( + std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode, + const webrtc::WebRtcKeyValueConfig* trials) : codec_name_(codec_name), max_qp_(max_qp), is_screenshare_(is_screenshare), - conference_mode_(conference_mode) {} + conference_mode_(conference_mode), + trials_(trials ? *trials : fallback_trials_) {} std::vector EncoderStreamFactory::CreateEncoderStreams( int width, @@ -3399,8 +3413,7 @@ std::vector EncoderStreamFactory::CreateEncoderStreams( if (encoder_config.number_of_streams > 1 || ((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name_, kH264CodecName) || - absl::EqualsIgnoreCase(codec_name_, kH265CodecName)) && + absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) && is_screenshare_ && conference_mode_)) { return CreateSimulcastOrConferenceModeScreenshareStreams( width, height, encoder_config, experimental_min_bitrate); @@ -3507,7 +3520,7 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( encoder_config.number_of_streams, width, height, encoder_config.bitrate_priority, max_qp_, is_screenshare_ && conference_mode_, - temporal_layers_supported); + temporal_layers_supported, trials_); // Allow an experiment to override the minimum bitrate for the lowest // spatial layer. The experiment's configuration has the lowest priority. if (experimental_min_bitrate) { @@ -3520,10 +3533,22 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) { return layer.scale_resolution_down_by != -1.; }); + + bool default_scale_factors_used = true; + if (has_scale_resolution_down_by) { + default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config); + } + const bool norm_size_configured = + webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent().has_value(); const int normalized_width = - NormalizeSimulcastSize(width, encoder_config.number_of_streams); + (default_scale_factors_used || norm_size_configured) + ? NormalizeSimulcastSize(width, encoder_config.number_of_streams) + : width; const int normalized_height = - NormalizeSimulcastSize(height, encoder_config.number_of_streams); + (default_scale_factors_used || norm_size_configured) + ? NormalizeSimulcastSize(height, encoder_config.number_of_streams) + : height; + for (size_t i = 0; i < layers.size(); ++i) { layers[i].active = encoder_config.simulcast_layers[i].active; // Update with configured num temporal layers if supported by codec. @@ -3575,10 +3600,18 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( std::max(layers[i].max_bitrate_bps, layers[i].min_bitrate_bps); } else if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { // Only max bitrate is configured, make sure min/target are below max. + // Keep target bitrate if it is set explicitly in encoding config. + // Otherwise set target bitrate to 3/4 of the max bitrate + // or the one calculated from GetSimulcastConfig() which is larger. layers[i].min_bitrate_bps = std::min(layers[i].min_bitrate_bps, layers[i].max_bitrate_bps); - layers[i].target_bitrate_bps = - std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps); + if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) { + layers[i].target_bitrate_bps = std::max( + layers[i].target_bitrate_bps, layers[i].max_bitrate_bps * 3 / 4); + } + layers[i].target_bitrate_bps = std::max( + std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps), + layers[i].min_bitrate_bps); } if (i == layers.size() - 1) { is_highest_layer_max_bitrate_configured = diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h index 3705ebf01..321a5a8c2 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/call/transport.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" @@ -31,7 +32,6 @@ #include "media/base/media_engine.h" #include "media/engine/constants.h" #include "media/engine/unhandled_packets_buffer.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/network_route.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -98,7 +98,8 @@ class WebRtcVideoEngine : public VideoEngineInterface { // and external hardware codecs. WebRtcVideoEngine( std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory); + std::unique_ptr video_decoder_factory, + const webrtc::WebRtcKeyValueConfig& trials); ~WebRtcVideoEngine() override; @@ -120,6 +121,7 @@ class WebRtcVideoEngine : public VideoEngineInterface { const std::unique_ptr encoder_factory_; const std::unique_ptr bitrate_allocator_factory_; + const webrtc::WebRtcKeyValueConfig& trials_; }; class WebRtcVideoChannel : public VideoMediaChannel, @@ -319,8 +321,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, const std::vector& codecs); // Wrapper for the sender part. - class WebRtcVideoSendStream - : public rtc::VideoSourceInterface { + class WebRtcVideoSendStream { public: WebRtcVideoSendStream( webrtc::Call* call, @@ -332,7 +333,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, const absl::optional& codec_settings, const absl::optional>& rtp_extensions, const VideoSendParameters& send_params); - virtual ~WebRtcVideoSendStream(); + ~WebRtcVideoSendStream(); void SetSendParameters(const ChangedSendParameters& send_params); webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters); @@ -341,14 +342,6 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor); - // Implements rtc::VideoSourceInterface. - // WebRtcVideoSendStream acts as a source to the webrtc::VideoSendStream - // in |stream_|. This is done to proxy VideoSinkWants from the encoder to - // the worker thread. - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; - bool SetVideoSend(const VideoOptions* options, rtc::VideoSourceInterface* source); @@ -414,8 +407,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, RTC_GUARDED_BY(&thread_checker_); webrtc::VideoSendStream* stream_ RTC_GUARDED_BY(&thread_checker_); - rtc::VideoSinkInterface* encoder_sink_ - RTC_GUARDED_BY(&thread_checker_); + // Contains settings that are the same for all streams in the MediaChannel, // such as codecs, header extensions, and the global bitrate limit for the // entire channel. @@ -429,11 +421,6 @@ class WebRtcVideoChannel : public VideoMediaChannel, bool sending_ RTC_GUARDED_BY(&thread_checker_); - // In order for the |invoker_| to protect other members from being - // destructed as they are used in asynchronous tasks it has to be destructed - // first. - rtc::AsyncInvoker invoker_; - // TODO(asapersson): investigate why setting // DegrationPreferences::MAINTAIN_RESOLUTION isn't sufficient to disable // downscaling everywhere in the pipeline. @@ -566,7 +553,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void FillSendAndReceiveCodecStats(VideoMediaInfo* video_media_info) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); - rtc::Thread* worker_thread_; + rtc::Thread* const worker_thread_; rtc::ThreadChecker thread_checker_; uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_); @@ -639,10 +626,6 @@ class WebRtcVideoChannel : public VideoMediaChannel, bool allow_codec_switching_ = false; absl::optional requested_encoder_switch_; - - // In order for the |invoker_| to protect other members from being destructed - // as they are used in asynchronous tasks it has to be destructed first. - rtc::AsyncInvoker invoker_; }; class EncoderStreamFactory @@ -651,7 +634,18 @@ class EncoderStreamFactory EncoderStreamFactory(std::string codec_name, int max_qp, bool is_screenshare, - bool conference_mode); + bool conference_mode) + : EncoderStreamFactory(codec_name, + max_qp, + is_screenshare, + conference_mode, + nullptr) {} + + EncoderStreamFactory(std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode, + const webrtc::WebRtcKeyValueConfig* trials); private: std::vector CreateEncoderStreams( @@ -678,6 +672,8 @@ class EncoderStreamFactory // Allows a screenshare specific configuration, which enables temporal // layering and various settings. const bool conference_mode_; + const webrtc::FieldTrialBasedConfig fallback_trials_; + const webrtc::WebRtcKeyValueConfig& trials_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc index 749f2506a..fc526f12a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc @@ -12,27 +12,30 @@ #include #include +#include #include #include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/audio/audio_frame_processor.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/call/audio_sink.h" +#include "api/transport/webrtc_key_value_config.h" #include "media/base/audio_source.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "media/engine/adm_helpers.h" #include "media/engine/payload_type_mapper.h" #include "media/engine/webrtc_media_engine.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/audio_device_impl.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/arraysize.h" #include "rtc_base/byte_order.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" #include "rtc_base/experiments/struct_parameters_parser.h" @@ -45,7 +48,6 @@ #include "rtc_base/strings/string_format.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #if WEBRTC_ENABLE_PROTOBUF @@ -111,12 +113,6 @@ std::string ToString(const AudioCodec& codec) { return ss.Release(); } -// If this field trial is enabled, we will negotiate and use RFC 2198 -// redundancy for opus audio. -bool IsAudioRedForOpusFieldTrialEnabled() { - return webrtc::field_trial::IsEnabled("WebRTC-Audio-Red-For-Opus"); -} - bool IsCodec(const AudioCodec& codec, const char* ref_name) { return absl::EqualsIgnoreCase(codec.name, ref_name); } @@ -203,6 +199,11 @@ absl::optional ComputeSendBitrate(int max_send_bitrate_bps, } } +bool IsEnabled(const webrtc::WebRtcKeyValueConfig& config, + absl::string_view trial) { + return absl::StartsWith(config.Lookup(trial), "Enabled"); +} + struct AdaptivePtimeConfig { bool enabled = false; webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16); @@ -219,9 +220,8 @@ struct AdaptivePtimeConfig { "use_slow_adaptation", &use_slow_adaptation); } - AdaptivePtimeConfig() { - Parser()->Parse( - webrtc::field_trial::FindFullName("WebRTC-Audio-AdaptivePtime")); + explicit AdaptivePtimeConfig(const webrtc::WebRtcKeyValueConfig& trials) { + Parser()->Parse(trials.Lookup("WebRTC-Audio-AdaptivePtime")); #if WEBRTC_ENABLE_PROTOBUF webrtc::audio_network_adaptor::config::ControllerManager config; auto* frame_length_controller = @@ -243,13 +243,22 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( const rtc::scoped_refptr& encoder_factory, const rtc::scoped_refptr& decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing) + rtc::scoped_refptr audio_processing, + std::function onUnknownAudioSsrc, + webrtc::AudioFrameProcessor* audio_frame_processor, + const webrtc::WebRtcKeyValueConfig& trials) : task_queue_factory_(task_queue_factory), adm_(adm), encoder_factory_(encoder_factory), decoder_factory_(decoder_factory), audio_mixer_(audio_mixer), - apm_(audio_processing) { + apm_(audio_processing), + onUnknownAudioSsrc_(onUnknownAudioSsrc), + audio_frame_processor_(audio_frame_processor), + audio_red_for_opus_trial_enabled_( + IsEnabled(trials, "WebRTC-Audio-Red-For-Opus")), + minimized_remsampling_on_mobile_trial_enabled_( + IsEnabled(trials, "WebRTC-Audio-MinimizeResamplingOnMobile")) { // This may be called from any thread, so detach thread checkers. worker_thread_checker_.Detach(); signal_thread_checker_.Detach(); @@ -315,6 +324,10 @@ void WebRtcVoiceEngine::Init() { } config.audio_processing = apm_; config.audio_device_module = adm_; + if (audio_frame_processor_) + config.async_audio_processing_factory = + new rtc::RefCountedObject( + *audio_frame_processor_, *task_queue_factory_); audio_state_ = webrtc::AudioState::Create(config); } @@ -415,8 +428,7 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { // performed inside the audio processing module on mobile platforms by // whenever possible turning off the fixed AGC mode and the high-pass filter. // (https://bugs.chromium.org/p/webrtc/issues/detail?id=6181). - if (webrtc::field_trial::IsEnabled( - "WebRTC-Audio-MinimizeResamplingOnMobile")) { + if (minimized_remsampling_on_mobile_trial_enabled_) { options.auto_gain_control = false; RTC_LOG(LS_INFO) << "Disable AGC according to field trial."; if (!(options.noise_suppression.value_or(false) || @@ -581,7 +593,6 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { apm_config.voice_detection.enabled = *options.typing_detection; } - ap->SetExtraOptions(config); ap->ApplyConfig(apm_config); return true; } @@ -723,8 +734,7 @@ std::vector WebRtcVoiceEngine::CollectCodecs( out.push_back(codec); - if (codec.name == kOpusCodecName && - IsAudioRedForOpusFieldTrialEnabled()) { + if (codec.name == kOpusCodecName && audio_red_for_opus_trial_enabled_) { map_format({kRedCodecName, 48000, 2}, &out); } } @@ -768,7 +778,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream const absl::optional codec_pair_id, rtc::scoped_refptr frame_encryptor, const webrtc::CryptoOptions& crypto_options) - : call_(call), + : adaptive_ptime_config_(call->trials()), + call_(call), config_(send_transport), max_send_bitrate_bps_(max_send_bitrate_bps), rtp_parameters_(CreateRtpParametersWithOneEncoding()) { @@ -801,6 +812,10 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream stream_ = call_->CreateAudioSendStream(config_); } + WebRtcAudioSendStream() = delete; + WebRtcAudioSendStream(const WebRtcAudioSendStream&) = delete; + WebRtcAudioSendStream& operator=(const WebRtcAudioSendStream&) = delete; + ~WebRtcAudioSendStream() override { RTC_DCHECK(worker_thread_checker_.IsCurrent()); ClearSource(); @@ -1142,8 +1157,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // TODO(webrtc:11717): Remove this once audio_network_adaptor in AudioOptions // has been removed. absl::optional audio_network_adaptor_config_from_options_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioSendStream); }; class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { @@ -1192,6 +1205,10 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { RecreateAudioReceiveStream(); } + WebRtcAudioReceiveStream() = delete; + WebRtcAudioReceiveStream(const WebRtcAudioReceiveStream&) = delete; + WebRtcAudioReceiveStream& operator=(const WebRtcAudioReceiveStream&) = delete; + ~WebRtcAudioReceiveStream() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); call_->DestroyAudioReceiveStream(stream_); @@ -1250,10 +1267,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { } } - webrtc::AudioReceiveStream::Stats GetStats() const { + webrtc::AudioReceiveStream::Stats GetStats( + bool get_and_clear_legacy_stats) const { RTC_DCHECK(worker_thread_checker_.IsCurrent()); RTC_DCHECK(stream_); - return stream_->GetStats(); + return stream_->GetStats(get_and_clear_legacy_stats); } void SetRawAudioSink(std::unique_ptr sink) { @@ -1354,8 +1372,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { bool playout_ = false; float output_volume_ = 1.0; std::unique_ptr raw_audio_sink_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioReceiveStream); }; WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel( @@ -1368,7 +1384,9 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel( engine_(engine), call_(call), audio_config_(config.audio), - crypto_options_(crypto_options) { + crypto_options_(crypto_options), + audio_red_for_opus_trial_enabled_( + IsEnabled(call->trials(), "WebRTC-Audio-Red-For-Opus")) { RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel"; RTC_DCHECK(call); engine->RegisterChannel(this); @@ -1415,7 +1433,8 @@ bool WebRtcVoiceMediaChannel::SetSendParameters( } std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForAudio, true); + params.extensions, webrtc::RtpExtension::IsSupportedForAudio, true, + call_->trials()); if (send_rtp_extensions_ != filtered_extensions) { send_rtp_extensions_.swap(filtered_extensions); for (auto& it : send_streams_) { @@ -1452,7 +1471,8 @@ bool WebRtcVoiceMediaChannel::SetRecvParameters( return false; } std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForAudio, false); + params.extensions, webrtc::RtpExtension::IsSupportedForAudio, false, + call_->trials()); if (recv_rtp_extensions_ != filtered_extensions) { recv_rtp_extensions_.swap(filtered_extensions); for (auto& it : recv_streams_) { @@ -1628,7 +1648,7 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( } auto format = AudioCodecToSdpAudioFormat(codec); if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) && - (!IsAudioRedForOpusFieldTrialEnabled() || + (!audio_red_for_opus_trial_enabled_ || !IsCodec(codec, kRedCodecName)) && !engine()->decoder_factory_->IsSupportedDecoder(format)) { RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format); @@ -1782,7 +1802,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( } } - if (IsAudioRedForOpusFieldTrialEnabled()) { + if (audio_red_for_opus_trial_enabled_) { // Loop through the codecs to find the RED codec that matches opus // with respect to clockrate and number of channels. size_t red_codec_position = 0; @@ -2029,6 +2049,11 @@ void WebRtcVoiceMediaChannel::ResetUnsignaledRecvStream() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); RTC_LOG(LS_INFO) << "ResetUnsignaledRecvStream."; unsignaled_stream_params_ = StreamParams(); + // Create a copy since RemoveRecvStream will modify |unsignaled_recv_ssrcs_|. + std::vector to_remove = unsignaled_recv_ssrcs_; + for (uint32_t ssrc : to_remove) { + RemoveRecvStream(ssrc); + } } bool WebRtcVoiceMediaChannel::SetLocalSource(uint32_t ssrc, @@ -2199,6 +2224,10 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, } RTC_DCHECK(!absl::c_linear_search(unsignaled_recv_ssrcs_, ssrc)); + if (engine()->onUnknownAudioSsrc_) { + engine()->onUnknownAudioSsrc_(ssrc); + } + // Add new stream. StreamParams sp = unsignaled_stream_params_; sp.ssrcs.push_back(ssrc); @@ -2296,7 +2325,8 @@ void WebRtcVoiceMediaChannel::OnReadyToSend(bool ready) { ready ? webrtc::kNetworkUp : webrtc::kNetworkDown); } -bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) { +bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::GetStats"); RTC_DCHECK(worker_thread_checker_.IsCurrent()); RTC_DCHECK(info); @@ -2349,7 +2379,8 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) { continue; } } - webrtc::AudioReceiveStream::Stats stats = stream.second->GetStats(); + webrtc::AudioReceiveStream::Stats stats = + stream.second->GetStats(get_and_clear_legacy_stats); VoiceReceiverInfo rinfo; rinfo.add_ssrc(stats.remote_ssrc); rinfo.payload_bytes_rcvd = stats.payload_bytes_rcvd; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h index 86a7a495f..b212b3a3f 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h @@ -20,16 +20,21 @@ #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/rtp_source.h" +#include "api/transport/webrtc_key_value_config.h" #include "call/audio_state.h" #include "call/call.h" #include "media/base/media_engine.h" #include "media/base/rtp_utils.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/network_route.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" +namespace webrtc { +class AudioFrameProcessor; +} + namespace cricket { class AudioDeviceModule; @@ -49,7 +54,15 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { const rtc::scoped_refptr& encoder_factory, const rtc::scoped_refptr& decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing); + rtc::scoped_refptr audio_processing, + std::function onUnknownAudioSsrc, + webrtc::AudioFrameProcessor* audio_frame_processor, + const webrtc::WebRtcKeyValueConfig& trials); + + WebRtcVoiceEngine() = delete; + WebRtcVoiceEngine(const WebRtcVoiceEngine&) = delete; + WebRtcVoiceEngine& operator=(const WebRtcVoiceEngine&) = delete; + ~WebRtcVoiceEngine() override; // Does initialization that needs to occur on the worker thread. @@ -110,6 +123,8 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { rtc::scoped_refptr audio_mixer_; // The audio processing module. rtc::scoped_refptr apm_; + // Asynchronous audio processing. + webrtc::AudioFrameProcessor* const audio_frame_processor_; // The primary instance of WebRtc VoiceEngine. rtc::scoped_refptr audio_state_; std::vector send_codecs_; @@ -119,8 +134,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { bool initialized_ = false; // Cache experimental_ns and apply in case they are missing in the audio - // options. We need to do this because SetExtraOptions() will revert to - // defaults for options which are not provided. + // options. absl::optional experimental_ns_; // Jitter buffer settings for new streams. size_t audio_jitter_buffer_max_packets_ = 200; @@ -128,7 +142,12 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { int audio_jitter_buffer_min_delay_ms_ = 0; bool audio_jitter_buffer_enable_rtx_handling_ = false; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceEngine); + std::function onUnknownAudioSsrc_ = nullptr; + + // If this field trial is enabled, we will negotiate and use RFC 2198 + // redundancy for opus audio. + const bool audio_red_for_opus_trial_enabled_; + const bool minimized_remsampling_on_mobile_trial_enabled_; }; // WebRtcVoiceMediaChannel is an implementation of VoiceMediaChannel that uses @@ -141,6 +160,11 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::Call* call); + + WebRtcVoiceMediaChannel() = delete; + WebRtcVoiceMediaChannel(const WebRtcVoiceMediaChannel&) = delete; + WebRtcVoiceMediaChannel& operator=(const WebRtcVoiceMediaChannel&) = delete; + ~WebRtcVoiceMediaChannel() override; const AudioOptions& options() const { return options_; } @@ -196,7 +220,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override; void OnReadyToSend(bool ready) override; - bool GetStats(VoiceMediaInfo* info) override; + bool GetStats(VoiceMediaInfo* info, bool get_and_clear_legacy_stats) override; // Set the audio sink for an existing stream. void SetRawAudioSink( @@ -332,7 +356,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, rtc::scoped_refptr unsignaled_frame_decryptor_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceMediaChannel); + const bool audio_red_for_opus_trial_enabled_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc index 2d51623c8..527934d02 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc @@ -58,6 +58,7 @@ static constexpr size_t kSctpMtu = 1200; // Set the initial value of the static SCTP Data Engines reference count. ABSL_CONST_INIT int g_usrsctp_usage_count = 0; +ABSL_CONST_INIT bool g_usrsctp_initialized_ = false; ABSL_CONST_INIT webrtc::GlobalMutex g_usrsctp_lock_(absl::kConstInit); // DataMessageType is used for the SCTP "Payload Protocol Identifier", as @@ -262,9 +263,19 @@ class SctpTransport::UsrSctpWrapper { public: static void InitializeUsrSctp() { RTC_LOG(LS_INFO) << __FUNCTION__; - // First argument is udp_encapsulation_port, which is not releveant for our - // AF_CONN use of sctp. - usrsctp_init(0, &UsrSctpWrapper::OnSctpOutboundPacket, &DebugSctpPrintf); + // UninitializeUsrSctp tries to call usrsctp_finish in a loop for three + // seconds; if that failed and we were left in a still-initialized state, we + // don't want to call usrsctp_init again as that will result in undefined + // behavior. + if (g_usrsctp_initialized_) { + RTC_LOG(LS_WARNING) << "Not reinitializing usrsctp since last attempt at " + "usrsctp_finish failed."; + } else { + // First argument is udp_encapsulation_port, which is not releveant for + // our AF_CONN use of sctp. + usrsctp_init(0, &UsrSctpWrapper::OnSctpOutboundPacket, &DebugSctpPrintf); + g_usrsctp_initialized_ = true; + } // To turn on/off detailed SCTP debugging. You will also need to have the // SCTP_DEBUG cpp defines flag, which can be turned on in media/BUILD.gn. @@ -318,6 +329,7 @@ class SctpTransport::UsrSctpWrapper { // closed. Wait and try again until it succeeds for up to 3 seconds. for (size_t i = 0; i < 300; ++i) { if (usrsctp_finish() == 0) { + g_usrsctp_initialized_ = false; delete g_transport_map_; g_transport_map_ = nullptr; return; @@ -394,7 +406,17 @@ class SctpTransport::UsrSctpWrapper { struct sctp_rcvinfo rcv, int flags, void* ulp_info) { - SctpTransport* transport = static_cast(ulp_info); + SctpTransport* transport = GetTransportFromSocket(sock); + if (!transport) { + RTC_LOG(LS_ERROR) + << "OnSctpInboundPacket: Failed to get transport for socket " << sock + << "; possibly was already destroyed."; + free(data); + return 0; + } + // Sanity check that both methods of getting the SctpTransport pointer + // yield the same result. + RTC_CHECK_EQ(transport, static_cast(ulp_info)); int result = transport->OnDataOrNotificationFromSctp(data, length, rcv, flags); free(data); @@ -427,6 +449,8 @@ class SctpTransport::UsrSctpWrapper { return transport; } + // TODO(crbug.com/webrtc/11899): This is a legacy callback signature, remove + // when usrsctp is updated. static int SendThresholdCallback(struct socket* sock, uint32_t sb_free) { // Fired on our I/O thread. SctpTransport::OnPacketReceived() gets // a packet containing acknowledgments, which goes into usrsctp_conninput, @@ -435,12 +459,32 @@ class SctpTransport::UsrSctpWrapper { if (!transport) { RTC_LOG(LS_ERROR) << "SendThresholdCallback: Failed to get transport for socket " - << sock; + << sock << "; possibly was already destroyed."; return 0; } transport->OnSendThresholdCallback(); return 0; } + + static int SendThresholdCallback(struct socket* sock, + uint32_t sb_free, + void* ulp_info) { + // Fired on our I/O thread. SctpTransport::OnPacketReceived() gets + // a packet containing acknowledgments, which goes into usrsctp_conninput, + // and then back here. + SctpTransport* transport = GetTransportFromSocket(sock); + if (!transport) { + RTC_LOG(LS_ERROR) + << "SendThresholdCallback: Failed to get transport for socket " + << sock << "; possibly was already destroyed."; + return 0; + } + // Sanity check that both methods of getting the SctpTransport pointer + // yield the same result. + RTC_CHECK_EQ(transport, static_cast(ulp_info)); + transport->OnSendThresholdCallback(); + return 0; + } }; SctpTransport::SctpTransport(rtc::Thread* network_thread, @@ -870,9 +914,11 @@ bool SctpTransport::ConfigureSctpSocket() { } // Subscribe to SCTP event notifications. + // TODO(crbug.com/1137936): Subscribe to SCTP_SEND_FAILED_EVENT once deadlock + // is fixed upstream, or we switch to the upcall API: + // https://github.com/sctplab/usrsctp/issues/537 int event_types[] = {SCTP_ASSOC_CHANGE, SCTP_PEER_ADDR_CHANGE, - SCTP_SEND_FAILED_EVENT, SCTP_SENDER_DRY_EVENT, - SCTP_STREAM_RESET_EVENT}; + SCTP_SENDER_DRY_EVENT, SCTP_STREAM_RESET_EVENT}; struct sctp_event event = {0}; event.se_assoc_id = SCTP_ALL_ASSOC; event.se_on = 1; @@ -1102,8 +1148,8 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, // If data is NULL, the SCTP association has been closed. if (!data) { RTC_LOG(LS_INFO) << debug_name_ - << "->OnSctpInboundPacket(...): " - "No data, closing."; + << "->OnDataOrNotificationFromSctp(...): " + "No data; association closed."; return kSctpSuccessReturn; } @@ -1112,9 +1158,10 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, // be handled early and entirely separate from the reassembly // process. if (flags & MSG_NOTIFICATION) { - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnSctpInboundPacket(...): SCTP notification" - << " length=" << length; + RTC_LOG(LS_VERBOSE) + << debug_name_ + << "->OnDataOrNotificationFromSctp(...): SCTP notification" + << " length=" << length; // Copy and dispatch asynchronously rtc::CopyOnWriteBuffer notification(reinterpret_cast(data), @@ -1128,7 +1175,7 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, // Log data chunk const uint32_t ppid = rtc::NetworkToHost32(rcv.rcv_ppid); RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnSctpInboundPacket(...): SCTP data chunk" + << "->OnDataOrNotificationFromSctp(...): SCTP data chunk" << " length=" << length << ", sid=" << rcv.rcv_sid << ", ppid=" << ppid << ", ssn=" << rcv.rcv_ssn << ", cum-tsn=" << rcv.rcv_cumtsn @@ -1219,14 +1266,31 @@ void SctpTransport::OnDataFromSctpToTransport( void SctpTransport::OnNotificationFromSctp( const rtc::CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(network_thread_); + if (buffer.size() < sizeof(sctp_notification::sn_header)) { + RTC_LOG(LS_ERROR) << "SCTP notification is shorter than header size: " + << buffer.size(); + return; + } + const sctp_notification& notification = reinterpret_cast(*buffer.data()); - RTC_DCHECK(notification.sn_header.sn_length == buffer.size()); + if (buffer.size() != notification.sn_header.sn_length) { + RTC_LOG(LS_ERROR) << "SCTP notification length (" << buffer.size() + << ") does not match sn_length field (" + << notification.sn_header.sn_length << ")."; + return; + } // TODO(ldixon): handle notifications appropriately. switch (notification.sn_header.sn_type) { case SCTP_ASSOC_CHANGE: RTC_LOG(LS_VERBOSE) << "SCTP_ASSOC_CHANGE"; + if (buffer.size() < sizeof(notification.sn_assoc_change)) { + RTC_LOG(LS_ERROR) + << "SCTP_ASSOC_CHANGE notification has less than required length: " + << buffer.size(); + return; + } OnNotificationAssocChange(notification.sn_assoc_change); break; case SCTP_REMOTE_ERROR: @@ -1253,6 +1317,12 @@ void SctpTransport::OnNotificationFromSctp( RTC_LOG(LS_INFO) << "SCTP_NOTIFICATIONS_STOPPED_EVENT"; break; case SCTP_SEND_FAILED_EVENT: { + if (buffer.size() < sizeof(notification.sn_send_failed_event)) { + RTC_LOG(LS_ERROR) << "SCTP_SEND_FAILED_EVENT notification has less " + "than required length: " + << buffer.size(); + return; + } const struct sctp_send_failed_event& ssfe = notification.sn_send_failed_event; RTC_LOG(LS_WARNING) << "SCTP_SEND_FAILED_EVENT: message with" @@ -1265,6 +1335,12 @@ void SctpTransport::OnNotificationFromSctp( break; } case SCTP_STREAM_RESET_EVENT: + if (buffer.size() < sizeof(notification.sn_strreset_event)) { + RTC_LOG(LS_ERROR) << "SCTP_STREAM_RESET_EVENT notification has less " + "than required length: " + << buffer.size(); + return; + } OnStreamResetEvent(¬ification.sn_strreset_event); break; case SCTP_ASSOC_RESET_EVENT: diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h index 38029ffeb..54542af6b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h @@ -21,6 +21,7 @@ #include #include "absl/types/optional.h" +#include "api/transport/sctp_transport_factory_interface.h" #include "rtc_base/async_invoker.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" @@ -283,7 +284,7 @@ class SctpTransport : public SctpTransportInternal, RTC_DISALLOW_COPY_AND_ASSIGN(SctpTransport); }; -class SctpTransportFactory : public SctpTransportInternalFactory { +class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { public: explicit SctpTransportFactory(rtc::Thread* network_thread) : network_thread_(network_thread) {} diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h index b0e0e0f7e..dc8ac4558 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h @@ -142,18 +142,6 @@ class SctpTransportInternal { virtual void set_debug_name_for_testing(const char* debug_name) = 0; }; -// Factory class which can be used to allow fake SctpTransports to be injected -// for testing. Or, theoretically, SctpTransportInternal implementations that -// use something other than usrsctp. -class SctpTransportInternalFactory { - public: - virtual ~SctpTransportInternalFactory() {} - - // Create an SCTP transport using |channel| for the underlying transport. - virtual std::unique_ptr CreateSctpTransport( - rtc::PacketTransportInternal* channel) = 0; -}; - } // namespace cricket #endif // MEDIA_SCTP_SCTP_TRANSPORT_INTERNAL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.cc b/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.cc new file mode 100644 index 000000000..9452f3bcf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.cc @@ -0,0 +1,61 @@ + +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/async_audio_processing/async_audio_processing.h" + +#include + +#include "api/audio/audio_frame.h" +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +AsyncAudioProcessing::Factory::~Factory() = default; +AsyncAudioProcessing::Factory::Factory(AudioFrameProcessor& frame_processor, + TaskQueueFactory& task_queue_factory) + : frame_processor_(frame_processor), + task_queue_factory_(task_queue_factory) {} + +std::unique_ptr +AsyncAudioProcessing::Factory::CreateAsyncAudioProcessing( + AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback) { + return std::make_unique( + frame_processor_, task_queue_factory_, + std::move(on_frame_processed_callback)); +} + +AsyncAudioProcessing::~AsyncAudioProcessing() { + frame_processor_.SetSink(nullptr); +} + +AsyncAudioProcessing::AsyncAudioProcessing( + AudioFrameProcessor& frame_processor, + TaskQueueFactory& task_queue_factory, + AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback) + : on_frame_processed_callback_(std::move(on_frame_processed_callback)), + frame_processor_(frame_processor), + task_queue_(task_queue_factory.CreateTaskQueue( + "AsyncAudioProcessing", + TaskQueueFactory::Priority::NORMAL)) { + frame_processor_.SetSink([this](std::unique_ptr frame) { + task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + on_frame_processed_callback_(std::move(frame)); + }); + }); +} + +void AsyncAudioProcessing::Process(std::unique_ptr frame) { + task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + frame_processor_.Process(std::move(frame)); + }); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.h new file mode 100644 index 000000000..7e09d69f1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/async_audio_processing/async_audio_processing.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_ASYNC_AUDIO_PROCESSING_ASYNC_AUDIO_PROCESSING_H_ +#define MODULES_ASYNC_AUDIO_PROCESSING_ASYNC_AUDIO_PROCESSING_H_ + +#include + +#include "api/audio/audio_frame_processor.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/task_queue.h" + +namespace webrtc { + +class AudioFrame; +class TaskQueueFactory; + +// Helper class taking care of interactions with AudioFrameProcessor +// in asynchronous manner. Offloads AudioFrameProcessor::Process calls +// to a dedicated task queue. Makes sure that it's always safe for +// AudioFrameProcessor to pass processed frames back to its sink. +class AsyncAudioProcessing final { + public: + // Helper class passing AudioFrameProcessor and TaskQueueFactory into + // AsyncAudioProcessing constructor. + class Factory : public rtc::RefCountInterface { + public: + Factory(const Factory&) = delete; + Factory& operator=(const Factory&) = delete; + + ~Factory(); + Factory(AudioFrameProcessor& frame_processor, + TaskQueueFactory& task_queue_factory); + + std::unique_ptr CreateAsyncAudioProcessing( + AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback); + + private: + AudioFrameProcessor& frame_processor_; + TaskQueueFactory& task_queue_factory_; + }; + + AsyncAudioProcessing(const AsyncAudioProcessing&) = delete; + AsyncAudioProcessing& operator=(const AsyncAudioProcessing&) = delete; + + ~AsyncAudioProcessing(); + + // Creates AsyncAudioProcessing which will pass audio frames to + // |frame_processor| on |task_queue_| and reply with processed frames passed + // into |on_frame_processed_callback|, which is posted back onto + // |task_queue_|. |task_queue_| is created using the provided + // |task_queue_factory|. + AsyncAudioProcessing( + AudioFrameProcessor& frame_processor, + TaskQueueFactory& task_queue_factory, + AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback); + + // Accepts |frame| for asynchronous processing. Thread-safe. + void Process(std::unique_ptr frame); + + private: + AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback_; + AudioFrameProcessor& frame_processor_; + rtc::TaskQueue task_queue_; +}; + +} // namespace webrtc + +#endif // MODULES_ASYNC_AUDIO_PROCESSING_ASYNC_AUDIO_PROCESSING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc index 33142c783..0e615cae8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc @@ -244,26 +244,37 @@ absl::optional> AcmReceiver::LastDecoder() return std::make_pair(last_decoder_->payload_type, last_decoder_->sdp_format); } -void AcmReceiver::GetNetworkStatistics(NetworkStatistics* acm_stat) const { +void AcmReceiver::GetNetworkStatistics( + NetworkStatistics* acm_stat, + bool get_and_clear_legacy_stats /* = true */) const { NetEqNetworkStatistics neteq_stat; - // NetEq function always returns zero, so we don't check the return value. - neteq_->NetworkStatistics(&neteq_stat); + if (get_and_clear_legacy_stats) { + // NetEq function always returns zero, so we don't check the return value. + neteq_->NetworkStatistics(&neteq_stat); + acm_stat->currentExpandRate = neteq_stat.expand_rate; + acm_stat->currentSpeechExpandRate = neteq_stat.speech_expand_rate; + acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate; + acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate; + acm_stat->currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate; + acm_stat->currentSecondaryDiscardedRate = + neteq_stat.secondary_discarded_rate; + acm_stat->meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms; + acm_stat->maxWaitingTimeMs = neteq_stat.max_waiting_time_ms; + } else { + neteq_stat = neteq_->CurrentNetworkStatistics(); + acm_stat->currentExpandRate = 0; + acm_stat->currentSpeechExpandRate = 0; + acm_stat->currentPreemptiveRate = 0; + acm_stat->currentAccelerateRate = 0; + acm_stat->currentSecondaryDecodedRate = 0; + acm_stat->currentSecondaryDiscardedRate = 0; + acm_stat->meanWaitingTimeMs = -1; + acm_stat->maxWaitingTimeMs = 1; + } acm_stat->currentBufferSize = neteq_stat.current_buffer_size_ms; acm_stat->preferredBufferSize = neteq_stat.preferred_buffer_size_ms; acm_stat->jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false; - acm_stat->currentPacketLossRate = neteq_stat.packet_loss_rate; - acm_stat->currentExpandRate = neteq_stat.expand_rate; - acm_stat->currentSpeechExpandRate = neteq_stat.speech_expand_rate; - acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate; - acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate; - acm_stat->currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate; - acm_stat->currentSecondaryDiscardedRate = neteq_stat.secondary_discarded_rate; - acm_stat->addedSamples = neteq_stat.added_zero_samples; - acm_stat->meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms; - acm_stat->medianWaitingTimeMs = neteq_stat.median_waiting_time_ms; - acm_stat->minWaitingTimeMs = neteq_stat.min_waiting_time_ms; - acm_stat->maxWaitingTimeMs = neteq_stat.max_waiting_time_ms; NetEqLifetimeStatistics neteq_lifetime_stat = neteq_->GetLifetimeStatistics(); acm_stat->totalSamplesReceived = neteq_lifetime_stat.total_samples_received; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.h index d451a94ef..19dc57758 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.h @@ -138,7 +138,8 @@ class AcmReceiver { // Output: // - statistics : The current network statistics. // - void GetNetworkStatistics(NetworkStatistics* statistics) const; + void GetNetworkStatistics(NetworkStatistics* statistics, + bool get_and_clear_legacy_stats = true) const; // // Flushes the NetEq packet and speech buffers. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.cc index eee6f403a..40c865906 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.cc @@ -53,11 +53,6 @@ void BitrateController::MakeDecision(AudioEncoderRuntimeConfig* config) { // Decision on |bitrate_bps| should not have been made. RTC_DCHECK(!config->bitrate_bps); if (target_audio_bitrate_bps_ && overhead_bytes_per_packet_) { - // Current implementation of BitrateController can only work when - // |metrics.target_audio_bitrate_bps| includes overhead is enabled. This is - // currently governed by the following field trial. - RTC_DCHECK( - webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")); if (config->frame_length_ms) frame_length_ms_ = *config->frame_length_ms; int offset = config->last_fl_change_increase diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc index 032de2024..9fbf42cee 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc @@ -34,7 +34,7 @@ int GetIlbcBitrate(int ptime) { // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. return 13333; default: - FATAL(); + RTC_CHECK_NOTREACHED(); } } @@ -144,7 +144,7 @@ size_t AudioEncoderIlbcImpl::RequiredOutputSizeBytes() const { case 6: return 2 * 50; default: - FATAL(); + RTC_CHECK_NOTREACHED(); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c index 326c766a9..8ebe51308 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c @@ -260,9 +260,10 @@ size_t WebRtcIlbcfix_DecodePlc(IlbcDecoderInstance* iLBCdec_inst, for (i=0;iblockl], &dummy, - (IlbcDecoder*)iLBCdec_inst, 0)); + (IlbcDecoder*)iLBCdec_inst, 0); + RTC_CHECK_EQ(result, 0); } return (noOfLostFrames*((IlbcDecoder*)iLBCdec_inst)->blockl); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h index d99e9c893..8bde0e34a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h @@ -93,7 +93,7 @@ class AudioEncoderIsacT final : public AudioEncoder { // Cache the value of the "WebRTC-SendSideBwe-WithOverhead" field trial. const bool send_side_bwe_with_overhead_ = - field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead"); + !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead"); // When we send a packet, expect this many bytes of headers to be added to it. // Start out with a reasonable default that we can use until we receive a real diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c index 36fbdd6bb..067d8f358 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c @@ -26,7 +26,6 @@ #include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" #include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" #include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "system_wrappers/include/cpu_features_wrapper.h" // Declare function pointers. FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index 2b1692071..203cb5aeb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -356,7 +356,7 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl( std::unique_ptr bitrate_smoother) : payload_type_(payload_type), send_side_bwe_with_overhead_( - webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")), + !webrtc::field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled( "WebRTC-Audio-StableTargetAdaptation")), adjust_bandwidth_( @@ -367,7 +367,8 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl( inst_(nullptr), packet_loss_fraction_smoother_(new PacketLossFractionSmoother()), audio_network_adaptor_creator_(audio_network_adaptor_creator), - bitrate_smoother_(std::move(bitrate_smoother)) { + bitrate_smoother_(std::move(bitrate_smoother)), + consecutive_dtx_frames_(0) { RTC_DCHECK(0 <= payload_type && payload_type <= 127); // Sanity check of the redundant payload type field that we want to get rid @@ -589,7 +590,6 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( Num10msFramesPerPacket() * SamplesPer10msFrame()); const size_t max_encoded_bytes = SufficientOutputBufferSize(); - const size_t start_offset_bytes = encoded->size(); EncodedInfo info; info.encoded_bytes = encoded->AppendData( max_encoded_bytes, [&](rtc::ArrayView encoded) { @@ -604,6 +604,8 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( }); input_buffer_.clear(); + bool dtx_frame = (info.encoded_bytes <= 2); + // Will use new packet size for next encoding. config_.frame_size_ms = next_frame_length_ms_; @@ -618,18 +620,14 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( info.encoded_timestamp = first_timestamp_in_buffer_; info.payload_type = payload_type_; info.send_even_if_empty = true; // Allows Opus to send empty packets. + // After 20 DTX frames (MAX_CONSECUTIVE_DTX) Opus will send a frame + // coding the background noise. Avoid flagging this frame as speech + // (even though there is a probability of the frame being speech). + info.speech = !dtx_frame && (consecutive_dtx_frames_ != 20); info.encoder_type = CodecType::kOpus; - // Extract the VAD result from the encoded packet. - int has_voice = WebRtcOpus_PacketHasVoiceActivity( - &encoded->data()[start_offset_bytes], info.encoded_bytes); - if (has_voice == -1) { - // CELT mode packet or there was an error. This had set the speech flag to - // true historically. - info.speech = true; - } else { - info.speech = has_voice; - } + // Increase or reset DTX counter. + consecutive_dtx_frames_ = (dtx_frame) ? (consecutive_dtx_frames_ + 1) : (0); return info; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h index dc955cec2..ab954feba 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -172,6 +172,7 @@ class AudioEncoderOpusImpl final : public AudioEncoder { absl::optional overhead_bytes_per_packet_; const std::unique_ptr bitrate_smoother_; absl::optional bitrate_smoother_last_update_time_; + int consecutive_dtx_frames_; friend struct AudioEncoderOpus; RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpusImpl); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc new file mode 100644 index 000000000..1923647fb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc @@ -0,0 +1,250 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "modules/audio_coding/codecs/opus/opus_interface.h" +#include "rtc_base/format_macros.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +using std::get; +using std::string; +using std::tuple; +using ::testing::TestWithParam; + +namespace webrtc { + +// Define coding parameter as . +typedef tuple coding_param; +typedef struct mode mode; + +struct mode { + bool fec; + uint8_t target_packet_loss_rate; +}; + +const int kOpusBlockDurationMs = 20; +const int kOpusSamplingKhz = 48; + +class OpusFecTest : public TestWithParam { + protected: + OpusFecTest(); + + void SetUp() override; + void TearDown() override; + + virtual void EncodeABlock(); + + virtual void DecodeABlock(bool lost_previous, bool lost_current); + + int block_duration_ms_; + int sampling_khz_; + size_t block_length_sample_; + + size_t channels_; + int bit_rate_; + + size_t data_pointer_; + size_t loop_length_samples_; + size_t max_bytes_; + size_t encoded_bytes_; + + WebRtcOpusEncInst* opus_encoder_; + WebRtcOpusDecInst* opus_decoder_; + + string in_filename_; + + std::unique_ptr in_data_; + std::unique_ptr out_data_; + std::unique_ptr bit_stream_; +}; + +void OpusFecTest::SetUp() { + channels_ = get<0>(GetParam()); + bit_rate_ = get<1>(GetParam()); + printf("Coding %" RTC_PRIuS " channel signal at %d bps.\n", channels_, + bit_rate_); + + in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam())); + + FILE* fp = fopen(in_filename_.c_str(), "rb"); + ASSERT_FALSE(fp == NULL); + + // Obtain file size. + fseek(fp, 0, SEEK_END); + loop_length_samples_ = ftell(fp) / sizeof(int16_t); + rewind(fp); + + // Allocate memory to contain the whole file. + in_data_.reset( + new int16_t[loop_length_samples_ + block_length_sample_ * channels_]); + + // Copy the file into the buffer. + ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp), + loop_length_samples_); + fclose(fp); + + // The audio will be used in a looped manner. To ease the acquisition of an + // audio frame that crosses the end of the excerpt, we add an extra block + // length of samples to the end of the array, starting over again from the + // beginning of the array. Audio frames cross the end of the excerpt always + // appear as a continuum of memory. + memcpy(&in_data_[loop_length_samples_], &in_data_[0], + block_length_sample_ * channels_ * sizeof(int16_t)); + + // Maximum number of bytes in output bitstream. + max_bytes_ = block_length_sample_ * channels_ * sizeof(int16_t); + + out_data_.reset(new int16_t[2 * block_length_sample_ * channels_]); + bit_stream_.reset(new uint8_t[max_bytes_]); + + // If channels_ == 1, use Opus VOIP mode, otherwise, audio mode. + int app = channels_ == 1 ? 0 : 1; + + // Create encoder memory. + EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_, app, 48000)); + EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_, 48000)); + // Set bitrate. + EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_)); +} + +void OpusFecTest::TearDown() { + // Free memory. + EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_)); + EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_)); +} + +OpusFecTest::OpusFecTest() + : block_duration_ms_(kOpusBlockDurationMs), + sampling_khz_(kOpusSamplingKhz), + block_length_sample_( + static_cast(block_duration_ms_ * sampling_khz_)), + data_pointer_(0), + max_bytes_(0), + encoded_bytes_(0), + opus_encoder_(NULL), + opus_decoder_(NULL) {} + +void OpusFecTest::EncodeABlock() { + int value = + WebRtcOpus_Encode(opus_encoder_, &in_data_[data_pointer_], + block_length_sample_, max_bytes_, &bit_stream_[0]); + EXPECT_GT(value, 0); + + encoded_bytes_ = static_cast(value); +} + +void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) { + int16_t audio_type; + int value_1 = 0, value_2 = 0; + + if (lost_previous) { + // Decode previous frame. + if (!lost_current && + WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_) == 1) { + value_1 = + WebRtcOpus_DecodeFec(opus_decoder_, &bit_stream_[0], encoded_bytes_, + &out_data_[0], &audio_type); + } else { + // Call decoder PLC. + while (value_1 < static_cast(block_length_sample_)) { + int ret = WebRtcOpus_Decode(opus_decoder_, NULL, 0, &out_data_[value_1], + &audio_type); + EXPECT_EQ(ret, sampling_khz_ * 10); // Should return 10 ms of samples. + value_1 += ret; + } + } + EXPECT_EQ(static_cast(block_length_sample_), value_1); + } + + if (!lost_current) { + // Decode current frame. + value_2 = WebRtcOpus_Decode(opus_decoder_, &bit_stream_[0], encoded_bytes_, + &out_data_[value_1 * channels_], &audio_type); + EXPECT_EQ(static_cast(block_length_sample_), value_2); + } +} + +TEST_P(OpusFecTest, RandomPacketLossTest) { + const int kDurationMs = 200000; + int time_now_ms, fec_frames; + int actual_packet_loss_rate; + bool lost_current, lost_previous; + mode mode_set[3] = {{true, 0}, {false, 0}, {true, 50}}; + + lost_current = false; + for (int i = 0; i < 3; i++) { + if (mode_set[i].fec) { + EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_encoder_)); + EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate( + opus_encoder_, mode_set[i].target_packet_loss_rate)); + printf("FEC is ON, target at packet loss rate %d percent.\n", + mode_set[i].target_packet_loss_rate); + } else { + EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_encoder_)); + printf("FEC is OFF.\n"); + } + // In this test, we let the target packet loss rate match the actual rate. + actual_packet_loss_rate = mode_set[i].target_packet_loss_rate; + // Run every mode a certain time. + time_now_ms = 0; + fec_frames = 0; + while (time_now_ms < kDurationMs) { + // Encode & decode. + EncodeABlock(); + + // Check if payload has FEC. + int fec = WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_); + + // If FEC is disabled or the target packet loss rate is set to 0, there + // should be no FEC in the bit stream. + if (!mode_set[i].fec || mode_set[i].target_packet_loss_rate == 0) { + EXPECT_EQ(fec, 0); + } else if (fec == 1) { + fec_frames++; + } + + lost_previous = lost_current; + lost_current = rand() < actual_packet_loss_rate * (RAND_MAX / 100); + DecodeABlock(lost_previous, lost_current); + + time_now_ms += block_duration_ms_; + + // |data_pointer_| is incremented and wrapped across + // |loop_length_samples_|. + data_pointer_ = (data_pointer_ + block_length_sample_ * channels_) % + loop_length_samples_; + } + if (mode_set[i].fec) { + printf("%.2f percent frames has FEC.\n", + static_cast(fec_frames) * block_duration_ms_ / 2000); + } + } +} + +const coding_param param_set[] = { + std::make_tuple(1, + 64000, + string("audio_coding/testfile32kHz"), + string("pcm")), + std::make_tuple(1, + 32000, + string("audio_coding/testfile32kHz"), + string("pcm")), + std::make_tuple(2, + 64000, + string("audio_coding/teststereo32kHz"), + string("pcm"))}; + +// 64 kbps, stereo +INSTANTIATE_TEST_SUITE_P(AllTest, OpusFecTest, ::testing::ValuesIn(param_set)); + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc index 455f17546..ca39ed823 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc @@ -767,7 +767,7 @@ int WebRtcOpus_PacketHasVoiceActivity(const uint8_t* payload, int silk_frames = WebRtcOpus_NumSilkFrames(payload); if (silk_frames == 0) - return 0; + return -1; const int channels = opus_packet_get_nb_channels(payload); RTC_DCHECK(channels == 1 || channels == 2); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc new file mode 100644 index 000000000..4477e8a5f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/codecs/opus/opus_interface.h" +#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" + +using ::std::string; + +namespace webrtc { + +static const int kOpusBlockDurationMs = 20; +static const int kOpusSamplingKhz = 48; + +class OpusSpeedTest : public AudioCodecSpeedTest { + protected: + OpusSpeedTest(); + void SetUp() override; + void TearDown() override; + float EncodeABlock(int16_t* in_data, + uint8_t* bit_stream, + size_t max_bytes, + size_t* encoded_bytes) override; + float DecodeABlock(const uint8_t* bit_stream, + size_t encoded_bytes, + int16_t* out_data) override; + WebRtcOpusEncInst* opus_encoder_; + WebRtcOpusDecInst* opus_decoder_; +}; + +OpusSpeedTest::OpusSpeedTest() + : AudioCodecSpeedTest(kOpusBlockDurationMs, + kOpusSamplingKhz, + kOpusSamplingKhz), + opus_encoder_(NULL), + opus_decoder_(NULL) {} + +void OpusSpeedTest::SetUp() { + AudioCodecSpeedTest::SetUp(); + // If channels_ == 1, use Opus VOIP mode, otherwise, audio mode. + int app = channels_ == 1 ? 0 : 1; + /* Create encoder memory. */ + EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_, app, 48000)); + EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_, 48000)); + /* Set bitrate. */ + EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_)); +} + +void OpusSpeedTest::TearDown() { + AudioCodecSpeedTest::TearDown(); + /* Free memory. */ + EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_)); + EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_)); +} + +float OpusSpeedTest::EncodeABlock(int16_t* in_data, + uint8_t* bit_stream, + size_t max_bytes, + size_t* encoded_bytes) { + clock_t clocks = clock(); + int value = WebRtcOpus_Encode(opus_encoder_, in_data, input_length_sample_, + max_bytes, bit_stream); + clocks = clock() - clocks; + EXPECT_GT(value, 0); + *encoded_bytes = static_cast(value); + return 1000.0 * clocks / CLOCKS_PER_SEC; +} + +float OpusSpeedTest::DecodeABlock(const uint8_t* bit_stream, + size_t encoded_bytes, + int16_t* out_data) { + int value; + int16_t audio_type; + clock_t clocks = clock(); + value = WebRtcOpus_Decode(opus_decoder_, bit_stream, encoded_bytes, out_data, + &audio_type); + clocks = clock() - clocks; + EXPECT_EQ(output_length_sample_, static_cast(value)); + return 1000.0 * clocks / CLOCKS_PER_SEC; +} + +/* Test audio length in second. */ +constexpr size_t kDurationSec = 400; + +#define ADD_TEST(complexity) \ + TEST_P(OpusSpeedTest, OpusSetComplexityTest##complexity) { \ + /* Set complexity. */ \ + printf("Setting complexity to %d ...\n", complexity); \ + EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_encoder_, complexity)); \ + EncodeDecode(kDurationSec); \ + } + +ADD_TEST(10) +ADD_TEST(9) +ADD_TEST(8) +ADD_TEST(7) +ADD_TEST(6) +ADD_TEST(5) +ADD_TEST(4) +ADD_TEST(3) +ADD_TEST(2) +ADD_TEST(1) +ADD_TEST(0) + +#define ADD_BANDWIDTH_TEST(bandwidth) \ + TEST_P(OpusSpeedTest, OpusSetBandwidthTest##bandwidth) { \ + /* Set bandwidth. */ \ + printf("Setting bandwidth to %d ...\n", bandwidth); \ + EXPECT_EQ(0, WebRtcOpus_SetBandwidth(opus_encoder_, bandwidth)); \ + EncodeDecode(kDurationSec); \ + } + +ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_NARROWBAND) +ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_MEDIUMBAND) +ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_WIDEBAND) +ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_SUPERWIDEBAND) +ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_FULLBAND) + +// List all test cases: (channel, bit rat, filename, extension). +const coding_param param_set[] = { + std::make_tuple(1, + 64000, + string("audio_coding/speech_mono_32_48kHz"), + string("pcm"), + true), + std::make_tuple(1, + 32000, + string("audio_coding/speech_mono_32_48kHz"), + string("pcm"), + true), + std::make_tuple(2, + 64000, + string("audio_coding/music_stereo_48kHz"), + string("pcm"), + true)}; + +INSTANTIATE_TEST_SUITE_P(AllTest, + OpusSpeedTest, + ::testing::ValuesIn(param_set)); + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.cc deleted file mode 100644 index 2a71b43d2..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.cc +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/opus/test/audio_ring_buffer.h" - -#include "common_audio/ring_buffer.h" -#include "rtc_base/checks.h" - -// This is a simple multi-channel wrapper over the ring_buffer.h C interface. - -namespace webrtc { - -AudioRingBuffer::AudioRingBuffer(size_t channels, size_t max_frames) { - buffers_.reserve(channels); - for (size_t i = 0; i < channels; ++i) - buffers_.push_back(WebRtc_CreateBuffer(max_frames, sizeof(float))); -} - -AudioRingBuffer::~AudioRingBuffer() { - for (auto* buf : buffers_) - WebRtc_FreeBuffer(buf); -} - -void AudioRingBuffer::Write(const float* const* data, - size_t channels, - size_t frames) { - RTC_DCHECK_EQ(buffers_.size(), channels); - for (size_t i = 0; i < channels; ++i) { - const size_t written = WebRtc_WriteBuffer(buffers_[i], data[i], frames); - RTC_CHECK_EQ(written, frames); - } -} - -void AudioRingBuffer::Read(float* const* data, size_t channels, size_t frames) { - RTC_DCHECK_EQ(buffers_.size(), channels); - for (size_t i = 0; i < channels; ++i) { - const size_t read = - WebRtc_ReadBuffer(buffers_[i], nullptr, data[i], frames); - RTC_CHECK_EQ(read, frames); - } -} - -size_t AudioRingBuffer::ReadFramesAvailable() const { - // All buffers have the same amount available. - return WebRtc_available_read(buffers_[0]); -} - -size_t AudioRingBuffer::WriteFramesAvailable() const { - // All buffers have the same amount available. - return WebRtc_available_write(buffers_[0]); -} - -void AudioRingBuffer::MoveReadPositionForward(size_t frames) { - for (auto* buf : buffers_) { - const size_t moved = - static_cast(WebRtc_MoveReadPtr(buf, static_cast(frames))); - RTC_CHECK_EQ(moved, frames); - } -} - -void AudioRingBuffer::MoveReadPositionBackward(size_t frames) { - for (auto* buf : buffers_) { - const size_t moved = static_cast( - -WebRtc_MoveReadPtr(buf, -static_cast(frames))); - RTC_CHECK_EQ(moved, frames); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.h deleted file mode 100644 index a89dfd8ca..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/audio_ring_buffer.h +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_AUDIO_CODING_CODECS_OPUS_TEST_AUDIO_RING_BUFFER_H_ -#define MODULES_AUDIO_CODING_CODECS_OPUS_TEST_AUDIO_RING_BUFFER_H_ - -#include - -#include -#include - -struct RingBuffer; - -namespace webrtc { - -// A ring buffer tailored for float deinterleaved audio. Any operation that -// cannot be performed as requested will cause a crash (e.g. insufficient data -// in the buffer to fulfill a read request.) -class AudioRingBuffer final { - public: - // Specify the number of channels and maximum number of frames the buffer will - // contain. - AudioRingBuffer(size_t channels, size_t max_frames); - ~AudioRingBuffer(); - - // Copies |data| to the buffer and advances the write pointer. |channels| must - // be the same as at creation time. - void Write(const float* const* data, size_t channels, size_t frames); - - // Copies from the buffer to |data| and advances the read pointer. |channels| - // must be the same as at creation time. - void Read(float* const* data, size_t channels, size_t frames); - - size_t ReadFramesAvailable() const; - size_t WriteFramesAvailable() const; - - // Moves the read position. The forward version advances the read pointer - // towards the write pointer and the backward verison withdraws the read - // pointer away from the write pointer (i.e. flushing and stuffing the buffer - // respectively.) - void MoveReadPositionForward(size_t frames); - void MoveReadPositionBackward(size_t frames); - - private: - // TODO(kwiberg): Use std::vector> instead. - std::vector buffers_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_OPUS_TEST_AUDIO_RING_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.cc deleted file mode 100644 index 7f102b54a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.cc +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/opus/test/blocker.h" - -#include - -#include "rtc_base/checks.h" - -namespace { - -// Adds |a| and |b| frame by frame into |result| (basically matrix addition). -void AddFrames(const float* const* a, - size_t a_start_index, - const float* const* b, - int b_start_index, - size_t num_frames, - size_t num_channels, - float* const* result, - size_t result_start_index) { - for (size_t i = 0; i < num_channels; ++i) { - for (size_t j = 0; j < num_frames; ++j) { - result[i][j + result_start_index] = - a[i][j + a_start_index] + b[i][j + b_start_index]; - } - } -} - -// Copies |src| into |dst| channel by channel. -void CopyFrames(const float* const* src, - size_t src_start_index, - size_t num_frames, - size_t num_channels, - float* const* dst, - size_t dst_start_index) { - for (size_t i = 0; i < num_channels; ++i) { - memcpy(&dst[i][dst_start_index], &src[i][src_start_index], - num_frames * sizeof(dst[i][dst_start_index])); - } -} - -// Moves |src| into |dst| channel by channel. -void MoveFrames(const float* const* src, - size_t src_start_index, - size_t num_frames, - size_t num_channels, - float* const* dst, - size_t dst_start_index) { - for (size_t i = 0; i < num_channels; ++i) { - memmove(&dst[i][dst_start_index], &src[i][src_start_index], - num_frames * sizeof(dst[i][dst_start_index])); - } -} - -void ZeroOut(float* const* buffer, - size_t starting_idx, - size_t num_frames, - size_t num_channels) { - for (size_t i = 0; i < num_channels; ++i) { - memset(&buffer[i][starting_idx], 0, - num_frames * sizeof(buffer[i][starting_idx])); - } -} - -// Pointwise multiplies each channel of |frames| with |window|. Results are -// stored in |frames|. -void ApplyWindow(const float* window, - size_t num_frames, - size_t num_channels, - float* const* frames) { - for (size_t i = 0; i < num_channels; ++i) { - for (size_t j = 0; j < num_frames; ++j) { - frames[i][j] = frames[i][j] * window[j]; - } - } -} - -size_t gcd(size_t a, size_t b) { - size_t tmp; - while (b) { - tmp = a; - a = b; - b = tmp % b; - } - return a; -} - -} // namespace - -namespace webrtc { - -Blocker::Blocker(size_t chunk_size, - size_t block_size, - size_t num_input_channels, - size_t num_output_channels, - const float* window, - size_t shift_amount, - BlockerCallback* callback) - : chunk_size_(chunk_size), - block_size_(block_size), - num_input_channels_(num_input_channels), - num_output_channels_(num_output_channels), - initial_delay_(block_size_ - gcd(chunk_size, shift_amount)), - frame_offset_(0), - input_buffer_(num_input_channels_, chunk_size_ + initial_delay_), - output_buffer_(chunk_size_ + initial_delay_, num_output_channels_), - input_block_(block_size_, num_input_channels_), - output_block_(block_size_, num_output_channels_), - window_(new float[block_size_]), - shift_amount_(shift_amount), - callback_(callback) { - RTC_CHECK_LE(num_output_channels_, num_input_channels_); - RTC_CHECK_LE(shift_amount_, block_size_); - - memcpy(window_.get(), window, block_size_ * sizeof(*window_.get())); - input_buffer_.MoveReadPositionBackward(initial_delay_); -} - -Blocker::~Blocker() = default; - -// When block_size < chunk_size the input and output buffers look like this: -// -// delay* chunk_size chunk_size + delay* -// buffer: <-------------|---------------------|---------------|> -// _a_ _b_ _c_ -// -// On each call to ProcessChunk(): -// 1. New input gets read into sections _b_ and _c_ of the input buffer. -// 2. We block starting from frame_offset. -// 3. We block until we reach a block |bl| that doesn't contain any frames -// from sections _a_ or _b_ of the input buffer. -// 4. We window the current block, fire the callback for processing, window -// again, and overlap/add to the output buffer. -// 5. We copy sections _a_ and _b_ of the output buffer into output. -// 6. For both the input and the output buffers, we copy section _c_ into -// section _a_. -// 7. We set the new frame_offset to be the difference between the first frame -// of |bl| and the border between sections _b_ and _c_. -// -// When block_size > chunk_size the input and output buffers look like this: -// -// chunk_size delay* chunk_size + delay* -// buffer: <-------------|---------------------|---------------|> -// _a_ _b_ _c_ -// -// On each call to ProcessChunk(): -// The procedure is the same as above, except for: -// 1. New input gets read into section _c_ of the input buffer. -// 3. We block until we reach a block |bl| that doesn't contain any frames -// from section _a_ of the input buffer. -// 5. We copy section _a_ of the output buffer into output. -// 6. For both the input and the output buffers, we copy sections _b_ and _c_ -// into section _a_ and _b_. -// 7. We set the new frame_offset to be the difference between the first frame -// of |bl| and the border between sections _a_ and _b_. -// -// * delay here refers to inintial_delay_ -// -// TODO(claguna): Look at using ring buffers to eliminate some copies. -void Blocker::ProcessChunk(const float* const* input, - size_t chunk_size, - size_t num_input_channels, - size_t num_output_channels, - float* const* output) { - RTC_CHECK_EQ(chunk_size, chunk_size_); - RTC_CHECK_EQ(num_input_channels, num_input_channels_); - RTC_CHECK_EQ(num_output_channels, num_output_channels_); - - input_buffer_.Write(input, num_input_channels, chunk_size_); - size_t first_frame_in_block = frame_offset_; - - // Loop through blocks. - while (first_frame_in_block < chunk_size_) { - input_buffer_.Read(input_block_.channels(), num_input_channels, - block_size_); - input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_); - - ApplyWindow(window_.get(), block_size_, num_input_channels_, - input_block_.channels()); - callback_->ProcessBlock(input_block_.channels(), block_size_, - num_input_channels_, num_output_channels_, - output_block_.channels()); - ApplyWindow(window_.get(), block_size_, num_output_channels_, - output_block_.channels()); - - AddFrames(output_buffer_.channels(), first_frame_in_block, - output_block_.channels(), 0, block_size_, num_output_channels_, - output_buffer_.channels(), first_frame_in_block); - - first_frame_in_block += shift_amount_; - } - - // Copy output buffer to output - CopyFrames(output_buffer_.channels(), 0, chunk_size_, num_output_channels_, - output, 0); - - // Copy output buffer [chunk_size_, chunk_size_ + initial_delay] - // to output buffer [0, initial_delay], zero the rest. - MoveFrames(output_buffer_.channels(), chunk_size, initial_delay_, - num_output_channels_, output_buffer_.channels(), 0); - ZeroOut(output_buffer_.channels(), initial_delay_, chunk_size_, - num_output_channels_); - - // Calculate new starting frames. - frame_offset_ = first_frame_in_block - chunk_size_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.h deleted file mode 100644 index 26177bcad..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/blocker.h +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_OPUS_TEST_BLOCKER_H_ -#define MODULES_AUDIO_CODING_CODECS_OPUS_TEST_BLOCKER_H_ - -#include - -#include "common_audio/channel_buffer.h" -#include "modules/audio_coding/codecs/opus/test/audio_ring_buffer.h" - -namespace webrtc { - -// The callback function to process audio in the time domain. Input has already -// been windowed, and output will be windowed. The number of input channels -// must be >= the number of output channels. -class BlockerCallback { - public: - virtual ~BlockerCallback() {} - - virtual void ProcessBlock(const float* const* input, - size_t num_frames, - size_t num_input_channels, - size_t num_output_channels, - float* const* output) = 0; -}; - -// The main purpose of Blocker is to abstract away the fact that often we -// receive a different number of audio frames than our transform takes. For -// example, most FFTs work best when the fft-size is a power of 2, but suppose -// we receive 20ms of audio at a sample rate of 48000. That comes to 960 frames -// of audio, which is not a power of 2. Blocker allows us to specify the -// transform and all other necessary processing via the Process() callback -// function without any constraints on the transform-size -// (read: |block_size_|) or received-audio-size (read: |chunk_size_|). -// We handle this for the multichannel audio case, allowing for different -// numbers of input and output channels (for example, beamforming takes 2 or -// more input channels and returns 1 output channel). Audio signals are -// represented as deinterleaved floats in the range [-1, 1]. -// -// Blocker is responsible for: -// - blocking audio while handling potential discontinuities on the edges -// of chunks -// - windowing blocks before sending them to Process() -// - windowing processed blocks, and overlap-adding them together before -// sending back a processed chunk -// -// To use blocker: -// 1. Impelment a BlockerCallback object |bc|. -// 2. Instantiate a Blocker object |b|, passing in |bc|. -// 3. As you receive audio, call b.ProcessChunk() to get processed audio. -// -// A small amount of delay is added to the first received chunk to deal with -// the difference in chunk/block sizes. This delay is <= chunk_size. -// -// Ownership of window is retained by the caller. That is, Blocker makes a -// copy of window and does not attempt to delete it. -class Blocker { - public: - Blocker(size_t chunk_size, - size_t block_size, - size_t num_input_channels, - size_t num_output_channels, - const float* window, - size_t shift_amount, - BlockerCallback* callback); - ~Blocker(); - - void ProcessChunk(const float* const* input, - size_t chunk_size, - size_t num_input_channels, - size_t num_output_channels, - float* const* output); - - size_t initial_delay() const { return initial_delay_; } - - private: - const size_t chunk_size_; - const size_t block_size_; - const size_t num_input_channels_; - const size_t num_output_channels_; - - // The number of frames of delay to add at the beginning of the first chunk. - const size_t initial_delay_; - - // The frame index into the input buffer where the first block should be read - // from. This is necessary because shift_amount_ is not necessarily a - // multiple of chunk_size_, so blocks won't line up at the start of the - // buffer. - size_t frame_offset_; - - // Since blocks nearly always overlap, there are certain blocks that require - // frames from the end of one chunk and the beginning of the next chunk. The - // input and output buffers are responsible for saving those frames between - // calls to ProcessChunk(). - // - // Both contain |initial delay| + |chunk_size| frames. The input is a fairly - // standard FIFO, but due to the overlap-add it's harder to use an - // AudioRingBuffer for the output. - AudioRingBuffer input_buffer_; - ChannelBuffer output_buffer_; - - // Space for the input block (can't wrap because of windowing). - ChannelBuffer input_block_; - - // Space for the output block (can't wrap because of overlap/add). - ChannelBuffer output_block_; - - std::unique_ptr window_; - - // The amount of frames between the start of contiguous blocks. For example, - // |shift_amount_| = |block_size_| / 2 for a Hann window. - size_t shift_amount_; - - BlockerCallback* callback_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_OPUS_TEST_BLOCKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.cc deleted file mode 100644 index b1a6526bb..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.cc +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/opus/test/lapped_transform.h" - -#include -#include -#include - -#include "common_audio/real_fourier.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -void LappedTransform::BlockThunk::ProcessBlock(const float* const* input, - size_t num_frames, - size_t num_input_channels, - size_t num_output_channels, - float* const* output) { - RTC_CHECK_EQ(num_input_channels, parent_->num_in_channels_); - RTC_CHECK_EQ(num_output_channels, parent_->num_out_channels_); - RTC_CHECK_EQ(parent_->block_length_, num_frames); - - for (size_t i = 0; i < num_input_channels; ++i) { - memcpy(parent_->real_buf_.Row(i), input[i], num_frames * sizeof(*input[0])); - parent_->fft_->Forward(parent_->real_buf_.Row(i), - parent_->cplx_pre_.Row(i)); - } - - size_t block_length = - RealFourier::ComplexLength(RealFourier::FftOrder(num_frames)); - RTC_CHECK_EQ(parent_->cplx_length_, block_length); - parent_->block_processor_->ProcessAudioBlock( - parent_->cplx_pre_.Array(), num_input_channels, parent_->cplx_length_, - num_output_channels, parent_->cplx_post_.Array()); - - for (size_t i = 0; i < num_output_channels; ++i) { - parent_->fft_->Inverse(parent_->cplx_post_.Row(i), - parent_->real_buf_.Row(i)); - memcpy(output[i], parent_->real_buf_.Row(i), - num_frames * sizeof(*input[0])); - } -} - -LappedTransform::LappedTransform(size_t num_in_channels, - size_t num_out_channels, - size_t chunk_length, - const float* window, - size_t block_length, - size_t shift_amount, - Callback* callback) - : blocker_callback_(this), - num_in_channels_(num_in_channels), - num_out_channels_(num_out_channels), - block_length_(block_length), - chunk_length_(chunk_length), - block_processor_(callback), - blocker_(chunk_length_, - block_length_, - num_in_channels_, - num_out_channels_, - window, - shift_amount, - &blocker_callback_), - fft_(RealFourier::Create(RealFourier::FftOrder(block_length_))), - cplx_length_(RealFourier::ComplexLength(fft_->order())), - real_buf_(num_in_channels, - block_length_, - RealFourier::kFftBufferAlignment), - cplx_pre_(num_in_channels, - cplx_length_, - RealFourier::kFftBufferAlignment), - cplx_post_(num_out_channels, - cplx_length_, - RealFourier::kFftBufferAlignment) { - RTC_CHECK(num_in_channels_ > 0); - RTC_CHECK_GT(block_length_, 0); - RTC_CHECK_GT(chunk_length_, 0); - RTC_CHECK(block_processor_); - - // block_length_ power of 2? - RTC_CHECK_EQ(0, block_length_ & (block_length_ - 1)); -} - -LappedTransform::~LappedTransform() = default; - -void LappedTransform::ProcessChunk(const float* const* in_chunk, - float* const* out_chunk) { - blocker_.ProcessChunk(in_chunk, chunk_length_, num_in_channels_, - num_out_channels_, out_chunk); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.h deleted file mode 100644 index 3620df383..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/test/lapped_transform.h +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_OPUS_TEST_LAPPED_TRANSFORM_H_ -#define MODULES_AUDIO_CODING_CODECS_OPUS_TEST_LAPPED_TRANSFORM_H_ - -#include -#include - -#include "common_audio/real_fourier.h" -#include "modules/audio_coding/codecs/opus/test/blocker.h" -#include "rtc_base/memory/aligned_malloc.h" - -namespace webrtc { - -// Wrapper class for aligned arrays. Every row (and the first dimension) are -// aligned to the given byte alignment. -template -class AlignedArray { - public: - AlignedArray(size_t rows, size_t cols, size_t alignment) - : rows_(rows), cols_(cols) { - RTC_CHECK_GT(alignment, 0); - head_row_ = - static_cast(AlignedMalloc(rows_ * sizeof(*head_row_), alignment)); - for (size_t i = 0; i < rows_; ++i) { - head_row_[i] = static_cast( - AlignedMalloc(cols_ * sizeof(**head_row_), alignment)); - } - } - - ~AlignedArray() { - for (size_t i = 0; i < rows_; ++i) { - AlignedFree(head_row_[i]); - } - AlignedFree(head_row_); - } - - T* const* Array() { return head_row_; } - - const T* const* Array() const { return head_row_; } - - T* Row(size_t row) { - RTC_CHECK_LE(row, rows_); - return head_row_[row]; - } - - const T* Row(size_t row) const { - RTC_CHECK_LE(row, rows_); - return head_row_[row]; - } - - private: - size_t rows_; - size_t cols_; - T** head_row_; -}; - -// Helper class for audio processing modules which operate on frequency domain -// input derived from the windowed time domain audio stream. -// -// The input audio chunk is sliced into possibly overlapping blocks, multiplied -// by a window and transformed with an FFT implementation. The transformed data -// is supplied to the given callback for processing. The processed output is -// then inverse transformed into the time domain and spliced back into a chunk -// which constitutes the final output of this processing module. -class LappedTransform { - public: - class Callback { - public: - virtual ~Callback() {} - - virtual void ProcessAudioBlock(const std::complex* const* in_block, - size_t num_in_channels, - size_t frames, - size_t num_out_channels, - std::complex* const* out_block) = 0; - }; - - // Construct a transform instance. |chunk_length| is the number of samples in - // each channel. |window| defines the window, owned by the caller (a copy is - // made internally); |window| should have length equal to |block_length|. - // |block_length| defines the length of a block, in samples. - // |shift_amount| is in samples. |callback| is the caller-owned audio - // processing function called for each block of the input chunk. - LappedTransform(size_t num_in_channels, - size_t num_out_channels, - size_t chunk_length, - const float* window, - size_t block_length, - size_t shift_amount, - Callback* callback); - ~LappedTransform(); - - // Main audio processing helper method. Internally slices |in_chunk| into - // blocks, transforms them to frequency domain, calls the callback for each - // block and returns a de-blocked time domain chunk of audio through - // |out_chunk|. Both buffers are caller-owned. - void ProcessChunk(const float* const* in_chunk, float* const* out_chunk); - - // Get the chunk length. - // - // The chunk length is the number of samples per channel that must be passed - // to ProcessChunk via the parameter in_chunk. - // - // Returns the same chunk_length passed to the LappedTransform constructor. - size_t chunk_length() const { return chunk_length_; } - - // Get the number of input channels. - // - // This is the number of arrays that must be passed to ProcessChunk via - // in_chunk. - // - // Returns the same num_in_channels passed to the LappedTransform constructor. - size_t num_in_channels() const { return num_in_channels_; } - - // Get the number of output channels. - // - // This is the number of arrays that must be passed to ProcessChunk via - // out_chunk. - // - // Returns the same num_out_channels passed to the LappedTransform - // constructor. - size_t num_out_channels() const { return num_out_channels_; } - - // Returns the initial delay. - // - // This is the delay introduced by the |blocker_| to be able to get and return - // chunks of |chunk_length|, but process blocks of |block_length|. - size_t initial_delay() const { return blocker_.initial_delay(); } - - private: - // Internal middleware callback, given to the blocker. Transforms each block - // and hands it over to the processing method given at construction time. - class BlockThunk : public BlockerCallback { - public: - explicit BlockThunk(LappedTransform* parent) : parent_(parent) {} - - void ProcessBlock(const float* const* input, - size_t num_frames, - size_t num_input_channels, - size_t num_output_channels, - float* const* output) override; - - private: - LappedTransform* const parent_; - } blocker_callback_; - - const size_t num_in_channels_; - const size_t num_out_channels_; - - const size_t block_length_; - const size_t chunk_length_; - - Callback* const block_processor_; - Blocker blocker_; - - // TODO(alessiob): Replace RealFourier with a different FFT library. - std::unique_ptr fft_; - const size_t cplx_length_; - AlignedArray real_buf_; - AlignedArray > cplx_pre_; - AlignedArray > cplx_post_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_OPUS_TEST_LAPPED_TRANSFORM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc new file mode 100644 index 000000000..3d5ba0b7c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" + +#include "rtc_base/format_macros.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +using ::std::get; + +namespace webrtc { + +AudioCodecSpeedTest::AudioCodecSpeedTest(int block_duration_ms, + int input_sampling_khz, + int output_sampling_khz) + : block_duration_ms_(block_duration_ms), + input_sampling_khz_(input_sampling_khz), + output_sampling_khz_(output_sampling_khz), + input_length_sample_( + static_cast(block_duration_ms_ * input_sampling_khz_)), + output_length_sample_( + static_cast(block_duration_ms_ * output_sampling_khz_)), + data_pointer_(0), + loop_length_samples_(0), + max_bytes_(0), + encoded_bytes_(0), + encoding_time_ms_(0.0), + decoding_time_ms_(0.0), + out_file_(NULL) {} + +void AudioCodecSpeedTest::SetUp() { + channels_ = get<0>(GetParam()); + bit_rate_ = get<1>(GetParam()); + in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam())); + save_out_data_ = get<4>(GetParam()); + + FILE* fp = fopen(in_filename_.c_str(), "rb"); + assert(fp != NULL); + + // Obtain file size. + fseek(fp, 0, SEEK_END); + loop_length_samples_ = ftell(fp) / sizeof(int16_t); + rewind(fp); + + // Allocate memory to contain the whole file. + in_data_.reset( + new int16_t[loop_length_samples_ + input_length_sample_ * channels_]); + + data_pointer_ = 0; + + // Copy the file into the buffer. + ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp), + loop_length_samples_); + fclose(fp); + + // Add an extra block length of samples to the end of the array, starting + // over again from the beginning of the array. This is done to simplify + // the reading process when reading over the end of the loop. + memcpy(&in_data_[loop_length_samples_], &in_data_[0], + input_length_sample_ * channels_ * sizeof(int16_t)); + + max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t); + out_data_.reset(new int16_t[output_length_sample_ * channels_]); + bit_stream_.reset(new uint8_t[max_bytes_]); + + if (save_out_data_) { + std::string out_filename = + ::testing::UnitTest::GetInstance()->current_test_info()->name(); + + // Erase '/' + size_t found; + while ((found = out_filename.find('/')) != std::string::npos) + out_filename.replace(found, 1, "_"); + + out_filename = test::OutputPath() + out_filename + ".pcm"; + + out_file_ = fopen(out_filename.c_str(), "wb"); + assert(out_file_ != NULL); + + printf("Output to be saved in %s.\n", out_filename.c_str()); + } +} + +void AudioCodecSpeedTest::TearDown() { + if (save_out_data_) { + fclose(out_file_); + } +} + +void AudioCodecSpeedTest::EncodeDecode(size_t audio_duration_sec) { + size_t time_now_ms = 0; + float time_ms; + + printf("Coding %d kHz-sampled %" RTC_PRIuS "-channel audio at %d bps ...\n", + input_sampling_khz_, channels_, bit_rate_); + + while (time_now_ms < audio_duration_sec * 1000) { + // Encode & decode. + time_ms = EncodeABlock(&in_data_[data_pointer_], &bit_stream_[0], + max_bytes_, &encoded_bytes_); + encoding_time_ms_ += time_ms; + time_ms = DecodeABlock(&bit_stream_[0], encoded_bytes_, &out_data_[0]); + decoding_time_ms_ += time_ms; + if (save_out_data_) { + fwrite(&out_data_[0], sizeof(int16_t), output_length_sample_ * channels_, + out_file_); + } + data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) % + loop_length_samples_; + time_now_ms += block_duration_ms_; + } + + printf("Encoding: %.2f%% real time,\nDecoding: %.2f%% real time.\n", + (encoding_time_ms_ / audio_duration_sec) / 10.0, + (decoding_time_ms_ / audio_duration_sec) / 10.0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h new file mode 100644 index 000000000..59c2f1605 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ +#define MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ + +#include +#include + +#include "test/gtest.h" + +namespace webrtc { + +// Define coding parameter as +// . +typedef std::tuple coding_param; + +class AudioCodecSpeedTest : public ::testing::TestWithParam { + protected: + AudioCodecSpeedTest(int block_duration_ms, + int input_sampling_khz, + int output_sampling_khz); + virtual void SetUp(); + virtual void TearDown(); + + // EncodeABlock(...) does the following: + // 1. encodes a block of audio, saved in |in_data|, + // 2. save the bit stream to |bit_stream| of |max_bytes| bytes in size, + // 3. assign |encoded_bytes| with the length of the bit stream (in bytes), + // 4. return the cost of time (in millisecond) spent on actual encoding. + virtual float EncodeABlock(int16_t* in_data, + uint8_t* bit_stream, + size_t max_bytes, + size_t* encoded_bytes) = 0; + + // DecodeABlock(...) does the following: + // 1. decodes the bit stream in |bit_stream| with a length of |encoded_bytes| + // (in bytes), + // 2. save the decoded audio in |out_data|, + // 3. return the cost of time (in millisecond) spent on actual decoding. + virtual float DecodeABlock(const uint8_t* bit_stream, + size_t encoded_bytes, + int16_t* out_data) = 0; + + // Encoding and decode an audio of |audio_duration| (in seconds) and + // record the runtime for encoding and decoding separately. + void EncodeDecode(size_t audio_duration); + + int block_duration_ms_; + int input_sampling_khz_; + int output_sampling_khz_; + + // Number of samples-per-channel in a frame. + size_t input_length_sample_; + + // Expected output number of samples-per-channel in a frame. + size_t output_length_sample_; + + std::unique_ptr in_data_; + std::unique_ptr out_data_; + size_t data_pointer_; + size_t loop_length_samples_; + std::unique_ptr bit_stream_; + + // Maximum number of bytes in output bitstream for a frame of audio. + size_t max_bytes_; + + size_t encoded_bytes_; + float encoding_time_ms_; + float decoding_time_ms_; + FILE* out_file_; + + size_t channels_; + + // Bit rate is in bit-per-second. + int bit_rate_; + + std::string in_filename_; + + // Determines whether to save the output to file. + bool save_out_data_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h index a5d4b246c..07aa8c956 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h @@ -97,12 +97,6 @@ struct NetworkStatistics { uint64_t fecPacketsReceived; uint64_t fecPacketsDiscarded; // Stats below DO NOT correspond directly to anything in the WebRTC stats - // Loss rate (network + late); fraction between 0 and 1, scaled to Q14. - uint16_t currentPacketLossRate; - // Late loss rate; fraction between 0 and 1, scaled to Q14. - union { - RTC_DEPRECATED uint16_t currentDiscardRate; - }; // fraction (of original stream) of synthesized audio inserted through // expansion (in Q14) uint16_t currentExpandRate; @@ -123,14 +117,8 @@ struct NetworkStatistics { uint16_t currentSecondaryDiscardedRate; // average packet waiting time in the jitter buffer (ms) int meanWaitingTimeMs; - // median packet waiting time in the jitter buffer (ms) - int medianWaitingTimeMs; - // min packet waiting time in the jitter buffer (ms) - int minWaitingTimeMs; // max packet waiting time in the jitter buffer (ms) int maxWaitingTimeMs; - // added samples in off mode due to packet loss - size_t addedSamples; // count of the number of buffer flushes uint64_t packetBufferFlushes; // number of samples expanded due to delayed packets diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc index d238665ba..7ad006545 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc @@ -45,12 +45,12 @@ void BufferLevelFilter::Update(size_t buffer_size_samples, filtered_current_level - (int64_t{time_stretched_samples} * (1 << 8)))); } -void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level) { - if (target_buffer_level <= 1) { +void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level_ms) { + if (target_buffer_level_ms <= 20) { level_factor_ = 251; - } else if (target_buffer_level <= 3) { + } else if (target_buffer_level_ms <= 60) { level_factor_ = 252; - } else if (target_buffer_level <= 7) { + } else if (target_buffer_level_ms <= 140) { level_factor_ = 253; } else { level_factor_ = 254; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h index 6dd424991..bb3185667 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h @@ -23,15 +23,13 @@ class BufferLevelFilter { virtual ~BufferLevelFilter() {} virtual void Reset(); - // Updates the filter. Current buffer size is |buffer_size_packets| (Q0). + // Updates the filter. Current buffer size is |buffer_size_samples|. // |time_stretched_samples| is subtracted from the filtered value (thus // bypassing the filter operation). virtual void Update(size_t buffer_size_samples, int time_stretched_samples); - // Set the current target buffer level in number of packets (obtained from - // DelayManager::base_target_level()). Used to select the appropriate - // filter coefficient. - virtual void SetTargetBufferLevel(int target_buffer_level_packets); + // The target level is used to select the appropriate filter coefficient. + virtual void SetTargetBufferLevel(int target_buffer_level_ms); // Returns filtered current level in number of samples. virtual int filtered_current_level() const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc index 8e1ffaf9f..9c0ee9682 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc @@ -27,16 +27,25 @@ namespace { constexpr int kPostponeDecodingLevel = 50; constexpr int kDefaultTargetLevelWindowMs = 100; +constexpr int kDecelerationTargetLevelOffsetMs = 85; } // namespace namespace webrtc { DecisionLogic::DecisionLogic(NetEqController::Config config) - : delay_manager_(DelayManager::Create(config.max_packets_in_buffer, - config.base_min_delay_ms, - config.enable_rtx_handling, - config.tick_timer)), + : DecisionLogic(config, + DelayManager::Create(config.max_packets_in_buffer, + config.base_min_delay_ms, + config.tick_timer), + std::make_unique()) {} + +DecisionLogic::DecisionLogic( + NetEqController::Config config, + std::unique_ptr delay_manager, + std::unique_ptr buffer_level_filter) + : delay_manager_(std::move(delay_manager)), + buffer_level_filter_(std::move(buffer_level_filter)), tick_timer_(config.tick_timer), disallow_time_stretching_(!config.allow_time_stretching), timescale_countdown_( @@ -67,6 +76,7 @@ void DecisionLogic::Reset() { packet_length_samples_ = 0; sample_memory_ = 0; prev_time_scale_ = false; + last_pack_cng_or_dtmf_ = true; timescale_countdown_.reset(); num_consecutive_expands_ = 0; time_stretched_cn_samples_ = 0; @@ -76,11 +86,12 @@ void DecisionLogic::SoftReset() { packet_length_samples_ = 0; sample_memory_ = 0; prev_time_scale_ = false; + last_pack_cng_or_dtmf_ = true; timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1); time_stretched_cn_samples_ = 0; delay_manager_->Reset(); - buffer_level_filter_.Reset(); + buffer_level_filter_->Reset(); } void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) { @@ -158,12 +169,13 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, const size_t current_span = estimate_dtx_delay_ ? status.packet_buffer_info.span_samples : status.packet_buffer_info.span_samples_no_dtx; + const int target_level_samples = + delay_manager_->TargetDelayMs() * sample_rate_ / 1000; if ((status.last_mode == NetEq::Mode::kExpand || status.last_mode == NetEq::Mode::kCodecPlc) && status.expand_mutefactor < 16384 / 2 && - current_span(delay_manager_->TargetLevel() * - packet_length_samples_ * - kPostponeDecodingLevel / 100)>> 8 && + current_span < static_cast(target_level_samples * + kPostponeDecodingLevel / 100) && !status.packet_buffer_info.dtx_or_cng) { return NetEq::Operation::kExpand; } @@ -195,41 +207,30 @@ void DecisionLogic::ExpandDecision(NetEq::Operation operation) { } } -absl::optional DecisionLogic::PacketArrived(bool last_cng_or_dtmf, - size_t packet_length_samples, - bool should_update_stats, - uint16_t main_sequence_number, - uint32_t main_timestamp, - int fs_hz) { - delay_manager_->LastDecodedWasCngOrDtmf(last_cng_or_dtmf); - absl::optional relative_delay; - if (delay_manager_->last_pack_cng_or_dtmf() == 0) { - // Calculate the total speech length carried in each packet. - if (packet_length_samples > 0 && - packet_length_samples != packet_length_samples_) { - packet_length_samples_ = packet_length_samples; - delay_manager_->SetPacketAudioLength( - rtc::dchecked_cast((1000 * packet_length_samples) / fs_hz)); - } - - // Update statistics. - if (should_update_stats) { - relative_delay = - delay_manager_->Update(main_sequence_number, main_timestamp, fs_hz); - } - } else if (delay_manager_->last_pack_cng_or_dtmf() == -1) { - // This is first "normal" packet after CNG or DTMF. - // Reset packet time counter and measure time until next packet, - // but don't update statistics. - delay_manager_->set_last_pack_cng_or_dtmf(0); - delay_manager_->ResetPacketIatCount(); +absl::optional DecisionLogic::PacketArrived( + int fs_hz, + bool should_update_stats, + const PacketArrivedInfo& info) { + if (info.is_cng_or_dtmf) { + last_pack_cng_or_dtmf_ = true; + return absl::nullopt; } + if (!should_update_stats) { + return absl::nullopt; + } + if (info.packet_length_samples > 0 && fs_hz > 0 && + info.packet_length_samples != packet_length_samples_) { + packet_length_samples_ = info.packet_length_samples; + delay_manager_->SetPacketAudioLength(packet_length_samples_ * 1000 / fs_hz); + } + auto relative_delay = delay_manager_->Update( + info.main_timestamp, fs_hz, /*reset=*/last_pack_cng_or_dtmf_); + last_pack_cng_or_dtmf_ = false; return relative_delay; } void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples) { - buffer_level_filter_.SetTargetBufferLevel( - delay_manager_->base_target_level()); + buffer_level_filter_->SetTargetBufferLevel(delay_manager_->TargetDelayMs()); int time_stretched_samples = time_stretched_cn_samples_; if (prev_time_scale_) { @@ -237,7 +238,7 @@ void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples) { timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval); } - buffer_level_filter_.Update(buffer_size_samples, time_stretched_samples); + buffer_level_filter_->Update(buffer_size_samples, time_stretched_samples); prev_time_scale_ = false; time_stretched_cn_samples_ = 0; } @@ -250,8 +251,8 @@ NetEq::Operation DecisionLogic::CngOperation(NetEq::Mode prev_mode, int32_t timestamp_diff = static_cast( static_cast(generated_noise_samples + target_timestamp) - available_timestamp); - int32_t optimal_level_samp = static_cast( - (delay_manager_->TargetLevel() * packet_length_samples_) >> 8); + int optimal_level_samp = + delay_manager_->TargetDelayMs() * sample_rate_ / 1000; const int64_t excess_waiting_time_samp = -static_cast(timestamp_diff) - optimal_level_samp; @@ -295,22 +296,26 @@ NetEq::Operation DecisionLogic::ExpectedPacketAvailable(NetEq::Mode prev_mode, bool play_dtmf) { if (!disallow_time_stretching_ && prev_mode != NetEq::Mode::kExpand && !play_dtmf) { - // Check criterion for time-stretching. The values are in number of packets - // in Q8. - int low_limit, high_limit; - delay_manager_->BufferLimits(&low_limit, &high_limit); - int buffer_level_packets = 0; - if (packet_length_samples_ > 0) { - buffer_level_packets = - ((1 << 8) * buffer_level_filter_.filtered_current_level()) / - packet_length_samples_; - } - if (buffer_level_packets >= high_limit << 2) + const int samples_per_ms = sample_rate_ / 1000; + const int target_level_samples = + delay_manager_->TargetDelayMs() * samples_per_ms; + const int low_limit = + std::max(target_level_samples * 3 / 4, + target_level_samples - + kDecelerationTargetLevelOffsetMs * samples_per_ms); + // |higher_limit| is equal to |target_level|, but should at + // least be 20 ms higher than |lower_limit|. + const int high_limit = + std::max(target_level_samples, low_limit + 20 * samples_per_ms); + + const int buffer_level_samples = + buffer_level_filter_->filtered_current_level(); + if (buffer_level_samples >= high_limit << 2) return NetEq::Operation::kFastAccelerate; if (TimescaleAllowed()) { - if (buffer_level_packets >= high_limit) + if (buffer_level_samples >= high_limit) return NetEq::Operation::kAccelerate; - if (buffer_level_packets < low_limit) + if (buffer_level_samples < low_limit) return NetEq::Operation::kPreemptiveExpand; } } @@ -352,11 +357,11 @@ NetEq::Operation DecisionLogic::FuturePacketAvailable( prev_mode == NetEq::Mode::kCodecInternalCng) { size_t cur_size_samples = estimate_dtx_delay_ - ? cur_size_samples = span_samples_in_packet_buffer + ? span_samples_in_packet_buffer : num_packets_in_packet_buffer * decoder_frame_length; // Target level is in number of packets in Q8. const size_t target_level_samples = - (delay_manager_->TargetLevel() * packet_length_samples_) >> 8; + delay_manager_->TargetDelayMs() * sample_rate_ / 1000; const bool generated_enough_noise = static_cast(generated_noise_samples + target_timestamp) >= available_timestamp; @@ -406,13 +411,8 @@ NetEq::Operation DecisionLogic::FuturePacketAvailable( } bool DecisionLogic::UnderTargetLevel() const { - int buffer_level_packets = 0; - if (packet_length_samples_ > 0) { - buffer_level_packets = - ((1 << 8) * buffer_level_filter_.filtered_current_level()) / - packet_length_samples_; - } - return buffer_level_packets <= delay_manager_->TargetLevel(); + return buffer_level_filter_->filtered_current_level() < + delay_manager_->TargetDelayMs() * sample_rate_ / 1000; } bool DecisionLogic::ReinitAfterExpands(uint32_t timestamp_leap) const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h index 5f1a41401..08feba64d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_ #define MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_ +#include + #include "api/neteq/neteq.h" #include "api/neteq/neteq_controller.h" #include "api/neteq/tick_timer.h" @@ -29,6 +31,9 @@ class DecisionLogic : public NetEqController { // Constructor. DecisionLogic(NetEqController::Config config); + DecisionLogic(NetEqController::Config config, + std::unique_ptr delay_manager, + std::unique_ptr buffer_level_filter); ~DecisionLogic() override; @@ -70,19 +75,15 @@ class DecisionLogic : public NetEqController { // Adds |value| to |sample_memory_|. void AddSampleMemory(int32_t value) override { sample_memory_ += value; } - int TargetLevelMs() override { - return ((delay_manager_->TargetLevel() * packet_length_samples_) >> 8) / - rtc::CheckedDivExact(sample_rate_, 1000); - } + int TargetLevelMs() const override { return delay_manager_->TargetDelayMs(); } - absl::optional PacketArrived(bool last_cng_or_dtmf, - size_t packet_length_samples, + absl::optional PacketArrived(int fs_hz, bool should_update_stats, - uint16_t main_sequence_number, - uint32_t main_timestamp, - int fs_hz) override; + const PacketArrivedInfo& info) override; - void RegisterEmptyPacket() override { delay_manager_->RegisterEmptyPacket(); } + void RegisterEmptyPacket() override {} + + void NotifyMutedState() override {} bool SetMaximumDelay(int delay_ms) override { return delay_manager_->SetMaximumDelay(delay_ms); @@ -99,7 +100,7 @@ class DecisionLogic : public NetEqController { bool PeakFound() const override { return false; } int GetFilteredBufferLevel() const override { - return buffer_level_filter_.filtered_current_level(); + return buffer_level_filter_->filtered_current_level(); } // Accessors and mutators. @@ -120,8 +121,8 @@ class DecisionLogic : public NetEqController { enum CngState { kCngOff, kCngRfc3389On, kCngInternalOn }; // Updates the |buffer_level_filter_| with the current buffer level - // |buffer_size_packets|. - void FilterBufferLevel(size_t buffer_size_packets); + // |buffer_size_samples|. + void FilterBufferLevel(size_t buffer_size_samples); // Returns the operation given that the next available packet is a comfort // noise payload (RFC 3389 only, not codec-internal). @@ -172,7 +173,7 @@ class DecisionLogic : public NetEqController { bool MaxWaitForPacket() const; std::unique_ptr delay_manager_; - BufferLevelFilter buffer_level_filter_; + std::unique_ptr buffer_level_filter_; const TickTimer* tick_timer_; int sample_rate_; size_t output_size_samples_; @@ -186,6 +187,7 @@ class DecisionLogic : public NetEqController { std::unique_ptr timescale_countdown_; int num_consecutive_expands_ = 0; int time_stretched_cn_samples_ = 0; + bool last_pack_cng_or_dtmf_ = true; FieldTrialParameter estimate_dtx_delay_; FieldTrialParameter time_stretch_cn_; FieldTrialConstrained target_level_window_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc index 4ae6d108c..33eeb96f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc @@ -22,38 +22,45 @@ #include "modules/audio_coding/neteq/histogram.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "system_wrappers/include/field_trial.h" +namespace webrtc { namespace { constexpr int kMinBaseMinimumDelayMs = 0; constexpr int kMaxBaseMinimumDelayMs = 10000; -constexpr int kMaxReorderedPackets = - 10; // Max number of consecutive reordered packets. -constexpr int kMaxHistoryMs = 2000; // Oldest packet to include in history to - // calculate relative packet arrival delay. constexpr int kDelayBuckets = 100; constexpr int kBucketSizeMs = 20; -constexpr int kDecelerationTargetLevelOffsetMs = 85 << 8; // In Q8. +constexpr int kStartDelayMs = 80; +constexpr int kMaxNumReorderedPackets = 5; -int PercentileToQuantile(double percentile) { - return static_cast((1 << 30) * percentile / 100.0 + 0.5); -} - -struct DelayHistogramConfig { - int quantile = 1041529569; // 0.97 in Q30. - int forget_factor = 32745; // 0.9993 in Q15. +struct DelayManagerConfig { + double quantile = 0.97; + double forget_factor = 0.9993; absl::optional start_forget_weight = 2; -}; + absl::optional resample_interval_ms; + int max_history_ms = 2000; -DelayHistogramConfig GetDelayHistogramConfig() { - constexpr char kDelayHistogramFieldTrial[] = - "WebRTC-Audio-NetEqDelayHistogram"; - DelayHistogramConfig config; - if (webrtc::field_trial::IsEnabled(kDelayHistogramFieldTrial)) { + std::unique_ptr Parser() { + return webrtc::StructParametersParser::Create( // + "quantile", &quantile, // + "forget_factor", &forget_factor, // + "start_forget_weight", &start_forget_weight, // + "resample_interval_ms", &resample_interval_ms, // + "max_history_ms", &max_history_ms); + } + + // TODO(jakobi): remove legacy field trial. + void MaybeUpdateFromLegacyFieldTrial() { + constexpr char kDelayHistogramFieldTrial[] = + "WebRTC-Audio-NetEqDelayHistogram"; + if (!webrtc::field_trial::IsEnabled(kDelayHistogramFieldTrial)) { + return; + } const auto field_trial_string = webrtc::field_trial::FindFullName(kDelayHistogramFieldTrial); double percentile = -1.0; @@ -63,30 +70,36 @@ DelayHistogramConfig GetDelayHistogramConfig() { &forget_factor, &start_forget_weight) >= 2 && percentile >= 0.0 && percentile <= 100.0 && forget_factor >= 0.0 && forget_factor <= 1.0) { - config.quantile = PercentileToQuantile(percentile); - config.forget_factor = (1 << 15) * forget_factor; - config.start_forget_weight = - start_forget_weight >= 1 ? absl::make_optional(start_forget_weight) - : absl::nullopt; + this->quantile = percentile / 100; + this->forget_factor = forget_factor; + this->start_forget_weight = start_forget_weight >= 1 + ? absl::make_optional(start_forget_weight) + : absl::nullopt; } } - RTC_LOG(LS_INFO) << "Delay histogram config:" - " quantile=" - << config.quantile - << " forget_factor=" << config.forget_factor - << " start_forget_weight=" - << config.start_forget_weight.value_or(0); - return config; -} + + explicit DelayManagerConfig() { + Parser()->Parse(webrtc::field_trial::FindFullName( + "WebRTC-Audio-NetEqDelayManagerConfig")); + MaybeUpdateFromLegacyFieldTrial(); + RTC_LOG(LS_INFO) << "Delay manager config:" + " quantile=" + << quantile << " forget_factor=" << forget_factor + << " start_forget_weight=" + << start_forget_weight.value_or(0) + << " resample_interval_ms=" + << resample_interval_ms.value_or(0) + << " max_history_ms=" << max_history_ms; + } +}; } // namespace -namespace webrtc { - -DelayManager::DelayManager(size_t max_packets_in_buffer, +DelayManager::DelayManager(int max_packets_in_buffer, int base_minimum_delay_ms, int histogram_quantile, - bool enable_rtx_handling, + absl::optional resample_interval_ms, + int max_history_ms, const TickTimer* tick_timer, std::unique_ptr histogram) : first_packet_received_(false), @@ -94,17 +107,14 @@ DelayManager::DelayManager(size_t max_packets_in_buffer, histogram_(std::move(histogram)), histogram_quantile_(histogram_quantile), tick_timer_(tick_timer), + resample_interval_ms_(resample_interval_ms), + max_history_ms_(max_history_ms), base_minimum_delay_ms_(base_minimum_delay_ms), effective_minimum_delay_ms_(base_minimum_delay_ms), - base_target_level_(4), // In Q0 domain. - target_level_(base_target_level_ << 8), // In Q8 domain. - packet_len_ms_(0), - last_seq_no_(0), - last_timestamp_(0), minimum_delay_ms_(0), maximum_delay_ms_(0), - last_pack_cng_or_dtmf_(1), - enable_rtx_handling_(enable_rtx_handling) { + target_level_ms_(kStartDelayMs), + last_timestamp_(0) { RTC_CHECK(histogram_); RTC_DCHECK_GE(base_minimum_delay_ms_, 0); @@ -112,102 +122,102 @@ DelayManager::DelayManager(size_t max_packets_in_buffer, } std::unique_ptr DelayManager::Create( - size_t max_packets_in_buffer, + int max_packets_in_buffer, int base_minimum_delay_ms, - bool enable_rtx_handling, const TickTimer* tick_timer) { - DelayHistogramConfig config = GetDelayHistogramConfig(); - const int quantile = config.quantile; + DelayManagerConfig config; + int forget_factor_q15 = (1 << 15) * config.forget_factor; + int quantile_q30 = (1 << 30) * config.quantile; std::unique_ptr histogram = std::make_unique( - kDelayBuckets, config.forget_factor, config.start_forget_weight); + kDelayBuckets, forget_factor_q15, config.start_forget_weight); return std::make_unique( - max_packets_in_buffer, base_minimum_delay_ms, quantile, - enable_rtx_handling, tick_timer, std::move(histogram)); + max_packets_in_buffer, base_minimum_delay_ms, quantile_q30, + config.resample_interval_ms, config.max_history_ms, tick_timer, + std::move(histogram)); } DelayManager::~DelayManager() {} -absl::optional DelayManager::Update(uint16_t sequence_number, - uint32_t timestamp, - int sample_rate_hz) { +absl::optional DelayManager::Update(uint32_t timestamp, + int sample_rate_hz, + bool reset) { if (sample_rate_hz <= 0) { return absl::nullopt; } - if (!first_packet_received_) { - // Prepare for next packet arrival. + if (!first_packet_received_ || reset) { + // Restart relative delay esimation from this packet. + delay_history_.clear(); packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - last_seq_no_ = sequence_number; last_timestamp_ = timestamp; first_packet_received_ = true; + num_reordered_packets_ = 0; + resample_stopwatch_ = tick_timer_->GetNewStopwatch(); + max_delay_in_interval_ms_ = 0; return absl::nullopt; } - // Try calculating packet length from current and previous timestamps. - int packet_len_ms; - if (!IsNewerTimestamp(timestamp, last_timestamp_) || - !IsNewerSequenceNumber(sequence_number, last_seq_no_)) { - // Wrong timestamp or sequence order; use stored value. - packet_len_ms = packet_len_ms_; + const int expected_iat_ms = + 1000 * static_cast(timestamp - last_timestamp_) / sample_rate_hz; + const int iat_ms = packet_iat_stopwatch_->ElapsedMs(); + const int iat_delay_ms = iat_ms - expected_iat_ms; + int relative_delay; + bool reordered = !IsNewerTimestamp(timestamp, last_timestamp_); + if (reordered) { + relative_delay = std::max(iat_delay_ms, 0); } else { - // Calculate timestamps per packet and derive packet length in ms. - int64_t packet_len_samp = - static_cast(timestamp - last_timestamp_) / - static_cast(sequence_number - last_seq_no_); - packet_len_ms = - rtc::saturated_cast(1000 * packet_len_samp / sample_rate_hz); + UpdateDelayHistory(iat_delay_ms, timestamp, sample_rate_hz); + relative_delay = CalculateRelativePacketArrivalDelay(); } - bool reordered = false; - absl::optional relative_delay; - if (packet_len_ms > 0) { - // Cannot update statistics unless |packet_len_ms| is valid. - - // Inter-arrival time (IAT) in integer "packet times" (rounding down). This - // is the value added to the inter-arrival time histogram. - int iat_ms = packet_iat_stopwatch_->ElapsedMs(); - // Check for discontinuous packet sequence and re-ordering. - if (IsNewerSequenceNumber(sequence_number, last_seq_no_ + 1)) { - // Compensate for gap in the sequence numbers. Reduce IAT with the - // expected extra time due to lost packets. - int packet_offset = - static_cast(sequence_number - last_seq_no_ - 1); - iat_ms -= packet_offset * packet_len_ms; - } else if (!IsNewerSequenceNumber(sequence_number, last_seq_no_)) { - int packet_offset = - static_cast(last_seq_no_ + 1 - sequence_number); - iat_ms += packet_offset * packet_len_ms; - reordered = true; + absl::optional histogram_update; + if (resample_interval_ms_) { + if (static_cast(resample_stopwatch_->ElapsedMs()) > + *resample_interval_ms_) { + histogram_update = max_delay_in_interval_ms_; + resample_stopwatch_ = tick_timer_->GetNewStopwatch(); + max_delay_in_interval_ms_ = 0; } - - int iat_delay = iat_ms - packet_len_ms; - if (reordered) { - relative_delay = std::max(iat_delay, 0); - } else { - UpdateDelayHistory(iat_delay, timestamp, sample_rate_hz); - relative_delay = CalculateRelativePacketArrivalDelay(); - } - - const int index = relative_delay.value() / kBucketSizeMs; + max_delay_in_interval_ms_ = + std::max(max_delay_in_interval_ms_, relative_delay); + } else { + histogram_update = relative_delay; + } + if (histogram_update) { + const int index = *histogram_update / kBucketSizeMs; if (index < histogram_->NumBuckets()) { // Maximum delay to register is 2000 ms. histogram_->Add(index); } - // Calculate new |target_level_| based on updated statistics. - target_level_ = CalculateTargetLevel(); + } - LimitTargetLevel(); - } // End if (packet_len_ms > 0). + // Calculate new |target_level_ms_| based on updated statistics. + int bucket_index = histogram_->Quantile(histogram_quantile_); + target_level_ms_ = (1 + bucket_index) * kBucketSizeMs; + target_level_ms_ = std::max(target_level_ms_, effective_minimum_delay_ms_); + if (maximum_delay_ms_ > 0) { + target_level_ms_ = std::min(target_level_ms_, maximum_delay_ms_); + } + if (packet_len_ms_ > 0) { + // Target level should be at least one packet. + target_level_ms_ = std::max(target_level_ms_, packet_len_ms_); + // Limit to 75% of maximum buffer size. + target_level_ms_ = std::min( + target_level_ms_, 3 * max_packets_in_buffer_ * packet_len_ms_ / 4); + } - if (enable_rtx_handling_ && reordered && - num_reordered_packets_ < kMaxReorderedPackets) { - ++num_reordered_packets_; - return relative_delay; + // Prepare for next packet arrival. + if (reordered) { + // Allow a small number of reordered packets before resetting the delay + // estimation. + if (num_reordered_packets_ < kMaxNumReorderedPackets) { + ++num_reordered_packets_; + return relative_delay; + } + delay_history_.clear(); } num_reordered_packets_ = 0; - // Prepare for next packet arrival. packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - last_seq_no_ = sequence_number; last_timestamp_ = timestamp; return relative_delay; } @@ -220,7 +230,7 @@ void DelayManager::UpdateDelayHistory(int iat_delay_ms, delay.timestamp = timestamp; delay_history_.push_back(delay); while (timestamp - delay_history_.front().timestamp > - static_cast(kMaxHistoryMs * sample_rate_hz / 1000)) { + static_cast(max_history_ms_ * sample_rate_hz / 1000)) { delay_history_.pop_front(); } } @@ -238,128 +248,29 @@ int DelayManager::CalculateRelativePacketArrivalDelay() const { return relative_delay; } -// Enforces upper and lower limits for |target_level_|. The upper limit is -// chosen to be minimum of i) 75% of |max_packets_in_buffer_|, to leave some -// headroom for natural fluctuations around the target, and ii) equivalent of -// |maximum_delay_ms_| in packets. Note that in practice, if no -// |maximum_delay_ms_| is specified, this does not have any impact, since the -// target level is far below the buffer capacity in all reasonable cases. -// The lower limit is equivalent of |effective_minimum_delay_ms_| in packets. -// We update |least_required_level_| while the above limits are applied. -// TODO(hlundin): Move this check to the buffer logistics class. -void DelayManager::LimitTargetLevel() { - if (packet_len_ms_ > 0 && effective_minimum_delay_ms_ > 0) { - int minimum_delay_packet_q8 = - (effective_minimum_delay_ms_ << 8) / packet_len_ms_; - target_level_ = std::max(target_level_, minimum_delay_packet_q8); - } - - if (maximum_delay_ms_ > 0 && packet_len_ms_ > 0) { - int maximum_delay_packet_q8 = (maximum_delay_ms_ << 8) / packet_len_ms_; - target_level_ = std::min(target_level_, maximum_delay_packet_q8); - } - - // Shift to Q8, then 75%.; - int max_buffer_packets_q8 = - static_cast((3 * (max_packets_in_buffer_ << 8)) / 4); - target_level_ = std::min(target_level_, max_buffer_packets_q8); - - // Sanity check, at least 1 packet (in Q8). - target_level_ = std::max(target_level_, 1 << 8); -} - -int DelayManager::CalculateTargetLevel() { - int limit_probability = histogram_quantile_; - - int bucket_index = histogram_->Quantile(limit_probability); - int target_level = 1; - if (packet_len_ms_ > 0) { - target_level += bucket_index * kBucketSizeMs / packet_len_ms_; - } - base_target_level_ = target_level; - - // Sanity check. |target_level| must be strictly positive. - target_level = std::max(target_level, 1); - // Scale to Q8 and assign to member variable. - target_level_ = target_level << 8; - return target_level_; -} - int DelayManager::SetPacketAudioLength(int length_ms) { if (length_ms <= 0) { RTC_LOG_F(LS_ERROR) << "length_ms = " << length_ms; return -1; } - packet_len_ms_ = length_ms; - packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - last_pack_cng_or_dtmf_ = 1; // TODO(hlundin): Legacy. Remove? return 0; } void DelayManager::Reset() { - packet_len_ms_ = 0; // Packet size unknown. + packet_len_ms_ = 0; histogram_->Reset(); delay_history_.clear(); - base_target_level_ = 4; - target_level_ = base_target_level_ << 8; + target_level_ms_ = kStartDelayMs; packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - last_pack_cng_or_dtmf_ = 1; + first_packet_received_ = false; + num_reordered_packets_ = 0; + resample_stopwatch_ = tick_timer_->GetNewStopwatch(); + max_delay_in_interval_ms_ = 0; } -void DelayManager::ResetPacketIatCount() { - packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); -} - -void DelayManager::BufferLimits(int* lower_limit, int* higher_limit) const { - BufferLimits(target_level_, lower_limit, higher_limit); -} - -// Note that |low_limit| and |higher_limit| are not assigned to -// |minimum_delay_ms_| and |maximum_delay_ms_| defined by the client of this -// class. They are computed from |target_level| in Q8 and used for decision -// making. -void DelayManager::BufferLimits(int target_level, - int* lower_limit, - int* higher_limit) const { - if (!lower_limit || !higher_limit) { - RTC_LOG_F(LS_ERROR) << "NULL pointers supplied as input"; - assert(false); - return; - } - - // |target_level| is in Q8 already. - *lower_limit = (target_level * 3) / 4; - - if (packet_len_ms_ > 0) { - *lower_limit = - std::max(*lower_limit, target_level - kDecelerationTargetLevelOffsetMs / - packet_len_ms_); - } - - int window_20ms = 0x7FFF; // Default large value for legacy bit-exactness. - if (packet_len_ms_ > 0) { - window_20ms = (20 << 8) / packet_len_ms_; - } - // |higher_limit| is equal to |target_level|, but should at - // least be 20 ms higher than |lower_limit|. - *higher_limit = std::max(target_level, *lower_limit + window_20ms); -} - -int DelayManager::TargetLevel() const { - return target_level_; -} - -void DelayManager::LastDecodedWasCngOrDtmf(bool it_was) { - if (it_was) { - last_pack_cng_or_dtmf_ = 1; - } else if (last_pack_cng_or_dtmf_ != 0) { - last_pack_cng_or_dtmf_ = -1; - } -} - -void DelayManager::RegisterEmptyPacket() { - ++last_seq_no_; +int DelayManager::TargetDelayMs() const { + return target_level_ms_; } bool DelayManager::IsValidMinimumDelay(int delay_ms) const { @@ -409,17 +320,6 @@ int DelayManager::GetBaseMinimumDelay() const { return base_minimum_delay_ms_; } -int DelayManager::base_target_level() const { - return base_target_level_; -} -int DelayManager::last_pack_cng_or_dtmf() const { - return last_pack_cng_or_dtmf_; -} - -void DelayManager::set_last_pack_cng_or_dtmf(int value) { - last_pack_cng_or_dtmf_ = value; -} - void DelayManager::UpdateEffectiveMinimumDelay() { // Clamp |base_minimum_delay_ms_| into the range which can be effectively // used. @@ -432,16 +332,11 @@ void DelayManager::UpdateEffectiveMinimumDelay() { int DelayManager::MinimumDelayUpperBound() const { // Choose the lowest possible bound discarding 0 cases which mean the value // is not set and unconstrained. - int q75 = MaxBufferTimeQ75(); + int q75 = max_packets_in_buffer_ * packet_len_ms_ * 3 / 4; q75 = q75 > 0 ? q75 : kMaxBaseMinimumDelayMs; const int maximum_delay_ms = maximum_delay_ms_ > 0 ? maximum_delay_ms_ : kMaxBaseMinimumDelayMs; return std::min(maximum_delay_ms, q75); } -int DelayManager::MaxBufferTimeQ75() const { - const int max_buffer_time = max_packets_in_buffer_ * packet_len_ms_; - return rtc::dchecked_cast(3 * max_buffer_time / 4); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h index ab9ba3416..9832ceda2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h @@ -25,10 +25,11 @@ namespace webrtc { class DelayManager { public: - DelayManager(size_t max_packets_in_buffer, + DelayManager(int max_packets_in_buffer, int base_minimum_delay_ms, int histogram_quantile, - bool enable_rtx_handling, + absl::optional resample_interval_ms, + int max_history_ms, const TickTimer* tick_timer, std::unique_ptr histogram); @@ -37,58 +38,29 @@ class DelayManager { // is the number of packet slots in the buffer) and that the target delay // should be greater than or equal to |base_minimum_delay_ms|. Supply a // PeakDetector object to the DelayManager. - static std::unique_ptr Create(size_t max_packets_in_buffer, + static std::unique_ptr Create(int max_packets_in_buffer, int base_minimum_delay_ms, - bool enable_rtx_handling, const TickTimer* tick_timer); virtual ~DelayManager(); - // Updates the delay manager with a new incoming packet, with - // |sequence_number| and |timestamp| from the RTP header. This updates the - // inter-arrival time histogram and other statistics, as well as the - // associated DelayPeakDetector. A new target buffer level is calculated. - // Returns the relative delay if it can be calculated. - virtual absl::optional Update(uint16_t sequence_number, - uint32_t timestamp, - int sample_rate_hz); + // Updates the delay manager with a new incoming packet, with |timestamp| from + // the RTP header. This updates the statistics and a new target buffer level + // is calculated. Returns the relative delay if it can be calculated. If + // |reset| is true, restarts the relative arrival delay calculation from this + // packet. + virtual absl::optional Update(uint32_t timestamp, + int sample_rate_hz, + bool reset = false); - // Calculates a new target buffer level. Called from the Update() method. - // Sets target_level_ (in Q8) and returns the same value. Also calculates - // and updates base_target_level_, which is the target buffer level before - // taking delay peaks into account. - virtual int CalculateTargetLevel(); - - // Notifies the DelayManager of how much audio data is carried in each packet. - // The method updates the DelayPeakDetector too, and resets the inter-arrival - // time counter. Returns 0 on success, -1 on failure. - virtual int SetPacketAudioLength(int length_ms); - - // Resets the DelayManager and the associated DelayPeakDetector. + // Resets all state. virtual void Reset(); - // Reset the inter-arrival time counter to 0. - virtual void ResetPacketIatCount(); + // Gets the target buffer level in milliseconds. + virtual int TargetDelayMs() const; - // Writes the lower and higher limits which the buffer level should stay - // within to the corresponding pointers. The values are in (fractions of) - // packets in Q8. - virtual void BufferLimits(int* lower_limit, int* higher_limit) const; - virtual void BufferLimits(int target_level, - int* lower_limit, - int* higher_limit) const; - - // Gets the target buffer level, in (fractions of) packets in Q8. - virtual int TargetLevel() const; - - // Informs the delay manager whether or not the last decoded packet contained - // speech. - virtual void LastDecodedWasCngOrDtmf(bool it_was); - - // Notify the delay manager that empty packets have been received. These are - // packets that are part of the sequence number series, so that an empty - // packet will shift the sequence numbers for the following packets. - virtual void RegisterEmptyPacket(); + // Notifies the DelayManager of how much audio data is carried in each packet. + virtual int SetPacketAudioLength(int length_ms); // Accessors and mutators. // Assuming |delay| is in valid range. @@ -96,16 +68,11 @@ class DelayManager { virtual bool SetMaximumDelay(int delay_ms); virtual bool SetBaseMinimumDelay(int delay_ms); virtual int GetBaseMinimumDelay() const; - virtual int base_target_level() const; - virtual int last_pack_cng_or_dtmf() const; - virtual void set_last_pack_cng_or_dtmf(int value); - // This accessor is only intended for testing purposes. + // These accessors are only intended for testing purposes. int effective_minimum_delay_ms_for_test() const { return effective_minimum_delay_ms_; } - - // These accessors are only intended for testing purposes. int histogram_quantile() const { return histogram_quantile_; } Histogram* histogram() const { return histogram_.get(); } @@ -114,9 +81,6 @@ class DelayManager { // size and given |maximum_delay_ms_|. Lower bound is a constant 0. int MinimumDelayUpperBound() const; - // Provides 75% of currently possible maximum buffer size in milliseconds. - int MaxBufferTimeQ75() const; - // Updates |delay_history_|. void UpdateDelayHistory(int iat_delay_ms, uint32_t timestamp, @@ -130,10 +94,6 @@ class DelayManager { // and buffer size. void UpdateEffectiveMinimumDelay(); - // Makes sure that |target_level_| is not too large, taking - // |max_packets_in_buffer_| into account. This method is called by Update(). - void LimitTargetLevel(); - // Makes sure that |delay_ms| is less than maximum delay, if any maximum // is set. Also, if possible check |delay_ms| to be less than 75% of // |max_packets_in_buffer_|. @@ -142,31 +102,27 @@ class DelayManager { bool IsValidBaseMinimumDelay(int delay_ms) const; bool first_packet_received_; - const size_t max_packets_in_buffer_; // Capacity of the packet buffer. + // TODO(jakobi): set maximum buffer delay instead of number of packets. + const int max_packets_in_buffer_; std::unique_ptr histogram_; const int histogram_quantile_; const TickTimer* tick_timer_; - int base_minimum_delay_ms_; - // Provides delay which is used by LimitTargetLevel as lower bound on target - // delay. - int effective_minimum_delay_ms_; + const absl::optional resample_interval_ms_; + const int max_history_ms_; - // Time elapsed since last packet. - std::unique_ptr packet_iat_stopwatch_; - int base_target_level_; // Currently preferred buffer level before peak - // detection and streaming mode (Q0). - // TODO(turajs) change the comment according to the implementation of - // minimum-delay. - int target_level_; // Currently preferred buffer level in (fractions) - // of packets (Q8), before adding any extra delay. - int packet_len_ms_; // Length of audio in each incoming packet [ms]. - uint16_t last_seq_no_; // Sequence number for last received packet. - uint32_t last_timestamp_; // Timestamp for the last received packet. - int minimum_delay_ms_; // Externally set minimum delay. - int maximum_delay_ms_; // Externally set maximum allowed delay. - int last_pack_cng_or_dtmf_; - const bool enable_rtx_handling_; - int num_reordered_packets_ = 0; // Number of consecutive reordered packets. + int base_minimum_delay_ms_; + int effective_minimum_delay_ms_; // Used as lower bound for target delay. + int minimum_delay_ms_; // Externally set minimum delay. + int maximum_delay_ms_; // Externally set maximum allowed delay. + + int packet_len_ms_ = 0; + std::unique_ptr + packet_iat_stopwatch_; // Time elapsed since last packet. + int target_level_ms_; // Currently preferred buffer level. + uint32_t last_timestamp_; // Timestamp for the last received packet. + int num_reordered_packets_ = 0; + int max_delay_in_interval_ms_ = 0; + std::unique_ptr resample_stopwatch_; struct PacketDelay { int iat_delay_ms; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h new file mode 100644 index 000000000..503f6ac6b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_ +#define MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_ + +#include "modules/audio_coding/neteq/buffer_level_filter.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockBufferLevelFilter : public BufferLevelFilter { + public: + MOCK_METHOD(void, + Update, + (size_t buffer_size_samples, int time_stretched_samples)); + MOCK_METHOD(int, filtered_current_level, (), (const)); +}; + +} // namespace webrtc +#endif // MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h new file mode 100644 index 000000000..5b5133ece --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_ +#define MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_ + +#include +#include + +#include "api/neteq/tick_timer.h" +#include "modules/audio_coding/neteq/delay_manager.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockDelayManager : public DelayManager { + public: + MockDelayManager(size_t max_packets_in_buffer, + int base_minimum_delay_ms, + int histogram_quantile, + absl::optional resample_interval_ms, + int max_history_ms, + const TickTimer* tick_timer, + std::unique_ptr histogram) + : DelayManager(max_packets_in_buffer, + base_minimum_delay_ms, + histogram_quantile, + resample_interval_ms, + max_history_ms, + tick_timer, + std::move(histogram)) {} + MOCK_METHOD(int, TargetDelayMs, (), (const)); +}; + +} // namespace webrtc +#endif // MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h index b7df85fb2..fdfdbb4d1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h @@ -41,15 +41,12 @@ class MockNetEqController : public NetEqController { MOCK_METHOD(void, SetCngOff, (), (override)); MOCK_METHOD(void, ExpandDecision, (NetEq::Operation operation), (override)); MOCK_METHOD(void, AddSampleMemory, (int32_t value), (override)); - MOCK_METHOD(int, TargetLevelMs, (), (override)); + MOCK_METHOD(int, TargetLevelMs, (), (const, override)); MOCK_METHOD(absl::optional, PacketArrived, - (bool last_cng_or_dtmf, - size_t packet_length_samples, + (int fs_hz, bool should_update_stats, - uint16_t main_sequence_number, - uint32_t main_timestamp, - int fs_hz), + const PacketArrivedInfo& info), (override)); MOCK_METHOD(bool, PeakFound, (), (const, override)); MOCK_METHOD(int, GetFilteredBufferLevel, (), (const, override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc index 643fb1e2d..f8d5d9dc1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc @@ -387,17 +387,9 @@ int NetEqImpl::FilteredCurrentDelayMs() const { int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) { MutexLock lock(&mutex_); assert(decoder_database_.get()); - const size_t total_samples_in_buffers = - packet_buffer_->NumSamplesInBuffer(decoder_frame_length_) + - sync_buffer_->FutureLength(); - assert(controller_.get()); - stats->preferred_buffer_size_ms = controller_->TargetLevelMs(); - stats->jitter_peaks_found = controller_->PeakFound(); - stats_->GetNetworkStatistics(fs_hz_, total_samples_in_buffers, - decoder_frame_length_, stats); + *stats = CurrentNetworkStatisticsInternal(); + stats_->GetNetworkStatistics(decoder_frame_length_, stats); // Compensate for output delay chain. - stats->current_buffer_size_ms += output_delay_chain_ms_; - stats->preferred_buffer_size_ms += output_delay_chain_ms_; stats->mean_waiting_time_ms += output_delay_chain_ms_; stats->median_waiting_time_ms += output_delay_chain_ms_; stats->min_waiting_time_ms += output_delay_chain_ms_; @@ -405,6 +397,31 @@ int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) { return 0; } +NetEqNetworkStatistics NetEqImpl::CurrentNetworkStatistics() const { + MutexLock lock(&mutex_); + return CurrentNetworkStatisticsInternal(); +} + +NetEqNetworkStatistics NetEqImpl::CurrentNetworkStatisticsInternal() const { + assert(decoder_database_.get()); + NetEqNetworkStatistics stats; + const size_t total_samples_in_buffers = + packet_buffer_->NumSamplesInBuffer(decoder_frame_length_) + + sync_buffer_->FutureLength(); + + assert(controller_.get()); + stats.preferred_buffer_size_ms = controller_->TargetLevelMs(); + stats.jitter_peaks_found = controller_->PeakFound(); + RTC_DCHECK_GT(fs_hz_, 0); + stats.current_buffer_size_ms = + static_cast(total_samples_in_buffers * 1000 / fs_hz_); + + // Compensate for output delay chain. + stats.current_buffer_size_ms += output_delay_chain_ms_; + stats.preferred_buffer_size_ms += output_delay_chain_ms_; + return stats; +} + NetEqLifetimeStatistics NetEqImpl::GetLifetimeStatistics() const { MutexLock lock(&mutex_); return stats_->GetLifetimeStatistics(); @@ -663,6 +680,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, } PacketList parsed_packet_list; + bool is_dtx = false; while (!packet_list.empty()) { Packet& packet = packet_list.front(); const DecoderDatabase::DecoderInfo* info = @@ -703,6 +721,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, for (auto& result : results) { RTC_DCHECK(result.frame); RTC_DCHECK_GE(result.priority, 0); + is_dtx = is_dtx || result.frame->IsDtxPacket(); if (first) { // Re-use the node and move it to parsed_packet_list. packet_list.front() = packet_from_result(result); @@ -784,10 +803,13 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, decoder_database_->GetDecoderInfo(main_payload_type); assert(dec_info); // Already checked that the payload type is known. - const bool last_cng_or_dtmf = - dec_info->IsComfortNoise() || dec_info->IsDtmf(); - const size_t packet_length_samples = + NetEqController::PacketArrivedInfo info; + info.is_cng_or_dtmf = dec_info->IsComfortNoise() || dec_info->IsDtmf(); + info.packet_length_samples = number_of_primary_packets * decoder_frame_length_; + info.main_timestamp = main_timestamp; + info.main_sequence_number = main_sequence_number; + info.is_dtx = is_dtx; // Only update statistics if incoming packet is not older than last played // out packet or RTX handling is enabled, and if new codec flag is not // set. @@ -796,9 +818,8 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, static_cast(main_timestamp - timestamp_) >= 0) && !new_codec_; - auto relative_delay = controller_->PacketArrived( - last_cng_or_dtmf, packet_length_samples, should_update_stats, - main_sequence_number, main_timestamp, fs_hz_); + auto relative_delay = + controller_->PacketArrived(fs_hz_, should_update_stats, info); if (relative_delay) { stats_->RelativePacketArrivalDelay(relative_delay.value()); } @@ -840,6 +861,7 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, static_cast(audio_frame->samples_per_channel_); audio_frame->num_channels_ = sync_buffer_->Channels(); stats_->ExpandedNoiseSamples(output_size_samples_, false); + controller_->NotifyMutedState(); *muted = true; return 0; } @@ -1309,13 +1331,6 @@ int NetEqImpl::GetDecision(Operation* operation, int extracted_samples = 0; if (packet) { sync_buffer_->IncreaseEndTimestamp(packet->timestamp - end_timestamp); - if (controller_->CngOff()) { - // Adjustment of timestamp only corresponds to an actual packet loss - // if comfort noise is not played. If comfort noise was just played, - // this adjustment of timestamp is only done to get back in sync with the - // stream timestamp; no loss to report. - stats_->LostSamples(packet->timestamp - end_timestamp); - } if (*operation != Operation::kRfc3389Cng) { // We are about to decode and use a non-CNG packet. @@ -1707,7 +1722,7 @@ int NetEqImpl::DoAccelerate(int16_t* decoded_buffer, decoded_length = required_samples * num_channels; } - size_t samples_removed; + size_t samples_removed = 0; Accelerate::ReturnCodes return_code = accelerate_->Process(decoded_buffer, decoded_length, fast_accelerate, algorithm_buffer_.get(), &samples_removed); @@ -1785,7 +1800,7 @@ int NetEqImpl::DoPreemptiveExpand(int16_t* decoded_buffer, decoded_length = required_samples * num_channels; } - size_t samples_added; + size_t samples_added = 0; PreemptiveExpand::ReturnCodes return_code = preemptive_expand_->Process( decoded_buffer, decoded_length, old_borrowed_samples_per_channel, algorithm_buffer_.get(), &samples_added); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h index 0ade6b538..e130422a3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h @@ -162,6 +162,8 @@ class NetEqImpl : public webrtc::NetEq { // after the call. int NetworkStatistics(NetEqNetworkStatistics* stats) override; + NetEqNetworkStatistics CurrentNetworkStatistics() const override; + NetEqLifetimeStatistics GetLifetimeStatistics() const override; NetEqOperationsAndState GetOperationsAndState() const override; @@ -330,6 +332,9 @@ class NetEqImpl : public webrtc::NetEq { virtual void UpdatePlcComponents(int fs_hz, size_t channels) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + NetEqNetworkStatistics CurrentNetworkStatisticsInternal() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + Clock* const clock_; mutable Mutex mutex_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc index 134369099..5681464f4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc @@ -29,10 +29,10 @@ namespace webrtc { // The method loops through a list of packets {A, B, C, ...}. Each packet is // split into its corresponding RED payloads, {A1, A2, ...}, which is // temporarily held in the list |new_packets|. -// When the first packet in |packet_list| has been processed, the orignal packet -// is replaced by the new ones in |new_packets|, so that |packet_list| becomes: -// {A1, A2, ..., B, C, ...}. The method then continues with B, and C, until all -// the original packets have been replaced by their split payloads. +// When the first packet in |packet_list| has been processed, the original +// packet is replaced by the new ones in |new_packets|, so that |packet_list| +// becomes: {A1, A2, ..., B, C, ...}. The method then continues with B, and C, +// until all the original packets have been replaced by their split payloads. bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { // Too many RED blocks indicates that something is wrong. Clamp it at some // reasonable value. @@ -43,6 +43,7 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { const Packet& red_packet = *it; assert(!red_packet.payload.empty()); const uint8_t* payload_ptr = red_packet.payload.data(); + size_t payload_length = red_packet.payload.size(); // Read RED headers (according to RFC 2198): // @@ -67,6 +68,10 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { bool last_block = false; size_t sum_length = 0; while (!last_block) { + if (payload_length == 0) { + RTC_LOG(LS_WARNING) << "SplitRed header too short"; + return false; + } RedHeader new_header; // Check the F bit. If F == 0, this was the last block. last_block = ((*payload_ptr & 0x80) == 0); @@ -74,11 +79,16 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { new_header.payload_type = payload_ptr[0] & 0x7F; if (last_block) { // No more header data to read. - ++sum_length; // Account for RED header size of 1 byte. + sum_length += kRedLastHeaderLength; // Account for RED header size. new_header.timestamp = red_packet.timestamp; new_header.payload_length = red_packet.payload.size() - sum_length; - payload_ptr += 1; // Advance to first payload byte. + payload_ptr += kRedLastHeaderLength; // Advance to first payload byte. + payload_length -= kRedLastHeaderLength; } else { + if (payload_length < kRedHeaderLength) { + RTC_LOG(LS_WARNING) << "SplitRed header too short"; + return false; + } // Bits 8 through 21 are timestamp offset. int timestamp_offset = (payload_ptr[1] << 6) + ((payload_ptr[2] & 0xFC) >> 2); @@ -86,12 +96,17 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { // Bits 22 through 31 are payload length. new_header.payload_length = ((payload_ptr[2] & 0x03) << 8) + payload_ptr[3]; - payload_ptr += 4; // Advance to next RED header. + + sum_length += new_header.payload_length; + sum_length += kRedHeaderLength; // Account for RED header size. + + payload_ptr += kRedHeaderLength; // Advance to next RED header. + payload_length -= kRedHeaderLength; } - sum_length += new_header.payload_length; - sum_length += 4; // Account for RED header size of 4 bytes. // Store in new list of packets. - new_headers.push_back(new_header); + if (new_header.payload_length > 0) { + new_headers.push_back(new_header); + } } if (new_headers.size() <= kMaxRedBlocks) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h index c2e0a445d..c54ffc0da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h @@ -18,6 +18,9 @@ namespace webrtc { class DecoderDatabase; +static const size_t kRedHeaderLength = 4; // 4 bytes RED header. +static const size_t kRedLastHeaderLength = + 1; // reduced size for last RED header. // This class handles splitting of RED payloads into smaller parts. // Codec-specific packet splitting can be performed by // AudioDecoder::ParsePayload. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc index fa2925ce4..708780a8a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc @@ -115,11 +115,8 @@ void StatisticsCalculator::PeriodicUmaAverage::Reset() { StatisticsCalculator::StatisticsCalculator() : preemptive_samples_(0), accelerate_samples_(0), - added_zero_samples_(0), expanded_speech_samples_(0), expanded_noise_samples_(0), - discarded_packets_(0), - lost_timestamps_(0), timestamps_since_last_report_(0), secondary_decoded_samples_(0), discarded_secondary_packets_(0), @@ -139,7 +136,6 @@ StatisticsCalculator::~StatisticsCalculator() = default; void StatisticsCalculator::Reset() { preemptive_samples_ = 0; accelerate_samples_ = 0; - added_zero_samples_ = 0; expanded_speech_samples_ = 0; expanded_noise_samples_ = 0; secondary_decoded_samples_ = 0; @@ -148,8 +144,6 @@ void StatisticsCalculator::Reset() { } void StatisticsCalculator::ResetMcu() { - discarded_packets_ = 0; - lost_timestamps_ = 0; timestamps_since_last_report_ = 0; } @@ -237,10 +231,6 @@ void StatisticsCalculator::AcceleratedSamples(size_t num_samples) { lifetime_stats_.removed_samples_for_acceleration += num_samples; } -void StatisticsCalculator::AddZeros(size_t num_samples) { - added_zero_samples_ += num_samples; -} - void StatisticsCalculator::PacketsDiscarded(size_t num_packets) { operations_and_state_.discarded_primary_packets += num_packets; } @@ -254,10 +244,6 @@ void StatisticsCalculator::SecondaryPacketsReceived(size_t num_packets) { lifetime_stats_.fec_packets_received += num_packets; } -void StatisticsCalculator::LostSamples(size_t num_samples) { - lost_timestamps_ += num_samples; -} - void StatisticsCalculator::IncreaseCounter(size_t num_samples, int fs_hz) { const int time_step_ms = rtc::CheckedDivExact(static_cast(1000 * num_samples), fs_hz); @@ -267,9 +253,7 @@ void StatisticsCalculator::IncreaseCounter(size_t num_samples, int fs_hz) { timestamps_since_last_report_ += static_cast(num_samples); if (timestamps_since_last_report_ > static_cast(fs_hz * kMaxReportPeriod)) { - lost_timestamps_ = 0; timestamps_since_last_report_ = 0; - discarded_packets_ = 0; } lifetime_stats_.total_samples_received += num_samples; } @@ -321,20 +305,10 @@ void StatisticsCalculator::StoreWaitingTime(int waiting_time_ms) { operations_and_state_.last_waiting_time_ms = waiting_time_ms; } -void StatisticsCalculator::GetNetworkStatistics(int fs_hz, - size_t num_samples_in_buffers, - size_t samples_per_packet, +void StatisticsCalculator::GetNetworkStatistics(size_t samples_per_packet, NetEqNetworkStatistics* stats) { - RTC_DCHECK_GT(fs_hz, 0); RTC_DCHECK(stats); - stats->added_zero_samples = added_zero_samples_; - stats->current_buffer_size_ms = - static_cast(num_samples_in_buffers * 1000 / fs_hz); - - stats->packet_loss_rate = - CalculateQ14Ratio(lost_timestamps_, timestamps_since_last_report_); - stats->accelerate_rate = CalculateQ14Ratio(accelerate_samples_, timestamps_since_last_report_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h index 333f4a76a..f0c273442 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h @@ -62,9 +62,6 @@ class StatisticsCalculator { // Reports that |num_samples| samples were removed through accelerate. void AcceleratedSamples(size_t num_samples); - // Reports that |num_samples| zeros were inserted into the output. - void AddZeros(size_t num_samples); - // Reports that |num_packets| packets were discarded. virtual void PacketsDiscarded(size_t num_packets); @@ -74,9 +71,6 @@ class StatisticsCalculator { // Reports that |num_packets| secondary (FEC) packets were received. virtual void SecondaryPacketsReceived(size_t num_packets); - // Reports that |num_samples| were lost. - void LostSamples(size_t num_samples); - // Increases the report interval counter with |num_samples| at a sample rate // of |fs_hz|. This is how the StatisticsCalculator gets notified that current // time is increasing. @@ -107,15 +101,11 @@ class StatisticsCalculator { // period caused not by an actual packet loss, but by a delayed packet. virtual void LogDelayedPacketOutageEvent(int num_samples, int fs_hz); - // Returns the current network statistics in |stats|. The current sample rate - // is |fs_hz|, the total number of samples in packet buffer and sync buffer - // yet to play out is |num_samples_in_buffers|, and the number of samples per - // packet is |samples_per_packet|. The method does not populate + // Returns the current network statistics in |stats|. The number of samples + // per packet is |samples_per_packet|. The method does not populate // |preferred_buffer_size_ms|, |jitter_peaks_found| or |clockdrift_ppm|; use // the PopulateDelayManagerStats method for those. - void GetNetworkStatistics(int fs_hz, - size_t num_samples_in_buffers, - size_t samples_per_packet, + void GetNetworkStatistics(size_t samples_per_packet, NetEqNetworkStatistics* stats); // Returns a copy of this class's lifetime statistics. These statistics are @@ -196,12 +186,9 @@ class StatisticsCalculator { size_t silent_concealed_samples_correction_ = 0; size_t preemptive_samples_; size_t accelerate_samples_; - size_t added_zero_samples_; size_t expanded_speech_samples_; size_t expanded_noise_samples_; size_t concealed_samples_at_event_end_ = 0; - size_t discarded_packets_; - size_t lost_timestamps_; uint32_t timestamps_since_last_report_; std::deque waiting_times_; uint32_t secondary_decoded_samples_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc index 3a29bb80f..95f1a1a3c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc @@ -18,7 +18,6 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/sleep.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h index a2185047e..fb5bf6fa5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h @@ -103,15 +103,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t PlayoutDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t RecordingDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetPlayoutDevice(uint16_t index) override { @@ -123,8 +121,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t SetPlayoutDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetRecordingDevice(uint16_t index) override { @@ -136,8 +133,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t SetRecordingDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t PlayoutIsAvailable(bool& available) override { @@ -266,53 +262,38 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t SetMicrophoneVolume(uint32_t volume) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneVolume(uint32_t& volume) const override { - FATAL() << "Should never be called"; + RTC_CHECK_NOTREACHED(); return -1; } int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MinMicrophoneVolume(uint32_t& minVolume) const override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SpeakerMuteIsAvailable(bool& available) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } - int32_t SetSpeakerMute(bool enable) override { - FATAL() << "Should never be called"; - return -1; - } + int32_t SetSpeakerMute(bool enable) override { RTC_CHECK_NOTREACHED(); } - int32_t SpeakerMute(bool& enabled) const override { - FATAL() << "Should never be called"; - return -1; - } + int32_t SpeakerMute(bool& enabled) const override { RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMuteIsAvailable(bool& available) override { - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } - int32_t SetMicrophoneMute(bool enable) override { - FATAL() << "Not implemented"; - return -1; - } + int32_t SetMicrophoneMute(bool enable) override { RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMute(bool& enabled) const override { - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } // Returns true if the audio manager has been configured to support stereo diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc index 12ac45876..a3aa85565 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc @@ -219,8 +219,7 @@ int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) { int32_t AudioRecordJni::EnableBuiltInAGC(bool enable) { // TODO(henrika): possibly remove when no longer used by any client. - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t AudioRecordJni::EnableBuiltInNS(bool enable) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.cc index 909506b9f..d5b381029 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.cc @@ -38,7 +38,7 @@ CreateWindowsCoreAudioAudioDeviceModuleForTest( bool automatic_restart) { RTC_DLOG(INFO) << __FUNCTION__; // Returns NULL if Core Audio is not supported or if COM has not been - // initialized correctly using webrtc_win::ScopedCOMInitializer. + // initialized correctly using ScopedCOMInitializer. if (!webrtc_win::core_audio_utility::IsSupported()) { RTC_LOG(LS_ERROR) << "Unable to create ADM since Core Audio is not supported"; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.h index fb1ac41fa..9c19d6196 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_factory.h @@ -30,8 +30,8 @@ namespace webrtc { // rtc::scoped_refptr CreateAudioDevice() { // task_queue_factory_ = CreateDefaultTaskQueueFactory(); // // Tell COM that this thread shall live in the MTA. -// com_initializer_ = std::make_unique( -// webrtc_win::ScopedCOMInitializer::kMTA); +// com_initializer_ = std::make_unique( +// ScopedCOMInitializer::kMTA); // if (!com_initializer_->Succeeded()) { // return nullptr; // } @@ -42,7 +42,7 @@ namespace webrtc { // } // // private: -// std::unique_ptr com_initializer_; +// std::unique_ptr com_initializer_; // std::unique_ptr task_queue_factory_; // rtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModule( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc index 5fac1bcac..84d05e0f6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc @@ -217,7 +217,10 @@ bool AudioDeviceLinuxALSA::Initialized() const { int32_t AudioDeviceLinuxALSA::InitSpeaker() { MutexLock lock(&mutex_); + return InitSpeakerLocked(); +} +int32_t AudioDeviceLinuxALSA::InitSpeakerLocked() { if (_playing) { return -1; } @@ -229,7 +232,10 @@ int32_t AudioDeviceLinuxALSA::InitSpeaker() { int32_t AudioDeviceLinuxALSA::InitMicrophone() { MutexLock lock(&mutex_); + return InitMicrophoneLocked(); +} +int32_t AudioDeviceLinuxALSA::InitMicrophoneLocked() { if (_recording) { return -1; } @@ -421,22 +427,22 @@ int32_t AudioDeviceLinuxALSA::StereoRecordingIsAvailable(bool& available) { // Stop/uninitialize recording if initialized (and possibly started) if (_recIsInitialized) { - StopRecording(); + StopRecordingLocked(); } // Try init in stereo; _recChannels = 2; - if (InitRecording() == 0) { + if (InitRecordingLocked() == 0) { available = true; } // Stop/uninitialize recording - StopRecording(); + StopRecordingLocked(); // Recover previous states _recChannels = recChannels; if (recIsInitialized) { - InitRecording(); + InitRecordingLocked(); } if (recording) { StartRecording(); @@ -481,22 +487,22 @@ int32_t AudioDeviceLinuxALSA::StereoPlayoutIsAvailable(bool& available) { // Stop/uninitialize recording if initialized (and possibly started) if (_playIsInitialized) { - StopPlayout(); + StopPlayoutLocked(); } // Try init in stereo; _playChannels = 2; - if (InitPlayout() == 0) { + if (InitPlayoutLocked() == 0) { available = true; } // Stop/uninitialize recording - StopPlayout(); + StopPlayoutLocked(); // Recover previous states _playChannels = playChannels; if (playIsInitialized) { - InitPlayout(); + InitPlayoutLocked(); } if (playing) { StartPlayout(); @@ -745,9 +751,13 @@ int32_t AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available) { } int32_t AudioDeviceLinuxALSA::InitPlayout() { + MutexLock lock(&mutex_); + return InitPlayoutLocked(); +} + +int32_t AudioDeviceLinuxALSA::InitPlayoutLocked() { int errVal = 0; - MutexLock lock(&mutex_); if (_playing) { return -1; } @@ -760,7 +770,7 @@ int32_t AudioDeviceLinuxALSA::InitPlayout() { return 0; } // Initialize the speaker (devices might have been added or removed) - if (InitSpeaker() == -1) { + if (InitSpeakerLocked() == -1) { RTC_LOG(LS_WARNING) << "InitSpeaker() failed"; } @@ -864,9 +874,12 @@ int32_t AudioDeviceLinuxALSA::InitPlayout() { } int32_t AudioDeviceLinuxALSA::InitRecording() { - int errVal = 0; - MutexLock lock(&mutex_); + return InitRecordingLocked(); +} + +int32_t AudioDeviceLinuxALSA::InitRecordingLocked() { + int errVal = 0; if (_recording) { return -1; @@ -881,7 +894,7 @@ int32_t AudioDeviceLinuxALSA::InitRecording() { } // Initialize the microphone (devices might have been added or removed) - if (InitMicrophone() == -1) { + if (InitMicrophoneLocked() == -1) { RTC_LOG(LS_WARNING) << "InitMicrophone() failed"; } @@ -1058,28 +1071,28 @@ int32_t AudioDeviceLinuxALSA::StartRecording() { } int32_t AudioDeviceLinuxALSA::StopRecording() { - { MutexLock lock(&mutex_); + return StopRecordingLocked(); +} - if (!_recIsInitialized) { - return 0; - } - - if (_handleRecord == NULL) { - return -1; - } - - // Make sure we don't start recording (it's asynchronous). - _recIsInitialized = false; - _recording = false; +int32_t AudioDeviceLinuxALSA::StopRecordingLocked() { + if (!_recIsInitialized) { + return 0; } + if (_handleRecord == NULL) { + return -1; + } + + // Make sure we don't start recording (it's asynchronous). + _recIsInitialized = false; + _recording = false; + if (_ptrThreadRec) { _ptrThreadRec->Stop(); _ptrThreadRec.reset(); } - MutexLock lock(&mutex_); _recordingFramesLeft = 0; if (_recordingBuffer) { delete[] _recordingBuffer; @@ -1162,28 +1175,27 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() { } int32_t AudioDeviceLinuxALSA::StopPlayout() { - { MutexLock lock(&mutex_); + return StopPlayoutLocked(); +} - if (!_playIsInitialized) { - return 0; - } - - if (_handlePlayout == NULL) { - return -1; - } - - _playing = false; +int32_t AudioDeviceLinuxALSA::StopPlayoutLocked() { + if (!_playIsInitialized) { + return 0; } + if (_handlePlayout == NULL) { + return -1; + } + + _playing = false; + // stop playout thread first if (_ptrThreadPlay) { _ptrThreadPlay->Stop(); _ptrThreadPlay.reset(); } - MutexLock lock(&mutex_); - _playoutFramesLeft = 0; delete[] _playoutBuffer; _playoutBuffer = NULL; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index 0e0b7919b..410afcf42 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -40,8 +40,8 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { AudioDeviceModule::AudioLayer& audioLayer) const override; // Main initializaton and termination - InitStatus Init() override; - int32_t Terminate() override; + InitStatus Init() RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t Terminate() RTC_LOCKS_EXCLUDED(mutex_) override; bool Initialized() const override; // Device enumeration @@ -64,24 +64,24 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { // Audio transport initialization int32_t PlayoutIsAvailable(bool& available) override; - int32_t InitPlayout() override; + int32_t InitPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; bool PlayoutIsInitialized() const override; int32_t RecordingIsAvailable(bool& available) override; - int32_t InitRecording() override; + int32_t InitRecording() RTC_LOCKS_EXCLUDED(mutex_) override; bool RecordingIsInitialized() const override; // Audio transport control int32_t StartPlayout() override; - int32_t StopPlayout() override; + int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; bool Playing() const override; int32_t StartRecording() override; - int32_t StopRecording() override; + int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override; bool Recording() const override; // Audio mixer initialization - int32_t InitSpeaker() override; + int32_t InitSpeaker() RTC_LOCKS_EXCLUDED(mutex_) override; bool SpeakerIsInitialized() const override; - int32_t InitMicrophone() override; + int32_t InitMicrophone() RTC_LOCKS_EXCLUDED(mutex_) override; bool MicrophoneIsInitialized() const override; // Speaker volume controls @@ -109,19 +109,28 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { int32_t MicrophoneMute(bool& enabled) const override; // Stereo support - int32_t StereoPlayoutIsAvailable(bool& available) override; + int32_t StereoPlayoutIsAvailable(bool& available) + RTC_LOCKS_EXCLUDED(mutex_) override; int32_t SetStereoPlayout(bool enable) override; int32_t StereoPlayout(bool& enabled) const override; - int32_t StereoRecordingIsAvailable(bool& available) override; + int32_t StereoRecordingIsAvailable(bool& available) + RTC_LOCKS_EXCLUDED(mutex_) override; int32_t SetStereoRecording(bool enable) override; int32_t StereoRecording(bool& enabled) const override; // Delay information and control int32_t PlayoutDelay(uint16_t& delayMS) const override; - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) + RTC_LOCKS_EXCLUDED(mutex_) override; private: + int32_t InitRecordingLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t StopRecordingLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t StopPlayoutLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t InitPlayoutLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t GetDevicesInfo(const int32_t function, const bool playback, const int32_t enumDeviceNo = 0, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h index f05ba1ebf..03aa16bb8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h @@ -116,7 +116,7 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { // Main initializaton and termination InitStatus Init() override; - int32_t Terminate() override; + int32_t Terminate() RTC_LOCKS_EXCLUDED(mutex_) override; bool Initialized() const override; // Device enumeration @@ -139,18 +139,18 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { // Audio transport initialization int32_t PlayoutIsAvailable(bool& available) override; - int32_t InitPlayout() override; + int32_t InitPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; bool PlayoutIsInitialized() const override; int32_t RecordingIsAvailable(bool& available) override; int32_t InitRecording() override; bool RecordingIsInitialized() const override; // Audio transport control - int32_t StartPlayout() override; - int32_t StopPlayout() override; + int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; bool Playing() const override; - int32_t StartRecording() override; - int32_t StopRecording() override; + int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override; bool Recording() const override; // Audio mixer initialization @@ -192,7 +192,8 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { int32_t StereoRecording(bool& enabled) const override; // Delay information and control - int32_t PlayoutDelay(uint16_t& delayMS) const override; + int32_t PlayoutDelay(uint16_t& delayMS) const + RTC_LOCKS_EXCLUDED(mutex_) override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; @@ -256,8 +257,8 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { static void RecThreadFunc(void*); static void PlayThreadFunc(void*); - bool RecThreadProcess(); - bool PlayThreadProcess(); + bool RecThreadProcess() RTC_LOCKS_EXCLUDED(mutex_); + bool PlayThreadProcess() RTC_LOCKS_EXCLUDED(mutex_); AudioDeviceBuffer* _ptrAudioBuffer; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc index 028be5db6..fb9d874ef 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc @@ -47,16 +47,19 @@ int32_t AudioMixerManagerLinuxALSA::Close() { MutexLock lock(&mutex_); - CloseSpeaker(); - CloseMicrophone(); + CloseSpeakerLocked(); + CloseMicrophoneLocked(); return 0; } int32_t AudioMixerManagerLinuxALSA::CloseSpeaker() { - RTC_LOG(LS_VERBOSE) << __FUNCTION__; - MutexLock lock(&mutex_); + return CloseSpeakerLocked(); +} + +int32_t AudioMixerManagerLinuxALSA::CloseSpeakerLocked() { + RTC_LOG(LS_VERBOSE) << __FUNCTION__; int errVal = 0; @@ -86,9 +89,12 @@ int32_t AudioMixerManagerLinuxALSA::CloseSpeaker() { } int32_t AudioMixerManagerLinuxALSA::CloseMicrophone() { - RTC_LOG(LS_VERBOSE) << __FUNCTION__; - MutexLock lock(&mutex_); + return CloseMicrophoneLocked(); +} + +int32_t AudioMixerManagerLinuxALSA::CloseMicrophoneLocked() { + RTC_LOG(LS_VERBOSE) << __FUNCTION__; int errVal = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h index 61490b4a7..d98287822 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h @@ -21,27 +21,27 @@ namespace webrtc { class AudioMixerManagerLinuxALSA { public: - int32_t OpenSpeaker(char* deviceName); - int32_t OpenMicrophone(char* deviceName); - int32_t SetSpeakerVolume(uint32_t volume); + int32_t OpenSpeaker(char* deviceName) RTC_LOCKS_EXCLUDED(mutex_); + int32_t OpenMicrophone(char* deviceName) RTC_LOCKS_EXCLUDED(mutex_); + int32_t SetSpeakerVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(mutex_); int32_t SpeakerVolume(uint32_t& volume) const; int32_t MaxSpeakerVolume(uint32_t& maxVolume) const; int32_t MinSpeakerVolume(uint32_t& minVolume) const; int32_t SpeakerVolumeIsAvailable(bool& available); int32_t SpeakerMuteIsAvailable(bool& available); - int32_t SetSpeakerMute(bool enable); + int32_t SetSpeakerMute(bool enable) RTC_LOCKS_EXCLUDED(mutex_); int32_t SpeakerMute(bool& enabled) const; int32_t MicrophoneMuteIsAvailable(bool& available); - int32_t SetMicrophoneMute(bool enable); + int32_t SetMicrophoneMute(bool enable) RTC_LOCKS_EXCLUDED(mutex_); int32_t MicrophoneMute(bool& enabled) const; int32_t MicrophoneVolumeIsAvailable(bool& available); - int32_t SetMicrophoneVolume(uint32_t volume); + int32_t SetMicrophoneVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(mutex_); int32_t MicrophoneVolume(uint32_t& volume) const; int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const; int32_t MinMicrophoneVolume(uint32_t& minVolume) const; - int32_t Close(); - int32_t CloseSpeaker(); - int32_t CloseMicrophone(); + int32_t Close() RTC_LOCKS_EXCLUDED(mutex_); + int32_t CloseSpeaker() RTC_LOCKS_EXCLUDED(mutex_); + int32_t CloseMicrophone() RTC_LOCKS_EXCLUDED(mutex_); bool SpeakerIsInitialized() const; bool MicrophoneIsInitialized() const; @@ -50,6 +50,8 @@ class AudioMixerManagerLinuxALSA { ~AudioMixerManagerLinuxALSA(); private: + int32_t CloseSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int32_t CloseMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t LoadMicMixerElement() const; int32_t LoadSpeakerMixerElement() const; void GetControlName(char* controlName, char* deviceName) const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc index 6552953de..04a8bcf72 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc @@ -24,9 +24,23 @@ #include "rtc_base/ref_counted_object.h" namespace webrtc { + +struct AudioMixerImpl::SourceStatus { + SourceStatus(Source* audio_source, bool is_mixed, float gain) + : audio_source(audio_source), is_mixed(is_mixed), gain(gain) {} + Source* audio_source = nullptr; + bool is_mixed = false; + float gain = 0.0f; + + // A frame that will be passed to audio_source->GetAudioFrameWithInfo. + AudioFrame audio_frame; +}; + namespace { struct SourceFrame { + SourceFrame() = default; + SourceFrame(AudioMixerImpl::SourceStatus* source_status, AudioFrame* audio_frame, bool muted) @@ -57,6 +71,7 @@ struct SourceFrame { }; // ShouldMixBefore(a, b) is used to select mixer sources. +// Returns true if `a` is preferred over `b` as a source to be mixed. bool ShouldMixBefore(const SourceFrame& a, const SourceFrame& b) { if (a.muted != b.muted) { return b.muted; @@ -73,7 +88,7 @@ bool ShouldMixBefore(const SourceFrame& a, const SourceFrame& b) { } void RampAndUpdateGain( - const std::vector& mixed_sources_and_frames) { + rtc::ArrayView mixed_sources_and_frames) { for (const auto& source_frame : mixed_sources_and_frames) { float target_gain = source_frame.source_status->is_mixed ? 1.0f : 0.0f; Ramp(source_frame.source_status->gain, target_gain, @@ -82,9 +97,11 @@ void RampAndUpdateGain( } } -AudioMixerImpl::SourceStatusList::const_iterator FindSourceInList( +std::vector>::const_iterator +FindSourceInList( AudioMixerImpl::Source const* audio_source, - AudioMixerImpl::SourceStatusList const* audio_source_list) { + std::vector> const* + audio_source_list) { return std::find_if( audio_source_list->begin(), audio_source_list->end(), [audio_source](const std::unique_ptr& p) { @@ -93,14 +110,31 @@ AudioMixerImpl::SourceStatusList::const_iterator FindSourceInList( } } // namespace +struct AudioMixerImpl::HelperContainers { + void resize(size_t size) { + audio_to_mix.resize(size); + audio_source_mixing_data_list.resize(size); + ramp_list.resize(size); + preferred_rates.resize(size); + } + + std::vector audio_to_mix; + std::vector audio_source_mixing_data_list; + std::vector ramp_list; + std::vector preferred_rates; +}; + AudioMixerImpl::AudioMixerImpl( std::unique_ptr output_rate_calculator, bool use_limiter) : output_rate_calculator_(std::move(output_rate_calculator)), - output_frequency_(0), - sample_size_(0), audio_source_list_(), - frame_combiner_(use_limiter) {} + helper_containers_(std::make_unique()), + frame_combiner_(use_limiter) { + const int kTypicalMaxNumberOfMixedStreams = 3; + audio_source_list_.reserve(kTypicalMaxNumberOfMixedStreams); + helper_containers_->resize(kTypicalMaxNumberOfMixedStreams); +} AudioMixerImpl::~AudioMixerImpl() {} @@ -121,40 +155,23 @@ rtc::scoped_refptr AudioMixerImpl::Create( void AudioMixerImpl::Mix(size_t number_of_channels, AudioFrame* audio_frame_for_mixing) { RTC_DCHECK(number_of_channels >= 1); - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - - CalculateOutputFrequency(); - - { - MutexLock lock(&mutex_); - const size_t number_of_streams = audio_source_list_.size(); - frame_combiner_.Combine(GetAudioFromSources(), number_of_channels, - OutputFrequency(), number_of_streams, - audio_frame_for_mixing); - } - - return; -} - -void AudioMixerImpl::CalculateOutputFrequency() { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); MutexLock lock(&mutex_); - std::vector preferred_rates; + size_t number_of_streams = audio_source_list_.size(); + std::transform(audio_source_list_.begin(), audio_source_list_.end(), - std::back_inserter(preferred_rates), + helper_containers_->preferred_rates.begin(), [&](std::unique_ptr& a) { return a->audio_source->PreferredSampleRate(); }); - output_frequency_ = - output_rate_calculator_->CalculateOutputRate(preferred_rates); - sample_size_ = (output_frequency_ * kFrameDurationInMs) / 1000; -} + int output_frequency = output_rate_calculator_->CalculateOutputRateFromRange( + rtc::ArrayView(helper_containers_->preferred_rates.data(), + number_of_streams)); -int AudioMixerImpl::OutputFrequency() const { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - return output_frequency_; + frame_combiner_.Combine(GetAudioFromSources(output_frequency), + number_of_channels, output_frequency, + number_of_streams, audio_frame_for_mixing); } bool AudioMixerImpl::AddSource(Source* audio_source) { @@ -164,6 +181,7 @@ bool AudioMixerImpl::AddSource(Source* audio_source) { audio_source_list_.end()) << "Source already added to mixer"; audio_source_list_.emplace_back(new SourceStatus(audio_source, false, 0)); + helper_containers_->resize(audio_source_list_.size()); return true; } @@ -175,35 +193,37 @@ void AudioMixerImpl::RemoveSource(Source* audio_source) { audio_source_list_.erase(iter); } -AudioFrameList AudioMixerImpl::GetAudioFromSources() { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - AudioFrameList result; - std::vector audio_source_mixing_data_list; - std::vector ramp_list; - +rtc::ArrayView AudioMixerImpl::GetAudioFromSources( + int output_frequency) { // Get audio from the audio sources and put it in the SourceFrame vector. + int audio_source_mixing_data_count = 0; for (auto& source_and_status : audio_source_list_) { const auto audio_frame_info = source_and_status->audio_source->GetAudioFrameWithInfo( - OutputFrequency(), &source_and_status->audio_frame); + output_frequency, &source_and_status->audio_frame); if (audio_frame_info == Source::AudioFrameInfo::kError) { RTC_LOG_F(LS_WARNING) << "failed to GetAudioFrameWithInfo() from source"; continue; } - audio_source_mixing_data_list.emplace_back( - source_and_status.get(), &source_and_status->audio_frame, - audio_frame_info == Source::AudioFrameInfo::kMuted); + helper_containers_ + ->audio_source_mixing_data_list[audio_source_mixing_data_count++] = + SourceFrame(source_and_status.get(), &source_and_status->audio_frame, + audio_frame_info == Source::AudioFrameInfo::kMuted); } + rtc::ArrayView audio_source_mixing_data_view( + helper_containers_->audio_source_mixing_data_list.data(), + audio_source_mixing_data_count); // Sort frames by sorting function. - std::sort(audio_source_mixing_data_list.begin(), - audio_source_mixing_data_list.end(), ShouldMixBefore); + std::sort(audio_source_mixing_data_view.begin(), + audio_source_mixing_data_view.end(), ShouldMixBefore); int max_audio_frame_counter = kMaximumAmountOfMixedAudioSources; - + int ramp_list_lengh = 0; + int audio_to_mix_count = 0; // Go through list in order and put unmuted frames in result list. - for (const auto& p : audio_source_mixing_data_list) { + for (const auto& p : audio_source_mixing_data_view) { // Filter muted. if (p.muted) { p.source_status->is_mixed = false; @@ -214,19 +234,21 @@ AudioFrameList AudioMixerImpl::GetAudioFromSources() { bool is_mixed = false; if (max_audio_frame_counter > 0) { --max_audio_frame_counter; - result.push_back(p.audio_frame); - ramp_list.emplace_back(p.source_status, p.audio_frame, false, -1); + helper_containers_->audio_to_mix[audio_to_mix_count++] = p.audio_frame; + helper_containers_->ramp_list[ramp_list_lengh++] = + SourceFrame(p.source_status, p.audio_frame, false, -1); is_mixed = true; } p.source_status->is_mixed = is_mixed; } - RampAndUpdateGain(ramp_list); - return result; + RampAndUpdateGain(rtc::ArrayView( + helper_containers_->ramp_list.data(), ramp_list_lengh)); + return rtc::ArrayView( + helper_containers_->audio_to_mix.data(), audio_to_mix_count); } bool AudioMixerImpl::GetAudioSourceMixabilityStatusForTest( AudioMixerImpl::Source* audio_source) const { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); MutexLock lock(&mutex_); const auto iter = FindSourceInList(audio_source, &audio_source_list_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h index 57b1f5e4a..0a1308272 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h @@ -16,6 +16,7 @@ #include #include +#include "api/array_view.h" #include "api/audio/audio_frame.h" #include "api/audio/audio_mixer.h" #include "api/scoped_refptr.h" @@ -28,22 +29,9 @@ namespace webrtc { -typedef std::vector AudioFrameList; - class AudioMixerImpl : public AudioMixer { public: - struct SourceStatus { - SourceStatus(Source* audio_source, bool is_mixed, float gain) - : audio_source(audio_source), is_mixed(is_mixed), gain(gain) {} - Source* audio_source = nullptr; - bool is_mixed = false; - float gain = 0.0f; - - // A frame that will be passed to audio_source->GetAudioFrameWithInfo. - AudioFrame audio_frame; - }; - - using SourceStatusList = std::vector>; + struct SourceStatus; // AudioProcessing only accepts 10 ms frames. static const int kFrameDurationInMs = 10; @@ -75,32 +63,29 @@ class AudioMixerImpl : public AudioMixer { bool use_limiter); private: - // Set mixing frequency through OutputFrequencyCalculator. - void CalculateOutputFrequency(); - // Get mixing frequency. - int OutputFrequency() const; + struct HelperContainers; // Compute what audio sources to mix from audio_source_list_. Ramp // in and out. Update mixed status. Mixes up to // kMaximumAmountOfMixedAudioSources audio sources. - AudioFrameList GetAudioFromSources() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + rtc::ArrayView GetAudioFromSources(int output_frequency) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // The critical section lock guards audio source insertion and // removal, which can be done from any thread. The race checker // checks that mixing is done sequentially. mutable Mutex mutex_; - rtc::RaceChecker race_checker_; std::unique_ptr output_rate_calculator_; - // The current sample frequency and sample size when mixing. - int output_frequency_ RTC_GUARDED_BY(race_checker_); - size_t sample_size_ RTC_GUARDED_BY(race_checker_); - // List of all audio sources. Note all lists are disjunct - SourceStatusList audio_source_list_ RTC_GUARDED_BY(mutex_); // May be mixed. + // List of all audio sources. + std::vector> audio_source_list_ + RTC_GUARDED_BY(mutex_); + const std::unique_ptr helper_containers_ + RTC_GUARDED_BY(mutex_); // Component that handles actual adding of audio frames. - FrameCombiner frame_combiner_ RTC_GUARDED_BY(race_checker_); + FrameCombiner frame_combiner_; RTC_DISALLOW_COPY_AND_ASSIGN(AudioMixerImpl); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.cc index 57d88b638..5f24b653a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.cc @@ -18,14 +18,14 @@ namespace webrtc { -int DefaultOutputRateCalculator::CalculateOutputRate( - const std::vector& preferred_sample_rates) { +int DefaultOutputRateCalculator::CalculateOutputRateFromRange( + rtc::ArrayView preferred_sample_rates) { if (preferred_sample_rates.empty()) { return DefaultOutputRateCalculator::kDefaultFrequency; } using NativeRate = AudioProcessing::NativeRate; const int maximal_frequency = *std::max_element( - preferred_sample_rates.begin(), preferred_sample_rates.end()); + preferred_sample_rates.cbegin(), preferred_sample_rates.cend()); RTC_DCHECK_LE(NativeRate::kSampleRate8kHz, maximal_frequency); RTC_DCHECK_GE(NativeRate::kSampleRate48kHz, maximal_frequency); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.h index 2d34f3489..a7aaf681a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/default_output_rate_calculator.h @@ -13,6 +13,7 @@ #include +#include "api/array_view.h" #include "modules/audio_mixer/output_rate_calculator.h" namespace webrtc { @@ -25,8 +26,8 @@ class DefaultOutputRateCalculator : public OutputRateCalculator { // sample rates. A native rate is one in // AudioProcessing::NativeRate. If |preferred_sample_rates| is // empty, returns |kDefaultFrequency|. - int CalculateOutputRate( - const std::vector& preferred_sample_rates) override; + int CalculateOutputRateFromRange( + rtc::ArrayView preferred_sample_rates) override; ~DefaultOutputRateCalculator() override {} }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc index f7c6a0c32..e184506b4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc @@ -35,7 +35,7 @@ using MixingBuffer = std::array, FrameCombiner::kMaximumNumberOfChannels>; -void SetAudioFrameFields(const std::vector& mix_list, +void SetAudioFrameFields(rtc::ArrayView mix_list, size_t number_of_channels, int sample_rate, size_t number_of_streams, @@ -61,7 +61,7 @@ void SetAudioFrameFields(const std::vector& mix_list, } } -void MixFewFramesWithNoLimiter(const std::vector& mix_list, +void MixFewFramesWithNoLimiter(rtc::ArrayView mix_list, AudioFrame* audio_frame_for_mixing) { if (mix_list.empty()) { audio_frame_for_mixing->Mute(); @@ -74,7 +74,7 @@ void MixFewFramesWithNoLimiter(const std::vector& mix_list, audio_frame_for_mixing->mutable_data()); } -void MixToFloatFrame(const std::vector& mix_list, +void MixToFloatFrame(rtc::ArrayView mix_list, size_t samples_per_channel, size_t number_of_channels, MixingBuffer* mixing_buffer) { @@ -140,7 +140,7 @@ FrameCombiner::FrameCombiner(bool use_limiter) FrameCombiner::~FrameCombiner() = default; -void FrameCombiner::Combine(const std::vector& mix_list, +void FrameCombiner::Combine(rtc::ArrayView mix_list, size_t number_of_channels, int sample_rate, size_t number_of_streams, @@ -195,9 +195,10 @@ void FrameCombiner::Combine(const std::vector& mix_list, InterleaveToAudioFrame(mixing_buffer_view, audio_frame_for_mixing); } -void FrameCombiner::LogMixingStats(const std::vector& mix_list, - int sample_rate, - size_t number_of_streams) const { +void FrameCombiner::LogMixingStats( + rtc::ArrayView mix_list, + int sample_rate, + size_t number_of_streams) const { // Log every second. uma_logging_counter_++; if (uma_logging_counter_ > 1000 / AudioMixerImpl::kFrameDurationInMs) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.h index d989d02c3..9ddf81e41 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "api/audio/audio_frame.h" #include "modules/audio_processing/agc2/limiter.h" @@ -32,7 +33,7 @@ class FrameCombiner { // because 'mix_list' can be empty. The parameter // 'number_of_streams' is used for determining whether to pass the // data through a limiter. - void Combine(const std::vector& mix_list, + void Combine(rtc::ArrayView mix_list, size_t number_of_channels, int sample_rate, size_t number_of_streams, @@ -46,7 +47,7 @@ class FrameCombiner { kMaximumNumberOfChannels>; private: - void LogMixingStats(const std::vector& mix_list, + void LogMixingStats(rtc::ArrayView mix_list, int sample_rate, size_t number_of_streams) const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/output_rate_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/output_rate_calculator.h index cb3ca96b2..46b65a8b5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/output_rate_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/output_rate_calculator.h @@ -13,14 +13,17 @@ #include +#include "api/array_view.h" + namespace webrtc { // Decides the sample rate of a mixing iteration given the preferred // sample rates of the sources. class OutputRateCalculator { public: - virtual int CalculateOutputRate( - const std::vector& preferred_sample_rates) = 0; + virtual int CalculateOutputRateFromRange( + rtc::ArrayView preferred_sample_rates) = 0; + virtual ~OutputRateCalculator() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h index 2f6485340..759770946 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h @@ -42,6 +42,11 @@ void ComputeFrequencyResponse_Sse2( size_t num_partitions, const std::vector>& H, std::vector>* H2); + +void ComputeFrequencyResponse_Avx2( + size_t num_partitions, + const std::vector>& H, + std::vector>* H2); #endif // Adapts the filter partitions. @@ -60,6 +65,11 @@ void AdaptPartitions_Sse2(const RenderBuffer& render_buffer, const FftData& G, size_t num_partitions, std::vector>* H); + +void AdaptPartitions_Avx2(const RenderBuffer& render_buffer, + const FftData& G, + size_t num_partitions, + std::vector>* H); #endif // Produces the filter output. @@ -78,6 +88,11 @@ void ApplyFilter_Sse2(const RenderBuffer& render_buffer, size_t num_partitions, const std::vector>& H, FftData* S); + +void ApplyFilter_Avx2(const RenderBuffer& render_buffer, + size_t num_partitions, + const std::vector>& H, + FftData* S); #endif } // namespace aec3 diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc new file mode 100644 index 000000000..245b45ac3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/adaptive_fir_filter.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace aec3 { + +// Computes and stores the frequency response of the filter. +void ComputeFrequencyResponse_Avx2( + size_t num_partitions, + const std::vector>& H, + std::vector>* H2) { + for (auto& H2_ch : *H2) { + H2_ch.fill(0.f); + } + + const size_t num_render_channels = H[0].size(); + RTC_DCHECK_EQ(H.size(), H2->capacity()); + for (size_t p = 0; p < num_partitions; ++p) { + RTC_DCHECK_EQ(kFftLengthBy2Plus1, (*H2)[p].size()); + for (size_t ch = 0; ch < num_render_channels; ++ch) { + for (size_t j = 0; j < kFftLengthBy2; j += 8) { + __m256 re = _mm256_loadu_ps(&H[p][ch].re[j]); + __m256 re2 = _mm256_mul_ps(re, re); + __m256 im = _mm256_loadu_ps(&H[p][ch].im[j]); + re2 = _mm256_fmadd_ps(im, im, re2); + __m256 H2_k_j = _mm256_loadu_ps(&(*H2)[p][j]); + H2_k_j = _mm256_max_ps(H2_k_j, re2); + _mm256_storeu_ps(&(*H2)[p][j], H2_k_j); + } + float H2_new = H[p][ch].re[kFftLengthBy2] * H[p][ch].re[kFftLengthBy2] + + H[p][ch].im[kFftLengthBy2] * H[p][ch].im[kFftLengthBy2]; + (*H2)[p][kFftLengthBy2] = std::max((*H2)[p][kFftLengthBy2], H2_new); + } + } +} + +// Adapts the filter partitions. +void AdaptPartitions_Avx2(const RenderBuffer& render_buffer, + const FftData& G, + size_t num_partitions, + std::vector>* H) { + rtc::ArrayView> render_buffer_data = + render_buffer.GetFftBuffer(); + const size_t num_render_channels = render_buffer_data[0].size(); + const size_t lim1 = std::min( + render_buffer_data.size() - render_buffer.Position(), num_partitions); + const size_t lim2 = num_partitions; + constexpr size_t kNumEightBinBands = kFftLengthBy2 / 8; + + size_t X_partition = render_buffer.Position(); + size_t limit = lim1; + size_t p = 0; + do { + for (; p < limit; ++p, ++X_partition) { + for (size_t ch = 0; ch < num_render_channels; ++ch) { + FftData& H_p_ch = (*H)[p][ch]; + const FftData& X = render_buffer_data[X_partition][ch]; + + for (size_t k = 0, n = 0; n < kNumEightBinBands; ++n, k += 8) { + const __m256 G_re = _mm256_loadu_ps(&G.re[k]); + const __m256 G_im = _mm256_loadu_ps(&G.im[k]); + const __m256 X_re = _mm256_loadu_ps(&X.re[k]); + const __m256 X_im = _mm256_loadu_ps(&X.im[k]); + const __m256 H_re = _mm256_loadu_ps(&H_p_ch.re[k]); + const __m256 H_im = _mm256_loadu_ps(&H_p_ch.im[k]); + const __m256 a = _mm256_mul_ps(X_re, G_re); + const __m256 b = _mm256_mul_ps(X_im, G_im); + const __m256 c = _mm256_mul_ps(X_re, G_im); + const __m256 d = _mm256_mul_ps(X_im, G_re); + const __m256 e = _mm256_add_ps(a, b); + const __m256 f = _mm256_sub_ps(c, d); + const __m256 g = _mm256_add_ps(H_re, e); + const __m256 h = _mm256_add_ps(H_im, f); + _mm256_storeu_ps(&H_p_ch.re[k], g); + _mm256_storeu_ps(&H_p_ch.im[k], h); + } + } + } + X_partition = 0; + limit = lim2; + } while (p < lim2); + + X_partition = render_buffer.Position(); + limit = lim1; + p = 0; + do { + for (; p < limit; ++p, ++X_partition) { + for (size_t ch = 0; ch < num_render_channels; ++ch) { + FftData& H_p_ch = (*H)[p][ch]; + const FftData& X = render_buffer_data[X_partition][ch]; + + H_p_ch.re[kFftLengthBy2] += X.re[kFftLengthBy2] * G.re[kFftLengthBy2] + + X.im[kFftLengthBy2] * G.im[kFftLengthBy2]; + H_p_ch.im[kFftLengthBy2] += X.re[kFftLengthBy2] * G.im[kFftLengthBy2] - + X.im[kFftLengthBy2] * G.re[kFftLengthBy2]; + } + } + + X_partition = 0; + limit = lim2; + } while (p < lim2); +} + +// Produces the filter output (AVX2 variant). +void ApplyFilter_Avx2(const RenderBuffer& render_buffer, + size_t num_partitions, + const std::vector>& H, + FftData* S) { + RTC_DCHECK_GE(H.size(), H.size() - 1); + S->re.fill(0.f); + S->im.fill(0.f); + + rtc::ArrayView> render_buffer_data = + render_buffer.GetFftBuffer(); + const size_t num_render_channels = render_buffer_data[0].size(); + const size_t lim1 = std::min( + render_buffer_data.size() - render_buffer.Position(), num_partitions); + const size_t lim2 = num_partitions; + constexpr size_t kNumEightBinBands = kFftLengthBy2 / 8; + + size_t X_partition = render_buffer.Position(); + size_t p = 0; + size_t limit = lim1; + do { + for (; p < limit; ++p, ++X_partition) { + for (size_t ch = 0; ch < num_render_channels; ++ch) { + const FftData& H_p_ch = H[p][ch]; + const FftData& X = render_buffer_data[X_partition][ch]; + for (size_t k = 0, n = 0; n < kNumEightBinBands; ++n, k += 8) { + const __m256 X_re = _mm256_loadu_ps(&X.re[k]); + const __m256 X_im = _mm256_loadu_ps(&X.im[k]); + const __m256 H_re = _mm256_loadu_ps(&H_p_ch.re[k]); + const __m256 H_im = _mm256_loadu_ps(&H_p_ch.im[k]); + const __m256 S_re = _mm256_loadu_ps(&S->re[k]); + const __m256 S_im = _mm256_loadu_ps(&S->im[k]); + const __m256 a = _mm256_mul_ps(X_re, H_re); + const __m256 b = _mm256_mul_ps(X_im, H_im); + const __m256 c = _mm256_mul_ps(X_re, H_im); + const __m256 d = _mm256_mul_ps(X_im, H_re); + const __m256 e = _mm256_sub_ps(a, b); + const __m256 f = _mm256_add_ps(c, d); + const __m256 g = _mm256_add_ps(S_re, e); + const __m256 h = _mm256_add_ps(S_im, f); + _mm256_storeu_ps(&S->re[k], g); + _mm256_storeu_ps(&S->im[k], h); + } + } + } + limit = lim2; + X_partition = 0; + } while (p < lim2); + + X_partition = render_buffer.Position(); + p = 0; + limit = lim1; + do { + for (; p < limit; ++p, ++X_partition) { + for (size_t ch = 0; ch < num_render_channels; ++ch) { + const FftData& H_p_ch = H[p][ch]; + const FftData& X = render_buffer_data[X_partition][ch]; + S->re[kFftLengthBy2] += X.re[kFftLengthBy2] * H_p_ch.re[kFftLengthBy2] - + X.im[kFftLengthBy2] * H_p_ch.im[kFftLengthBy2]; + S->im[kFftLengthBy2] += X.re[kFftLengthBy2] * H_p_ch.im[kFftLengthBy2] + + X.im[kFftLengthBy2] * H_p_ch.re[kFftLengthBy2]; + } + } + limit = lim2; + X_partition = 0; + } while (p < lim2); +} + +} // namespace aec3 +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc index 80378eb3c..dfe41091a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc @@ -88,7 +88,6 @@ void ComputeErl(const Aec3Optimization& optimization, #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: - aec3::ErlComputer_NEON(H2, erl); break; #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.h index 108d9f8e4..4ac13b1bc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl.h @@ -36,6 +36,10 @@ void ErlComputer_NEON( void ErlComputer_SSE2( const std::vector>& H2, rtc::ArrayView erl); + +void ErlComputer_AVX2( + const std::vector>& H2, + rtc::ArrayView erl); #endif } // namespace aec3 diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc new file mode 100644 index 000000000..5fe7514db --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/adaptive_fir_filter_erl.h" + +#include + +namespace webrtc { + +namespace aec3 { + +// Computes and stores the echo return loss estimate of the filter, which is the +// sum of the partition frequency responses. +void ErlComputer_AVX2( + const std::vector>& H2, + rtc::ArrayView erl) { + std::fill(erl.begin(), erl.end(), 0.f); + for (auto& H2_j : H2) { + for (size_t k = 0; k < kFftLengthBy2; k += 8) { + const __m256 H2_j_k = _mm256_loadu_ps(&H2_j[k]); + __m256 erl_k = _mm256_loadu_ps(&erl[k]); + erl_k = _mm256_add_ps(erl_k, H2_j_k); + _mm256_storeu_ps(&erl[k], erl_k); + } + erl[kFftLengthBy2] += H2_j[kFftLengthBy2]; + } +} + +} // namespace aec3 +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc index aeb848a57..7bd8d6267 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc @@ -20,7 +20,9 @@ namespace webrtc { Aec3Optimization DetectOptimization() { #if defined(WEBRTC_ARCH_X86_FAMILY) - if (WebRtc_GetCPUInfo(kSSE2) != 0) { + if (GetCPUInfo(kAVX2) != 0) { + return Aec3Optimization::kAvx2; + } else if (GetCPUInfo(kSSE2) != 0) { return Aec3Optimization::kSse2; } #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h index cdeefc704..3bfff967a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h @@ -23,12 +23,12 @@ namespace webrtc { #define ALIGN16_END __attribute__((aligned(16))) #endif -enum class Aec3Optimization { kNone, kSse2, kNeon }; +enum class Aec3Optimization { kNone, kSse2, kAvx2, kNeon }; constexpr int kNumBlocksPerSecond = 250; constexpr int kMetricsReportingIntervalBlocks = 10 * kNumBlocksPerSecond; -constexpr int kMetricsComputationBlocks = 7; +constexpr int kMetricsComputationBlocks = 3; constexpr int kMetricsCollectionBlocks = kMetricsReportingIntervalBlocks - kMetricsComputationBlocks; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.cc index d1d4f7da0..8dfa18336 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.cc @@ -73,7 +73,7 @@ const float kSqrtHanning128[kFftLength] = { bool IsSse2Available() { #if defined(WEBRTC_ARCH_X86_FAMILY) - return WebRtc_GetCPUInfo(kSSE2) != 0; + return GetCPUInfo(kSSE2) != 0; #else return false; #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc index e87f607a3..c7361093f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc @@ -27,13 +27,6 @@ namespace webrtc { namespace { -constexpr size_t kBlocksSinceConvergencedFilterInit = 10000; -constexpr size_t kBlocksSinceConsistentEstimateInit = 10000; - -bool DeactivateTransparentMode() { - return field_trial::IsEnabled("WebRTC-Aec3TransparentModeKillSwitch"); -} - bool DeactivateInitialStateResetAtEchoPathChange() { return field_trial::IsEnabled( "WebRTC-Aec3DeactivateInitialStateResetKillSwitch"); @@ -134,7 +127,6 @@ AecState::AecState(const EchoCanceller3Config& config, new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), config_(config), num_capture_channels_(num_capture_channels), - transparent_mode_activated_(!DeactivateTransparentMode()), deactivate_initial_state_reset_at_echo_path_change_( DeactivateInitialStateResetAtEchoPathChange()), full_reset_at_echo_path_change_(FullResetAtEchoPathChange()), @@ -142,7 +134,7 @@ AecState::AecState(const EchoCanceller3Config& config, SubtractorAnalyzerResetAtEchoPathChange()), initial_state_(config_), delay_state_(config_, num_capture_channels_), - transparent_state_(config_), + transparent_state_(TransparentMode::Create(config_)), filter_quality_state_(config_, num_capture_channels_), erl_estimator_(2 * kNumBlocksPerSecond), erle_estimator_(2 * kNumBlocksPerSecond, config_, num_capture_channels_), @@ -164,7 +156,9 @@ void AecState::HandleEchoPathChange( if (!deactivate_initial_state_reset_at_echo_path_change_) { initial_state_.Reset(); } - transparent_state_.Reset(); + if (transparent_state_) { + transparent_state_->Reset(); + } erle_estimator_.Reset(true); erl_estimator_.Reset(); filter_quality_state_.Reset(); @@ -203,8 +197,10 @@ void AecState::Update( // Analyze the filter outputs and filters. bool any_filter_converged; + bool any_coarse_filter_converged; bool all_filters_diverged; subtractor_output_analyzer_.Update(subtractor_output, &any_filter_converged, + &any_coarse_filter_converged, &all_filters_diverged); bool any_filter_consistent; @@ -277,13 +273,15 @@ void AecState::Update( initial_state_.Update(active_render, SaturatedCapture()); // Detect whether the transparent mode should be activated. - transparent_state_.Update(delay_state_.MinDirectPathFilterDelay(), - any_filter_consistent, any_filter_converged, - all_filters_diverged, active_render, - SaturatedCapture()); + if (transparent_state_) { + transparent_state_->Update( + delay_state_.MinDirectPathFilterDelay(), any_filter_consistent, + any_filter_converged, any_coarse_filter_converged, all_filters_diverged, + active_render, SaturatedCapture()); + } // Analyze the quality of the filter. - filter_quality_state_.Update(active_render, TransparentMode(), + filter_quality_state_.Update(active_render, TransparentModeActive(), SaturatedCapture(), external_delay, any_filter_converged); @@ -301,11 +299,12 @@ void AecState::Update( erle_estimator_.Dump(data_dumper_); reverb_model_estimator_.Dump(data_dumper_.get()); + data_dumper_->DumpRaw("aec3_active_render", active_render); data_dumper_->DumpRaw("aec3_erl", Erl()); data_dumper_->DumpRaw("aec3_erl_time_domain", ErlTimeDomain()); data_dumper_->DumpRaw("aec3_erle", Erle()[0]); data_dumper_->DumpRaw("aec3_usable_linear_estimate", UsableLinearEstimate()); - data_dumper_->DumpRaw("aec3_transparent_mode", TransparentMode()); + data_dumper_->DumpRaw("aec3_transparent_mode", TransparentModeActive()); data_dumper_->DumpRaw("aec3_filter_delay", filter_analyzer_.MinFilterDelayBlocks()); @@ -315,6 +314,8 @@ void AecState::Update( data_dumper_->DumpRaw("aec3_capture_saturation", SaturatedCapture()); data_dumper_->DumpRaw("aec3_echo_saturation", SaturatedEcho()); data_dumper_->DumpRaw("aec3_any_filter_converged", any_filter_converged); + data_dumper_->DumpRaw("aec3_any_coarse_filter_converged", + any_coarse_filter_converged); data_dumper_->DumpRaw("aec3_all_filters_diverged", all_filters_diverged); data_dumper_->DumpRaw("aec3_external_delay_avaliable", @@ -353,8 +354,9 @@ void AecState::InitialState::InitialState::Update(bool active_render, AecState::FilterDelay::FilterDelay(const EchoCanceller3Config& config, size_t num_capture_channels) - : delay_headroom_samples_(config.delay.delay_headroom_samples), - filter_delays_blocks_(num_capture_channels, 0) {} + : delay_headroom_blocks_(config.delay.delay_headroom_samples / kBlockSize), + filter_delays_blocks_(num_capture_channels, delay_headroom_blocks_), + min_filter_delay_(delay_headroom_blocks_) {} void AecState::FilterDelay::Update( rtc::ArrayView analyzer_filter_delay_estimates_blocks, @@ -372,7 +374,7 @@ void AecState::FilterDelay::Update( const bool delay_estimator_may_not_have_converged = blocks_with_proper_filter_adaptation < 2 * kNumBlocksPerSecond; if (delay_estimator_may_not_have_converged && external_delay_) { - int delay_guess = delay_headroom_samples_ / kBlockSize; + const int delay_guess = delay_headroom_blocks_; std::fill(filter_delays_blocks_.begin(), filter_delays_blocks_.end(), delay_guess); } else { @@ -387,92 +389,6 @@ void AecState::FilterDelay::Update( filter_delays_blocks_.end()); } -AecState::TransparentMode::TransparentMode(const EchoCanceller3Config& config) - : bounded_erl_(config.ep_strength.bounded_erl), - linear_and_stable_echo_path_( - config.echo_removal_control.linear_and_stable_echo_path), - active_blocks_since_sane_filter_(kBlocksSinceConsistentEstimateInit), - non_converged_sequence_size_(kBlocksSinceConvergencedFilterInit) {} - -void AecState::TransparentMode::Reset() { - non_converged_sequence_size_ = kBlocksSinceConvergencedFilterInit; - diverged_sequence_size_ = 0; - strong_not_saturated_render_blocks_ = 0; - if (linear_and_stable_echo_path_) { - recent_convergence_during_activity_ = false; - } -} - -void AecState::TransparentMode::Update(int filter_delay_blocks, - bool any_filter_consistent, - bool any_filter_converged, - bool all_filters_diverged, - bool active_render, - bool saturated_capture) { - ++capture_block_counter_; - strong_not_saturated_render_blocks_ += - active_render && !saturated_capture ? 1 : 0; - - if (any_filter_consistent && filter_delay_blocks < 5) { - sane_filter_observed_ = true; - active_blocks_since_sane_filter_ = 0; - } else if (active_render) { - ++active_blocks_since_sane_filter_; - } - - bool sane_filter_recently_seen; - if (!sane_filter_observed_) { - sane_filter_recently_seen = - capture_block_counter_ <= 5 * kNumBlocksPerSecond; - } else { - sane_filter_recently_seen = - active_blocks_since_sane_filter_ <= 30 * kNumBlocksPerSecond; - } - - if (any_filter_converged) { - recent_convergence_during_activity_ = true; - active_non_converged_sequence_size_ = 0; - non_converged_sequence_size_ = 0; - ++num_converged_blocks_; - } else { - if (++non_converged_sequence_size_ > 20 * kNumBlocksPerSecond) { - num_converged_blocks_ = 0; - } - - if (active_render && - ++active_non_converged_sequence_size_ > 60 * kNumBlocksPerSecond) { - recent_convergence_during_activity_ = false; - } - } - - if (!all_filters_diverged) { - diverged_sequence_size_ = 0; - } else if (++diverged_sequence_size_ >= 60) { - // TODO(peah): Change these lines to ensure proper triggering of usable - // filter. - non_converged_sequence_size_ = kBlocksSinceConvergencedFilterInit; - } - - if (active_non_converged_sequence_size_ > 60 * kNumBlocksPerSecond) { - finite_erl_recently_detected_ = false; - } - if (num_converged_blocks_ > 50) { - finite_erl_recently_detected_ = true; - } - - if (bounded_erl_) { - transparency_activated_ = false; - } else if (finite_erl_recently_detected_) { - transparency_activated_ = false; - } else if (sane_filter_recently_seen && recent_convergence_during_activity_) { - transparency_activated_ = false; - } else { - const bool filter_should_have_converged = - strong_not_saturated_render_blocks_ > 6 * kNumBlocksPerSecond; - transparency_activated_ = filter_should_have_converged; - } -} - AecState::FilteringQualityAnalyzer::FilteringQualityAnalyzer( const EchoCanceller3Config& config, size_t num_capture_channels) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h index e79e64bec..5b40e9513 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h @@ -31,6 +31,7 @@ #include "modules/audio_processing/aec3/reverb_model_estimator.h" #include "modules/audio_processing/aec3/subtractor_output.h" #include "modules/audio_processing/aec3/subtractor_output_analyzer.h" +#include "modules/audio_processing/aec3/transparent_mode.h" namespace webrtc { @@ -107,8 +108,8 @@ class AecState { } // Returns whether the transparent mode is active - bool TransparentMode() const { - return transparent_mode_activated_ && transparent_state_.Active(); + bool TransparentModeActive() const { + return transparent_state_ && transparent_state_->Active(); } // Takes appropriate action at an echo path change. @@ -152,7 +153,6 @@ class AecState { std::unique_ptr data_dumper_; const EchoCanceller3Config config_; const size_t num_capture_channels_; - const bool transparent_mode_activated_; const bool deactivate_initial_state_reset_at_echo_path_change_; const bool full_reset_at_echo_path_change_; const bool subtractor_analyzer_reset_at_echo_path_change_; @@ -211,48 +211,15 @@ class AecState { size_t blocks_with_proper_filter_adaptation); private: - const int delay_headroom_samples_; + const int delay_headroom_blocks_; bool external_delay_reported_ = false; std::vector filter_delays_blocks_; - int min_filter_delay_ = 0; + int min_filter_delay_; absl::optional external_delay_; } delay_state_; - // Class for detecting and toggling the transparent mode which causes the - // suppressor to apply no suppression. - class TransparentMode { - public: - explicit TransparentMode(const EchoCanceller3Config& config); - - // Returns whether the transparent mode should be active. - bool Active() const { return transparency_activated_; } - - // Resets the state of the detector. - void Reset(); - - // Updates the detection deciscion based on new data. - void Update(int filter_delay_blocks, - bool any_filter_consistent, - bool any_filter_converged, - bool all_filters_diverged, - bool active_render, - bool saturated_capture); - - private: - const bool bounded_erl_; - const bool linear_and_stable_echo_path_; - size_t capture_block_counter_ = 0; - bool transparency_activated_ = false; - size_t active_blocks_since_sane_filter_; - bool sane_filter_observed_ = false; - bool finite_erl_recently_detected_ = false; - size_t non_converged_sequence_size_; - size_t diverged_sequence_size_ = 0; - size_t active_non_converged_sequence_size_ = 0; - size_t num_converged_blocks_ = 0; - bool recent_convergence_during_activity_ = false; - size_t strong_not_saturated_render_blocks_ = 0; - } transparent_state_; + // Classifier for toggling transparent mode when there is no echo. + std::unique_ptr transparent_state_; // Class for analyzing how well the linear filter is, and can be expected to, // perform on the current signals. The purpose of this is for using to @@ -316,7 +283,6 @@ class AecState { size_t blocks_with_active_render_ = 0; bool capture_signal_saturation_ = false; FilterAnalyzer filter_analyzer_; - absl::optional external_delay_; EchoAudibility echo_audibility_; ReverbModelEstimator reverb_model_estimator_; ReverbModel avg_render_reverb_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc index a0e1fc22a..98da232bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc @@ -213,6 +213,12 @@ void CopyBufferIntoFrame(const AudioBuffer& buffer, EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { EchoCanceller3Config adjusted_cfg = config; + if (field_trial::IsEnabled("WebRTC-Aec3AntiHowlingMinimizationKillSwitch")) { + adjusted_cfg.suppressor.high_bands_suppression + .anti_howling_activation_threshold = 25.f; + adjusted_cfg.suppressor.high_bands_suppression.anti_howling_gain = 0.01f; + } + if (field_trial::IsEnabled("WebRTC-Aec3UseShortConfigChangeDuration")) { adjusted_cfg.filter.config_change_duration_blocks = 10; } @@ -362,6 +368,10 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { adjusted_cfg.suppressor.nearend_tuning.max_dec_factor_lf = .2f; } + if (field_trial::IsEnabled("WebRTC-Aec3EnforceConservativeHfSuppression")) { + adjusted_cfg.suppressor.conservative_hf_suppression = true; + } + if (field_trial::IsEnabled("WebRTC-Aec3EnforceStationarityProperties")) { adjusted_cfg.echo_audibility.use_stationarity_properties = true; } @@ -378,6 +388,10 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { adjusted_cfg.render_levels.active_render_limit = 30.f; } + if (field_trial::IsEnabled("WebRTC-Aec3NonlinearModeReverbKillSwitch")) { + adjusted_cfg.echo_model.model_reverb_in_nonlinear_mode = false; + } + // Field-trial based override for the whole suppressor tuning. const std::string suppressor_tuning_override_trial_name = field_trial::FindFullName("WebRTC-Aec3SuppressorTuningOverride"); @@ -564,6 +578,11 @@ class EchoCanceller3::RenderWriter { Aec3RenderQueueItemVerifier>* render_transfer_queue, size_t num_bands, size_t num_channels); + + RenderWriter() = delete; + RenderWriter(const RenderWriter&) = delete; + RenderWriter& operator=(const RenderWriter&) = delete; + ~RenderWriter(); void Insert(const AudioBuffer& input); @@ -575,7 +594,6 @@ class EchoCanceller3::RenderWriter { std::vector>> render_queue_input_frame_; SwapQueue>>, Aec3RenderQueueItemVerifier>* render_transfer_queue_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RenderWriter); }; EchoCanceller3::RenderWriter::RenderWriter( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_variability.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_variability.h index adf0d7a4a..78e4f64b2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_variability.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_variability.h @@ -16,9 +16,7 @@ namespace webrtc { struct EchoPathVariability { enum class DelayAdjustment { kNone, - kBufferReadjustment, kBufferFlush, - kDelayReset, kNewDetectedDelay }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc index a3cd22f21..df539bfad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc @@ -414,12 +414,16 @@ void EchoRemoverImpl::ProcessCapture( const auto& echo_spectrum = aec_state_.UsableLinearEstimate() ? S2_linear : R2; + // Determine if the suppressor should assume clock drift. + const bool clock_drift = config_.echo_removal_control.has_clock_drift || + echo_path_variability.clock_drift; + // Compute preferred gains. float high_bands_gain; std::array G; suppression_gain_.GetGain(nearend_spectrum, echo_spectrum, R2, cng_.NoiseSpectrum(), render_signal_analyzer_, - aec_state_, x, &high_bands_gain, &G); + aec_state_, x, clock_drift, &high_bands_gain, &G); suppression_filter_.ApplyGain(comfort_noise, high_band_comfort_noise, G, high_bands_gain, Y_fft, y); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.cc index 4502f31cc..1ceb329d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.cc @@ -23,12 +23,6 @@ namespace webrtc { -namespace { - -constexpr float kOneByMetricsCollectionBlocks = 1.f / kMetricsCollectionBlocks; - -} // namespace - EchoRemoverMetrics::DbMetric::DbMetric() : DbMetric(0.f, 0.f, 0.f) {} EchoRemoverMetrics::DbMetric::DbMetric(float sum_value, float floor_value, @@ -52,11 +46,8 @@ EchoRemoverMetrics::EchoRemoverMetrics() { } void EchoRemoverMetrics::ResetMetrics() { - erl_.fill(DbMetric(0.f, 10000.f, 0.000f)); erl_time_domain_ = DbMetric(0.f, 10000.f, 0.000f); - erle_.fill(DbMetric(0.f, 0.f, 1000.f)); erle_time_domain_ = DbMetric(0.f, 0.f, 1000.f); - active_render_count_ = 0; saturated_capture_ = false; } @@ -66,104 +57,24 @@ void EchoRemoverMetrics::Update( const std::array& suppressor_gain) { metrics_reported_ = false; if (++block_counter_ <= kMetricsCollectionBlocks) { - aec3::UpdateDbMetric(aec_state.Erl(), &erl_); erl_time_domain_.UpdateInstant(aec_state.ErlTimeDomain()); - aec3::UpdateDbMetric(aec_state.Erle()[0], &erle_); erle_time_domain_.UpdateInstant(aec_state.FullBandErleLog2()); - active_render_count_ += (aec_state.ActiveRender() ? 1 : 0); saturated_capture_ = saturated_capture_ || aec_state.SaturatedCapture(); } else { // Report the metrics over several frames in order to lower the impact of // the logarithms involved on the computational complexity. - constexpr int kMetricsCollectionBlocksBy2 = kMetricsCollectionBlocks / 2; switch (block_counter_) { case kMetricsCollectionBlocks + 1: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand0.Average", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, - kOneByMetricsCollectionBlocks, - erle_[0].sum_value), - 0, 19, 20); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand0.Max", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, 1.f, - erle_[0].ceil_value), - 0, 19, 20); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand0.Min", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, 1.f, - erle_[0].floor_value), - 0, 19, 20); - break; - case kMetricsCollectionBlocks + 2: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand1.Average", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, - kOneByMetricsCollectionBlocks, - erle_[1].sum_value), - 0, 19, 20); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand1.Max", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, 1.f, - erle_[1].ceil_value), - 0, 19, 20); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErleBand1.Min", - aec3::TransformDbMetricForReporting(true, 0.f, 19.f, 0.f, 1.f, - erle_[1].floor_value), - 0, 19, 20); - break; - case kMetricsCollectionBlocks + 3: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand0.Average", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, - kOneByMetricsCollectionBlocks, - erl_[0].sum_value), - 0, 59, 30); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand0.Max", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, 1.f, - erl_[0].ceil_value), - 0, 59, 30); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand0.Min", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, 1.f, - erl_[0].floor_value), - 0, 59, 30); - break; - case kMetricsCollectionBlocks + 4: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand1.Average", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, - kOneByMetricsCollectionBlocks, - erl_[1].sum_value), - 0, 59, 30); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand1.Max", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, 1.f, - erl_[1].ceil_value), - 0, 59, 30); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.EchoCanceller.ErlBand1.Min", - aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, 1.f, - erl_[1].floor_value), - 0, 59, 30); - break; - case kMetricsCollectionBlocks + 5: RTC_HISTOGRAM_BOOLEAN( "WebRTC.Audio.EchoCanceller.UsableLinearEstimate", static_cast(aec_state.UsableLinearEstimate() ? 1 : 0)); - RTC_HISTOGRAM_BOOLEAN( - "WebRTC.Audio.EchoCanceller.ActiveRender", - static_cast( - active_render_count_ > kMetricsCollectionBlocksBy2 ? 1 : 0)); RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.EchoCanceller.FilterDelay", aec_state.MinDirectPathFilterDelay(), 0, 30, 31); RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.EchoCanceller.CaptureSaturation", static_cast(saturated_capture_ ? 1 : 0)); break; - case kMetricsCollectionBlocks + 6: + case kMetricsCollectionBlocks + 2: RTC_HISTOGRAM_COUNTS_LINEAR( "WebRTC.Audio.EchoCanceller.Erl.Value", aec3::TransformDbMetricForReporting(true, 0.f, 59.f, 30.f, 1.f, @@ -180,7 +91,7 @@ void EchoRemoverMetrics::Update( erl_time_domain_.floor_value), 0, 59, 30); break; - case kMetricsCollectionBlocks + 7: + case kMetricsCollectionBlocks + 3: RTC_HISTOGRAM_COUNTS_LINEAR( "WebRTC.Audio.EchoCanceller.Erle.Value", aec3::TransformDbMetricForReporting(false, 0.f, 19.f, 0.f, 1.f, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h index 77fd8cd7d..c3d8e20da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h @@ -48,11 +48,8 @@ class EchoRemoverMetrics { void ResetMetrics(); int block_counter_ = 0; - std::array erl_; DbMetric erl_time_domain_; - std::array erle_; DbMetric erle_time_domain_; - int active_render_count_ = 0; bool saturated_capture_ = false; bool metrics_reported_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data.h index 5e5adb62d..abef1fbd1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data.h @@ -40,6 +40,9 @@ struct FftData { im.fill(0.f); } + // Computes the power spectrum of the data. + void SpectrumAVX2(rtc::ArrayView power_spectrum) const; + // Computes the power spectrum of the data. void Spectrum(Aec3Optimization optimization, rtc::ArrayView power_spectrum) const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data_avx2.cc new file mode 100644 index 000000000..1fe4bd69c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fft_data_avx2.cc @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/fft_data.h" + +#include + +#include "api/array_view.h" + +namespace webrtc { + +// Computes the power spectrum of the data. +void FftData::SpectrumAVX2(rtc::ArrayView power_spectrum) const { + RTC_DCHECK_EQ(kFftLengthBy2Plus1, power_spectrum.size()); + for (size_t k = 0; k < kFftLengthBy2; k += 8) { + __m256 r = _mm256_loadu_ps(&re[k]); + __m256 i = _mm256_loadu_ps(&im[k]); + __m256 ii = _mm256_mul_ps(i, i); + ii = _mm256_fmadd_ps(r, r, ii); + _mm256_storeu_ps(&power_spectrum[k], ii); + } + power_spectrum[kFftLengthBy2] = re[kFftLengthBy2] * re[kFftLengthBy2] + + im[kFftLengthBy2] * im[kFftLengthBy2]; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc index 696a57c18..be954d3a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc @@ -69,9 +69,7 @@ void FilterAnalyzer::Reset() { blocks_since_reset_ = 0; ResetRegion(); for (auto& state : filter_analysis_states_) { - state.peak_index = 0; - state.gain = default_gain_; - state.consistent_filter_detector.Reset(); + state.Reset(default_gain_); } std::fill(filter_delays_blocks_.begin(), filter_delays_blocks_.end(), 0); } @@ -204,7 +202,9 @@ FilterAnalyzer::ConsistentFilterDetector::ConsistentFilterDetector( const EchoCanceller3Config& config) : active_render_threshold_(config.render_levels.active_render_limit * config.render_levels.active_render_limit * - kFftLengthBy2) {} + kFftLengthBy2) { + Reset(); +} void FilterAnalyzer::ConsistentFilterDetector::Reset() { significant_peak_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h index 0be2a7bc3..b0b707011 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h @@ -112,7 +112,16 @@ class FilterAnalyzer { struct FilterAnalysisState { explicit FilterAnalysisState(const EchoCanceller3Config& config) : filter_length_blocks(config.filter.refined_initial.length_blocks), - consistent_filter_detector(config) {} + consistent_filter_detector(config) { + Reset(config.ep_strength.default_gain); + } + + void Reset(float default_gain) { + peak_index = 0; + gain = default_gain; + consistent_filter_detector.Reset(); + } + float gain; size_t peak_index; int filter_length_blocks; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h index df9245322..fa44eb27f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h @@ -17,7 +17,6 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/arch.h" namespace webrtc { @@ -53,6 +52,16 @@ void MatchedFilterCore_SSE2(size_t x_start_index, bool* filters_updated, float* error_sum); +// Filter core for the matched filter that is optimized for AVX2. +void MatchedFilterCore_AVX2(size_t x_start_index, + float x2_sum_threshold, + float smoothing, + rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView h, + bool* filters_updated, + float* error_sum); + #endif // Filter core for the matched filter. @@ -94,6 +103,10 @@ class MatchedFilter { float smoothing, float matching_filter_threshold); + MatchedFilter() = delete; + MatchedFilter(const MatchedFilter&) = delete; + MatchedFilter& operator=(const MatchedFilter&) = delete; + ~MatchedFilter(); // Updates the correlation with the values in the capture buffer. @@ -129,8 +142,6 @@ class MatchedFilter { const float excitation_limit_; const float smoothing_; const float matching_filter_threshold_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MatchedFilter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc new file mode 100644 index 000000000..ed32102aa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc @@ -0,0 +1,132 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/matched_filter.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace aec3 { + +void MatchedFilterCore_AVX2(size_t x_start_index, + float x2_sum_threshold, + float smoothing, + rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView h, + bool* filters_updated, + float* error_sum) { + const int h_size = static_cast(h.size()); + const int x_size = static_cast(x.size()); + RTC_DCHECK_EQ(0, h_size % 8); + + // Process for all samples in the sub-block. + for (size_t i = 0; i < y.size(); ++i) { + // Apply the matched filter as filter * x, and compute x * x. + + RTC_DCHECK_GT(x_size, x_start_index); + const float* x_p = &x[x_start_index]; + const float* h_p = &h[0]; + + // Initialize values for the accumulation. + __m256 s_256 = _mm256_set1_ps(0); + __m256 x2_sum_256 = _mm256_set1_ps(0); + float x2_sum = 0.f; + float s = 0; + + // Compute loop chunk sizes until, and after, the wraparound of the circular + // buffer for x. + const int chunk1 = + std::min(h_size, static_cast(x_size - x_start_index)); + + // Perform the loop in two chunks. + const int chunk2 = h_size - chunk1; + for (int limit : {chunk1, chunk2}) { + // Perform 256 bit vector operations. + const int limit_by_8 = limit >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + // Load the data into 256 bit vectors. + __m256 x_k = _mm256_loadu_ps(x_p); + __m256 h_k = _mm256_loadu_ps(h_p); + // Compute and accumulate x * x and h * x. + x2_sum_256 = _mm256_fmadd_ps(x_k, x_k, x2_sum_256); + s_256 = _mm256_fmadd_ps(h_k, x_k, s_256); + } + + // Perform non-vector operations for any remaining items. + for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { + const float x_k = *x_p; + x2_sum += x_k * x_k; + s += *h_p * x_k; + } + + x_p = &x[0]; + } + + // Sum components together. + __m128 x2_sum_128 = _mm_add_ps(_mm256_extractf128_ps(x2_sum_256, 0), + _mm256_extractf128_ps(x2_sum_256, 1)); + __m128 s_128 = _mm_add_ps(_mm256_extractf128_ps(s_256, 0), + _mm256_extractf128_ps(s_256, 1)); + // Combine the accumulated vector and scalar values. + float* v = reinterpret_cast(&x2_sum_128); + x2_sum += v[0] + v[1] + v[2] + v[3]; + v = reinterpret_cast(&s_128); + s += v[0] + v[1] + v[2] + v[3]; + + // Compute the matched filter error. + float e = y[i] - s; + const bool saturation = y[i] >= 32000.f || y[i] <= -32000.f; + (*error_sum) += e * e; + + // Update the matched filter estimate in an NLMS manner. + if (x2_sum > x2_sum_threshold && !saturation) { + RTC_DCHECK_LT(0.f, x2_sum); + const float alpha = smoothing * e / x2_sum; + const __m256 alpha_256 = _mm256_set1_ps(alpha); + + // filter = filter + smoothing * (y - filter * x) * x / x * x. + float* h_p = &h[0]; + x_p = &x[x_start_index]; + + // Perform the loop in two chunks. + for (int limit : {chunk1, chunk2}) { + // Perform 256 bit vector operations. + const int limit_by_8 = limit >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + // Load the data into 256 bit vectors. + __m256 h_k = _mm256_loadu_ps(h_p); + __m256 x_k = _mm256_loadu_ps(x_p); + // Compute h = h + alpha * x. + h_k = _mm256_fmadd_ps(x_k, alpha_256, h_k); + + // Store the result. + _mm256_storeu_ps(h_p, h_k); + } + + // Perform non-vector operations for any remaining items. + for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { + *h_p += alpha * *x_p; + } + + x_p = &x[0]; + } + + *filters_updated = true; + } + + x_start_index = x_start_index > 0 ? x_start_index - 1 : x_size - 1; + } +} + +} // namespace aec3 +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h index 0cc7789d1..d48011e47 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h @@ -17,7 +17,6 @@ #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/matched_filter.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -31,6 +30,12 @@ class MatchedFilterLagAggregator { ApmDataDumper* data_dumper, size_t max_filter_lag, const EchoCanceller3Config::Delay::DelaySelectionThresholds& thresholds); + + MatchedFilterLagAggregator() = delete; + MatchedFilterLagAggregator(const MatchedFilterLagAggregator&) = delete; + MatchedFilterLagAggregator& operator=(const MatchedFilterLagAggregator&) = + delete; + ~MatchedFilterLagAggregator(); // Resets the aggregator. @@ -47,8 +52,6 @@ class MatchedFilterLagAggregator { int histogram_data_index_ = 0; bool significant_candidate_found_ = false; const EchoCanceller3Config::Delay::DelaySelectionThresholds thresholds_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MatchedFilterLagAggregator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h index 3422df35e..b8be6f517 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h @@ -23,7 +23,6 @@ #include "modules/audio_processing/aec3/fft_data.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,6 +32,11 @@ class RenderBuffer { RenderBuffer(BlockBuffer* block_buffer, SpectrumBuffer* spectrum_buffer, FftBuffer* fft_buffer); + + RenderBuffer() = delete; + RenderBuffer(const RenderBuffer&) = delete; + RenderBuffer& operator=(const RenderBuffer&) = delete; + ~RenderBuffer(); // Get a block. @@ -105,7 +109,6 @@ class RenderBuffer { const SpectrumBuffer* const spectrum_buffer_; const FftBuffer* const fft_buffer_; bool render_activity_ = false; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RenderBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc index f5030e17b..7bebc6fd4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc @@ -385,9 +385,11 @@ void RenderDelayBufferImpl::ApplyTotalDelay(int delay) { void RenderDelayBufferImpl::AlignFromExternalDelay() { RTC_DCHECK(config_.delay.use_external_delay_estimator); if (external_audio_buffer_delay_) { - int64_t delay = render_call_counter_ - capture_call_counter_ + - *external_audio_buffer_delay_; - ApplyTotalDelay(delay); + const int64_t delay = render_call_counter_ - capture_call_counter_ + + *external_audio_buffer_delay_; + const int64_t delay_with_headroom = + delay - config_.delay.delay_headroom_samples / kBlockSize; + ApplyTotalDelay(delay_with_headroom); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc index c42d22bdc..3677085d8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc @@ -25,7 +25,6 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,6 +35,12 @@ class RenderDelayControllerImpl final : public RenderDelayController { RenderDelayControllerImpl(const EchoCanceller3Config& config, int sample_rate_hz, size_t num_capture_channels); + + RenderDelayControllerImpl() = delete; + RenderDelayControllerImpl(const RenderDelayControllerImpl&) = delete; + RenderDelayControllerImpl& operator=(const RenderDelayControllerImpl&) = + delete; + ~RenderDelayControllerImpl() override; void Reset(bool reset_delay_confidence) override; void LogRenderCall() override; @@ -57,7 +62,6 @@ class RenderDelayControllerImpl final : public RenderDelayController { size_t capture_call_counter_ = 0; int delay_change_counter_ = 0; DelayEstimate::Quality last_delay_estimate_quality_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RenderDelayControllerImpl); }; DelayEstimate ComputeBufferDelay( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc index 5d31c6626..e352cf555 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc @@ -23,57 +23,23 @@ namespace webrtc { namespace { -bool UseLowEarlyReflectionsTransparentModeGain() { - return field_trial::IsEnabled( - "WebRTC-Aec3UseLowEarlyReflectionsTransparentModeGain"); -} - -bool UseLowLateReflectionsTransparentModeGain() { - return field_trial::IsEnabled( - "WebRTC-Aec3UseLowLateReflectionsTransparentModeGain"); -} - -bool UseLowEarlyReflectionsDefaultGain() { - return field_trial::IsEnabled("WebRTC-Aec3UseLowEarlyReflectionsDefaultGain"); -} - -bool UseLowLateReflectionsDefaultGain() { - return field_trial::IsEnabled("WebRTC-Aec3UseLowLateReflectionsDefaultGain"); -} - -bool ModelReverbInNonlinearMode() { - return !field_trial::IsEnabled("WebRTC-Aec3rNonlinearModeReverbKillSwitch"); -} - constexpr float kDefaultTransparentModeGain = 0.01f; -float GetEarlyReflectionsTransparentModeGain() { - if (UseLowEarlyReflectionsTransparentModeGain()) { - return 0.001f; - } - return kDefaultTransparentModeGain; -} - -float GetLateReflectionsTransparentModeGain() { - if (UseLowLateReflectionsTransparentModeGain()) { - return 0.001f; - } - +float GetTransparentModeGain() { return kDefaultTransparentModeGain; } float GetEarlyReflectionsDefaultModeGain( const EchoCanceller3Config::EpStrength& config) { - if (UseLowEarlyReflectionsDefaultGain()) { + if (field_trial::IsEnabled("WebRTC-Aec3UseLowEarlyReflectionsDefaultGain")) { return 0.1f; } - return config.default_gain; } float GetLateReflectionsDefaultModeGain( const EchoCanceller3Config::EpStrength& config) { - if (UseLowLateReflectionsDefaultGain()) { + if (field_trial::IsEnabled("WebRTC-Aec3UseLowLateReflectionsDefaultGain")) { return 0.1f; } return config.default_gain; @@ -201,15 +167,12 @@ ResidualEchoEstimator::ResidualEchoEstimator(const EchoCanceller3Config& config, size_t num_render_channels) : config_(config), num_render_channels_(num_render_channels), - early_reflections_transparent_mode_gain_( - GetEarlyReflectionsTransparentModeGain()), - late_reflections_transparent_mode_gain_( - GetLateReflectionsTransparentModeGain()), + early_reflections_transparent_mode_gain_(GetTransparentModeGain()), + late_reflections_transparent_mode_gain_(GetTransparentModeGain()), early_reflections_general_gain_( GetEarlyReflectionsDefaultModeGain(config_.ep_strength)), late_reflections_general_gain_( - GetLateReflectionsDefaultModeGain(config_.ep_strength)), - model_reverb_in_nonlinear_mode_(ModelReverbInNonlinearMode()) { + GetLateReflectionsDefaultModeGain(config_.ep_strength)) { Reset(); } @@ -277,7 +240,8 @@ void ResidualEchoEstimator::Estimate( NonLinearEstimate(echo_path_gain, X2, R2); } - if (model_reverb_in_nonlinear_mode_ && !aec_state.TransparentMode()) { + if (config_.echo_model.model_reverb_in_nonlinear_mode && + !aec_state.TransparentModeActive()) { AddReverb(ReverbType::kNonLinear, aec_state, render_buffer, R2); } } @@ -395,7 +359,7 @@ float ResidualEchoEstimator::GetEchoPathGain( const AecState& aec_state, bool gain_for_early_reflections) const { float gain_amplitude; - if (aec_state.TransparentMode()) { + if (aec_state.TransparentModeActive()) { gain_amplitude = gain_for_early_reflections ? early_reflections_transparent_mode_gain_ : late_reflections_transparent_mode_gain_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h index 081cc063f..8fe7a84f0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h @@ -68,7 +68,6 @@ class ResidualEchoEstimator { const float late_reflections_transparent_mode_gain_; const float early_reflections_general_gain_; const float late_reflections_general_gain_; - const bool model_reverb_in_nonlinear_mode_; std::array X2_noise_floor_; std::array X2_noise_floor_counter_; ReverbModel echo_reverb_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.cc index 8b2218530..baf060016 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.cc @@ -22,12 +22,14 @@ SubtractorOutputAnalyzer::SubtractorOutputAnalyzer(size_t num_capture_channels) void SubtractorOutputAnalyzer::Update( rtc::ArrayView subtractor_output, bool* any_filter_converged, + bool* any_coarse_filter_converged, bool* all_filters_diverged) { RTC_DCHECK(any_filter_converged); RTC_DCHECK(all_filters_diverged); RTC_DCHECK_EQ(subtractor_output.size(), filters_converged_.size()); *any_filter_converged = false; + *any_coarse_filter_converged = false; *all_filters_diverged = true; for (size_t ch = 0; ch < subtractor_output.size(); ++ch) { @@ -36,16 +38,21 @@ void SubtractorOutputAnalyzer::Update( const float e2_coarse = subtractor_output[ch].e2_coarse; constexpr float kConvergenceThreshold = 50 * 50 * kBlockSize; + constexpr float kConvergenceThresholdLowLevel = 20 * 20 * kBlockSize; bool refined_filter_converged = e2_refined < 0.5f * y2 && y2 > kConvergenceThreshold; - bool coarse_filter_converged = + bool coarse_filter_converged_strict = e2_coarse < 0.05f * y2 && y2 > kConvergenceThreshold; + bool coarse_filter_converged_relaxed = + e2_coarse < 0.2f * y2 && y2 > kConvergenceThresholdLowLevel; float min_e2 = std::min(e2_refined, e2_coarse); bool filter_diverged = min_e2 > 1.5f * y2 && y2 > 30.f * 30.f * kBlockSize; filters_converged_[ch] = - refined_filter_converged || coarse_filter_converged; + refined_filter_converged || coarse_filter_converged_strict; *any_filter_converged = *any_filter_converged || filters_converged_[ch]; + *any_coarse_filter_converged = + *any_coarse_filter_converged || coarse_filter_converged_relaxed; *all_filters_diverged = *all_filters_diverged && filter_diverged; } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.h index 5328ae7f1..32707dbb1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor_output_analyzer.h @@ -26,6 +26,7 @@ class SubtractorOutputAnalyzer { // Analyses the subtractor output. void Update(rtc::ArrayView subtractor_output, bool* any_filter_converged, + bool* any_coarse_filter_converged, bool* all_filters_diverged); const std::vector& ConvergedFilters() const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc index c1f12b774..5b01c5290 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc @@ -27,39 +27,40 @@ namespace webrtc { namespace { -void PostprocessGains(std::array* gain) { - // TODO(gustaf): Investigate if this can be relaxed to achieve higher - // transparency above 2 kHz. - +void LimitLowFrequencyGains(std::array* gain) { // Limit the low frequency gains to avoid the impact of the high-pass filter // on the lower-frequency gain influencing the overall achieved gain. (*gain)[0] = (*gain)[1] = std::min((*gain)[1], (*gain)[2]); +} - // Limit the high frequency gains to avoid the impact of the anti-aliasing - // filter on the upper-frequency gains influencing the overall achieved - // gain. TODO(peah): Update this when new anti-aliasing filters are - // implemented. - constexpr size_t kAntiAliasingImpactLimit = (64 * 2000) / 8000; - const float min_upper_gain = (*gain)[kAntiAliasingImpactLimit]; +void LimitHighFrequencyGains(bool conservative_hf_suppression, + std::array* gain) { + // Limit the high frequency gains to avoid echo leakage due to an imperfect + // filter. + constexpr size_t kFirstBandToLimit = (64 * 2000) / 8000; + const float min_upper_gain = (*gain)[kFirstBandToLimit]; std::for_each( - gain->begin() + kAntiAliasingImpactLimit, gain->end() - 1, + gain->begin() + kFirstBandToLimit + 1, gain->end(), [min_upper_gain](float& a) { a = std::min(a, min_upper_gain); }); (*gain)[kFftLengthBy2] = (*gain)[kFftLengthBy2Minus1]; - // Limits the gain in the frequencies for which the adaptive filter has not - // converged. - // TODO(peah): Make adaptive to take the actual filter error into account. - constexpr size_t kUpperAccurateBandPlus1 = 29; + if (conservative_hf_suppression) { + // Limits the gain in the frequencies for which the adaptive filter has not + // converged. + // TODO(peah): Make adaptive to take the actual filter error into account. + constexpr size_t kUpperAccurateBandPlus1 = 29; - constexpr float oneByBandsInSum = - 1 / static_cast(kUpperAccurateBandPlus1 - 20); - const float hf_gain_bound = - std::accumulate(gain->begin() + 20, - gain->begin() + kUpperAccurateBandPlus1, 0.f) * - oneByBandsInSum; + constexpr float oneByBandsInSum = + 1 / static_cast(kUpperAccurateBandPlus1 - 20); + const float hf_gain_bound = + std::accumulate(gain->begin() + 20, + gain->begin() + kUpperAccurateBandPlus1, 0.f) * + oneByBandsInSum; - std::for_each(gain->begin() + kUpperAccurateBandPlus1, gain->end(), - [hf_gain_bound](float& a) { a = std::min(a, hf_gain_bound); }); + std::for_each( + gain->begin() + kUpperAccurateBandPlus1, gain->end(), + [hf_gain_bound](float& a) { a = std::min(a, hf_gain_bound); }); + } } // Scales the echo according to assessed audibility at the other end. @@ -265,6 +266,7 @@ void SuppressionGain::LowerBandGain( suppressor_input, rtc::ArrayView> residual_echo, rtc::ArrayView> comfort_noise, + bool clock_drift, std::array* gain) { gain->fill(1.f); const bool saturated_echo = aec_state.SaturatedEcho(); @@ -298,8 +300,14 @@ void SuppressionGain::LowerBandGain( last_echo_[ch].begin()); } - // Limit high-frequency gains. - PostprocessGains(gain); + LimitLowFrequencyGains(gain); + // Use conservative high-frequency gains during clock-drift or when not in + // dominant nearend. + if (!dominant_nearend_detector_->IsNearendState() || clock_drift || + config_.suppressor.conservative_hf_suppression) { + LimitHighFrequencyGains(config_.suppressor.conservative_hf_suppression, + gain); + } // Store computed gains. std::copy(gain->begin(), gain->end(), last_gain_.begin()); @@ -352,6 +360,7 @@ void SuppressionGain::GetGain( const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, const std::vector>>& render, + bool clock_drift, float* high_bands_gain, std::array* low_band_gain) { RTC_DCHECK(high_bands_gain); @@ -364,7 +373,8 @@ void SuppressionGain::GetGain( // Compute gain for the lower band. bool low_noise_render = low_render_detector_.Detect(render); LowerBandGain(low_noise_render, aec_state, nearend_spectrum, - residual_echo_spectrum, comfort_noise_spectrum, low_band_gain); + residual_echo_spectrum, comfort_noise_spectrum, clock_drift, + low_band_gain); // Compute the gain for the upper bands. const absl::optional narrow_peak_band = diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h index f46db0b7b..e7175c36d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h @@ -47,6 +47,7 @@ class SuppressionGain { const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, const std::vector>>& render, + bool clock_drift, float* high_bands_gain, std::array* low_band_gain); @@ -76,6 +77,7 @@ class SuppressionGain { suppressor_input, rtc::ArrayView> residual_echo, rtc::ArrayView> comfort_noise, + bool clock_drift, std::array* gain); void GetMinGain(rtc::ArrayView weighted_residual_echo, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc new file mode 100644 index 000000000..3ed0980bf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc @@ -0,0 +1,239 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/transparent_mode.h" + +#include "rtc_base/checks.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace { + +constexpr size_t kBlocksSinceConvergencedFilterInit = 10000; +constexpr size_t kBlocksSinceConsistentEstimateInit = 10000; + +bool DeactivateTransparentMode() { + return field_trial::IsEnabled("WebRTC-Aec3TransparentModeKillSwitch"); +} + +bool DeactivateTransparentModeHmm() { + return field_trial::IsEnabled("WebRTC-Aec3TransparentModeHmmKillSwitch"); +} + +} // namespace + +// Classifier that toggles transparent mode which reduces echo suppression when +// headsets are used. +class TransparentModeImpl : public TransparentMode { + public: + bool Active() const override { return transparency_activated_; } + + void Reset() override { + // Determines if transparent mode is used. + transparency_activated_ = false; + + // The estimated probability of being transparent mode. + prob_transparent_state_ = 0.f; + } + + void Update(int filter_delay_blocks, + bool any_filter_consistent, + bool any_filter_converged, + bool any_coarse_filter_converged, + bool all_filters_diverged, + bool active_render, + bool saturated_capture) override { + // The classifier is implemented as a Hidden Markov Model (HMM) with two + // hidden states: "normal" and "transparent". The estimated probabilities of + // the two states are updated by observing filter convergence during active + // render. The filters are less likely to be reported as converged when + // there is no echo present in the microphone signal. + + // The constants have been obtained by observing active_render and + // any_coarse_filter_converged under varying call scenarios. They + // have further been hand tuned to prefer normal state during uncertain + // regions (to avoid echo leaks). + + // The model is only updated during active render. + if (!active_render) + return; + + // Probability of switching from one state to the other. + constexpr float kSwitch = 0.000001f; + + // Probability of observing converged filters in states "normal" and + // "transparent" during active render. + constexpr float kConvergedNormal = 0.01f; + constexpr float kConvergedTransparent = 0.001f; + + // Probability of transitioning to transparent state from normal state and + // transparent state respectively. + constexpr float kA[2] = {kSwitch, 1.f - kSwitch}; + + // Probability of the two observations (converged filter or not converged + // filter) in normal state and transparent state respectively. + constexpr float kB[2][2] = { + {1.f - kConvergedNormal, kConvergedNormal}, + {1.f - kConvergedTransparent, kConvergedTransparent}}; + + // Probability of the two states before the update. + const float prob_transparent = prob_transparent_state_; + const float prob_normal = 1.f - prob_transparent; + + // Probability of transitioning to transparent state. + const float prob_transition_transparent = + prob_normal * kA[0] + prob_transparent * kA[1]; + const float prob_transition_normal = 1.f - prob_transition_transparent; + + // Observed output. + const int out = static_cast(any_coarse_filter_converged); + + // Joint probabilites of the observed output and respective states. + const float prob_joint_normal = prob_transition_normal * kB[0][out]; + const float prob_joint_transparent = + prob_transition_transparent * kB[1][out]; + + // Conditional probability of transparent state and the observed output. + RTC_DCHECK_GT(prob_joint_normal + prob_joint_transparent, 0.f); + prob_transparent_state_ = + prob_joint_transparent / (prob_joint_normal + prob_joint_transparent); + + // Transparent mode is only activated when its state probability is high. + // Dead zone between activation/deactivation thresholds to avoid switching + // back and forth. + if (prob_transparent_state_ > 0.95f) { + transparency_activated_ = true; + } else if (prob_transparent_state_ < 0.5f) { + transparency_activated_ = false; + } + } + + private: + bool transparency_activated_ = false; + float prob_transparent_state_ = 0.f; +}; + +// Legacy classifier for toggling transparent mode. +class LegacyTransparentModeImpl : public TransparentMode { + public: + explicit LegacyTransparentModeImpl(const EchoCanceller3Config& config) + : linear_and_stable_echo_path_( + config.echo_removal_control.linear_and_stable_echo_path), + active_blocks_since_sane_filter_(kBlocksSinceConsistentEstimateInit), + non_converged_sequence_size_(kBlocksSinceConvergencedFilterInit) {} + + bool Active() const override { return transparency_activated_; } + + void Reset() override { + non_converged_sequence_size_ = kBlocksSinceConvergencedFilterInit; + diverged_sequence_size_ = 0; + strong_not_saturated_render_blocks_ = 0; + if (linear_and_stable_echo_path_) { + recent_convergence_during_activity_ = false; + } + } + + void Update(int filter_delay_blocks, + bool any_filter_consistent, + bool any_filter_converged, + bool any_coarse_filter_converged, + bool all_filters_diverged, + bool active_render, + bool saturated_capture) override { + ++capture_block_counter_; + strong_not_saturated_render_blocks_ += + active_render && !saturated_capture ? 1 : 0; + + if (any_filter_consistent && filter_delay_blocks < 5) { + sane_filter_observed_ = true; + active_blocks_since_sane_filter_ = 0; + } else if (active_render) { + ++active_blocks_since_sane_filter_; + } + + bool sane_filter_recently_seen; + if (!sane_filter_observed_) { + sane_filter_recently_seen = + capture_block_counter_ <= 5 * kNumBlocksPerSecond; + } else { + sane_filter_recently_seen = + active_blocks_since_sane_filter_ <= 30 * kNumBlocksPerSecond; + } + + if (any_filter_converged) { + recent_convergence_during_activity_ = true; + active_non_converged_sequence_size_ = 0; + non_converged_sequence_size_ = 0; + ++num_converged_blocks_; + } else { + if (++non_converged_sequence_size_ > 20 * kNumBlocksPerSecond) { + num_converged_blocks_ = 0; + } + + if (active_render && + ++active_non_converged_sequence_size_ > 60 * kNumBlocksPerSecond) { + recent_convergence_during_activity_ = false; + } + } + + if (!all_filters_diverged) { + diverged_sequence_size_ = 0; + } else if (++diverged_sequence_size_ >= 60) { + // TODO(peah): Change these lines to ensure proper triggering of usable + // filter. + non_converged_sequence_size_ = kBlocksSinceConvergencedFilterInit; + } + + if (active_non_converged_sequence_size_ > 60 * kNumBlocksPerSecond) { + finite_erl_recently_detected_ = false; + } + if (num_converged_blocks_ > 50) { + finite_erl_recently_detected_ = true; + } + + if (finite_erl_recently_detected_) { + transparency_activated_ = false; + } else if (sane_filter_recently_seen && + recent_convergence_during_activity_) { + transparency_activated_ = false; + } else { + const bool filter_should_have_converged = + strong_not_saturated_render_blocks_ > 6 * kNumBlocksPerSecond; + transparency_activated_ = filter_should_have_converged; + } + } + + private: + const bool linear_and_stable_echo_path_; + size_t capture_block_counter_ = 0; + bool transparency_activated_ = false; + size_t active_blocks_since_sane_filter_; + bool sane_filter_observed_ = false; + bool finite_erl_recently_detected_ = false; + size_t non_converged_sequence_size_; + size_t diverged_sequence_size_ = 0; + size_t active_non_converged_sequence_size_ = 0; + size_t num_converged_blocks_ = 0; + bool recent_convergence_during_activity_ = false; + size_t strong_not_saturated_render_blocks_ = 0; +}; + +std::unique_ptr TransparentMode::Create( + const EchoCanceller3Config& config) { + if (config.ep_strength.bounded_erl || DeactivateTransparentMode()) { + return nullptr; + } + if (DeactivateTransparentModeHmm()) { + return std::make_unique(config); + } + return std::make_unique(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.h new file mode 100644 index 000000000..bc5dd0391 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AEC3_TRANSPARENT_MODE_H_ +#define MODULES_AUDIO_PROCESSING_AEC3_TRANSPARENT_MODE_H_ + +#include + +#include "api/audio/echo_canceller3_config.h" +#include "modules/audio_processing/aec3/aec3_common.h" + +namespace webrtc { + +// Class for detecting and toggling the transparent mode which causes the +// suppressor to apply less suppression. +class TransparentMode { + public: + static std::unique_ptr Create( + const EchoCanceller3Config& config); + + virtual ~TransparentMode() {} + + // Returns whether the transparent mode should be active. + virtual bool Active() const = 0; + + // Resets the state of the detector. + virtual void Reset() = 0; + + // Updates the detection decision based on new data. + virtual void Update(int filter_delay_blocks, + bool any_filter_consistent, + bool any_filter_converged, + bool any_coarse_filter_converged, + bool all_filters_diverged, + bool active_render, + bool saturated_capture) = 0; +}; + +} // namespace webrtc +#endif // MODULES_AUDIO_PROCESSING_AEC3_TRANSPARENT_MODE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h index 883cd95fd..8ef813341 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h @@ -40,6 +40,7 @@ class VectorMath { : optimization_(optimization) {} // Elementwise square root. + void SqrtAVX2(rtc::ArrayView x); void Sqrt(rtc::ArrayView x) { switch (optimization_) { #if defined(WEBRTC_ARCH_X86_FAMILY) @@ -110,6 +111,9 @@ class VectorMath { } // Elementwise vector multiplication z = x * y. + void MultiplyAVX2(rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView z); void Multiply(rtc::ArrayView x, rtc::ArrayView y, rtc::ArrayView z) { @@ -133,6 +137,9 @@ class VectorMath { z[j] = x[j] * y[j]; } } break; + case Aec3Optimization::kAvx2: + MultiplyAVX2(x, y, z); + break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: { @@ -159,6 +166,7 @@ class VectorMath { } // Elementwise vector accumulation z += x. + void AccumulateAVX2(rtc::ArrayView x, rtc::ArrayView z); void Accumulate(rtc::ArrayView x, rtc::ArrayView z) { RTC_DCHECK_EQ(z.size(), x.size()); switch (optimization_) { @@ -179,6 +187,9 @@ class VectorMath { z[j] += x[j]; } } break; + case Aec3Optimization::kAvx2: + AccumulateAVX2(x, z); + break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math_avx2.cc new file mode 100644 index 000000000..0b5f3c142 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math_avx2.cc @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/vector_math.h" + +#include +#include + +#include "api/array_view.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace aec3 { + +// Elementwise square root. +void VectorMath::SqrtAVX2(rtc::ArrayView x) { + const int x_size = static_cast(x.size()); + const int vector_limit = x_size >> 3; + + int j = 0; + for (; j < vector_limit * 8; j += 8) { + __m256 g = _mm256_loadu_ps(&x[j]); + g = _mm256_sqrt_ps(g); + _mm256_storeu_ps(&x[j], g); + } + + for (; j < x_size; ++j) { + x[j] = sqrtf(x[j]); + } +} + +// Elementwise vector multiplication z = x * y. +void VectorMath::MultiplyAVX2(rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView z) { + RTC_DCHECK_EQ(z.size(), x.size()); + RTC_DCHECK_EQ(z.size(), y.size()); + const int x_size = static_cast(x.size()); + const int vector_limit = x_size >> 3; + + int j = 0; + for (; j < vector_limit * 8; j += 8) { + const __m256 x_j = _mm256_loadu_ps(&x[j]); + const __m256 y_j = _mm256_loadu_ps(&y[j]); + const __m256 z_j = _mm256_mul_ps(x_j, y_j); + _mm256_storeu_ps(&z[j], z_j); + } + + for (; j < x_size; ++j) { + z[j] = x[j] * y[j]; + } +} + +// Elementwise vector accumulation z += x. +void VectorMath::AccumulateAVX2(rtc::ArrayView x, + rtc::ArrayView z) { + RTC_DCHECK_EQ(z.size(), x.size()); + const int x_size = static_cast(x.size()); + const int vector_limit = x_size >> 3; + + int j = 0; + for (; j < vector_limit * 8; j += 8) { + const __m256 x_j = _mm256_loadu_ps(&x[j]); + __m256 z_j = _mm256_loadu_ps(&z[j]); + z_j = _mm256_add_ps(x_j, z_j); + _mm256_storeu_ps(&z[j], z_j); + } + + for (; j < x_size; ++j) { + z[j] += x[j]; + } +} + +} // namespace aec3 +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc index 12997388f..18f85721b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc @@ -203,6 +203,12 @@ void AecDumpImpl::WriteRuntimeSetting( setting->set_capture_fixed_post_gain(x); break; } + case AudioProcessing::RuntimeSetting::Type::kCaptureOutputUsed: { + bool x; + runtime_setting.GetBool(&x); + setting->set_capture_output_used(x); + break; + } case AudioProcessing::RuntimeSetting::Type::kPlayoutVolumeChange: { int x; runtime_setting.GetInt(&x); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc index a5d36089c..0372ccf38 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc @@ -14,12 +14,32 @@ #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" namespace webrtc { +namespace { + +void DumpDebugData(const AdaptiveDigitalGainApplier::FrameInfo& info, + ApmDataDumper& dumper) { + dumper.DumpRaw("agc2_vad_probability", info.vad_result.speech_probability); + dumper.DumpRaw("agc2_vad_rms_dbfs", info.vad_result.rms_dbfs); + dumper.DumpRaw("agc2_vad_peak_dbfs", info.vad_result.peak_dbfs); + dumper.DumpRaw("agc2_noise_estimate_dbfs", info.input_noise_level_dbfs); + dumper.DumpRaw("agc2_last_limiter_audio_level", info.limiter_envelope_dbfs); +} + +constexpr int kGainApplierAdjacentSpeechFramesThreshold = 1; +constexpr float kMaxGainChangePerSecondDb = 3.f; +constexpr float kMaxOutputNoiseLevelDbfs = -50.f; + +} // namespace AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper) : speech_level_estimator_(apm_data_dumper), - gain_applier_(apm_data_dumper), + gain_applier_(apm_data_dumper, + kGainApplierAdjacentSpeechFramesThreshold, + kMaxGainChangePerSecondDb, + kMaxOutputNoiseLevelDbfs), apm_data_dumper_(apm_data_dumper), noise_level_estimator_(apm_data_dumper) { RTC_DCHECK(apm_data_dumper); @@ -30,48 +50,37 @@ AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper, : speech_level_estimator_( apm_data_dumper, config.adaptive_digital.level_estimator, - config.adaptive_digital.use_saturation_protector, + config.adaptive_digital + .level_estimator_adjacent_speech_frames_threshold, + config.adaptive_digital.initial_saturation_margin_db, config.adaptive_digital.extra_saturation_margin_db), - gain_applier_(apm_data_dumper), + vad_(config.adaptive_digital.vad_probability_attack), + gain_applier_( + apm_data_dumper, + config.adaptive_digital.gain_applier_adjacent_speech_frames_threshold, + config.adaptive_digital.max_gain_change_db_per_second, + config.adaptive_digital.max_output_noise_level_dbfs), apm_data_dumper_(apm_data_dumper), noise_level_estimator_(apm_data_dumper) { RTC_DCHECK(apm_data_dumper); + if (!config.adaptive_digital.use_saturation_protector) { + RTC_LOG(LS_WARNING) << "The saturation protector cannot be disabled."; + } } AdaptiveAgc::~AdaptiveAgc() = default; -void AdaptiveAgc::Process(AudioFrameView float_frame, - float last_audio_level) { - auto signal_with_levels = SignalWithLevels(float_frame); - signal_with_levels.vad_result = vad_.AnalyzeFrame(float_frame); - apm_data_dumper_->DumpRaw("agc2_vad_probability", - signal_with_levels.vad_result.speech_probability); - apm_data_dumper_->DumpRaw("agc2_vad_rms_dbfs", - signal_with_levels.vad_result.speech_rms_dbfs); - apm_data_dumper_->DumpRaw("agc2_vad_peak_dbfs", - signal_with_levels.vad_result.speech_peak_dbfs); - - speech_level_estimator_.UpdateEstimation(signal_with_levels.vad_result); - - signal_with_levels.input_level_dbfs = - speech_level_estimator_.LatestLevelEstimate(); - - signal_with_levels.input_noise_level_dbfs = - noise_level_estimator_.Analyze(float_frame); - - apm_data_dumper_->DumpRaw("agc2_noise_estimate_dbfs", - signal_with_levels.input_noise_level_dbfs); - - signal_with_levels.limiter_audio_level_dbfs = - last_audio_level > 0 ? FloatS16ToDbfs(last_audio_level) : -90.f; - apm_data_dumper_->DumpRaw("agc2_last_limiter_audio_level", - signal_with_levels.limiter_audio_level_dbfs); - - signal_with_levels.estimate_is_confident = - speech_level_estimator_.LevelEstimationIsConfident(); - - // The gain applier applies the gain. - gain_applier_.Process(signal_with_levels); +void AdaptiveAgc::Process(AudioFrameView frame, float limiter_envelope) { + AdaptiveDigitalGainApplier::FrameInfo info; + info.vad_result = vad_.AnalyzeFrame(frame); + speech_level_estimator_.Update(info.vad_result); + info.input_level_dbfs = speech_level_estimator_.level_dbfs(); + info.input_noise_level_dbfs = noise_level_estimator_.Analyze(frame); + info.limiter_envelope_dbfs = + limiter_envelope > 0 ? FloatS16ToDbfs(limiter_envelope) : -90.f; + info.estimate_is_confident = speech_level_estimator_.IsConfident(); + DumpDebugData(info, *apm_data_dumper_); + gain_applier_.Process(info, frame); } void AdaptiveAgc::Reset() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h index 16c0082ed..f3c7854e1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h @@ -21,19 +21,25 @@ namespace webrtc { class ApmDataDumper; +// Adaptive digital gain controller. +// TODO(crbug.com/webrtc/7494): Unify with `AdaptiveDigitalGainApplier`. class AdaptiveAgc { public: explicit AdaptiveAgc(ApmDataDumper* apm_data_dumper); + // TODO(crbug.com/webrtc/7494): Remove ctor above. AdaptiveAgc(ApmDataDumper* apm_data_dumper, const AudioProcessing::Config::GainController2& config); ~AdaptiveAgc(); - void Process(AudioFrameView float_frame, float last_audio_level); + // Analyzes `frame` and applies a digital adaptive gain to it. Takes into + // account the envelope measured by the limiter. + // TODO(crbug.com/webrtc/7494): Make the class depend on the limiter. + void Process(AudioFrameView frame, float limiter_envelope); void Reset(); private: AdaptiveModeLevelEstimator speech_level_estimator_; - VadWithLevel vad_; + VadLevelAnalyzer vad_; AdaptiveDigitalGainApplier gain_applier_; ApmDataDumper* const apm_data_dumper_; NoiseLevelEstimator noise_level_estimator_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc index 6ece83b23..36ef9be56 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc @@ -16,6 +16,7 @@ #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "system_wrappers/include/metrics.h" @@ -44,12 +45,16 @@ float ComputeGainDb(float input_level_dbfs) { return 0.f; } -// We require 'gain + noise_level <= kMaxNoiseLevelDbfs'. +// Returns `target_gain` if the output noise level is below +// `max_output_noise_level_dbfs`; otherwise returns a capped gain so that the +// output noise level equals `max_output_noise_level_dbfs`. float LimitGainByNoise(float target_gain, float input_noise_level_dbfs, - ApmDataDumper* apm_data_dumper) { - const float noise_headroom_db = kMaxNoiseLevelDbfs - input_noise_level_dbfs; - apm_data_dumper->DumpRaw("agc2_noise_headroom_db", noise_headroom_db); + float max_output_noise_level_dbfs, + ApmDataDumper& apm_data_dumper) { + const float noise_headroom_db = + max_output_noise_level_dbfs - input_noise_level_dbfs; + apm_data_dumper.DumpRaw("agc2_noise_headroom_db", noise_headroom_db); return std::min(target_gain, std::max(noise_headroom_db, 0.f)); } @@ -74,57 +79,68 @@ float LimitGainByLowConfidence(float target_gain, // Return the gain difference in db to 'last_gain_db'. float ComputeGainChangeThisFrameDb(float target_gain_db, float last_gain_db, - bool gain_increase_allowed) { + bool gain_increase_allowed, + float max_gain_change_db) { float target_gain_difference_db = target_gain_db - last_gain_db; if (!gain_increase_allowed) { target_gain_difference_db = std::min(target_gain_difference_db, 0.f); } - - return rtc::SafeClamp(target_gain_difference_db, -kMaxGainChangePerFrameDb, - kMaxGainChangePerFrameDb); + return rtc::SafeClamp(target_gain_difference_db, -max_gain_change_db, + max_gain_change_db); } + } // namespace -SignalWithLevels::SignalWithLevels(AudioFrameView float_frame) - : float_frame(float_frame) {} -SignalWithLevels::SignalWithLevels(const SignalWithLevels&) = default; - AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier( - ApmDataDumper* apm_data_dumper) - : gain_applier_(false, DbToRatio(last_gain_db_)), - apm_data_dumper_(apm_data_dumper) {} + ApmDataDumper* apm_data_dumper, + int adjacent_speech_frames_threshold, + float max_gain_change_db_per_second, + float max_output_noise_level_dbfs) + : apm_data_dumper_(apm_data_dumper), + gain_applier_( + /*hard_clip_samples=*/false, + /*initial_gain_factor=*/DbToRatio(kInitialAdaptiveDigitalGainDb)), + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + max_gain_change_db_per_10ms_(max_gain_change_db_per_second * + kFrameDurationMs / 1000.f), + max_output_noise_level_dbfs_(max_output_noise_level_dbfs), + calls_since_last_gain_log_(0), + frames_to_gain_increase_allowed_(adjacent_speech_frames_threshold_), + last_gain_db_(kInitialAdaptiveDigitalGainDb) { + RTC_DCHECK_GT(max_gain_change_db_per_second, 0.f); + RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); + RTC_DCHECK_GE(max_output_noise_level_dbfs_, -90.f); + RTC_DCHECK_LE(max_output_noise_level_dbfs_, 0.f); +} -void AdaptiveDigitalGainApplier::Process(SignalWithLevels signal_with_levels) { - calls_since_last_gain_log_++; - if (calls_since_last_gain_log_ == 100) { - calls_since_last_gain_log_ = 0; - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", - last_gain_db_, 0, kMaxGainDb, kMaxGainDb + 1); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", - -signal_with_levels.input_noise_level_dbfs, 0, - 100, 101); - } - - signal_with_levels.input_level_dbfs = - std::min(signal_with_levels.input_level_dbfs, 0.f); - - RTC_DCHECK_GE(signal_with_levels.input_level_dbfs, -150.f); - RTC_DCHECK_GE(signal_with_levels.float_frame.num_channels(), 1); - RTC_DCHECK_GE(signal_with_levels.float_frame.samples_per_channel(), 1); +void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, + AudioFrameView frame) { + RTC_DCHECK_GE(info.input_level_dbfs, -150.f); + RTC_DCHECK_GE(frame.num_channels(), 1); + RTC_DCHECK( + frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || + frame.samples_per_channel() == 320 || frame.samples_per_channel() == 480) + << "`frame` does not look like a 10 ms frame for an APM supported sample " + "rate"; const float target_gain_db = LimitGainByLowConfidence( - LimitGainByNoise(ComputeGainDb(signal_with_levels.input_level_dbfs), - signal_with_levels.input_noise_level_dbfs, - apm_data_dumper_), - last_gain_db_, signal_with_levels.limiter_audio_level_dbfs, - signal_with_levels.estimate_is_confident); + LimitGainByNoise(ComputeGainDb(std::min(info.input_level_dbfs, 0.f)), + info.input_noise_level_dbfs, + max_output_noise_level_dbfs_, *apm_data_dumper_), + last_gain_db_, info.limiter_envelope_dbfs, info.estimate_is_confident); - // Forbid increasing the gain when there is no speech. - gain_increase_allowed_ = signal_with_levels.vad_result.speech_probability > - kVadConfidenceThreshold; + // Forbid increasing the gain until enough adjacent speech frames are + // observed. + if (info.vad_result.speech_probability < kVadConfidenceThreshold) { + frames_to_gain_increase_allowed_ = adjacent_speech_frames_threshold_; + } else if (frames_to_gain_increase_allowed_ > 0) { + frames_to_gain_increase_allowed_--; + } const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( - target_gain_db, last_gain_db_, gain_increase_allowed_); + target_gain_db, last_gain_db_, + /*gain_increase_allowed=*/frames_to_gain_increase_allowed_ == 0, + max_gain_change_db_per_10ms_); apm_data_dumper_->DumpRaw("agc2_want_to_change_by_db", target_gain_db - last_gain_db_); @@ -137,10 +153,27 @@ void AdaptiveDigitalGainApplier::Process(SignalWithLevels signal_with_levels) { gain_applier_.SetGainFactor( DbToRatio(last_gain_db_ + gain_change_this_frame_db)); } - gain_applier_.ApplyGain(signal_with_levels.float_frame); + gain_applier_.ApplyGain(frame); // Remember that the gain has changed for the next iteration. last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; apm_data_dumper_->DumpRaw("agc2_applied_gain_db", last_gain_db_); + + // Log every 10 seconds. + calls_since_last_gain_log_++; + if (calls_since_last_gain_log_ == 1000) { + calls_since_last_gain_log_ = 0; + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", + last_gain_db_, 0, kMaxGainDb, kMaxGainDb + 1); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Agc2.EstimatedSpeechPlusNoiseLevel", + -info.input_level_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", + -info.input_noise_level_dbfs, 0, 100, 101); + RTC_LOG(LS_INFO) << "AGC2 adaptive digital" + << " | speech_plus_noise_dbfs: " << info.input_level_dbfs + << " | noise_dbfs: " << info.input_noise_level_dbfs + << " | gain_db: " << last_gain_db_; + } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h index e7f07fcf0..a65379f5b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h @@ -11,7 +11,6 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ -#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/include/audio_frame_view.h" @@ -20,36 +19,51 @@ namespace webrtc { class ApmDataDumper; -struct SignalWithLevels { - SignalWithLevels(AudioFrameView float_frame); - SignalWithLevels(const SignalWithLevels&); - - float input_level_dbfs = -1.f; - float input_noise_level_dbfs = -1.f; - VadWithLevel::LevelAndProbability vad_result; - float limiter_audio_level_dbfs = -1.f; - bool estimate_is_confident = false; - AudioFrameView float_frame; -}; - +// Part of the adaptive digital controller that applies a digital adaptive gain. +// The gain is updated towards a target. The logic decides when gain updates are +// allowed, it controls the adaptation speed and caps the target based on the +// estimated noise level and the speech level estimate confidence. class AdaptiveDigitalGainApplier { public: - explicit AdaptiveDigitalGainApplier(ApmDataDumper* apm_data_dumper); - // Decide what gain to apply. - void Process(SignalWithLevels signal_with_levels); + // Information about a frame to process. + struct FrameInfo { + float input_level_dbfs; // Estimated speech plus noise level. + float input_noise_level_dbfs; // Estimated noise level. + VadLevelAnalyzer::Result vad_result; + float limiter_envelope_dbfs; // Envelope level from the limiter. + bool estimate_is_confident; + }; + + // Ctor. + // `adjacent_speech_frames_threshold` indicates how many speech frames are + // required before a gain increase is allowed. `max_gain_change_db_per_second` + // limits the adaptation speed (uniformly operated across frames). + // `max_output_noise_level_dbfs` limits the output noise level. + AdaptiveDigitalGainApplier(ApmDataDumper* apm_data_dumper, + int adjacent_speech_frames_threshold, + float max_gain_change_db_per_second, + float max_output_noise_level_dbfs); + AdaptiveDigitalGainApplier(const AdaptiveDigitalGainApplier&) = delete; + AdaptiveDigitalGainApplier& operator=(const AdaptiveDigitalGainApplier&) = + delete; + + // Analyzes `info`, updates the digital gain and applies it to a 10 ms + // `frame`. Supports any sample rate supported by APM. + void Process(const FrameInfo& info, AudioFrameView frame); private: - float last_gain_db_ = kInitialAdaptiveDigitalGainDb; + ApmDataDumper* const apm_data_dumper_; GainApplier gain_applier_; - int calls_since_last_gain_log_ = 0; - // For some combinations of noise and speech probability, increasing - // the level is not allowed. Since we may get VAD results in bursts, - // we keep track of this variable until the next VAD results come - // in. - bool gain_increase_allowed_ = true; - ApmDataDumper* apm_data_dumper_ = nullptr; + const int adjacent_speech_frames_threshold_; + const float max_gain_change_db_per_10ms_; + const float max_output_noise_level_dbfs_; + + int calls_since_last_gain_log_; + int frames_to_gain_increase_allowed_; + float last_gain_db_; }; + } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc index dd27688ab..739997f5e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc @@ -13,99 +13,187 @@ #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { +namespace { + +using LevelEstimatorType = + AudioProcessing::Config::GainController2::LevelEstimator; + +// Combines a level estimation with the saturation protector margins. +float ComputeLevelEstimateDbfs(float level_estimate_dbfs, + float saturation_margin_db, + float extra_saturation_margin_db) { + return rtc::SafeClamp( + level_estimate_dbfs + saturation_margin_db + extra_saturation_margin_db, + -90.f, 30.f); +} + +// Returns the level of given type from `vad_level`. +float GetLevel(const VadLevelAnalyzer::Result& vad_level, + LevelEstimatorType type) { + switch (type) { + case LevelEstimatorType::kRms: + return vad_level.rms_dbfs; + break; + case LevelEstimatorType::kPeak: + return vad_level.peak_dbfs; + break; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace + +bool AdaptiveModeLevelEstimator::LevelEstimatorState::operator==( + const AdaptiveModeLevelEstimator::LevelEstimatorState& b) const { + return time_to_full_buffer_ms == b.time_to_full_buffer_ms && + level_dbfs.numerator == b.level_dbfs.numerator && + level_dbfs.denominator == b.level_dbfs.denominator && + saturation_protector == b.saturation_protector; +} + +float AdaptiveModeLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { + RTC_DCHECK_NE(denominator, 0.f); + return numerator / denominator; +} AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( ApmDataDumper* apm_data_dumper) - : level_estimator_( - AudioProcessing::Config::GainController2::LevelEstimator::kRms), - use_saturation_protector_(true), - saturation_protector_(apm_data_dumper), - apm_data_dumper_(apm_data_dumper) {} + : AdaptiveModeLevelEstimator( + apm_data_dumper, + AudioProcessing::Config::GainController2::LevelEstimator::kRms, + kDefaultLevelEstimatorAdjacentSpeechFramesThreshold, + kDefaultInitialSaturationMarginDb, + kDefaultExtraSaturationMarginDb) {} AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( ApmDataDumper* apm_data_dumper, AudioProcessing::Config::GainController2::LevelEstimator level_estimator, - bool use_saturation_protector, + int adjacent_speech_frames_threshold, + float initial_saturation_margin_db, float extra_saturation_margin_db) - : level_estimator_(level_estimator), - use_saturation_protector_(use_saturation_protector), - saturation_protector_(apm_data_dumper, extra_saturation_margin_db), - apm_data_dumper_(apm_data_dumper) {} + : apm_data_dumper_(apm_data_dumper), + level_estimator_type_(level_estimator), + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + initial_saturation_margin_db_(initial_saturation_margin_db), + extra_saturation_margin_db_(extra_saturation_margin_db), + level_dbfs_(ComputeLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs, + initial_saturation_margin_db_, + extra_saturation_margin_db_)) { + RTC_DCHECK(apm_data_dumper_); + RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); + Reset(); +} -void AdaptiveModeLevelEstimator::UpdateEstimation( - const VadWithLevel::LevelAndProbability& vad_data) { - RTC_DCHECK_GT(vad_data.speech_rms_dbfs, -150.f); - RTC_DCHECK_LT(vad_data.speech_rms_dbfs, 50.f); - RTC_DCHECK_GT(vad_data.speech_peak_dbfs, -150.f); - RTC_DCHECK_LT(vad_data.speech_peak_dbfs, 50.f); - RTC_DCHECK_GE(vad_data.speech_probability, 0.f); - RTC_DCHECK_LE(vad_data.speech_probability, 1.f); +void AdaptiveModeLevelEstimator::Update( + const VadLevelAnalyzer::Result& vad_level) { + RTC_DCHECK_GT(vad_level.rms_dbfs, -150.f); + RTC_DCHECK_LT(vad_level.rms_dbfs, 50.f); + RTC_DCHECK_GT(vad_level.peak_dbfs, -150.f); + RTC_DCHECK_LT(vad_level.peak_dbfs, 50.f); + RTC_DCHECK_GE(vad_level.speech_probability, 0.f); + RTC_DCHECK_LE(vad_level.speech_probability, 1.f); + DumpDebugData(); - if (vad_data.speech_probability < kVadConfidenceThreshold) { - DebugDumpEstimate(); + if (vad_level.speech_probability < kVadConfidenceThreshold) { + // Not a speech frame. + if (adjacent_speech_frames_threshold_ > 1) { + // When two or more adjacent speech frames are required in order to update + // the state, we need to decide whether to discard or confirm the updates + // based on the speech sequence length. + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // First non-speech frame after a long enough sequence of speech frames. + // Update the reliable state. + reliable_state_ = preliminary_state_; + } else if (num_adjacent_speech_frames_ > 0) { + // First non-speech frame after a too short sequence of speech frames. + // Reset to the last reliable state. + preliminary_state_ = reliable_state_; + } + } + num_adjacent_speech_frames_ = 0; return; } - const bool buffer_is_full = buffer_size_ms_ >= kFullBufferSizeMs; + // Speech frame observed. + num_adjacent_speech_frames_++; + + // Update preliminary level estimate. + RTC_DCHECK_GE(preliminary_state_.time_to_full_buffer_ms, 0); + const bool buffer_is_full = preliminary_state_.time_to_full_buffer_ms == 0; if (!buffer_is_full) { - buffer_size_ms_ += kFrameDurationMs; + preliminary_state_.time_to_full_buffer_ms -= kFrameDurationMs; } - + // Weighted average of levels with speech probability as weight. + RTC_DCHECK_GT(vad_level.speech_probability, 0.f); const float leak_factor = buffer_is_full ? kFullBufferLeakFactor : 1.f; + preliminary_state_.level_dbfs.numerator = + preliminary_state_.level_dbfs.numerator * leak_factor + + GetLevel(vad_level, level_estimator_type_) * vad_level.speech_probability; + preliminary_state_.level_dbfs.denominator = + preliminary_state_.level_dbfs.denominator * leak_factor + + vad_level.speech_probability; - // Read speech level estimation. - float speech_level_dbfs = 0.f; - using LevelEstimatorType = - AudioProcessing::Config::GainController2::LevelEstimator; - switch (level_estimator_) { - case LevelEstimatorType::kRms: - speech_level_dbfs = vad_data.speech_rms_dbfs; - break; - case LevelEstimatorType::kPeak: - speech_level_dbfs = vad_data.speech_peak_dbfs; - break; - } + const float level_dbfs = preliminary_state_.level_dbfs.GetRatio(); - // Update speech level estimation. - estimate_numerator_ = estimate_numerator_ * leak_factor + - speech_level_dbfs * vad_data.speech_probability; - estimate_denominator_ = - estimate_denominator_ * leak_factor + vad_data.speech_probability; - last_estimate_with_offset_dbfs_ = estimate_numerator_ / estimate_denominator_; + UpdateSaturationProtectorState(vad_level.peak_dbfs, level_dbfs, + preliminary_state_.saturation_protector); - if (use_saturation_protector_) { - saturation_protector_.UpdateMargin(vad_data, - last_estimate_with_offset_dbfs_); - DebugDumpEstimate(); + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // `preliminary_state_` is now reliable. Update the last level estimation. + level_dbfs_ = ComputeLevelEstimateDbfs( + level_dbfs, preliminary_state_.saturation_protector.margin_db, + extra_saturation_margin_db_); } } -float AdaptiveModeLevelEstimator::LatestLevelEstimate() const { - return rtc::SafeClamp( - last_estimate_with_offset_dbfs_ + - (use_saturation_protector_ ? saturation_protector_.LastMargin() - : 0.f), - -90.f, 30.f); +bool AdaptiveModeLevelEstimator::IsConfident() const { + if (adjacent_speech_frames_threshold_ == 1) { + // Ignore `reliable_state_` when a single frame is enough to update the + // level estimate (because it is not used). + return preliminary_state_.time_to_full_buffer_ms == 0; + } + // Once confident, it remains confident. + RTC_DCHECK(reliable_state_.time_to_full_buffer_ms != 0 || + preliminary_state_.time_to_full_buffer_ms == 0); + // During the first long enough speech sequence, `reliable_state_` must be + // ignored since `preliminary_state_` is used. + return reliable_state_.time_to_full_buffer_ms == 0 || + (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && + preliminary_state_.time_to_full_buffer_ms == 0); } void AdaptiveModeLevelEstimator::Reset() { - buffer_size_ms_ = 0; - last_estimate_with_offset_dbfs_ = kInitialSpeechLevelEstimateDbfs; - estimate_numerator_ = 0.f; - estimate_denominator_ = 0.f; - saturation_protector_.Reset(); + ResetLevelEstimatorState(preliminary_state_); + ResetLevelEstimatorState(reliable_state_); + level_dbfs_ = ComputeLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs, + initial_saturation_margin_db_, + extra_saturation_margin_db_); + num_adjacent_speech_frames_ = 0; } -void AdaptiveModeLevelEstimator::DebugDumpEstimate() { - if (apm_data_dumper_) { - apm_data_dumper_->DumpRaw("agc2_adaptive_level_estimate_with_offset_dbfs", - last_estimate_with_offset_dbfs_); - apm_data_dumper_->DumpRaw("agc2_adaptive_level_estimate_dbfs", - LatestLevelEstimate()); - } - saturation_protector_.DebugDumpEstimate(); +void AdaptiveModeLevelEstimator::ResetLevelEstimatorState( + LevelEstimatorState& state) const { + state.time_to_full_buffer_ms = kFullBufferSizeMs; + state.level_dbfs.numerator = 0.f; + state.level_dbfs.denominator = 0.f; + ResetSaturationProtectorState(initial_saturation_margin_db_, + state.saturation_protector); } + +void AdaptiveModeLevelEstimator::DumpDebugData() const { + apm_data_dumper_->DumpRaw("agc2_adaptive_level_estimate_dbfs", level_dbfs_); + apm_data_dumper_->DumpRaw("agc2_adaptive_num_adjacent_speech_frames_", + num_adjacent_speech_frames_); + apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_level_estimate_num", + preliminary_state_.level_dbfs.numerator); + apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_level_estimate_den", + preliminary_state_.level_dbfs.denominator); + apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_saturation_margin_db", + preliminary_state_.saturation_protector.margin_db); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h index 63b9de2ae..213fc0f0c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h @@ -12,8 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ #include +#include -#include "modules/audio_processing/agc2/agc2_common.h" // kFullBufferSizeMs... +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/saturation_protector.h" #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/include/audio_processing.h" @@ -21,33 +22,63 @@ namespace webrtc { class ApmDataDumper; +// Level estimator for the digital adaptive gain controller. class AdaptiveModeLevelEstimator { public: explicit AdaptiveModeLevelEstimator(ApmDataDumper* apm_data_dumper); + AdaptiveModeLevelEstimator(const AdaptiveModeLevelEstimator&) = delete; + AdaptiveModeLevelEstimator& operator=(const AdaptiveModeLevelEstimator&) = + delete; AdaptiveModeLevelEstimator( ApmDataDumper* apm_data_dumper, AudioProcessing::Config::GainController2::LevelEstimator level_estimator, - bool use_saturation_protector, + int adjacent_speech_frames_threshold, + float initial_saturation_margin_db, float extra_saturation_margin_db); - void UpdateEstimation(const VadWithLevel::LevelAndProbability& vad_data); - float LatestLevelEstimate() const; + + // Updates the level estimation. + void Update(const VadLevelAnalyzer::Result& vad_data); + // Returns the estimated speech plus noise level. + float level_dbfs() const { return level_dbfs_; } + // Returns true if the estimator is confident on its current estimate. + bool IsConfident() const; + void Reset(); - bool LevelEstimationIsConfident() const { - return buffer_size_ms_ >= kFullBufferSizeMs; - } private: - void DebugDumpEstimate(); + // Part of the level estimator state used for check-pointing and restore ops. + struct LevelEstimatorState { + bool operator==(const LevelEstimatorState& s) const; + inline bool operator!=(const LevelEstimatorState& s) const { + return !(*this == s); + } + struct Ratio { + float numerator; + float denominator; + float GetRatio() const; + }; + // TODO(crbug.com/webrtc/7494): Remove time_to_full_buffer_ms if redundant. + int time_to_full_buffer_ms; + Ratio level_dbfs; + SaturationProtectorState saturation_protector; + }; + static_assert(std::is_trivially_copyable::value, ""); + + void ResetLevelEstimatorState(LevelEstimatorState& state) const; + + void DumpDebugData() const; + + ApmDataDumper* const apm_data_dumper_; const AudioProcessing::Config::GainController2::LevelEstimator - level_estimator_; - const bool use_saturation_protector_; - size_t buffer_size_ms_ = 0; - float last_estimate_with_offset_dbfs_ = kInitialSpeechLevelEstimateDbfs; - float estimate_numerator_ = 0.f; - float estimate_denominator_ = 0.f; - SaturationProtector saturation_protector_; - ApmDataDumper* const apm_data_dumper_; + level_estimator_type_; + const int adjacent_speech_frames_threshold_; + const float initial_saturation_margin_db_; + const float extra_saturation_margin_db_; + LevelEstimatorState preliminary_state_; + LevelEstimatorState reliable_state_; + float level_dbfs_; + int num_adjacent_speech_frames_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc index b7c64373f..5ceeb7df7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc @@ -38,7 +38,7 @@ void AdaptiveModeLevelEstimatorAgc::Process(const int16_t* audio, if (latest_voice_probability_ > kVadConfidenceThreshold) { time_in_ms_since_last_estimate_ += kFrameDurationMs; } - level_estimator_.UpdateEstimation(vad_prob); + level_estimator_.Update(vad_prob); } // Retrieves the difference between the target RMS level and the current @@ -48,8 +48,8 @@ bool AdaptiveModeLevelEstimatorAgc::GetRmsErrorDb(int* error) { if (time_in_ms_since_last_estimate_ <= kTimeUntilConfidentMs) { return false; } - *error = std::floor(target_level_dbfs() - - level_estimator_.LatestLevelEstimate() + 0.5f); + *error = + std::floor(target_level_dbfs() - level_estimator_.level_dbfs() + 0.5f); time_in_ms_since_last_estimate_ = 0; return true; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h index 6d1233988..bc6fa843b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h @@ -43,7 +43,7 @@ class AdaptiveModeLevelEstimatorAgc : public Agc { static constexpr int kDefaultAgc2LevelHeadroomDbfs = -1; int32_t time_in_ms_since_last_estimate_ = 0; AdaptiveModeLevelEstimator level_estimator_; - VadWithLevel agc2_vad_; + VadLevelAnalyzer agc2_vad_; float latest_voice_probability_ = 0.f; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.cc deleted file mode 100644 index 3f697d12a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.cc +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/agc2_common.h" - -#include - -#include - -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -float GetInitialSaturationMarginDb() { - constexpr char kForceInitialSaturationMarginFieldTrial[] = - "WebRTC-Audio-Agc2ForceInitialSaturationMargin"; - - const bool use_forced_initial_saturation_margin = - webrtc::field_trial::IsEnabled(kForceInitialSaturationMarginFieldTrial); - if (use_forced_initial_saturation_margin) { - const std::string field_trial_string = webrtc::field_trial::FindFullName( - kForceInitialSaturationMarginFieldTrial); - float margin_db = -1; - if (sscanf(field_trial_string.c_str(), "Enabled-%f", &margin_db) == 1 && - margin_db >= 12.f && margin_db <= 25.f) { - return margin_db; - } - } - constexpr float kDefaultInitialSaturationMarginDb = 20.f; - return kDefaultInitialSaturationMarginDb; -} - -float GetExtraSaturationMarginOffsetDb() { - constexpr char kForceExtraSaturationMarginFieldTrial[] = - "WebRTC-Audio-Agc2ForceExtraSaturationMargin"; - - const bool use_forced_extra_saturation_margin = - webrtc::field_trial::IsEnabled(kForceExtraSaturationMarginFieldTrial); - if (use_forced_extra_saturation_margin) { - const std::string field_trial_string = webrtc::field_trial::FindFullName( - kForceExtraSaturationMarginFieldTrial); - float margin_db = -1; - if (sscanf(field_trial_string.c_str(), "Enabled-%f", &margin_db) == 1 && - margin_db >= 0.f && margin_db <= 10.f) { - return margin_db; - } - } - constexpr float kDefaultExtraSaturationMarginDb = 2.f; - return kDefaultExtraSaturationMarginDb; -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h index a6389f4c2..5d01100eb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h @@ -26,18 +26,12 @@ constexpr size_t kMaximalNumberOfSamplesPerChannel = 480; constexpr float kAttackFilterConstant = 0.f; // Adaptive digital gain applier settings below. -constexpr float kMaxGainChangePerSecondDb = 3.f; -constexpr float kMaxGainChangePerFrameDb = - kMaxGainChangePerSecondDb * kFrameDurationMs / 1000.f; constexpr float kHeadroomDbfs = 1.f; constexpr float kMaxGainDb = 30.f; constexpr float kInitialAdaptiveDigitalGainDb = 8.f; // At what limiter levels should we start decreasing the adaptive digital gain. constexpr float kLimiterThresholdForAgcGainDbfs = -kHeadroomDbfs; -// This parameter must be tuned together with the noise estimator. -constexpr float kMaxNoiseLevelDbfs = -50.f; - // This is the threshold for speech. Speech frames are used for updating the // speech level, measuring the amount of speech, and decide when to allow target // gain reduction. @@ -49,9 +43,13 @@ constexpr float kFullBufferLeakFactor = 1.f - 1.f / kFullBufferSizeMs; constexpr float kInitialSpeechLevelEstimateDbfs = -30.f; +// Robust VAD probability and speech decisions. +constexpr float kDefaultSmoothedVadProbabilityAttack = 1.f; +constexpr int kDefaultLevelEstimatorAdjacentSpeechFramesThreshold = 1; + // Saturation Protector settings. -float GetInitialSaturationMarginDb(); -float GetExtraSaturationMarginOffsetDb(); +constexpr float kDefaultInitialSaturationMarginDb = 20.f; +constexpr float kDefaultExtraSaturationMarginDb = 2.f; constexpr size_t kPeakEnveloperSuperFrameLengthMs = 400; static_assert(kFullBufferSizeMs % kPeakEnveloperSuperFrameLengthMs == 0, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h index 61d1b005e..be7cbb3da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h @@ -13,7 +13,6 @@ #include "api/array_view.h" #include "modules/audio_processing/agc2/biquad_filter.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -22,6 +21,11 @@ class ApmDataDumper; class DownSampler { public: explicit DownSampler(ApmDataDumper* data_dumper); + + DownSampler() = delete; + DownSampler(const DownSampler&) = delete; + DownSampler& operator=(const DownSampler&) = delete; + void Initialize(int sample_rate_hz); void DownSample(rtc::ArrayView in, rtc::ArrayView out); @@ -31,8 +35,6 @@ class DownSampler { int sample_rate_hz_; int down_sampling_factor_; BiQuadFilter low_pass_filter_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DownSampler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.h index b22f9bb39..e9895f0b1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_PROCESSING_AGC2_NOISE_SPECTRUM_ESTIMATOR_H_ #include "api/array_view.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -21,6 +20,11 @@ class ApmDataDumper; class NoiseSpectrumEstimator { public: explicit NoiseSpectrumEstimator(ApmDataDumper* data_dumper); + + NoiseSpectrumEstimator() = delete; + NoiseSpectrumEstimator(const NoiseSpectrumEstimator&) = delete; + NoiseSpectrumEstimator& operator=(const NoiseSpectrumEstimator&) = delete; + void Initialize(); void Update(rtc::ArrayView spectrum, bool first_update); @@ -31,8 +35,6 @@ class NoiseSpectrumEstimator { private: ApmDataDumper* data_dumper_; float noise_spectrum_[65]; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(NoiseSpectrumEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc index d932c7806..431c01fab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc @@ -20,7 +20,7 @@ namespace { constexpr int kAutoCorrelationFftOrder = 9; // Length-512 FFT. static_assert(1 << kAutoCorrelationFftOrder > - kNumInvertedLags12kHz + kBufSize12kHz - kMaxPitch12kHz, + kNumLags12kHz + kBufSize12kHz - kMaxPitch12kHz, ""); } // namespace @@ -45,15 +45,15 @@ AutoCorrelationCalculator::~AutoCorrelationCalculator() = default; // pitch period. void AutoCorrelationCalculator::ComputeOnPitchBuffer( rtc::ArrayView pitch_buf, - rtc::ArrayView auto_corr) { + rtc::ArrayView auto_corr) { RTC_DCHECK_LT(auto_corr.size(), kMaxPitch12kHz); RTC_DCHECK_GT(pitch_buf.size(), kMaxPitch12kHz); - constexpr size_t kFftFrameSize = 1 << kAutoCorrelationFftOrder; - constexpr size_t kConvolutionLength = kBufSize12kHz - kMaxPitch12kHz; + constexpr int kFftFrameSize = 1 << kAutoCorrelationFftOrder; + constexpr int kConvolutionLength = kBufSize12kHz - kMaxPitch12kHz; static_assert(kConvolutionLength == kFrameSize20ms12kHz, "Mismatch between pitch buffer size, frame size and maximum " "pitch period."); - static_assert(kFftFrameSize > kNumInvertedLags12kHz + kConvolutionLength, + static_assert(kFftFrameSize > kNumLags12kHz + kConvolutionLength, "The FFT length is not sufficiently big to avoid cyclic " "convolution errors."); auto tmp = tmp_->GetView(); @@ -67,13 +67,12 @@ void AutoCorrelationCalculator::ComputeOnPitchBuffer( // Compute the FFT for the sliding frames chunk. The sliding frames are // defined as pitch_buf[i:i+kConvolutionLength] where i in - // [0, kNumInvertedLags12kHz). The chunk includes all of them, hence it is - // defined as pitch_buf[:kNumInvertedLags12kHz+kConvolutionLength]. + // [0, kNumLags12kHz). The chunk includes all of them, hence it is + // defined as pitch_buf[:kNumLags12kHz+kConvolutionLength]. std::copy(pitch_buf.begin(), - pitch_buf.begin() + kConvolutionLength + kNumInvertedLags12kHz, + pitch_buf.begin() + kConvolutionLength + kNumLags12kHz, tmp.begin()); - std::fill(tmp.begin() + kNumInvertedLags12kHz + kConvolutionLength, tmp.end(), - 0.f); + std::fill(tmp.begin() + kNumLags12kHz + kConvolutionLength, tmp.end(), 0.f); fft_.ForwardTransform(*tmp_, X_.get(), /*ordered=*/false); // Convolve in the frequency domain. @@ -84,7 +83,7 @@ void AutoCorrelationCalculator::ComputeOnPitchBuffer( // Extract the auto-correlation coefficients. std::copy(tmp.begin() + kConvolutionLength - 1, - tmp.begin() + kConvolutionLength + kNumInvertedLags12kHz - 1, + tmp.begin() + kConvolutionLength + kNumLags12kHz - 1, auto_corr.begin()); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.h index de7f453bc..d58558ca2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.h @@ -34,7 +34,7 @@ class AutoCorrelationCalculator { // |auto_corr| indexes are inverted lags. void ComputeOnPitchBuffer( rtc::ArrayView pitch_buf, - rtc::ArrayView auto_corr); + rtc::ArrayView auto_corr); private: Pffft fft_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc index 744c87fea..5d76b52e5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc @@ -18,7 +18,7 @@ namespace rnn_vad { Optimization DetectOptimization() { #if defined(WEBRTC_ARCH_X86_FAMILY) - if (WebRtc_GetCPUInfo(kSSE2) != 0) { + if (GetCPUInfo(kSSE2) != 0) { return Optimization::kSse2; } #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h index c2e8df690..36b366ad1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h @@ -18,52 +18,58 @@ namespace rnn_vad { constexpr double kPi = 3.14159265358979323846; -constexpr size_t kSampleRate24kHz = 24000; -constexpr size_t kFrameSize10ms24kHz = kSampleRate24kHz / 100; -constexpr size_t kFrameSize20ms24kHz = kFrameSize10ms24kHz * 2; +constexpr int kSampleRate24kHz = 24000; +constexpr int kFrameSize10ms24kHz = kSampleRate24kHz / 100; +constexpr int kFrameSize20ms24kHz = kFrameSize10ms24kHz * 2; // Pitch buffer. -constexpr size_t kMinPitch24kHz = kSampleRate24kHz / 800; // 0.00125 s. -constexpr size_t kMaxPitch24kHz = kSampleRate24kHz / 62.5; // 0.016 s. -constexpr size_t kBufSize24kHz = kMaxPitch24kHz + kFrameSize20ms24kHz; +constexpr int kMinPitch24kHz = kSampleRate24kHz / 800; // 0.00125 s. +constexpr int kMaxPitch24kHz = kSampleRate24kHz / 62.5; // 0.016 s. +constexpr int kBufSize24kHz = kMaxPitch24kHz + kFrameSize20ms24kHz; static_assert((kBufSize24kHz & 1) == 0, "The buffer size must be even."); // 24 kHz analysis. // Define a higher minimum pitch period for the initial search. This is used to // avoid searching for very short periods, for which a refinement step is // responsible. -constexpr size_t kInitialMinPitch24kHz = 3 * kMinPitch24kHz; +constexpr int kInitialMinPitch24kHz = 3 * kMinPitch24kHz; static_assert(kMinPitch24kHz < kInitialMinPitch24kHz, ""); static_assert(kInitialMinPitch24kHz < kMaxPitch24kHz, ""); static_assert(kMaxPitch24kHz > kInitialMinPitch24kHz, ""); -constexpr size_t kNumInvertedLags24kHz = kMaxPitch24kHz - kInitialMinPitch24kHz; +// Number of (inverted) lags during the initial pitch search phase at 24 kHz. +constexpr int kInitialNumLags24kHz = kMaxPitch24kHz - kInitialMinPitch24kHz; +// Number of (inverted) lags during the pitch search refinement phase at 24 kHz. +constexpr int kRefineNumLags24kHz = kMaxPitch24kHz + 1; +static_assert( + kRefineNumLags24kHz > kInitialNumLags24kHz, + "The refinement step must search the pitch in an extended pitch range."); // 12 kHz analysis. -constexpr size_t kSampleRate12kHz = 12000; -constexpr size_t kFrameSize10ms12kHz = kSampleRate12kHz / 100; -constexpr size_t kFrameSize20ms12kHz = kFrameSize10ms12kHz * 2; -constexpr size_t kBufSize12kHz = kBufSize24kHz / 2; -constexpr size_t kInitialMinPitch12kHz = kInitialMinPitch24kHz / 2; -constexpr size_t kMaxPitch12kHz = kMaxPitch24kHz / 2; +constexpr int kSampleRate12kHz = 12000; +constexpr int kFrameSize10ms12kHz = kSampleRate12kHz / 100; +constexpr int kFrameSize20ms12kHz = kFrameSize10ms12kHz * 2; +constexpr int kBufSize12kHz = kBufSize24kHz / 2; +constexpr int kInitialMinPitch12kHz = kInitialMinPitch24kHz / 2; +constexpr int kMaxPitch12kHz = kMaxPitch24kHz / 2; static_assert(kMaxPitch12kHz > kInitialMinPitch12kHz, ""); // The inverted lags for the pitch interval [|kInitialMinPitch12kHz|, -// |kMaxPitch12kHz|] are in the range [0, |kNumInvertedLags12kHz|]. -constexpr size_t kNumInvertedLags12kHz = kMaxPitch12kHz - kInitialMinPitch12kHz; +// |kMaxPitch12kHz|] are in the range [0, |kNumLags12kHz|]. +constexpr int kNumLags12kHz = kMaxPitch12kHz - kInitialMinPitch12kHz; // 48 kHz constants. -constexpr size_t kMinPitch48kHz = kMinPitch24kHz * 2; -constexpr size_t kMaxPitch48kHz = kMaxPitch24kHz * 2; +constexpr int kMinPitch48kHz = kMinPitch24kHz * 2; +constexpr int kMaxPitch48kHz = kMaxPitch24kHz * 2; // Spectral features. -constexpr size_t kNumBands = 22; -constexpr size_t kNumLowerBands = 6; +constexpr int kNumBands = 22; +constexpr int kNumLowerBands = 6; static_assert((0 < kNumLowerBands) && (kNumLowerBands < kNumBands), ""); -constexpr size_t kCepstralCoeffsHistorySize = 8; +constexpr int kCepstralCoeffsHistorySize = 8; static_assert(kCepstralCoeffsHistorySize > 2, "The history size must at least be 3 to compute first and second " "derivatives."); -constexpr size_t kFeatureVectorSize = 42; +constexpr int kFeatureVectorSize = 42; enum class Optimization { kNone, kSse2, kNeon }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc index e9351797f..cdbbbc311 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc @@ -67,13 +67,12 @@ bool FeaturesExtractor::CheckSilenceComputeFeatures( ComputeLpResidual(lpc_coeffs, pitch_buf_24kHz_view_, lp_residual_view_); // Estimate pitch on the LP-residual and write the normalized pitch period // into the output vector (normalization based on training data stats). - pitch_info_48kHz_ = pitch_estimator_.Estimate(lp_residual_view_); - feature_vector[kFeatureVectorSize - 2] = - 0.01f * (static_cast(pitch_info_48kHz_.period) - 300); + pitch_period_48kHz_ = pitch_estimator_.Estimate(lp_residual_view_); + feature_vector[kFeatureVectorSize - 2] = 0.01f * (pitch_period_48kHz_ - 300); // Extract lagged frames (according to the estimated pitch period). - RTC_DCHECK_LE(pitch_info_48kHz_.period / 2, kMaxPitch24kHz); + RTC_DCHECK_LE(pitch_period_48kHz_ / 2, kMaxPitch24kHz); auto lagged_frame = pitch_buf_24kHz_view_.subview( - kMaxPitch24kHz - pitch_info_48kHz_.period / 2, kFrameSize20ms24kHz); + kMaxPitch24kHz - pitch_period_48kHz_ / 2, kFrameSize20ms24kHz); // Analyze reference and lagged frames checking if silence has been detected // and write the feature vector. return spectral_features_extractor_.CheckSilenceComputeFeatures( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h index ce5cce185..e2c77d2cf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h @@ -16,7 +16,6 @@ #include "api/array_view.h" #include "modules/audio_processing/agc2/biquad_filter.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" -#include "modules/audio_processing/agc2/rnn_vad/pitch_info.h" #include "modules/audio_processing/agc2/rnn_vad/pitch_search.h" #include "modules/audio_processing/agc2/rnn_vad/sequence_buffer.h" #include "modules/audio_processing/agc2/rnn_vad/spectral_features.h" @@ -53,7 +52,7 @@ class FeaturesExtractor { PitchEstimator pitch_estimator_; rtc::ArrayView reference_frame_view_; SpectralFeaturesExtractor spectral_features_extractor_; - PitchInfo pitch_info_48kHz_; + int pitch_period_48kHz_; }; } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc index 1a124a349..c553aa2ad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc @@ -16,27 +16,23 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace rnn_vad { namespace { -// Computes cross-correlation coefficients between |x| and |y| and writes them -// in |x_corr|. The lag values are in {0, ..., max_lag - 1}, where max_lag -// equals the size of |x_corr|. -// The |x| and |y| sub-arrays used to compute a cross-correlation coefficients -// for a lag l have both size "size of |x| - l" - i.e., the longest sub-array is -// used. |x| and |y| must have the same size. -void ComputeCrossCorrelation( +// Computes auto-correlation coefficients for |x| and writes them in +// |auto_corr|. The lag values are in {0, ..., max_lag - 1}, where max_lag +// equals the size of |auto_corr|. +void ComputeAutoCorrelation( rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView x_corr) { - constexpr size_t max_lag = x_corr.size(); - RTC_DCHECK_EQ(x.size(), y.size()); + rtc::ArrayView auto_corr) { + constexpr int max_lag = auto_corr.size(); RTC_DCHECK_LT(max_lag, x.size()); - for (size_t lag = 0; lag < max_lag; ++lag) { - x_corr[lag] = - std::inner_product(x.begin(), x.end() - lag, y.begin() + lag, 0.f); + for (int lag = 0; lag < max_lag; ++lag) { + auto_corr[lag] = + std::inner_product(x.begin(), x.end() - lag, x.begin() + lag, 0.f); } } @@ -45,9 +41,13 @@ void DenoiseAutoCorrelation( rtc::ArrayView auto_corr) { // Assume -40 dB white noise floor. auto_corr[0] *= 1.0001f; - for (size_t i = 1; i < kNumLpcCoefficients; ++i) { - auto_corr[i] -= auto_corr[i] * (0.008f * i) * (0.008f * i); - } + // Hard-coded values obtained as + // [np.float32((0.008*0.008*i*i)) for i in range(1,5)]. + auto_corr[1] -= auto_corr[1] * 0.000064f; + auto_corr[2] -= auto_corr[2] * 0.000256f; + auto_corr[3] -= auto_corr[3] * 0.000576f; + auto_corr[4] -= auto_corr[4] * 0.001024f; + static_assert(kNumLpcCoefficients == 5, "Update `auto_corr`."); } // Computes the initial inverse filter coefficients given the auto-correlation @@ -56,9 +56,9 @@ void ComputeInitialInverseFilterCoefficients( rtc::ArrayView auto_corr, rtc::ArrayView lpc_coeffs) { float error = auto_corr[0]; - for (size_t i = 0; i < kNumLpcCoefficients - 1; ++i) { + for (int i = 0; i < kNumLpcCoefficients - 1; ++i) { float reflection_coeff = 0.f; - for (size_t j = 0; j < i; ++j) { + for (int j = 0; j < i; ++j) { reflection_coeff += lpc_coeffs[j] * auto_corr[i - j]; } reflection_coeff += auto_corr[i + 1]; @@ -72,7 +72,7 @@ void ComputeInitialInverseFilterCoefficients( reflection_coeff /= -error; // Update LPC coefficients and total error. lpc_coeffs[i] = reflection_coeff; - for (size_t j = 0; j<(i + 1)>> 1; ++j) { + for (int j = 0; j < ((i + 1) >> 1); ++j) { const float tmp1 = lpc_coeffs[j]; const float tmp2 = lpc_coeffs[i - 1 - j]; lpc_coeffs[j] = tmp1 + reflection_coeff * tmp2; @@ -91,46 +91,49 @@ void ComputeAndPostProcessLpcCoefficients( rtc::ArrayView x, rtc::ArrayView lpc_coeffs) { std::array auto_corr; - ComputeCrossCorrelation(x, x, {auto_corr.data(), auto_corr.size()}); + ComputeAutoCorrelation(x, auto_corr); if (auto_corr[0] == 0.f) { // Empty frame. std::fill(lpc_coeffs.begin(), lpc_coeffs.end(), 0); return; } - DenoiseAutoCorrelation({auto_corr.data(), auto_corr.size()}); + DenoiseAutoCorrelation(auto_corr); std::array lpc_coeffs_pre{}; ComputeInitialInverseFilterCoefficients(auto_corr, lpc_coeffs_pre); // LPC coefficients post-processing. // TODO(bugs.webrtc.org/9076): Consider removing these steps. - float c1 = 1.f; - for (size_t i = 0; i < kNumLpcCoefficients - 1; ++i) { - c1 *= 0.9f; - lpc_coeffs_pre[i] *= c1; - } - const float c2 = 0.8f; - lpc_coeffs[0] = lpc_coeffs_pre[0] + c2; - lpc_coeffs[1] = lpc_coeffs_pre[1] + c2 * lpc_coeffs_pre[0]; - lpc_coeffs[2] = lpc_coeffs_pre[2] + c2 * lpc_coeffs_pre[1]; - lpc_coeffs[3] = lpc_coeffs_pre[3] + c2 * lpc_coeffs_pre[2]; - lpc_coeffs[4] = c2 * lpc_coeffs_pre[3]; + lpc_coeffs_pre[0] *= 0.9f; + lpc_coeffs_pre[1] *= 0.9f * 0.9f; + lpc_coeffs_pre[2] *= 0.9f * 0.9f * 0.9f; + lpc_coeffs_pre[3] *= 0.9f * 0.9f * 0.9f * 0.9f; + constexpr float kC = 0.8f; + lpc_coeffs[0] = lpc_coeffs_pre[0] + kC; + lpc_coeffs[1] = lpc_coeffs_pre[1] + kC * lpc_coeffs_pre[0]; + lpc_coeffs[2] = lpc_coeffs_pre[2] + kC * lpc_coeffs_pre[1]; + lpc_coeffs[3] = lpc_coeffs_pre[3] + kC * lpc_coeffs_pre[2]; + lpc_coeffs[4] = kC * lpc_coeffs_pre[3]; + static_assert(kNumLpcCoefficients == 5, "Update `lpc_coeffs(_pre)`."); } void ComputeLpResidual( rtc::ArrayView lpc_coeffs, rtc::ArrayView x, rtc::ArrayView y) { - RTC_DCHECK_LT(kNumLpcCoefficients, x.size()); + RTC_DCHECK_GT(x.size(), kNumLpcCoefficients); RTC_DCHECK_EQ(x.size(), y.size()); - std::array input_chunk; - input_chunk.fill(0.f); - for (size_t i = 0; i < y.size(); ++i) { - const float sum = std::inner_product(input_chunk.begin(), input_chunk.end(), - lpc_coeffs.begin(), x[i]); - // Circular shift and add a new sample. - for (size_t j = kNumLpcCoefficients - 1; j > 0; --j) - input_chunk[j] = input_chunk[j - 1]; - input_chunk[0] = x[i]; - // Copy result. - y[i] = sum; + // The code below implements the following operation: + // y[i] = x[i] + dot_product({x[i], ..., x[i - kNumLpcCoefficients + 1]}, + // lpc_coeffs) + // Edge case: i < kNumLpcCoefficients. + y[0] = x[0]; + for (int i = 1; i < kNumLpcCoefficients; ++i) { + y[i] = + std::inner_product(x.crend() - i, x.crend(), lpc_coeffs.cbegin(), x[i]); + } + // Regular case. + auto last = x.crend(); + for (int i = kNumLpcCoefficients; rtc::SafeLt(i, y.size()); ++i, --last) { + y[i] = std::inner_product(last - kNumLpcCoefficients, last, + lpc_coeffs.cbegin(), x[i]); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h index cddedca5d..2e54dd93d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h @@ -19,7 +19,7 @@ namespace webrtc { namespace rnn_vad { // LPC inverse filter length. -constexpr size_t kNumLpcCoefficients = 5; +constexpr int kNumLpcCoefficients = 5; // Given a frame |x|, computes a post-processed version of LPC coefficients // tailored for pitch estimation. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_info.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_info.h deleted file mode 100644 index c9fdd182b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_info.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_PITCH_INFO_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_PITCH_INFO_H_ - -namespace webrtc { -namespace rnn_vad { - -// Stores pitch period and gain information. The pitch gain measures the -// strength of the pitch (the higher, the stronger). -struct PitchInfo { - PitchInfo() : period(0), gain(0.f) {} - PitchInfo(int p, float g) : period(p), gain(g) {} - int period; - float gain; -}; - -} // namespace rnn_vad -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_PITCH_INFO_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc index 1b3b459c5..c6c3e1b2b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc @@ -19,37 +19,48 @@ namespace webrtc { namespace rnn_vad { PitchEstimator::PitchEstimator() - : pitch_buf_decimated_(kBufSize12kHz), - pitch_buf_decimated_view_(pitch_buf_decimated_.data(), kBufSize12kHz), - auto_corr_(kNumInvertedLags12kHz), - auto_corr_view_(auto_corr_.data(), kNumInvertedLags12kHz) { - RTC_DCHECK_EQ(kBufSize12kHz, pitch_buf_decimated_.size()); - RTC_DCHECK_EQ(kNumInvertedLags12kHz, auto_corr_view_.size()); -} + : y_energy_24kHz_(kRefineNumLags24kHz, 0.f), + pitch_buffer_12kHz_(kBufSize12kHz), + auto_correlation_12kHz_(kNumLags12kHz) {} PitchEstimator::~PitchEstimator() = default; -PitchInfo PitchEstimator::Estimate( - rtc::ArrayView pitch_buf) { +int PitchEstimator::Estimate( + rtc::ArrayView pitch_buffer) { + rtc::ArrayView pitch_buffer_12kHz_view( + pitch_buffer_12kHz_.data(), kBufSize12kHz); + RTC_DCHECK_EQ(pitch_buffer_12kHz_.size(), pitch_buffer_12kHz_view.size()); + rtc::ArrayView auto_correlation_12kHz_view( + auto_correlation_12kHz_.data(), kNumLags12kHz); + RTC_DCHECK_EQ(auto_correlation_12kHz_.size(), + auto_correlation_12kHz_view.size()); + // Perform the initial pitch search at 12 kHz. - Decimate2x(pitch_buf, pitch_buf_decimated_view_); - auto_corr_calculator_.ComputeOnPitchBuffer(pitch_buf_decimated_view_, - auto_corr_view_); - std::array pitch_candidates_inv_lags = FindBestPitchPeriods( - auto_corr_view_, pitch_buf_decimated_view_, kMaxPitch12kHz); - // Refine the pitch period estimation. + Decimate2x(pitch_buffer, pitch_buffer_12kHz_view); + auto_corr_calculator_.ComputeOnPitchBuffer(pitch_buffer_12kHz_view, + auto_correlation_12kHz_view); + CandidatePitchPeriods pitch_periods = ComputePitchPeriod12kHz( + pitch_buffer_12kHz_view, auto_correlation_12kHz_view); // The refinement is done using the pitch buffer that contains 24 kHz samples. // Therefore, adapt the inverted lags in |pitch_candidates_inv_lags| from 12 // to 24 kHz. - pitch_candidates_inv_lags[0] *= 2; - pitch_candidates_inv_lags[1] *= 2; - size_t pitch_inv_lag_48kHz = - RefinePitchPeriod48kHz(pitch_buf, pitch_candidates_inv_lags); - // Look for stronger harmonics to find the final pitch period and its gain. - RTC_DCHECK_LT(pitch_inv_lag_48kHz, kMaxPitch48kHz); - last_pitch_48kHz_ = CheckLowerPitchPeriodsAndComputePitchGain( - pitch_buf, kMaxPitch48kHz - pitch_inv_lag_48kHz, last_pitch_48kHz_); - return last_pitch_48kHz_; + pitch_periods.best *= 2; + pitch_periods.second_best *= 2; + + // Refine the initial pitch period estimation from 12 kHz to 48 kHz. + // Pre-compute frame energies at 24 kHz. + rtc::ArrayView y_energy_24kHz_view( + y_energy_24kHz_.data(), kRefineNumLags24kHz); + RTC_DCHECK_EQ(y_energy_24kHz_.size(), y_energy_24kHz_view.size()); + ComputeSlidingFrameSquareEnergies24kHz(pitch_buffer, y_energy_24kHz_view); + // Estimation at 48 kHz. + const int pitch_lag_48kHz = + ComputePitchPeriod48kHz(pitch_buffer, y_energy_24kHz_view, pitch_periods); + last_pitch_48kHz_ = ComputeExtendedPitchPeriod48kHz( + pitch_buffer, y_energy_24kHz_view, + /*initial_pitch_period_48kHz=*/kMaxPitch48kHz - pitch_lag_48kHz, + last_pitch_48kHz_); + return last_pitch_48kHz_.period; } } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h index 74133d073..e96a2dcaf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h @@ -17,8 +17,8 @@ #include "api/array_view.h" #include "modules/audio_processing/agc2/rnn_vad/auto_correlation.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" -#include "modules/audio_processing/agc2/rnn_vad/pitch_info.h" #include "modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h" +#include "rtc_base/gtest_prod_util.h" namespace webrtc { namespace rnn_vad { @@ -30,17 +30,20 @@ class PitchEstimator { PitchEstimator(const PitchEstimator&) = delete; PitchEstimator& operator=(const PitchEstimator&) = delete; ~PitchEstimator(); - // Estimates the pitch period and gain. Returns the pitch estimation data for - // 48 kHz. - PitchInfo Estimate(rtc::ArrayView pitch_buf); + // Returns the estimated pitch period at 48 kHz. + int Estimate(rtc::ArrayView pitch_buffer); private: - PitchInfo last_pitch_48kHz_; + FRIEND_TEST_ALL_PREFIXES(RnnVadTest, PitchSearchWithinTolerance); + float GetLastPitchStrengthForTesting() const { + return last_pitch_48kHz_.strength; + } + + PitchInfo last_pitch_48kHz_{}; AutoCorrelationCalculator auto_corr_calculator_; - std::vector pitch_buf_decimated_; - rtc::ArrayView pitch_buf_decimated_view_; - std::vector auto_corr_; - rtc::ArrayView auto_corr_view_; + std::vector y_energy_24kHz_; + std::vector pitch_buffer_12kHz_; + std::vector auto_correlation_12kHz_; }; } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc index f24a76f7b..262c38645 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc @@ -19,102 +19,77 @@ #include "modules/audio_processing/agc2/rnn_vad/common.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" +#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { namespace rnn_vad { namespace { -// Converts a lag to an inverted lag (only for 24kHz). -size_t GetInvertedLag(size_t lag) { - RTC_DCHECK_LE(lag, kMaxPitch24kHz); - return kMaxPitch24kHz - lag; -} - -float ComputeAutoCorrelationCoeff(rtc::ArrayView pitch_buf, - size_t inv_lag, - size_t max_pitch_period) { - RTC_DCHECK_LT(inv_lag, pitch_buf.size()); - RTC_DCHECK_LT(max_pitch_period, pitch_buf.size()); - RTC_DCHECK_LE(inv_lag, max_pitch_period); +float ComputeAutoCorrelation( + int inverted_lag, + rtc::ArrayView pitch_buffer) { + RTC_DCHECK_LT(inverted_lag, kBufSize24kHz); + RTC_DCHECK_LT(inverted_lag, kRefineNumLags24kHz); + static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); // TODO(bugs.webrtc.org/9076): Maybe optimize using vectorization. - return std::inner_product(pitch_buf.begin() + max_pitch_period, - pitch_buf.end(), pitch_buf.begin() + inv_lag, 0.f); + return std::inner_product(pitch_buffer.begin() + kMaxPitch24kHz, + pitch_buffer.end(), + pitch_buffer.begin() + inverted_lag, 0.f); } -// Computes a pseudo-interpolation offset for an estimated pitch period |lag| by -// looking at the auto-correlation coefficients in the neighborhood of |lag|. -// (namely, |prev_auto_corr|, |lag_auto_corr| and |next_auto_corr|). The output -// is a lag in {-1, 0, +1}. -// TODO(bugs.webrtc.org/9076): Consider removing pseudo-i since it -// is relevant only if the spectral analysis works at a sample rate that is -// twice as that of the pitch buffer (not so important instead for the estimated -// pitch period feature fed into the RNN). -int GetPitchPseudoInterpolationOffset(size_t lag, - float prev_auto_corr, - float lag_auto_corr, - float next_auto_corr) { - const float& a = prev_auto_corr; - const float& b = lag_auto_corr; - const float& c = next_auto_corr; - - int offset = 0; - if ((c - a) > 0.7f * (b - a)) { - offset = 1; // |c| is the largest auto-correlation coefficient. - } else if ((a - c) > 0.7f * (b - c)) { - offset = -1; // |a| is the largest auto-correlation coefficient. +// Given an auto-correlation coefficient `curr_auto_correlation` and its +// neighboring values `prev_auto_correlation` and `next_auto_correlation` +// computes a pseudo-interpolation offset to be applied to the pitch period +// associated to `curr`. The output is a lag in {-1, 0, +1}. +// TODO(bugs.webrtc.org/9076): Consider removing this method. +// `GetPitchPseudoInterpolationOffset()` it is relevant only if the spectral +// analysis works at a sample rate that is twice as that of the pitch buffer; +// In particular, it is not relevant for the estimated pitch period feature fed +// into the RNN. +int GetPitchPseudoInterpolationOffset(float prev_auto_correlation, + float curr_auto_correlation, + float next_auto_correlation) { + if ((next_auto_correlation - prev_auto_correlation) > + 0.7f * (curr_auto_correlation - prev_auto_correlation)) { + return 1; // |next_auto_correlation| is the largest auto-correlation + // coefficient. + } else if ((prev_auto_correlation - next_auto_correlation) > + 0.7f * (curr_auto_correlation - next_auto_correlation)) { + return -1; // |prev_auto_correlation| is the largest auto-correlation + // coefficient. } - return offset; + return 0; } // Refines a pitch period |lag| encoded as lag with pseudo-interpolation. The // output sample rate is twice as that of |lag|. -size_t PitchPseudoInterpolationLagPitchBuf( - size_t lag, - rtc::ArrayView pitch_buf) { +int PitchPseudoInterpolationLagPitchBuf( + int lag, + rtc::ArrayView pitch_buffer) { int offset = 0; // Cannot apply pseudo-interpolation at the boundaries. if (lag > 0 && lag < kMaxPitch24kHz) { + const int inverted_lag = kMaxPitch24kHz - lag; offset = GetPitchPseudoInterpolationOffset( - lag, - ComputeAutoCorrelationCoeff(pitch_buf, GetInvertedLag(lag - 1), - kMaxPitch24kHz), - ComputeAutoCorrelationCoeff(pitch_buf, GetInvertedLag(lag), - kMaxPitch24kHz), - ComputeAutoCorrelationCoeff(pitch_buf, GetInvertedLag(lag + 1), - kMaxPitch24kHz)); + ComputeAutoCorrelation(inverted_lag + 1, pitch_buffer), + ComputeAutoCorrelation(inverted_lag, pitch_buffer), + ComputeAutoCorrelation(inverted_lag - 1, pitch_buffer)); } return 2 * lag + offset; } -// Refines a pitch period |inv_lag| encoded as inverted lag with -// pseudo-interpolation. The output sample rate is twice as that of -// |inv_lag|. -size_t PitchPseudoInterpolationInvLagAutoCorr( - size_t inv_lag, - rtc::ArrayView auto_corr) { - int offset = 0; - // Cannot apply pseudo-interpolation at the boundaries. - if (inv_lag > 0 && inv_lag < auto_corr.size() - 1) { - offset = GetPitchPseudoInterpolationOffset(inv_lag, auto_corr[inv_lag + 1], - auto_corr[inv_lag], - auto_corr[inv_lag - 1]); - } - // TODO(bugs.webrtc.org/9076): When retraining, check if |offset| below should - // be subtracted since |inv_lag| is an inverted lag but offset is a lag. - return 2 * inv_lag + offset; -} - -// Integer multipliers used in CheckLowerPitchPeriodsAndComputePitchGain() when +// Integer multipliers used in ComputeExtendedPitchPeriod48kHz() when // looking for sub-harmonics. // The values have been chosen to serve the following algorithm. Given the // initial pitch period T, we examine whether one of its harmonics is the true // fundamental frequency. We consider T/k with k in {2, ..., 15}. For each of -// these harmonics, in addition to the pitch gain of itself, we choose one +// these harmonics, in addition to the pitch strength of itself, we choose one // multiple of its pitch period, n*T/k, to validate it (by averaging their pitch -// gains). The multiplier n is chosen so that n*T/k is used only one time over -// all k. When for example k = 4, we should also expect a peak at 3*T/4. When -// k = 8 instead we don't want to look at 2*T/8, since we have already checked -// T/4 before. Instead, we look at T*3/8. +// strengths). The multiplier n is chosen so that n*T/k is used only one time +// over all k. When for example k = 4, we should also expect a peak at 3*T/4. +// When k = 8 instead we don't want to look at 2*T/8, since we have already +// checked T/4 before. Instead, we look at T*3/8. // The array can be generate in Python as follows: // from fractions import Fraction // # Smallest positive integer not in X. @@ -131,96 +106,215 @@ size_t PitchPseudoInterpolationInvLagAutoCorr( constexpr std::array kSubHarmonicMultipliers = { {3, 2, 3, 2, 5, 2, 3, 2, 3, 2, 5, 2, 3, 2}}; -// Initial pitch period candidate thresholds for ComputePitchGainThreshold() for -// a sample rate of 24 kHz. Computed as [5*k*k for k in range(16)]. -constexpr std::array kInitialPitchPeriodThresholds = { - {20, 45, 80, 125, 180, 245, 320, 405, 500, 605, 720, 845, 980, 1125}}; +struct Range { + int min; + int max; +}; + +// Number of analyzed pitches to the left(right) of a pitch candidate. +constexpr int kPitchNeighborhoodRadius = 2; + +// Creates a pitch period interval centered in `inverted_lag` with hard-coded +// radius. Clipping is applied so that the interval is always valid for a 24 kHz +// pitch buffer. +Range CreateInvertedLagRange(int inverted_lag) { + return {std::max(inverted_lag - kPitchNeighborhoodRadius, 0), + std::min(inverted_lag + kPitchNeighborhoodRadius, + kInitialNumLags24kHz - 1)}; +} + +constexpr int kNumPitchCandidates = 2; // Best and second best. +// Maximum number of analyzed pitch periods. +constexpr int kMaxPitchPeriods24kHz = + kNumPitchCandidates * (2 * kPitchNeighborhoodRadius + 1); + +// Collection of inverted lags. +class InvertedLagsIndex { + public: + InvertedLagsIndex() : num_entries_(0) {} + // Adds an inverted lag to the index. Cannot add more than + // `kMaxPitchPeriods24kHz` values. + void Append(int inverted_lag) { + RTC_DCHECK_LT(num_entries_, kMaxPitchPeriods24kHz); + inverted_lags_[num_entries_++] = inverted_lag; + } + const int* data() const { return inverted_lags_.data(); } + int size() const { return num_entries_; } + + private: + std::array inverted_lags_; + int num_entries_; +}; + +// Computes the auto correlation coefficients for the inverted lags in the +// closed interval `inverted_lags`. Updates `inverted_lags_index` by appending +// the inverted lags for the computed auto correlation values. +void ComputeAutoCorrelation( + Range inverted_lags, + rtc::ArrayView pitch_buffer, + rtc::ArrayView auto_correlation, + InvertedLagsIndex& inverted_lags_index) { + // Check valid range. + RTC_DCHECK_LE(inverted_lags.min, inverted_lags.max); + // Trick to avoid zero initialization of `auto_correlation`. + // Needed by the pseudo-interpolation. + if (inverted_lags.min > 0) { + auto_correlation[inverted_lags.min - 1] = 0.f; + } + if (inverted_lags.max < kInitialNumLags24kHz - 1) { + auto_correlation[inverted_lags.max + 1] = 0.f; + } + // Check valid `inverted_lag` indexes. + RTC_DCHECK_GE(inverted_lags.min, 0); + RTC_DCHECK_LT(inverted_lags.max, kInitialNumLags24kHz); + for (int inverted_lag = inverted_lags.min; inverted_lag <= inverted_lags.max; + ++inverted_lag) { + auto_correlation[inverted_lag] = + ComputeAutoCorrelation(inverted_lag, pitch_buffer); + inverted_lags_index.Append(inverted_lag); + } +} + +// Searches the strongest pitch period at 24 kHz and returns its inverted lag at +// 48 kHz. +int ComputePitchPeriod48kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView inverted_lags, + rtc::ArrayView auto_correlation, + rtc::ArrayView y_energy) { + static_assert(kMaxPitch24kHz > kInitialNumLags24kHz, ""); + static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); + int best_inverted_lag = 0; // Pitch period. + float best_numerator = -1.f; // Pitch strength numerator. + float best_denominator = 0.f; // Pitch strength denominator. + for (int inverted_lag : inverted_lags) { + // A pitch candidate must have positive correlation. + if (auto_correlation[inverted_lag] > 0.f) { + // Auto-correlation energy normalized by frame energy. + const float numerator = + auto_correlation[inverted_lag] * auto_correlation[inverted_lag]; + const float denominator = y_energy[inverted_lag]; + // Compare numerator/denominator ratios without using divisions. + if (numerator * best_denominator > best_numerator * denominator) { + best_inverted_lag = inverted_lag; + best_numerator = numerator; + best_denominator = denominator; + } + } + } + // Pseudo-interpolation to transform `best_inverted_lag` (24 kHz pitch) to a + // 48 kHz pitch period. + if (best_inverted_lag == 0 || best_inverted_lag >= kInitialNumLags24kHz - 1) { + // Cannot apply pseudo-interpolation at the boundaries. + return best_inverted_lag * 2; + } + int offset = GetPitchPseudoInterpolationOffset( + auto_correlation[best_inverted_lag + 1], + auto_correlation[best_inverted_lag], + auto_correlation[best_inverted_lag - 1]); + // TODO(bugs.webrtc.org/9076): When retraining, check if |offset| below should + // be subtracted since |inverted_lag| is an inverted lag but offset is a lag. + return 2 * best_inverted_lag + offset; +} + +// Returns an alternative pitch period for `pitch_period` given a `multiplier` +// and a `divisor` of the period. +constexpr int GetAlternativePitchPeriod(int pitch_period, + int multiplier, + int divisor) { + RTC_DCHECK_GT(divisor, 0); + // Same as `round(multiplier * pitch_period / divisor)`. + return (2 * multiplier * pitch_period + divisor) / (2 * divisor); +} + +// Returns true if the alternative pitch period is stronger than the initial one +// given the last estimated pitch and the value of `period_divisor` used to +// compute the alternative pitch period via `GetAlternativePitchPeriod()`. +bool IsAlternativePitchStrongerThanInitial(PitchInfo last, + PitchInfo initial, + PitchInfo alternative, + int period_divisor) { + // Initial pitch period candidate thresholds for a sample rate of 24 kHz. + // Computed as [5*k*k for k in range(16)]. + constexpr std::array kInitialPitchPeriodThresholds = { + {20, 45, 80, 125, 180, 245, 320, 405, 500, 605, 720, 845, 980, 1125}}; + static_assert( + kInitialPitchPeriodThresholds.size() == kSubHarmonicMultipliers.size(), + ""); + RTC_DCHECK_GE(last.period, 0); + RTC_DCHECK_GE(initial.period, 0); + RTC_DCHECK_GE(alternative.period, 0); + RTC_DCHECK_GE(period_divisor, 2); + // Compute a term that lowers the threshold when |alternative.period| is close + // to the last estimated period |last.period| - i.e., pitch tracking. + float lower_threshold_term = 0.f; + if (std::abs(alternative.period - last.period) <= 1) { + // The candidate pitch period is within 1 sample from the last one. + // Make the candidate at |alternative.period| very easy to be accepted. + lower_threshold_term = last.strength; + } else if (std::abs(alternative.period - last.period) == 2 && + initial.period > + kInitialPitchPeriodThresholds[period_divisor - 2]) { + // The candidate pitch period is 2 samples far from the last one and the + // period |initial.period| (from which |alternative.period| has been + // derived) is greater than a threshold. Make |alternative.period| easy to + // be accepted. + lower_threshold_term = 0.5f * last.strength; + } + // Set the threshold based on the strength of the initial estimate + // |initial.period|. Also reduce the chance of false positives caused by a + // bias towards high frequencies (originating from short-term correlations). + float threshold = + std::max(0.3f, 0.7f * initial.strength - lower_threshold_term); + if (alternative.period < 3 * kMinPitch24kHz) { + // High frequency. + threshold = std::max(0.4f, 0.85f * initial.strength - lower_threshold_term); + } else if (alternative.period < 2 * kMinPitch24kHz) { + // Even higher frequency. + threshold = std::max(0.5f, 0.9f * initial.strength - lower_threshold_term); + } + return alternative.strength > threshold; +} } // namespace void Decimate2x(rtc::ArrayView src, rtc::ArrayView dst) { // TODO(bugs.webrtc.org/9076): Consider adding anti-aliasing filter. - static_assert(2 * dst.size() == src.size(), ""); - for (size_t i = 0; i < dst.size(); ++i) { + static_assert(2 * kBufSize12kHz == kBufSize24kHz, ""); + for (int i = 0; i < kBufSize12kHz; ++i) { dst[i] = src[2 * i]; } } -float ComputePitchGainThreshold(int candidate_pitch_period, - int pitch_period_ratio, - int initial_pitch_period, - float initial_pitch_gain, - int prev_pitch_period, - float prev_pitch_gain) { - // Map arguments to more compact aliases. - const int& t1 = candidate_pitch_period; - const int& k = pitch_period_ratio; - const int& t0 = initial_pitch_period; - const float& g0 = initial_pitch_gain; - const int& t_prev = prev_pitch_period; - const float& g_prev = prev_pitch_gain; - - // Validate input. - RTC_DCHECK_GE(t1, 0); - RTC_DCHECK_GE(k, 2); - RTC_DCHECK_GE(t0, 0); - RTC_DCHECK_GE(t_prev, 0); - - // Compute a term that lowers the threshold when |t1| is close to the last - // estimated period |t_prev| - i.e., pitch tracking. - float lower_threshold_term = 0; - if (abs(t1 - t_prev) <= 1) { - // The candidate pitch period is within 1 sample from the previous one. - // Make the candidate at |t1| very easy to be accepted. - lower_threshold_term = g_prev; - } else if (abs(t1 - t_prev) == 2 && - t0 > kInitialPitchPeriodThresholds[k - 2]) { - // The candidate pitch period is 2 samples far from the previous one and the - // period |t0| (from which |t1| has been derived) is greater than a - // threshold. Make |t1| easy to be accepted. - lower_threshold_term = 0.5f * g_prev; - } - // Set the threshold based on the gain of the initial estimate |t0|. Also - // reduce the chance of false positives caused by a bias towards high - // frequencies (originating from short-term correlations). - float threshold = std::max(0.3f, 0.7f * g0 - lower_threshold_term); - if (static_cast(t1) < 3 * kMinPitch24kHz) { - // High frequency. - threshold = std::max(0.4f, 0.85f * g0 - lower_threshold_term); - } else if (static_cast(t1) < 2 * kMinPitch24kHz) { - // Even higher frequency. - threshold = std::max(0.5f, 0.9f * g0 - lower_threshold_term); - } - return threshold; -} - -void ComputeSlidingFrameSquareEnergies( - rtc::ArrayView pitch_buf, - rtc::ArrayView yy_values) { - float yy = - ComputeAutoCorrelationCoeff(pitch_buf, kMaxPitch24kHz, kMaxPitch24kHz); - yy_values[0] = yy; - for (size_t i = 1; i < yy_values.size(); ++i) { - RTC_DCHECK_LE(i, kMaxPitch24kHz + kFrameSize20ms24kHz); - RTC_DCHECK_LE(i, kMaxPitch24kHz); - const float old_coeff = pitch_buf[kMaxPitch24kHz + kFrameSize20ms24kHz - i]; - const float new_coeff = pitch_buf[kMaxPitch24kHz - i]; - yy -= old_coeff * old_coeff; - yy += new_coeff * new_coeff; - yy = std::max(0.f, yy); - yy_values[i] = yy; +void ComputeSlidingFrameSquareEnergies24kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy) { + float yy = std::inner_product(pitch_buffer.begin(), + pitch_buffer.begin() + kFrameSize20ms24kHz, + pitch_buffer.begin(), 0.f); + y_energy[0] = yy; + static_assert(kMaxPitch24kHz - 1 + kFrameSize20ms24kHz < kBufSize24kHz, ""); + static_assert(kMaxPitch24kHz < kRefineNumLags24kHz, ""); + for (int inverted_lag = 0; inverted_lag < kMaxPitch24kHz; ++inverted_lag) { + yy -= pitch_buffer[inverted_lag] * pitch_buffer[inverted_lag]; + yy += pitch_buffer[inverted_lag + kFrameSize20ms24kHz] * + pitch_buffer[inverted_lag + kFrameSize20ms24kHz]; + yy = std::max(1.f, yy); + y_energy[inverted_lag + 1] = yy; } } -std::array FindBestPitchPeriods( - rtc::ArrayView auto_corr, - rtc::ArrayView pitch_buf, - size_t max_pitch_period) { +CandidatePitchPeriods ComputePitchPeriod12kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView auto_correlation) { + static_assert(kMaxPitch12kHz > kNumLags12kHz, ""); + static_assert(kMaxPitch12kHz < kBufSize12kHz, ""); + // Stores a pitch candidate period and strength information. struct PitchCandidate { // Pitch period encoded as inverted lag. - size_t period_inverted_lag = 0; + int period_inverted_lag = 0; // Pitch strength encoded as a ratio. float strength_numerator = -1.f; float strength_denominator = 0.f; @@ -232,25 +326,22 @@ std::array FindBestPitchPeriods( } }; - RTC_DCHECK_GT(max_pitch_period, auto_corr.size()); - RTC_DCHECK_LT(max_pitch_period, pitch_buf.size()); - const size_t frame_size = pitch_buf.size() - max_pitch_period; // TODO(bugs.webrtc.org/9076): Maybe optimize using vectorization. - float yy = - std::inner_product(pitch_buf.begin(), pitch_buf.begin() + frame_size + 1, - pitch_buf.begin(), 1.f); + float denominator = std::inner_product( + pitch_buffer.begin(), pitch_buffer.begin() + kFrameSize20ms12kHz + 1, + pitch_buffer.begin(), 1.f); // Search best and second best pitches by looking at the scaled // auto-correlation. - PitchCandidate candidate; PitchCandidate best; PitchCandidate second_best; second_best.period_inverted_lag = 1; - for (size_t inv_lag = 0; inv_lag < auto_corr.size(); ++inv_lag) { + for (int inverted_lag = 0; inverted_lag < kNumLags12kHz; ++inverted_lag) { // A pitch candidate must have positive correlation. - if (auto_corr[inv_lag] > 0) { - candidate.period_inverted_lag = inv_lag; - candidate.strength_numerator = auto_corr[inv_lag] * auto_corr[inv_lag]; - candidate.strength_denominator = yy; + if (auto_correlation[inverted_lag] > 0.f) { + PitchCandidate candidate{ + inverted_lag, + auto_correlation[inverted_lag] * auto_correlation[inverted_lag], + denominator}; if (candidate.HasStrongerPitchThan(second_best)) { if (candidate.HasStrongerPitchThan(best)) { second_best = best; @@ -261,142 +352,147 @@ std::array FindBestPitchPeriods( } } // Update |squared_energy_y| for the next inverted lag. - const float old_coeff = pitch_buf[inv_lag]; - const float new_coeff = pitch_buf[inv_lag + frame_size]; - yy -= old_coeff * old_coeff; - yy += new_coeff * new_coeff; - yy = std::max(0.f, yy); + const float y_old = pitch_buffer[inverted_lag]; + const float y_new = pitch_buffer[inverted_lag + kFrameSize20ms12kHz]; + denominator -= y_old * y_old; + denominator += y_new * y_new; + denominator = std::max(0.f, denominator); } - return {{best.period_inverted_lag, second_best.period_inverted_lag}}; + return {best.period_inverted_lag, second_best.period_inverted_lag}; } -size_t RefinePitchPeriod48kHz( - rtc::ArrayView pitch_buf, - rtc::ArrayView inv_lags) { - // Compute the auto-correlation terms only for neighbors of the given pitch - // candidates (similar to what is done in ComputePitchAutoCorrelation(), but - // for a few lag values). - std::array auto_corr; - auto_corr.fill(0.f); // Zeros become ignored lags in FindBestPitchPeriods(). - auto is_neighbor = [](size_t i, size_t j) { - return ((i > j) ? (i - j) : (j - i)) <= 2; - }; - for (size_t inv_lag = 0; inv_lag < auto_corr.size(); ++inv_lag) { - if (is_neighbor(inv_lag, inv_lags[0]) || is_neighbor(inv_lag, inv_lags[1])) - auto_corr[inv_lag] = - ComputeAutoCorrelationCoeff(pitch_buf, inv_lag, kMaxPitch24kHz); +int ComputePitchPeriod48kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy, + CandidatePitchPeriods pitch_candidates) { + // Compute the auto-correlation terms only for neighbors of the two pitch + // candidates (best and second best). + std::array auto_correlation; + InvertedLagsIndex inverted_lags_index; + // Create two inverted lag ranges so that `r1` precedes `r2`. + const bool swap_candidates = + pitch_candidates.best > pitch_candidates.second_best; + const Range r1 = CreateInvertedLagRange( + swap_candidates ? pitch_candidates.second_best : pitch_candidates.best); + const Range r2 = CreateInvertedLagRange( + swap_candidates ? pitch_candidates.best : pitch_candidates.second_best); + // Check valid ranges. + RTC_DCHECK_LE(r1.min, r1.max); + RTC_DCHECK_LE(r2.min, r2.max); + // Check `r1` precedes `r2`. + RTC_DCHECK_LE(r1.min, r2.min); + RTC_DCHECK_LE(r1.max, r2.max); + if (r1.max + 1 >= r2.min) { + // Overlapping or adjacent ranges. + ComputeAutoCorrelation({r1.min, r2.max}, pitch_buffer, auto_correlation, + inverted_lags_index); + } else { + // Disjoint ranges. + ComputeAutoCorrelation(r1, pitch_buffer, auto_correlation, + inverted_lags_index); + ComputeAutoCorrelation(r2, pitch_buffer, auto_correlation, + inverted_lags_index); } - // Find best pitch at 24 kHz. - const auto pitch_candidates_inv_lags = FindBestPitchPeriods( - {auto_corr.data(), auto_corr.size()}, - {pitch_buf.data(), pitch_buf.size()}, kMaxPitch24kHz); - const auto inv_lag = pitch_candidates_inv_lags[0]; // Refine the best. - // Pseudo-interpolation. - return PitchPseudoInterpolationInvLagAutoCorr(inv_lag, auto_corr); + return ComputePitchPeriod48kHz(pitch_buffer, inverted_lags_index, + auto_correlation, y_energy); } -PitchInfo CheckLowerPitchPeriodsAndComputePitchGain( - rtc::ArrayView pitch_buf, +PitchInfo ComputeExtendedPitchPeriod48kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy, int initial_pitch_period_48kHz, - PitchInfo prev_pitch_48kHz) { + PitchInfo last_pitch_48kHz) { RTC_DCHECK_LE(kMinPitch48kHz, initial_pitch_period_48kHz); RTC_DCHECK_LE(initial_pitch_period_48kHz, kMaxPitch48kHz); + // Stores information for a refined pitch candidate. struct RefinedPitchCandidate { - RefinedPitchCandidate() {} - RefinedPitchCandidate(int period_24kHz, float gain, float xy, float yy) - : period_24kHz(period_24kHz), gain(gain), xy(xy), yy(yy) {} - int period_24kHz; - // Pitch strength information. - float gain; - // Additional pitch strength information used for the final estimation of - // pitch gain. - float xy; // Cross-correlation. - float yy; // Auto-correlation. + int period; + float strength; + // Additional strength data used for the final pitch estimation. + float xy; // Auto-correlation. + float y_energy; // Energy of the sliding frame `y`. }; - // Initialize. - std::array yy_values; - ComputeSlidingFrameSquareEnergies(pitch_buf, - {yy_values.data(), yy_values.size()}); - const float xx = yy_values[0]; - // Helper lambdas. - const auto pitch_gain = [](float xy, float yy, float xx) { - RTC_DCHECK_LE(0.f, xx * yy); - return xy / std::sqrt(1.f + xx * yy); + const float x_energy = y_energy[kMaxPitch24kHz]; + const auto pitch_strength = [x_energy](float xy, float y_energy) { + RTC_DCHECK_GE(x_energy * y_energy, 0.f); + return xy / std::sqrt(1.f + x_energy * y_energy); }; - // Initial pitch candidate gain. + + // Initialize the best pitch candidate with `initial_pitch_period_48kHz`. RefinedPitchCandidate best_pitch; - best_pitch.period_24kHz = std::min(initial_pitch_period_48kHz / 2, - static_cast(kMaxPitch24kHz - 1)); - best_pitch.xy = ComputeAutoCorrelationCoeff( - pitch_buf, GetInvertedLag(best_pitch.period_24kHz), kMaxPitch24kHz); - best_pitch.yy = yy_values[best_pitch.period_24kHz]; - best_pitch.gain = pitch_gain(best_pitch.xy, best_pitch.yy, xx); + best_pitch.period = + std::min(initial_pitch_period_48kHz / 2, kMaxPitch24kHz - 1); + best_pitch.xy = + ComputeAutoCorrelation(kMaxPitch24kHz - best_pitch.period, pitch_buffer); + best_pitch.y_energy = y_energy[kMaxPitch24kHz - best_pitch.period]; + best_pitch.strength = pitch_strength(best_pitch.xy, best_pitch.y_energy); + // Keep a copy of the initial pitch candidate. + const PitchInfo initial_pitch{best_pitch.period, best_pitch.strength}; + // 24 kHz version of the last estimated pitch. + const PitchInfo last_pitch{last_pitch_48kHz.period / 2, + last_pitch_48kHz.strength}; - // Store the initial pitch period information. - const size_t initial_pitch_period = best_pitch.period_24kHz; - const float initial_pitch_gain = best_pitch.gain; - - // Given the initial pitch estimation, check lower periods (i.e., harmonics). - const auto alternative_period = [](int period, int k, int n) -> int { - RTC_DCHECK_GT(k, 0); - return (2 * n * period + k) / (2 * k); // Same as round(n*period/k). - }; - for (int k = 2; k < static_cast(kSubHarmonicMultipliers.size() + 2); - ++k) { - int candidate_pitch_period = alternative_period(initial_pitch_period, k, 1); - if (static_cast(candidate_pitch_period) < kMinPitch24kHz) { - break; - } - // When looking at |candidate_pitch_period|, we also look at one of its + // Find `max_period_divisor` such that the result of + // `GetAlternativePitchPeriod(initial_pitch_period, 1, max_period_divisor)` + // equals `kMinPitch24kHz`. + const int max_period_divisor = + (2 * initial_pitch.period) / (2 * kMinPitch24kHz - 1); + for (int period_divisor = 2; period_divisor <= max_period_divisor; + ++period_divisor) { + PitchInfo alternative_pitch; + alternative_pitch.period = GetAlternativePitchPeriod( + initial_pitch.period, /*multiplier=*/1, period_divisor); + RTC_DCHECK_GE(alternative_pitch.period, kMinPitch24kHz); + // When looking at |alternative_pitch.period|, we also look at one of its // sub-harmonics. |kSubHarmonicMultipliers| is used to know where to look. - // |k| == 2 is a special case since |candidate_pitch_secondary_period| might - // be greater than the maximum pitch period. - int candidate_pitch_secondary_period = alternative_period( - initial_pitch_period, k, kSubHarmonicMultipliers[k - 2]); - RTC_DCHECK_GT(candidate_pitch_secondary_period, 0); - if (k == 2 && - candidate_pitch_secondary_period > static_cast(kMaxPitch24kHz)) { - candidate_pitch_secondary_period = initial_pitch_period; + // |period_divisor| == 2 is a special case since |dual_alternative_period| + // might be greater than the maximum pitch period. + int dual_alternative_period = GetAlternativePitchPeriod( + initial_pitch.period, kSubHarmonicMultipliers[period_divisor - 2], + period_divisor); + RTC_DCHECK_GT(dual_alternative_period, 0); + if (period_divisor == 2 && dual_alternative_period > kMaxPitch24kHz) { + dual_alternative_period = initial_pitch.period; } - RTC_DCHECK_NE(candidate_pitch_period, candidate_pitch_secondary_period) + RTC_DCHECK_NE(alternative_pitch.period, dual_alternative_period) << "The lower pitch period and the additional sub-harmonic must not " "coincide."; // Compute an auto-correlation score for the primary pitch candidate - // |candidate_pitch_period| by also looking at its possible sub-harmonic - // |candidate_pitch_secondary_period|. - float xy_primary_period = ComputeAutoCorrelationCoeff( - pitch_buf, GetInvertedLag(candidate_pitch_period), kMaxPitch24kHz); - float xy_secondary_period = ComputeAutoCorrelationCoeff( - pitch_buf, GetInvertedLag(candidate_pitch_secondary_period), - kMaxPitch24kHz); - float xy = 0.5f * (xy_primary_period + xy_secondary_period); - float yy = 0.5f * (yy_values[candidate_pitch_period] + - yy_values[candidate_pitch_secondary_period]); - float candidate_pitch_gain = pitch_gain(xy, yy, xx); + // |alternative_pitch.period| by also looking at its possible sub-harmonic + // |dual_alternative_period|. + const float xy_primary_period = ComputeAutoCorrelation( + kMaxPitch24kHz - alternative_pitch.period, pitch_buffer); + const float xy_secondary_period = ComputeAutoCorrelation( + kMaxPitch24kHz - dual_alternative_period, pitch_buffer); + const float xy = 0.5f * (xy_primary_period + xy_secondary_period); + const float yy = + 0.5f * (y_energy[kMaxPitch24kHz - alternative_pitch.period] + + y_energy[kMaxPitch24kHz - dual_alternative_period]); + alternative_pitch.strength = pitch_strength(xy, yy); // Maybe update best period. - float threshold = ComputePitchGainThreshold( - candidate_pitch_period, k, initial_pitch_period, initial_pitch_gain, - prev_pitch_48kHz.period / 2, prev_pitch_48kHz.gain); - if (candidate_pitch_gain > threshold) { - best_pitch = {candidate_pitch_period, candidate_pitch_gain, xy, yy}; + if (IsAlternativePitchStrongerThanInitial( + last_pitch, initial_pitch, alternative_pitch, period_divisor)) { + best_pitch = {alternative_pitch.period, alternative_pitch.strength, xy, + yy}; } } - // Final pitch gain and period. + // Final pitch strength and period. best_pitch.xy = std::max(0.f, best_pitch.xy); - RTC_DCHECK_LE(0.f, best_pitch.yy); - float final_pitch_gain = (best_pitch.yy <= best_pitch.xy) - ? 1.f - : best_pitch.xy / (best_pitch.yy + 1.f); - final_pitch_gain = std::min(best_pitch.gain, final_pitch_gain); + RTC_DCHECK_LE(0.f, best_pitch.y_energy); + float final_pitch_strength = + (best_pitch.y_energy <= best_pitch.xy) + ? 1.f + : best_pitch.xy / (best_pitch.y_energy + 1.f); + final_pitch_strength = std::min(best_pitch.strength, final_pitch_strength); int final_pitch_period_48kHz = std::max( kMinPitch48kHz, - PitchPseudoInterpolationLagPitchBuf(best_pitch.period_24kHz, pitch_buf)); + PitchPseudoInterpolationLagPitchBuf(best_pitch.period, pitch_buffer)); - return {final_pitch_period_48kHz, final_pitch_gain}; + return {final_pitch_period_48kHz, final_pitch_strength}; } } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h index 2cc5ce6af..0af55f8e6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h @@ -14,10 +14,10 @@ #include #include +#include #include "api/array_view.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" -#include "modules/audio_processing/agc2/rnn_vad/pitch_info.h" namespace webrtc { namespace rnn_vad { @@ -26,50 +26,82 @@ namespace rnn_vad { void Decimate2x(rtc::ArrayView src, rtc::ArrayView dst); -// Computes a gain threshold for a candidate pitch period given the initial and -// the previous pitch period and gain estimates and the pitch period ratio used -// to derive the candidate pitch period from the initial period. -float ComputePitchGainThreshold(int candidate_pitch_period, - int pitch_period_ratio, - int initial_pitch_period, - float initial_pitch_gain, - int prev_pitch_period, - float prev_pitch_gain); - -// Computes the sum of squared samples for every sliding frame in the pitch -// buffer. |yy_values| indexes are lags. +// Key concepts and keywords used below in this file. // -// The pitch buffer is structured as depicted below: -// |.........|...........| -// a b -// The part on the left, named "a" contains the oldest samples, whereas "b" the -// most recent ones. The size of "a" corresponds to the maximum pitch period, -// that of "b" to the frame size (e.g., 16 ms and 20 ms respectively). -void ComputeSlidingFrameSquareEnergies( - rtc::ArrayView pitch_buf, - rtc::ArrayView yy_values); +// The pitch estimation relies on a pitch buffer, which is an array-like data +// structured designed as follows: +// +// |....A....|.....B.....| +// +// The part on the left, named `A` contains the oldest samples, whereas `B` +// contains the most recent ones. The size of `A` corresponds to the maximum +// pitch period, that of `B` to the analysis frame size (e.g., 16 ms and 20 ms +// respectively). +// +// Pitch estimation is essentially based on the analysis of two 20 ms frames +// extracted from the pitch buffer. One frame, called `x`, is kept fixed and +// corresponds to `B` - i.e., the most recent 20 ms. The other frame, called +// `y`, is extracted from different parts of the buffer instead. +// +// The offset between `x` and `y` corresponds to a specific pitch period. +// For instance, if `y` is positioned at the beginning of the pitch buffer, then +// the cross-correlation between `x` and `y` can be used as an indication of the +// strength for the maximum pitch. +// +// Such an offset can be encoded in two ways: +// - As a lag, which is the index in the pitch buffer for the first item in `y` +// - As an inverted lag, which is the number of samples from the beginning of +// `x` and the end of `y` +// +// |---->| lag +// |....A....|.....B.....| +// |<--| inverted lag +// |.....y.....| `y` 20 ms frame +// +// The inverted lag has the advantage of being directly proportional to the +// corresponding pitch period. -// Given the auto-correlation coefficients stored according to -// ComputePitchAutoCorrelation() (i.e., using inverted lags), returns the best -// and the second best pitch periods. -std::array FindBestPitchPeriods( - rtc::ArrayView auto_corr, - rtc::ArrayView pitch_buf, - size_t max_pitch_period); +// Computes the sum of squared samples for every sliding frame `y` in the pitch +// buffer. The indexes of `y_energy` are inverted lags. +void ComputeSlidingFrameSquareEnergies24kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy); -// Refines the pitch period estimation given the pitch buffer |pitch_buf| and -// the initial pitch period estimation |inv_lags|. Returns an inverted lag at -// 48 kHz. -size_t RefinePitchPeriod48kHz( - rtc::ArrayView pitch_buf, - rtc::ArrayView inv_lags); +// Top-2 pitch period candidates. Unit: number of samples - i.e., inverted lags. +struct CandidatePitchPeriods { + int best; + int second_best; +}; -// Refines the pitch period estimation and compute the pitch gain. Returns the -// refined pitch estimation data at 48 kHz. -PitchInfo CheckLowerPitchPeriodsAndComputePitchGain( - rtc::ArrayView pitch_buf, +// Computes the candidate pitch periods at 12 kHz given a view on the 12 kHz +// pitch buffer and the auto-correlation values (having inverted lags as +// indexes). +CandidatePitchPeriods ComputePitchPeriod12kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView auto_correlation); + +// Computes the pitch period at 48 kHz given a view on the 24 kHz pitch buffer, +// the energies for the sliding frames `y` at 24 kHz and the pitch period +// candidates at 24 kHz (encoded as inverted lag). +int ComputePitchPeriod48kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy, + CandidatePitchPeriods pitch_candidates_24kHz); + +struct PitchInfo { + int period; + float strength; +}; + +// Computes the pitch period at 48 kHz searching in an extended pitch range +// given a view on the 24 kHz pitch buffer, the energies for the sliding frames +// `y` at 24 kHz, the initial 48 kHz estimation (computed by +// `ComputePitchPeriod48kHz()`) and the last estimated pitch. +PitchInfo ComputeExtendedPitchPeriod48kHz( + rtc::ArrayView pitch_buffer, + rtc::ArrayView y_energy, int initial_pitch_period_48kHz, - PitchInfo prev_pitch_48kHz); + PitchInfo last_pitch_48kHz); } // namespace rnn_vad } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/ring_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/ring_buffer.h index 294b0c0ba..f0270af91 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/ring_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/ring_buffer.h @@ -21,7 +21,7 @@ namespace webrtc { namespace rnn_vad { // Ring buffer for N arrays of type T each one with size S. -template +template class RingBuffer { static_assert(S > 0, ""); static_assert(N > 0, ""); @@ -45,11 +45,10 @@ class RingBuffer { // Return an array view onto the array with a given delay. A view on the last // and least recently push array is returned when |delay| is 0 and N - 1 // respectively. - rtc::ArrayView GetArrayView(size_t delay) const { - const int delay_int = static_cast(delay); - RTC_DCHECK_LE(0, delay_int); - RTC_DCHECK_LT(delay_int, N); - int offset = tail_ - 1 - delay_int; + rtc::ArrayView GetArrayView(int delay) const { + RTC_DCHECK_LE(0, delay); + RTC_DCHECK_LT(delay, N); + int offset = tail_ - 1 - delay; if (offset < 0) offset += N; return {buffer_.data() + S * offset, S}; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc index 55a51ffa4..2072a6854 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc @@ -26,6 +26,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/rnnoise/src/rnn_activations.h" #include "third_party/rnnoise/src/rnn_vad_weights.h" @@ -77,15 +78,16 @@ std::vector GetScaledParams(rtc::ArrayView params) { // Casts and scales |weights| and re-arranges the layout. std::vector GetPreprocessedFcWeights( rtc::ArrayView weights, - size_t output_size) { + int output_size) { if (output_size == 1) { return GetScaledParams(weights); } // Transpose, scale and cast. - const size_t input_size = rtc::CheckedDivExact(weights.size(), output_size); + const int input_size = rtc::CheckedDivExact( + rtc::dchecked_cast(weights.size()), output_size); std::vector w(weights.size()); - for (size_t o = 0; o < output_size; ++o) { - for (size_t i = 0; i < input_size; ++i) { + for (int o = 0; o < output_size; ++o) { + for (int i = 0; i < input_size; ++i) { w[o * input_size + i] = rnnoise::kWeightsScale * static_cast(weights[i * output_size + o]); } @@ -93,7 +95,7 @@ std::vector GetPreprocessedFcWeights( return w; } -constexpr size_t kNumGruGates = 3; // Update, reset, output. +constexpr int kNumGruGates = 3; // Update, reset, output. // TODO(bugs.chromium.org/10480): Hard-coded optimized layout and remove this // function to improve setup time. @@ -101,17 +103,17 @@ constexpr size_t kNumGruGates = 3; // Update, reset, output. // It works both for weights, recurrent weights and bias. std::vector GetPreprocessedGruTensor( rtc::ArrayView tensor_src, - size_t output_size) { + int output_size) { // Transpose, cast and scale. // |n| is the size of the first dimension of the 3-dim tensor |weights|. - const size_t n = - rtc::CheckedDivExact(tensor_src.size(), output_size * kNumGruGates); - const size_t stride_src = kNumGruGates * output_size; - const size_t stride_dst = n * output_size; + const int n = rtc::CheckedDivExact(rtc::dchecked_cast(tensor_src.size()), + output_size * kNumGruGates); + const int stride_src = kNumGruGates * output_size; + const int stride_dst = n * output_size; std::vector tensor_dst(tensor_src.size()); - for (size_t g = 0; g < kNumGruGates; ++g) { - for (size_t o = 0; o < output_size; ++o) { - for (size_t i = 0; i < n; ++i) { + for (int g = 0; g < kNumGruGates; ++g) { + for (int o = 0; o < output_size; ++o) { + for (int i = 0; i < n; ++i) { tensor_dst[g * stride_dst + o * n + i] = rnnoise::kWeightsScale * static_cast( @@ -122,28 +124,28 @@ std::vector GetPreprocessedGruTensor( return tensor_dst; } -void ComputeGruUpdateResetGates(size_t input_size, - size_t output_size, +void ComputeGruUpdateResetGates(int input_size, + int output_size, rtc::ArrayView weights, rtc::ArrayView recurrent_weights, rtc::ArrayView bias, rtc::ArrayView input, rtc::ArrayView state, rtc::ArrayView gate) { - for (size_t o = 0; o < output_size; ++o) { + for (int o = 0; o < output_size; ++o) { gate[o] = bias[o]; - for (size_t i = 0; i < input_size; ++i) { + for (int i = 0; i < input_size; ++i) { gate[o] += input[i] * weights[o * input_size + i]; } - for (size_t s = 0; s < output_size; ++s) { + for (int s = 0; s < output_size; ++s) { gate[o] += state[s] * recurrent_weights[o * output_size + s]; } gate[o] = SigmoidApproximated(gate[o]); } } -void ComputeGruOutputGate(size_t input_size, - size_t output_size, +void ComputeGruOutputGate(int input_size, + int output_size, rtc::ArrayView weights, rtc::ArrayView recurrent_weights, rtc::ArrayView bias, @@ -151,12 +153,12 @@ void ComputeGruOutputGate(size_t input_size, rtc::ArrayView state, rtc::ArrayView reset, rtc::ArrayView gate) { - for (size_t o = 0; o < output_size; ++o) { + for (int o = 0; o < output_size; ++o) { gate[o] = bias[o]; - for (size_t i = 0; i < input_size; ++i) { + for (int i = 0; i < input_size; ++i) { gate[o] += input[i] * weights[o * input_size + i]; } - for (size_t s = 0; s < output_size; ++s) { + for (int s = 0; s < output_size; ++s) { gate[o] += state[s] * recurrent_weights[o * output_size + s] * reset[s]; } gate[o] = RectifiedLinearUnit(gate[o]); @@ -164,8 +166,8 @@ void ComputeGruOutputGate(size_t input_size, } // Gated recurrent unit (GRU) layer un-optimized implementation. -void ComputeGruLayerOutput(size_t input_size, - size_t output_size, +void ComputeGruLayerOutput(int input_size, + int output_size, rtc::ArrayView input, rtc::ArrayView weights, rtc::ArrayView recurrent_weights, @@ -173,8 +175,8 @@ void ComputeGruLayerOutput(size_t input_size, rtc::ArrayView state) { RTC_DCHECK_EQ(input_size, input.size()); // Stride and offset used to read parameter arrays. - const size_t stride_in = input_size * output_size; - const size_t stride_out = output_size * output_size; + const int stride_in = input_size * output_size; + const int stride_out = output_size * output_size; // Update gate. std::array update; @@ -198,7 +200,7 @@ void ComputeGruLayerOutput(size_t input_size, bias.subview(2 * output_size, output_size), input, state, reset, output); // Update output through the update gates and update the state. - for (size_t o = 0; o < output_size; ++o) { + for (int o = 0; o < output_size; ++o) { output[o] = update[o] * state[o] + (1.f - update[o]) * output[o]; state[o] = output[o]; } @@ -206,8 +208,8 @@ void ComputeGruLayerOutput(size_t input_size, // Fully connected layer un-optimized implementation. void ComputeFullyConnectedLayerOutput( - size_t input_size, - size_t output_size, + int input_size, + int output_size, rtc::ArrayView input, rtc::ArrayView bias, rtc::ArrayView weights, @@ -216,11 +218,11 @@ void ComputeFullyConnectedLayerOutput( RTC_DCHECK_EQ(input.size(), input_size); RTC_DCHECK_EQ(bias.size(), output_size); RTC_DCHECK_EQ(weights.size(), input_size * output_size); - for (size_t o = 0; o < output_size; ++o) { + for (int o = 0; o < output_size; ++o) { output[o] = bias[o]; // TODO(bugs.chromium.org/9076): Benchmark how different layouts for // |weights_| change the performance across different platforms. - for (size_t i = 0; i < input_size; ++i) { + for (int i = 0; i < input_size; ++i) { output[o] += input[i] * weights[o * input_size + i]; } output[o] = activation_function(output[o]); @@ -230,8 +232,8 @@ void ComputeFullyConnectedLayerOutput( #if defined(WEBRTC_ARCH_X86_FAMILY) // Fully connected layer SSE2 implementation. void ComputeFullyConnectedLayerOutputSse2( - size_t input_size, - size_t output_size, + int input_size, + int output_size, rtc::ArrayView input, rtc::ArrayView bias, rtc::ArrayView weights, @@ -240,16 +242,16 @@ void ComputeFullyConnectedLayerOutputSse2( RTC_DCHECK_EQ(input.size(), input_size); RTC_DCHECK_EQ(bias.size(), output_size); RTC_DCHECK_EQ(weights.size(), input_size * output_size); - const size_t input_size_by_4 = input_size >> 2; - const size_t offset = input_size & ~3; + const int input_size_by_4 = input_size >> 2; + const int offset = input_size & ~3; __m128 sum_wx_128; const float* v = reinterpret_cast(&sum_wx_128); - for (size_t o = 0; o < output_size; ++o) { + for (int o = 0; o < output_size; ++o) { // Perform 128 bit vector operations. sum_wx_128 = _mm_set1_ps(0); const float* x_p = input.data(); const float* w_p = weights.data() + o * input_size; - for (size_t i = 0; i < input_size_by_4; ++i, x_p += 4, w_p += 4) { + for (int i = 0; i < input_size_by_4; ++i, x_p += 4, w_p += 4) { sum_wx_128 = _mm_add_ps(sum_wx_128, _mm_mul_ps(_mm_loadu_ps(x_p), _mm_loadu_ps(w_p))); } @@ -266,8 +268,8 @@ void ComputeFullyConnectedLayerOutputSse2( } // namespace FullyConnectedLayer::FullyConnectedLayer( - const size_t input_size, - const size_t output_size, + const int input_size, + const int output_size, const rtc::ArrayView bias, const rtc::ArrayView weights, rtc::FunctionView activation_function, @@ -316,8 +318,8 @@ void FullyConnectedLayer::ComputeOutput(rtc::ArrayView input) { } GatedRecurrentLayer::GatedRecurrentLayer( - const size_t input_size, - const size_t output_size, + const int input_size, + const int output_size, const rtc::ArrayView bias, const rtc::ArrayView weights, const rtc::ArrayView recurrent_weights, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h index 58274b2e1..5b44f5304 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h @@ -29,19 +29,19 @@ namespace rnn_vad { // over-allocate space for fully-connected layers output vectors (implemented as // std::array). The value should equal the number of units of the largest // fully-connected layer. -constexpr size_t kFullyConnectedLayersMaxUnits = 24; +constexpr int kFullyConnectedLayersMaxUnits = 24; // Maximum number of units for a recurrent layer. This value is used to // over-allocate space for recurrent layers state vectors (implemented as // std::array). The value should equal the number of units of the largest // recurrent layer. -constexpr size_t kRecurrentLayersMaxUnits = 24; +constexpr int kRecurrentLayersMaxUnits = 24; // Fully-connected layer. class FullyConnectedLayer { public: - FullyConnectedLayer(size_t input_size, - size_t output_size, + FullyConnectedLayer(int input_size, + int output_size, rtc::ArrayView bias, rtc::ArrayView weights, rtc::FunctionView activation_function, @@ -49,16 +49,16 @@ class FullyConnectedLayer { FullyConnectedLayer(const FullyConnectedLayer&) = delete; FullyConnectedLayer& operator=(const FullyConnectedLayer&) = delete; ~FullyConnectedLayer(); - size_t input_size() const { return input_size_; } - size_t output_size() const { return output_size_; } + int input_size() const { return input_size_; } + int output_size() const { return output_size_; } Optimization optimization() const { return optimization_; } rtc::ArrayView GetOutput() const; // Computes the fully-connected layer output. void ComputeOutput(rtc::ArrayView input); private: - const size_t input_size_; - const size_t output_size_; + const int input_size_; + const int output_size_; const std::vector bias_; const std::vector weights_; rtc::FunctionView activation_function_; @@ -72,8 +72,8 @@ class FullyConnectedLayer { // activation functions for the update/reset and output gates respectively. class GatedRecurrentLayer { public: - GatedRecurrentLayer(size_t input_size, - size_t output_size, + GatedRecurrentLayer(int input_size, + int output_size, rtc::ArrayView bias, rtc::ArrayView weights, rtc::ArrayView recurrent_weights, @@ -81,8 +81,8 @@ class GatedRecurrentLayer { GatedRecurrentLayer(const GatedRecurrentLayer&) = delete; GatedRecurrentLayer& operator=(const GatedRecurrentLayer&) = delete; ~GatedRecurrentLayer(); - size_t input_size() const { return input_size_; } - size_t output_size() const { return output_size_; } + int input_size() const { return input_size_; } + int output_size() const { return output_size_; } Optimization optimization() const { return optimization_; } rtc::ArrayView GetOutput() const; void Reset(); @@ -90,8 +90,8 @@ class GatedRecurrentLayer { void ComputeOutput(rtc::ArrayView input); private: - const size_t input_size_; - const size_t output_size_; + const int input_size_; + const int output_size_; const std::vector bias_; const std::vector weights_; const std::vector recurrent_weights_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc index c5293bedc..8b12b60c5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc @@ -20,6 +20,7 @@ #include "modules/audio_processing/agc2/rnn_vad/features_extraction.h" #include "modules/audio_processing/agc2/rnn_vad/rnn.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_compare.h" ABSL_FLAG(std::string, i, "", "Path to the input wav file"); ABSL_FLAG(std::string, f, "", "Path to the output features file"); @@ -56,7 +57,7 @@ int main(int argc, char* argv[]) { } // Initialize. - const size_t frame_size_10ms = + const int frame_size_10ms = rtc::CheckedDivExact(wav_reader.sample_rate(), 100); std::vector samples_10ms; samples_10ms.resize(frame_size_10ms); @@ -69,9 +70,9 @@ int main(int argc, char* argv[]) { // Compute VAD probabilities. while (true) { // Read frame at the input sample rate. - const auto read_samples = + const size_t read_samples = wav_reader.ReadSamples(frame_size_10ms, samples_10ms.data()); - if (read_samples < frame_size_10ms) { + if (rtc::SafeLt(read_samples, frame_size_10ms)) { break; // EOF. } // Resample input. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h index 75d3d9bc0..a7402788c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h @@ -29,7 +29,7 @@ namespace rnn_vad { // values are written at the end of the buffer. // The class also provides a view on the most recent M values, where 0 < M <= S // and by default M = N. -template +template class SequenceBuffer { static_assert(N <= S, "The new chunk size cannot be larger than the sequence buffer " @@ -45,8 +45,8 @@ class SequenceBuffer { SequenceBuffer(const SequenceBuffer&) = delete; SequenceBuffer& operator=(const SequenceBuffer&) = delete; ~SequenceBuffer() = default; - size_t size() const { return S; } - size_t chunks_size() const { return N; } + int size() const { return S; } + int chunks_size() const { return N; } // Sets the sequence buffer values to zero. void Reset() { std::fill(buffer_.begin(), buffer_.end(), 0); } // Returns a view on the whole buffer. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc index 81e3339d7..96086babb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc @@ -16,6 +16,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace rnn_vad { @@ -32,11 +33,11 @@ void UpdateCepstralDifferenceStats( RTC_DCHECK(sym_matrix_buf); // Compute the new cepstral distance stats. std::array distances; - for (size_t i = 0; i < kCepstralCoeffsHistorySize - 1; ++i) { - const size_t delay = i + 1; + for (int i = 0; i < kCepstralCoeffsHistorySize - 1; ++i) { + const int delay = i + 1; auto old_cepstral_coeffs = ring_buf.GetArrayView(delay); distances[i] = 0.f; - for (size_t k = 0; k < kNumBands; ++k) { + for (int k = 0; k < kNumBands; ++k) { const float c = new_cepstral_coeffs[k] - old_cepstral_coeffs[k]; distances[i] += c * c; } @@ -48,9 +49,9 @@ void UpdateCepstralDifferenceStats( // Computes the first half of the Vorbis window. std::array ComputeScaledHalfVorbisWindow( float scaling = 1.f) { - constexpr size_t kHalfSize = kFrameSize20ms24kHz / 2; + constexpr int kHalfSize = kFrameSize20ms24kHz / 2; std::array half_window{}; - for (size_t i = 0; i < kHalfSize; ++i) { + for (int i = 0; i < kHalfSize; ++i) { half_window[i] = scaling * std::sin(0.5 * kPi * std::sin(0.5 * kPi * (i + 0.5) / kHalfSize) * @@ -71,8 +72,8 @@ void ComputeWindowedForwardFft( RTC_DCHECK_EQ(frame.size(), 2 * half_window.size()); // Apply windowing. auto in = fft_input_buffer->GetView(); - for (size_t i = 0, j = kFrameSize20ms24kHz - 1; i < half_window.size(); - ++i, --j) { + for (int i = 0, j = kFrameSize20ms24kHz - 1; + rtc::SafeLt(i, half_window.size()); ++i, --j) { in[i] = frame[i] * half_window[i]; in[j] = frame[j] * half_window[i]; } @@ -162,7 +163,7 @@ void SpectralFeaturesExtractor::ComputeAvgAndDerivatives( RTC_DCHECK_EQ(average.size(), first_derivative.size()); RTC_DCHECK_EQ(first_derivative.size(), second_derivative.size()); RTC_DCHECK_LE(average.size(), curr.size()); - for (size_t i = 0; i < average.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, average.size()); ++i) { // Average, kernel: [1, 1, 1]. average[i] = curr[i] + prev1[i] + prev2[i]; // First derivative, kernel: [1, 0, - 1]. @@ -178,7 +179,7 @@ void SpectralFeaturesExtractor::ComputeNormalizedCepstralCorrelation( reference_frame_fft_->GetConstView(), lagged_frame_fft_->GetConstView(), bands_cross_corr_); // Normalize. - for (size_t i = 0; i < bands_cross_corr_.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, bands_cross_corr_.size()); ++i) { bands_cross_corr_[i] = bands_cross_corr_[i] / std::sqrt(0.001f + reference_frame_bands_energy_[i] * @@ -194,9 +195,9 @@ void SpectralFeaturesExtractor::ComputeNormalizedCepstralCorrelation( float SpectralFeaturesExtractor::ComputeVariability() const { // Compute cepstral variability score. float variability = 0.f; - for (size_t delay1 = 0; delay1 < kCepstralCoeffsHistorySize; ++delay1) { + for (int delay1 = 0; delay1 < kCepstralCoeffsHistorySize; ++delay1) { float min_dist = std::numeric_limits::max(); - for (size_t delay2 = 0; delay2 < kCepstralCoeffsHistorySize; ++delay2) { + for (int delay2 = 0; delay2 < kCepstralCoeffsHistorySize; ++delay2) { if (delay1 == delay2) // The distance would be 0. continue; min_dist = diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc index 29192a08f..91c0086fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc @@ -15,6 +15,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace rnn_vad { @@ -105,9 +106,9 @@ void SpectralCorrelator::ComputeCrossCorrelation( RTC_DCHECK_EQ(x[1], 0.f) << "The Nyquist coefficient must be zeroed."; RTC_DCHECK_EQ(y[1], 0.f) << "The Nyquist coefficient must be zeroed."; constexpr auto kOpusScaleNumBins24kHz20ms = GetOpusScaleNumBins24kHz20ms(); - size_t k = 0; // Next Fourier coefficient index. + int k = 0; // Next Fourier coefficient index. cross_corr[0] = 0.f; - for (size_t i = 0; i < kOpusBands24kHz - 1; ++i) { + for (int i = 0; i < kOpusBands24kHz - 1; ++i) { cross_corr[i + 1] = 0.f; for (int j = 0; j < kOpusScaleNumBins24kHz20ms[i]; ++j) { // Band size. const float v = x[2 * k] * y[2 * k] + x[2 * k + 1] * y[2 * k + 1]; @@ -137,11 +138,11 @@ void ComputeSmoothedLogMagnitudeSpectrum( return x; }; // Smoothing over the bands for which the band energy is defined. - for (size_t i = 0; i < bands_energy.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, bands_energy.size()); ++i) { log_bands_energy[i] = smooth(std::log10(kOneByHundred + bands_energy[i])); } // Smoothing over the remaining bands (zero energy). - for (size_t i = bands_energy.size(); i < kNumBands; ++i) { + for (int i = bands_energy.size(); i < kNumBands; ++i) { log_bands_energy[i] = smooth(kLogOneByHundred); } } @@ -149,8 +150,8 @@ void ComputeSmoothedLogMagnitudeSpectrum( std::array ComputeDctTable() { std::array dct_table; const double k = std::sqrt(0.5); - for (size_t i = 0; i < kNumBands; ++i) { - for (size_t j = 0; j < kNumBands; ++j) + for (int i = 0; i < kNumBands; ++i) { + for (int j = 0; j < kNumBands; ++j) dct_table[i * kNumBands + j] = std::cos((i + 0.5) * j * kPi / kNumBands); dct_table[i * kNumBands] *= k; } @@ -173,9 +174,9 @@ void ComputeDct(rtc::ArrayView in, RTC_DCHECK_LE(in.size(), kNumBands); RTC_DCHECK_LE(1, out.size()); RTC_DCHECK_LE(out.size(), in.size()); - for (size_t i = 0; i < out.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, out.size()); ++i) { out[i] = 0.f; - for (size_t j = 0; j < in.size(); ++j) { + for (int j = 0; rtc::SafeLt(j, in.size()); ++j) { out[i] += in[j] * dct_table[j * kNumBands + i]; } // TODO(bugs.webrtc.org/10480): Scaling factor in the DCT table. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h index ed4caad02..aa7b1c6a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h @@ -25,7 +25,7 @@ namespace rnn_vad { // At a sample rate of 24 kHz, the last 3 Opus bands are beyond the Nyquist // frequency. However, band #19 gets the contributions from band #18 because // of the symmetric triangular filter with peak response at 12 kHz. -constexpr size_t kOpusBands24kHz = 20; +constexpr int kOpusBands24kHz = 20; static_assert(kOpusBands24kHz < kNumBands, "The number of bands at 24 kHz must be less than those defined " "in the Opus scale at 48 kHz."); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h index f0282aaed..dd3b62a1a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h @@ -18,6 +18,7 @@ #include "api/array_view.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace rnn_vad { @@ -29,7 +30,7 @@ namespace rnn_vad { // removed when one of the two corresponding items that have been compared is // removed from the ring buffer. It is assumed that the comparison is symmetric // and that comparing an item with itself is not needed. -template +template class SymmetricMatrixBuffer { static_assert(S > 2, ""); @@ -55,9 +56,9 @@ class SymmetricMatrixBuffer { // column left. std::memmove(buf_.data(), buf_.data() + S, (buf_.size() - S) * sizeof(T)); // Copy new values in the last column in the right order. - for (size_t i = 0; i < values.size(); ++i) { - const size_t index = (S - 1 - i) * (S - 1) - 1; - RTC_DCHECK_LE(static_cast(0), index); + for (int i = 0; rtc::SafeLt(i, values.size()); ++i) { + const int index = (S - 1 - i) * (S - 1) - 1; + RTC_DCHECK_GE(index, 0); RTC_DCHECK_LT(index, buf_.size()); buf_[index] = values[i]; } @@ -65,9 +66,9 @@ class SymmetricMatrixBuffer { // Reads the value that corresponds to comparison of two items in the ring // buffer having delay |delay1| and |delay2|. The two arguments must not be // equal and both must be in {0, ..., S - 1}. - T GetValue(size_t delay1, size_t delay2) const { - int row = S - 1 - static_cast(delay1); - int col = S - 1 - static_cast(delay2); + T GetValue(int delay1, int delay2) const { + int row = S - 1 - delay1; + int col = S - 1 - delay2; RTC_DCHECK_NE(row, col) << "The diagonal cannot be accessed."; if (row > col) std::swap(row, col); // Swap to access the upper-right triangular part. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc index 1a8e1a2ee..24bbf13e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc @@ -10,9 +10,11 @@ #include "modules/audio_processing/agc2/rnn_vad/test_utils.h" +#include #include #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" #include "test/gtest.h" @@ -24,7 +26,7 @@ namespace test { namespace { using ReaderPairType = - std::pair>, const size_t>; + std::pair>, const int>; } // namespace @@ -33,7 +35,7 @@ using webrtc::test::ResourcePath; void ExpectEqualFloatArray(rtc::ArrayView expected, rtc::ArrayView computed) { ASSERT_EQ(expected.size(), computed.size()); - for (size_t i = 0; i < expected.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, expected.size()); ++i) { SCOPED_TRACE(i); EXPECT_FLOAT_EQ(expected[i], computed[i]); } @@ -43,14 +45,14 @@ void ExpectNearAbsolute(rtc::ArrayView expected, rtc::ArrayView computed, float tolerance) { ASSERT_EQ(expected.size(), computed.size()); - for (size_t i = 0; i < expected.size(); ++i) { + for (int i = 0; rtc::SafeLt(i, expected.size()); ++i) { SCOPED_TRACE(i); EXPECT_NEAR(expected[i], computed[i], tolerance); } } -std::pair>, const size_t> -CreatePcmSamplesReader(const size_t frame_length) { +std::pair>, const int> +CreatePcmSamplesReader(const int frame_length) { auto ptr = std::make_unique>( test::ResourcePath("audio_processing/agc2/rnn_vad/samples", "pcm"), frame_length); @@ -59,14 +61,14 @@ CreatePcmSamplesReader(const size_t frame_length) { } ReaderPairType CreatePitchBuffer24kHzReader() { - constexpr size_t cols = 864; + constexpr int cols = 864; auto ptr = std::make_unique>( ResourcePath("audio_processing/agc2/rnn_vad/pitch_buf_24k", "dat"), cols); return {std::move(ptr), rtc::CheckedDivExact(ptr->data_length(), cols)}; } ReaderPairType CreateLpResidualAndPitchPeriodGainReader() { - constexpr size_t num_lp_residual_coeffs = 864; + constexpr int num_lp_residual_coeffs = 864; auto ptr = std::make_unique>( ResourcePath("audio_processing/agc2/rnn_vad/pitch_lp_res", "dat"), num_lp_residual_coeffs); @@ -83,8 +85,12 @@ ReaderPairType CreateVadProbsReader() { PitchTestData::PitchTestData() { BinaryFileReader test_data_reader( ResourcePath("audio_processing/agc2/rnn_vad/pitch_search_int", "dat"), - static_cast(1396)); + 1396); test_data_reader.ReadChunk(test_data_); + // Reverse the order of the squared energy values. + // Required after the WebRTC CL 191703 which switched to forward computation. + std::reverse(test_data_.begin() + kBufSize24kHz, + test_data_.begin() + kBufSize24kHz + kNumPitchBufSquareEnergies); } PitchTestData::~PitchTestData() = default; @@ -109,7 +115,7 @@ bool IsOptimizationAvailable(Optimization optimization) { switch (optimization) { case Optimization::kSse2: #if defined(WEBRTC_ARCH_X86_FAMILY) - return WebRtc_GetCPUInfo(kSSE2) != 0; + return GetCPUInfo(kSSE2) != 0; #else return false; #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h index db155e6a7..23e642be8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h @@ -24,6 +24,7 @@ #include "api/array_view.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace rnn_vad { @@ -47,7 +48,7 @@ void ExpectNearAbsolute(rtc::ArrayView expected, template class BinaryFileReader { public: - explicit BinaryFileReader(const std::string& file_path, size_t chunk_size = 0) + BinaryFileReader(const std::string& file_path, int chunk_size = 0) : is_(file_path, std::ios::binary | std::ios::ate), data_length_(is_.tellg() / sizeof(T)), chunk_size_(chunk_size) { @@ -58,7 +59,7 @@ class BinaryFileReader { BinaryFileReader(const BinaryFileReader&) = delete; BinaryFileReader& operator=(const BinaryFileReader&) = delete; ~BinaryFileReader() = default; - size_t data_length() const { return data_length_; } + int data_length() const { return data_length_; } bool ReadValue(D* dst) { if (std::is_same::value) { is_.read(reinterpret_cast(dst), sizeof(T)); @@ -72,7 +73,7 @@ class BinaryFileReader { // If |chunk_size| was specified in the ctor, it will check that the size of // |dst| equals |chunk_size|. bool ReadChunk(rtc::ArrayView dst) { - RTC_DCHECK((chunk_size_ == 0) || (chunk_size_ == dst.size())); + RTC_DCHECK((chunk_size_ == 0) || rtc::SafeEq(chunk_size_, dst.size())); const std::streamsize bytes_to_read = dst.size() * sizeof(T); if (std::is_same::value) { is_.read(reinterpret_cast(dst.data()), bytes_to_read); @@ -83,13 +84,13 @@ class BinaryFileReader { } return is_.gcount() == bytes_to_read; } - void SeekForward(size_t items) { is_.seekg(items * sizeof(T), is_.cur); } + void SeekForward(int items) { is_.seekg(items * sizeof(T), is_.cur); } void SeekBeginning() { is_.seekg(0, is_.beg); } private: std::ifstream is_; - const size_t data_length_; - const size_t chunk_size_; + const int data_length_; + const int chunk_size_; std::vector buf_; }; @@ -117,22 +118,22 @@ class BinaryFileWriter { // pointer and the second the number of chunks that can be read from the file. // Creates a reader for the PCM samples that casts from S16 to float and reads // chunks with length |frame_length|. -std::pair>, const size_t> -CreatePcmSamplesReader(const size_t frame_length); +std::pair>, const int> +CreatePcmSamplesReader(const int frame_length); // Creates a reader for the pitch buffer content at 24 kHz. -std::pair>, const size_t> +std::pair>, const int> CreatePitchBuffer24kHzReader(); // Creates a reader for the the LP residual coefficients and the pitch period // and gain values. -std::pair>, const size_t> +std::pair>, const int> CreateLpResidualAndPitchPeriodGainReader(); // Creates a reader for the VAD probabilities. -std::pair>, const size_t> +std::pair>, const int> CreateVadProbsReader(); -constexpr size_t kNumPitchBufAutoCorrCoeffs = 147; -constexpr size_t kNumPitchBufSquareEnergies = 385; -constexpr size_t kPitchTestDataSize = +constexpr int kNumPitchBufAutoCorrCoeffs = 147; +constexpr int kNumPitchBufSquareEnergies = 385; +constexpr int kPitchTestDataSize = kBufSize24kHz + kNumPitchBufSquareEnergies + kNumPitchBufAutoCorrCoeffs; // Class to retrieve a test pitch buffer content and the expected output for the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc index 6d777ffdb..b64fcdb71 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc @@ -10,96 +10,112 @@ #include "modules/audio_processing/agc2/saturation_protector.h" -#include -#include - #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { - namespace { -void ShiftBuffer(std::array* buffer_) { - // Move everything one element back. - std::copy(buffer_->begin() + 1, buffer_->end(), buffer_->begin()); -} + +constexpr float kMinLevelDbfs = -90.f; + +// Min/max margins are based on speech crest-factor. +constexpr float kMinMarginDb = 12.f; +constexpr float kMaxMarginDb = 25.f; + +using saturation_protector_impl::RingBuffer; + } // namespace -SaturationProtector::PeakEnveloper::PeakEnveloper() = default; - -void SaturationProtector::PeakEnveloper::Process(float frame_peak_dbfs) { - // Update the delayed buffer and the current superframe peak. - current_superframe_peak_dbfs_ = - std::max(current_superframe_peak_dbfs_, frame_peak_dbfs); - speech_time_in_estimate_ms_ += kFrameDurationMs; - if (speech_time_in_estimate_ms_ > kPeakEnveloperSuperFrameLengthMs) { - speech_time_in_estimate_ms_ = 0; - const bool buffer_full = elements_in_buffer_ == kPeakEnveloperBufferSize; - if (buffer_full) { - ShiftBuffer(&peak_delay_buffer_); - *peak_delay_buffer_.rbegin() = current_superframe_peak_dbfs_; - } else { - peak_delay_buffer_[elements_in_buffer_] = current_superframe_peak_dbfs_; - elements_in_buffer_++; +bool RingBuffer::operator==(const RingBuffer& b) const { + RTC_DCHECK_LE(size_, buffer_.size()); + RTC_DCHECK_LE(b.size_, b.buffer_.size()); + if (size_ != b.size_) { + return false; + } + for (int i = 0, i0 = FrontIndex(), i1 = b.FrontIndex(); i < size_; + ++i, ++i0, ++i1) { + if (buffer_[i0 % buffer_.size()] != b.buffer_[i1 % b.buffer_.size()]) { + return false; } - current_superframe_peak_dbfs_ = -90.f; + } + return true; +} + +void RingBuffer::Reset() { + next_ = 0; + size_ = 0; +} + +void RingBuffer::PushBack(float v) { + RTC_DCHECK_GE(next_, 0); + RTC_DCHECK_GE(size_, 0); + RTC_DCHECK_LT(next_, buffer_.size()); + RTC_DCHECK_LE(size_, buffer_.size()); + buffer_[next_++] = v; + if (rtc::SafeEq(next_, buffer_.size())) { + next_ = 0; + } + if (rtc::SafeLt(size_, buffer_.size())) { + size_++; } } -float SaturationProtector::PeakEnveloper::Query() const { - float result; - if (elements_in_buffer_ > 0) { - result = peak_delay_buffer_[0]; +absl::optional RingBuffer::Front() const { + if (size_ == 0) { + return absl::nullopt; + } + RTC_DCHECK_LT(FrontIndex(), buffer_.size()); + return buffer_[FrontIndex()]; +} + +bool SaturationProtectorState::operator==( + const SaturationProtectorState& b) const { + return margin_db == b.margin_db && peak_delay_buffer == b.peak_delay_buffer && + max_peaks_dbfs == b.max_peaks_dbfs && + time_since_push_ms == b.time_since_push_ms; +} + +void ResetSaturationProtectorState(float initial_margin_db, + SaturationProtectorState& state) { + state.margin_db = initial_margin_db; + state.peak_delay_buffer.Reset(); + state.max_peaks_dbfs = kMinLevelDbfs; + state.time_since_push_ms = 0; +} + +void UpdateSaturationProtectorState(float speech_peak_dbfs, + float speech_level_dbfs, + SaturationProtectorState& state) { + // Get the max peak over `kPeakEnveloperSuperFrameLengthMs` ms. + state.max_peaks_dbfs = std::max(state.max_peaks_dbfs, speech_peak_dbfs); + state.time_since_push_ms += kFrameDurationMs; + if (rtc::SafeGt(state.time_since_push_ms, kPeakEnveloperSuperFrameLengthMs)) { + // Push `max_peaks_dbfs` back into the ring buffer. + state.peak_delay_buffer.PushBack(state.max_peaks_dbfs); + // Reset. + state.max_peaks_dbfs = kMinLevelDbfs; + state.time_since_push_ms = 0; + } + + // Update margin by comparing the estimated speech level and the delayed max + // speech peak power. + // TODO(alessiob): Check with aleloi@ why we use a delay and how to tune it. + const float delayed_peak_dbfs = + state.peak_delay_buffer.Front().value_or(state.max_peaks_dbfs); + const float difference_db = delayed_peak_dbfs - speech_level_dbfs; + if (difference_db > state.margin_db) { + // Attack. + state.margin_db = + state.margin_db * kSaturationProtectorAttackConstant + + difference_db * (1.f - kSaturationProtectorAttackConstant); } else { - result = current_superframe_peak_dbfs_; - } - return result; -} - -SaturationProtector::SaturationProtector(ApmDataDumper* apm_data_dumper) - : SaturationProtector(apm_data_dumper, GetExtraSaturationMarginOffsetDb()) { -} - -SaturationProtector::SaturationProtector(ApmDataDumper* apm_data_dumper, - float extra_saturation_margin_db) - : apm_data_dumper_(apm_data_dumper), - last_margin_(GetInitialSaturationMarginDb()), - extra_saturation_margin_db_(extra_saturation_margin_db) {} - -void SaturationProtector::UpdateMargin( - const VadWithLevel::LevelAndProbability& vad_data, - float last_speech_level_estimate) { - peak_enveloper_.Process(vad_data.speech_peak_dbfs); - const float delayed_peak_dbfs = peak_enveloper_.Query(); - const float difference_db = delayed_peak_dbfs - last_speech_level_estimate; - - if (last_margin_ < difference_db) { - last_margin_ = last_margin_ * kSaturationProtectorAttackConstant + - difference_db * (1.f - kSaturationProtectorAttackConstant); - } else { - last_margin_ = last_margin_ * kSaturationProtectorDecayConstant + - difference_db * (1.f - kSaturationProtectorDecayConstant); + // Decay. + state.margin_db = state.margin_db * kSaturationProtectorDecayConstant + + difference_db * (1.f - kSaturationProtectorDecayConstant); } - last_margin_ = rtc::SafeClamp(last_margin_, 12.f, 25.f); -} - -float SaturationProtector::LastMargin() const { - return last_margin_ + extra_saturation_margin_db_; -} - -void SaturationProtector::Reset() { - peak_enveloper_ = PeakEnveloper(); -} - -void SaturationProtector::DebugDumpEstimate() const { - if (apm_data_dumper_) { - apm_data_dumper_->DumpRaw( - "agc2_adaptive_saturation_protector_delayed_peak_dbfs", - peak_enveloper_.Query()); - apm_data_dumper_->DumpRaw("agc2_adaptive_saturation_margin_db", - last_margin_); - } + state.margin_db = + rtc::SafeClamp(state.margin_db, kMinMarginDb, kMaxMarginDb); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h index e63746907..88be91a79 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h @@ -13,59 +13,70 @@ #include +#include "absl/types/optional.h" #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/agc2/vad_with_level.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { +namespace saturation_protector_impl { -class ApmDataDumper; - -class SaturationProtector { +// Ring buffer which only supports (i) push back and (ii) read oldest item. +class RingBuffer { public: - explicit SaturationProtector(ApmDataDumper* apm_data_dumper); + bool operator==(const RingBuffer& b) const; + inline bool operator!=(const RingBuffer& b) const { return !(*this == b); } - SaturationProtector(ApmDataDumper* apm_data_dumper, - float extra_saturation_margin_db); + // Maximum number of values that the buffer can contain. + int Capacity() const { return buffer_.size(); } + // Number of values in the buffer. + int Size() const { return size_; } - // Update and return margin estimate. This method should be called - // whenever a frame is reliably classified as 'speech'. - // - // Returned value is in DB scale. - void UpdateMargin(const VadWithLevel::LevelAndProbability& vad_data, - float last_speech_level_estimate_dbfs); - - // Returns latest computed margin. Used in cases when speech is not - // detected. - float LastMargin() const; - - // Resets the internal memory. void Reset(); - - void DebugDumpEstimate() const; + // Pushes back `v`. If the buffer is full, the oldest value is replaced. + void PushBack(float v); + // Returns the oldest item in the buffer. Returns an empty value if the + // buffer is empty. + absl::optional Front() const; private: - // Computes a delayed envelope of peaks. - class PeakEnveloper { - public: - PeakEnveloper(); - void Process(float frame_peak_dbfs); - - float Query() const; - - private: - size_t speech_time_in_estimate_ms_ = 0; - float current_superframe_peak_dbfs_ = -90.f; - size_t elements_in_buffer_ = 0; - std::array peak_delay_buffer_ = {}; - }; - - ApmDataDumper* apm_data_dumper_; - - float last_margin_; - PeakEnveloper peak_enveloper_; - const float extra_saturation_margin_db_; + inline int FrontIndex() const { + return rtc::SafeEq(size_, buffer_.size()) ? next_ : 0; + } + // `buffer_` has `size_` elements (up to the size of `buffer_`) and `next_` is + // the position where the next new value is written in `buffer_`. + std::array buffer_; + int next_ = 0; + int size_ = 0; }; +} // namespace saturation_protector_impl + +// Saturation protector state. Exposed publicly for check-pointing and restore +// ops. +struct SaturationProtectorState { + bool operator==(const SaturationProtectorState& s) const; + inline bool operator!=(const SaturationProtectorState& s) const { + return !(*this == s); + } + + float margin_db; // Recommended margin. + saturation_protector_impl::RingBuffer peak_delay_buffer; + float max_peaks_dbfs; + int time_since_push_ms; // Time since the last ring buffer push operation. +}; + +// Resets the saturation protector state. +void ResetSaturationProtectorState(float initial_margin_db, + SaturationProtectorState& state); + +// Updates `state` by analyzing the estimated speech level `speech_level_dbfs` +// and the peak power `speech_peak_dbfs` for an observed frame which is +// reliably classified as "speech". `state` must not be modified without calling +// this function. +void UpdateSaturationProtectorState(float speech_peak_dbfs, + float speech_level_dbfs, + SaturationProtectorState& state); + } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc index 38334f7ec..a06413d16 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc @@ -26,7 +26,7 @@ namespace { bool IsSse2Available() { #if defined(WEBRTC_ARCH_X86_FAMILY) - return WebRtc_GetCPUInfo(kSSE2) != 0; + return GetCPUInfo(kSSE2) != 0; #else return false; #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.h index ae288ae77..20cce920f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.h @@ -18,7 +18,6 @@ #include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" #include "modules/audio_processing/agc2/down_sampler.h" #include "modules/audio_processing/agc2/noise_spectrum_estimator.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,11 @@ class SignalClassifier { enum class SignalType { kNonStationary, kStationary }; explicit SignalClassifier(ApmDataDumper* data_dumper); + + SignalClassifier() = delete; + SignalClassifier(const SignalClassifier&) = delete; + SignalClassifier& operator=(const SignalClassifier&) = delete; + ~SignalClassifier(); void Initialize(int sample_rate_hz); @@ -39,6 +43,11 @@ class SignalClassifier { class FrameExtender { public: FrameExtender(size_t frame_size, size_t extended_frame_size); + + FrameExtender() = delete; + FrameExtender(const FrameExtender&) = delete; + FrameExtender& operator=(const FrameExtender&) = delete; + ~FrameExtender(); void ExtendFrame(rtc::ArrayView x, @@ -46,8 +55,6 @@ class SignalClassifier { private: std::vector x_old_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameExtender); }; ApmDataDumper* const data_dumper_; @@ -59,7 +66,6 @@ class SignalClassifier { int consistent_classification_counter_; SignalType last_signal_type_; const OouraFft ooura_fft_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(SignalClassifier); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc index d4ec2ced9..3dbb55732 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc @@ -16,55 +16,99 @@ #include "api/array_view.h" #include "common_audio/include/audio_util.h" +#include "common_audio/resampler/include/push_resampler.h" +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" +#include "modules/audio_processing/agc2/rnn_vad/features_extraction.h" +#include "modules/audio_processing/agc2/rnn_vad/rnn.h" +#include "rtc_base/checks.h" namespace webrtc { - namespace { -float ProcessForPeak(AudioFrameView frame) { - float current_max = 0; - for (const auto& x : frame.channel(0)) { - current_max = std::max(std::fabs(x), current_max); + +using VoiceActivityDetector = VadLevelAnalyzer::VoiceActivityDetector; + +// Default VAD that combines a resampler and the RNN VAD. +// Computes the speech probability on the first channel. +class Vad : public VoiceActivityDetector { + public: + Vad() = default; + Vad(const Vad&) = delete; + Vad& operator=(const Vad&) = delete; + ~Vad() = default; + + float ComputeProbability(AudioFrameView frame) override { + // The source number of channels is 1, because we always use the 1st + // channel. + resampler_.InitializeIfNeeded( + /*sample_rate_hz=*/static_cast(frame.samples_per_channel() * 100), + rnn_vad::kSampleRate24kHz, + /*num_channels=*/1); + + std::array work_frame; + // Feed the 1st channel to the resampler. + resampler_.Resample(frame.channel(0).data(), frame.samples_per_channel(), + work_frame.data(), rnn_vad::kFrameSize10ms24kHz); + + std::array feature_vector; + const bool is_silence = features_extractor_.CheckSilenceComputeFeatures( + work_frame, feature_vector); + return rnn_vad_.ComputeVadProbability(feature_vector, is_silence); + } + + private: + PushResampler resampler_; + rnn_vad::FeaturesExtractor features_extractor_; + rnn_vad::RnnBasedVad rnn_vad_; +}; + +// Returns an updated version of `p_old` by using instant decay and the given +// `attack` on a new VAD probability value `p_new`. +float SmoothedVadProbability(float p_old, float p_new, float attack) { + RTC_DCHECK_GT(attack, 0.f); + RTC_DCHECK_LE(attack, 1.f); + if (p_new < p_old || attack == 1.f) { + // Instant decay (or no smoothing). + return p_new; + } else { + // Attack phase. + return attack * p_new + (1.f - attack) * p_old; } - return current_max; } -float ProcessForRms(AudioFrameView frame) { - float rms = 0; - for (const auto& x : frame.channel(0)) { - rms += x * x; - } - return std::sqrt(rms / frame.samples_per_channel()); -} } // namespace -VadWithLevel::VadWithLevel() = default; -VadWithLevel::~VadWithLevel() = default; +VadLevelAnalyzer::VadLevelAnalyzer() + : VadLevelAnalyzer(kDefaultSmoothedVadProbabilityAttack, + std::make_unique()) {} -VadWithLevel::LevelAndProbability VadWithLevel::AnalyzeFrame( - AudioFrameView frame) { - SetSampleRate(static_cast(frame.samples_per_channel() * 100)); - std::array work_frame; - // Feed the 1st channel to the resampler. - resampler_.Resample(frame.channel(0).data(), frame.samples_per_channel(), - work_frame.data(), rnn_vad::kFrameSize10ms24kHz); +VadLevelAnalyzer::VadLevelAnalyzer(float vad_probability_attack) + : VadLevelAnalyzer(vad_probability_attack, std::make_unique()) {} - std::array feature_vector; - - const bool is_silence = features_extractor_.CheckSilenceComputeFeatures( - work_frame, feature_vector); - const float vad_probability = - rnn_vad_.ComputeVadProbability(feature_vector, is_silence); - return LevelAndProbability(vad_probability, - FloatS16ToDbfs(ProcessForRms(frame)), - FloatS16ToDbfs(ProcessForPeak(frame))); +VadLevelAnalyzer::VadLevelAnalyzer(float vad_probability_attack, + std::unique_ptr vad) + : vad_(std::move(vad)), vad_probability_attack_(vad_probability_attack) { + RTC_DCHECK(vad_); } -void VadWithLevel::SetSampleRate(int sample_rate_hz) { - // The source number of channels in 1, because we always use the 1st - // channel. - resampler_.InitializeIfNeeded(sample_rate_hz, rnn_vad::kSampleRate24kHz, - 1 /* num_channels */); +VadLevelAnalyzer::~VadLevelAnalyzer() = default; + +VadLevelAnalyzer::Result VadLevelAnalyzer::AnalyzeFrame( + AudioFrameView frame) { + // Compute levels. + float peak = 0.f; + float rms = 0.f; + for (const auto& x : frame.channel(0)) { + peak = std::max(std::fabs(x), peak); + rms += x * x; + } + // Compute smoothed speech probability. + vad_probability_ = SmoothedVadProbability( + /*p_old=*/vad_probability_, /*p_new=*/vad_->ComputeProbability(frame), + vad_probability_attack_); + return {vad_probability_, + FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel())), + FloatS16ToDbfs(peak)}; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h index b0ad868d4..ce72cdc75 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h @@ -11,36 +11,46 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_VAD_WITH_LEVEL_H_ #define MODULES_AUDIO_PROCESSING_AGC2_VAD_WITH_LEVEL_H_ -#include "common_audio/resampler/include/push_resampler.h" -#include "modules/audio_processing/agc2/rnn_vad/features_extraction.h" -#include "modules/audio_processing/agc2/rnn_vad/rnn.h" +#include + #include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { -class VadWithLevel { + +// Class to analyze voice activity and audio levels. +class VadLevelAnalyzer { public: - struct LevelAndProbability { - constexpr LevelAndProbability(float prob, float rms, float peak) - : speech_probability(prob), - speech_rms_dbfs(rms), - speech_peak_dbfs(peak) {} - LevelAndProbability() = default; - float speech_probability = 0; - float speech_rms_dbfs = 0; // Root mean square in decibels to full-scale. - float speech_peak_dbfs = 0; + struct Result { + float speech_probability; // Range: [0, 1]. + float rms_dbfs; // Root mean square power (dBFS). + float peak_dbfs; // Peak power (dBFS). }; - VadWithLevel(); - ~VadWithLevel(); + // Voice Activity Detector (VAD) interface. + class VoiceActivityDetector { + public: + virtual ~VoiceActivityDetector() = default; + // Analyzes an audio frame and returns the speech probability. + virtual float ComputeProbability(AudioFrameView frame) = 0; + }; - LevelAndProbability AnalyzeFrame(AudioFrameView frame); + // Ctor. Uses the default VAD. + VadLevelAnalyzer(); + explicit VadLevelAnalyzer(float vad_probability_attack); + // Ctor. Uses a custom `vad`. + VadLevelAnalyzer(float vad_probability_attack, + std::unique_ptr vad); + VadLevelAnalyzer(const VadLevelAnalyzer&) = delete; + VadLevelAnalyzer& operator=(const VadLevelAnalyzer&) = delete; + ~VadLevelAnalyzer(); + + // Computes the speech probability and the level for `frame`. + Result AnalyzeFrame(AudioFrameView frame); private: - void SetSampleRate(int sample_rate_hz); - - rnn_vad::RnnBasedVad rnn_vad_; - rnn_vad::FeaturesExtractor features_extractor_; - PushResampler resampler_; + std::unique_ptr vad_; + const float vad_probability_attack_; + float vad_probability_ = 0.f; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc index e89bbecc6..f55c9158f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc @@ -35,16 +35,10 @@ AudioProcessing* AudioProcessingBuilder::Create(const webrtc::Config& config) { #else // Standard implementation. - AudioProcessingImpl* apm = new rtc::RefCountedObject( + return new rtc::RefCountedObject( config, std::move(capture_post_processing_), std::move(render_pre_processing_), std::move(echo_control_factory_), std::move(echo_detector_), std::move(capture_analyzer_)); - if (apm->Initialize() != AudioProcessing::kNoError) { - delete apm; - apm = nullptr; - } - return apm; - #endif } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc index b155bdbad..37112f088 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc @@ -114,6 +114,7 @@ GainControl::Mode Agc1ConfigModeToInterfaceMode( case Agc1Config::kFixedDigital: return GainControl::kFixedDigital; } + RTC_CHECK_NOTREACHED(); } // Maximum lengths that frame of samples being passed from the render side to @@ -125,6 +126,7 @@ static const size_t kMaxAllowedValuesOfSamplesPerFrame = 480; // TODO(peah): Decrease this once we properly handle hugely unbalanced // reverse and forward call numbers. static const size_t kMaxNumFramesToBuffer = 100; + } // namespace // Throughout webrtc, it's assumed that success is represented by zero. @@ -307,6 +309,8 @@ AudioProcessingImpl::AudioProcessingImpl( config_.gain_controller1.analog_gain_controller.enable_digital_adaptive = !config.Get().digital_adaptive_disabled; #endif + + Initialize(); } AudioProcessingImpl::~AudioProcessingImpl() = default; @@ -315,7 +319,8 @@ int AudioProcessingImpl::Initialize() { // Run in a single-threaded manner during initialization. MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); - return InitializeLocked(); + InitializeLocked(); + return kNoError; } int AudioProcessingImpl::Initialize(int capture_input_sample_rate_hz, @@ -356,7 +361,7 @@ int AudioProcessingImpl::MaybeInitializeRender( return InitializeLocked(processing_config); } -int AudioProcessingImpl::InitializeLocked() { +void AudioProcessingImpl::InitializeLocked() { UpdateActiveSubmoduleStates(); const int render_audiobuffer_sample_rate_hz = @@ -425,7 +430,6 @@ int AudioProcessingImpl::InitializeLocked() { if (aec_dump_) { aec_dump_->WriteInitMessage(formats_.api_format, rtc::TimeUTCMillis()); } - return kNoError; } int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { @@ -519,7 +523,8 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { capture_nonlocked_.capture_processing_format.sample_rate_hz(); } - return InitializeLocked(); + InitializeLocked(); + return kNoError; } void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { @@ -542,34 +547,10 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { config_.echo_canceller.mobile_mode != config.echo_canceller.mobile_mode; const bool agc1_config_changed = - config_.gain_controller1.enabled != config.gain_controller1.enabled || - config_.gain_controller1.mode != config.gain_controller1.mode || - config_.gain_controller1.target_level_dbfs != - config.gain_controller1.target_level_dbfs || - config_.gain_controller1.compression_gain_db != - config.gain_controller1.compression_gain_db || - config_.gain_controller1.enable_limiter != - config.gain_controller1.enable_limiter || - config_.gain_controller1.analog_level_minimum != - config.gain_controller1.analog_level_minimum || - config_.gain_controller1.analog_level_maximum != - config.gain_controller1.analog_level_maximum || - config_.gain_controller1.analog_gain_controller.enabled != - config.gain_controller1.analog_gain_controller.enabled || - config_.gain_controller1.analog_gain_controller.startup_min_volume != - config.gain_controller1.analog_gain_controller.startup_min_volume || - config_.gain_controller1.analog_gain_controller.clipped_level_min != - config.gain_controller1.analog_gain_controller.clipped_level_min || - config_.gain_controller1.analog_gain_controller - .enable_agc2_level_estimator != - config.gain_controller1.analog_gain_controller - .enable_agc2_level_estimator || - config_.gain_controller1.analog_gain_controller.enable_digital_adaptive != - config.gain_controller1.analog_gain_controller - .enable_digital_adaptive; + config_.gain_controller1 != config.gain_controller1; const bool agc2_config_changed = - config_.gain_controller2.enabled != config.gain_controller2.enabled; + config_.gain_controller2 != config.gain_controller2; const bool voice_detection_config_changed = config_.voice_detection.enabled != config.voice_detection.enabled; @@ -608,10 +589,8 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { const bool config_ok = GainController2::Validate(config_.gain_controller2); if (!config_ok) { - RTC_LOG(LS_ERROR) << "AudioProcessing module config error\n" - "Gain Controller 2: " - << GainController2::ToString(config_.gain_controller2) - << "\nReverting to default parameter set"; + RTC_LOG(LS_ERROR) + << "Invalid Gain Controller 2 config; using the default config."; config_.gain_controller2 = AudioProcessing::Config::GainController2(); } @@ -638,9 +617,6 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { } } -// TODO(webrtc:5298): Remove. -void AudioProcessingImpl::SetExtraOptions(const webrtc::Config& config) {} - void AudioProcessingImpl::OverrideSubmoduleCreationForTesting( const ApmSubmoduleCreationOverrides& overrides) { MutexLock lock(&mutex_capture_); @@ -705,6 +681,7 @@ void AudioProcessingImpl::SetRuntimeSetting(RuntimeSetting setting) { case RuntimeSetting::Type::kCapturePreGain: case RuntimeSetting::Type::kCaptureCompressionGain: case RuntimeSetting::Type::kCaptureFixedPostGain: + case RuntimeSetting::Type::kCaptureOutputUsed: capture_runtime_settings_enqueuer_.Enqueue(setting); return; case RuntimeSetting::Type::kPlayoutVolumeChange: @@ -731,15 +708,18 @@ AudioProcessingImpl::RuntimeSettingEnqueuer::~RuntimeSettingEnqueuer() = void AudioProcessingImpl::RuntimeSettingEnqueuer::Enqueue( RuntimeSetting setting) { - size_t remaining_attempts = 10; + int remaining_attempts = 10; while (!runtime_settings_.Insert(&setting) && remaining_attempts-- > 0) { RuntimeSetting setting_to_discard; - if (runtime_settings_.Remove(&setting_to_discard)) + if (runtime_settings_.Remove(&setting_to_discard)) { RTC_LOG(LS_ERROR) << "The runtime settings queue is full. Oldest setting discarded."; + } } - if (remaining_attempts == 0) + if (remaining_attempts == 0) { + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.ApmRuntimeSettingCannotEnqueue", 1); RTC_LOG(LS_ERROR) << "Cannot enqueue a new runtime setting."; + } } int AudioProcessingImpl::MaybeInitializeCapture( @@ -865,6 +845,10 @@ void AudioProcessingImpl::HandleCaptureRuntimeSettings() { case RuntimeSetting::Type::kNotSpecified: RTC_NOTREACHED(); break; + case RuntimeSetting::Type::kCaptureOutputUsed: + // TODO(b/154437967): Add support for reducing complexity when it is + // known that the capture output will not be used. + break; } } } @@ -886,6 +870,7 @@ void AudioProcessingImpl::HandleRenderRuntimeSettings() { case RuntimeSetting::Type::kCapturePreGain: // fall-through case RuntimeSetting::Type::kCaptureCompressionGain: // fall-through case RuntimeSetting::Type::kCaptureFixedPostGain: // fall-through + case RuntimeSetting::Type::kCaptureOutputUsed: // fall-through case RuntimeSetting::Type::kNotSpecified: RTC_NOTREACHED(); break; @@ -1491,8 +1476,8 @@ bool AudioProcessingImpl::GetLinearAecOutput( rtc::ArrayView channel_view = rtc::ArrayView(linear_aec_buffer->channels_const()[ch], linear_aec_buffer->num_frames()); - std::copy(channel_view.begin(), channel_view.end(), - linear_output[ch].begin()); + FloatS16ToFloat(channel_view.data(), channel_view.size(), + linear_output[ch].data()); } return true; } @@ -1844,9 +1829,8 @@ void AudioProcessingImpl::InitializeNoiseSuppressor() { return NsConfig::SuppressionLevel::k18dB; case NoiseSuppresionConfig::kVeryHigh: return NsConfig::SuppressionLevel::k21dB; - default: - RTC_NOTREACHED(); } + RTC_CHECK_NOTREACHED(); }; NsConfig cfg; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h index dfd5f63c5..d0eec0eec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h @@ -72,7 +72,6 @@ class AudioProcessingImpl : public AudioProcessing { ChannelLayout render_input_layout) override; int Initialize(const ProcessingConfig& processing_config) override; void ApplyConfig(const AudioProcessing::Config& config) override; - void SetExtraOptions(const webrtc::Config& config) override; bool CreateAndAttachAecDump(const std::string& file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) override; @@ -140,7 +139,7 @@ class AudioProcessingImpl : public AudioProcessing { protected: // Overridden in a mock. - virtual int InitializeLocked() + virtual void InitializeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_); private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc index b15a266cd..6561bebc6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc @@ -65,8 +65,7 @@ void GainController2::NotifyAnalogLevel(int level) { void GainController2::ApplyConfig( const AudioProcessing::Config::GainController2& config) { - RTC_DCHECK(Validate(config)) - << " the invalid config was " << ToString(config); + RTC_DCHECK(Validate(config)); config_ = config; if (config.fixed_digital.gain_db != config_.fixed_digital.gain_db) { @@ -84,40 +83,19 @@ void GainController2::ApplyConfig( bool GainController2::Validate( const AudioProcessing::Config::GainController2& config) { - return config.fixed_digital.gain_db >= 0.f && - config.fixed_digital.gain_db < 50.f && - config.adaptive_digital.extra_saturation_margin_db >= 0.f && - config.adaptive_digital.extra_saturation_margin_db <= 100.f; -} - -std::string GainController2::ToString( - const AudioProcessing::Config::GainController2& config) { - rtc::StringBuilder ss; - std::string adaptive_digital_level_estimator; - using LevelEstimatorType = - AudioProcessing::Config::GainController2::LevelEstimator; - switch (config.adaptive_digital.level_estimator) { - case LevelEstimatorType::kRms: - adaptive_digital_level_estimator = "RMS"; - break; - case LevelEstimatorType::kPeak: - adaptive_digital_level_estimator = "peak"; - break; - } - // clang-format off - // clang formatting doesn't respect custom nested style. - ss << "{" - "enabled: " << (config.enabled ? "true" : "false") << ", " - "fixed_digital: {gain_db: " << config.fixed_digital.gain_db << "}, " - "adaptive_digital: {" - "enabled: " - << (config.adaptive_digital.enabled ? "true" : "false") << ", " - "level_estimator: " << adaptive_digital_level_estimator << ", " - "extra_saturation_margin_db:" - << config.adaptive_digital.extra_saturation_margin_db << "}" - "}"; - // clang-format on - return ss.Release(); + const auto& fixed = config.fixed_digital; + const auto& adaptive = config.adaptive_digital; + return fixed.gain_db >= 0.f && fixed.gain_db < 50.f && + adaptive.vad_probability_attack > 0.f && + adaptive.vad_probability_attack <= 1.f && + adaptive.level_estimator_adjacent_speech_frames_threshold >= 1 && + adaptive.initial_saturation_margin_db >= 0.f && + adaptive.initial_saturation_margin_db <= 100.f && + adaptive.extra_saturation_margin_db >= 0.f && + adaptive.extra_saturation_margin_db <= 100.f && + adaptive.gain_applier_adjacent_speech_frames_threshold >= 1 && + adaptive.max_gain_change_db_per_second > 0.f && + adaptive.max_output_noise_level_dbfs <= 0.f; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h index 7ed310ebf..da27fdcc6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h @@ -38,8 +38,6 @@ class GainController2 { void ApplyConfig(const AudioProcessing::Config::GainController2& config); static bool Validate(const AudioProcessing::Config::GainController2& config); - static std::string ToString( - const AudioProcessing::Config::GainController2& config); private: static int instance_count_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc index 88544159a..04336b611 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc @@ -16,6 +16,9 @@ namespace webrtc { namespace { +using Agc1Config = AudioProcessing::Config::GainController1; +using Agc2Config = AudioProcessing::Config::GainController2; + std::string NoiseSuppressionLevelToString( const AudioProcessing::Config::NoiseSuppression::Level& level) { switch (level) { @@ -28,28 +31,30 @@ std::string NoiseSuppressionLevelToString( case AudioProcessing::Config::NoiseSuppression::Level::kVeryHigh: return "VeryHigh"; } + RTC_CHECK_NOTREACHED(); } -std::string GainController1ModeToString( - const AudioProcessing::Config::GainController1::Mode& mode) { +std::string GainController1ModeToString(const Agc1Config::Mode& mode) { switch (mode) { - case AudioProcessing::Config::GainController1::Mode::kAdaptiveAnalog: + case Agc1Config::Mode::kAdaptiveAnalog: return "AdaptiveAnalog"; - case AudioProcessing::Config::GainController1::Mode::kAdaptiveDigital: + case Agc1Config::Mode::kAdaptiveDigital: return "AdaptiveDigital"; - case AudioProcessing::Config::GainController1::Mode::kFixedDigital: + case Agc1Config::Mode::kFixedDigital: return "FixedDigital"; } + RTC_CHECK_NOTREACHED(); } std::string GainController2LevelEstimatorToString( - const AudioProcessing::Config::GainController2::LevelEstimator& level) { + const Agc2Config::LevelEstimator& level) { switch (level) { - case AudioProcessing::Config::GainController2::LevelEstimator::kRms: + case Agc2Config::LevelEstimator::kRms: return "Rms"; - case AudioProcessing::Config::GainController2::LevelEstimator::kPeak: + case Agc2Config::LevelEstimator::kPeak: return "Peak"; } + RTC_CHECK_NOTREACHED(); } int GetDefaultMaxInternalRate() { @@ -70,20 +75,60 @@ void CustomProcessing::SetRuntimeSetting( AudioProcessing::Config::Pipeline::Pipeline() : maximum_internal_processing_rate(GetDefaultMaxInternalRate()) {} +bool Agc1Config::operator==(const Agc1Config& rhs) const { + const auto& analog_lhs = analog_gain_controller; + const auto& analog_rhs = rhs.analog_gain_controller; + return enabled == rhs.enabled && mode == rhs.mode && + target_level_dbfs == rhs.target_level_dbfs && + compression_gain_db == rhs.compression_gain_db && + enable_limiter == rhs.enable_limiter && + analog_level_minimum == rhs.analog_level_minimum && + analog_level_maximum == rhs.analog_level_maximum && + analog_lhs.enabled == analog_rhs.enabled && + analog_lhs.startup_min_volume == analog_rhs.startup_min_volume && + analog_lhs.clipped_level_min == analog_rhs.clipped_level_min && + analog_lhs.enable_agc2_level_estimator == + analog_rhs.enable_agc2_level_estimator && + analog_lhs.enable_digital_adaptive == + analog_rhs.enable_digital_adaptive; +} + +bool Agc2Config::operator==(const Agc2Config& rhs) const { + const auto& adaptive_lhs = adaptive_digital; + const auto& adaptive_rhs = rhs.adaptive_digital; + + return enabled == rhs.enabled && + fixed_digital.gain_db == rhs.fixed_digital.gain_db && + adaptive_lhs.enabled == adaptive_rhs.enabled && + adaptive_lhs.vad_probability_attack == + adaptive_rhs.vad_probability_attack && + adaptive_lhs.level_estimator == adaptive_rhs.level_estimator && + adaptive_lhs.level_estimator_adjacent_speech_frames_threshold == + adaptive_rhs.level_estimator_adjacent_speech_frames_threshold && + adaptive_lhs.use_saturation_protector == + adaptive_rhs.use_saturation_protector && + adaptive_lhs.initial_saturation_margin_db == + adaptive_rhs.initial_saturation_margin_db && + adaptive_lhs.extra_saturation_margin_db == + adaptive_rhs.extra_saturation_margin_db && + adaptive_lhs.gain_applier_adjacent_speech_frames_threshold == + adaptive_rhs.gain_applier_adjacent_speech_frames_threshold && + adaptive_lhs.max_gain_change_db_per_second == + adaptive_rhs.max_gain_change_db_per_second && + adaptive_lhs.max_output_noise_level_dbfs == + adaptive_rhs.max_output_noise_level_dbfs; +} + std::string AudioProcessing::Config::ToString() const { - char buf[1024]; + char buf[2048]; rtc::SimpleStringBuilder builder(buf); builder << "AudioProcessing::Config{ " "pipeline: {" "maximum_internal_processing_rate: " << pipeline.maximum_internal_processing_rate << ", multi_channel_render: " << pipeline.multi_channel_render - << ", " - ", multi_channel_capture: " - << pipeline.multi_channel_capture - << "}, " - "pre_amplifier: { enabled: " - << pre_amplifier.enabled + << ", multi_channel_capture: " << pipeline.multi_channel_capture + << "}, pre_amplifier: { enabled: " << pre_amplifier.enabled << ", fixed_gain_factor: " << pre_amplifier.fixed_gain_factor << " }, high_pass_filter: { enabled: " << high_pass_filter.enabled << " }, echo_canceller: { enabled: " << echo_canceller.enabled @@ -106,18 +151,29 @@ std::string AudioProcessing::Config::ToString() const { << " }, gain_controller2: { enabled: " << gain_controller2.enabled << ", fixed_digital: { gain_db: " << gain_controller2.fixed_digital.gain_db - << " }, adaptive_digital: { enabled: " - << gain_controller2.adaptive_digital.enabled << ", level_estimator: " + << "}, adaptive_digital: { enabled: " + << gain_controller2.adaptive_digital.enabled + << ", level_estimator: { type: " << GainController2LevelEstimatorToString( gain_controller2.adaptive_digital.level_estimator) - << ", use_saturation_protector: " - << gain_controller2.adaptive_digital.use_saturation_protector + << ", adjacent_speech_frames_threshold: " + << gain_controller2.adaptive_digital + .level_estimator_adjacent_speech_frames_threshold + << ", initial_saturation_margin_db: " + << gain_controller2.adaptive_digital.initial_saturation_margin_db << ", extra_saturation_margin_db: " << gain_controller2.adaptive_digital.extra_saturation_margin_db + << "}, gain_applier: { adjacent_speech_frames_threshold: " + << gain_controller2.adaptive_digital + .gain_applier_adjacent_speech_frames_threshold + << ", max_gain_change_db_per_second: " + << gain_controller2.adaptive_digital.max_gain_change_db_per_second + << ", max_output_noise_level_dbfs: " + << gain_controller2.adaptive_digital.max_output_noise_level_dbfs << " } }, residual_echo_detector: { enabled: " << residual_echo_detector.enabled << " }, level_estimation: { enabled: " << level_estimation.enabled - << " } }"; + << " }}}"; return builder.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h index d84318f2a..e85ac0c63 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h @@ -31,6 +31,7 @@ #include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/include/config.h" #include "rtc_base/arraysize.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/file_wrapper.h" @@ -104,7 +105,7 @@ struct ExperimentalAgc { // AudioProcessing::Config::TransientSuppression. // // Use to enable experimental noise suppression. It can be set in the -// constructor or using AudioProcessing::SetExtraOptions(). +// constructor. // TODO(webrtc:5298): Remove. struct ExperimentalNs { ExperimentalNs() : enabled(false) {} @@ -273,6 +274,11 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // HAL. // Recommended to be enabled on the client-side. struct GainController1 { + bool operator==(const GainController1& rhs) const; + bool operator!=(const GainController1& rhs) const { + return !(*this == rhs); + } + bool enabled = false; enum Mode { // Adaptive mode intended for use if an analog volume control is @@ -337,6 +343,11 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // first applies a fixed gain. The adaptive digital AGC can be turned off by // setting |adaptive_digital_mode=false|. struct GainController2 { + bool operator==(const GainController2& rhs) const; + bool operator!=(const GainController2& rhs) const { + return !(*this == rhs); + } + enum LevelEstimator { kRms, kPeak }; bool enabled = false; struct { @@ -344,9 +355,16 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } fixed_digital; struct { bool enabled = false; + float vad_probability_attack = 1.f; LevelEstimator level_estimator = kRms; + int level_estimator_adjacent_speech_frames_threshold = 1; + // TODO(crbug.com/webrtc/7494): Remove `use_saturation_protector`. bool use_saturation_protector = true; + float initial_saturation_margin_db = 20.f; float extra_saturation_margin_db = 2.f; + int gain_applier_adjacent_speech_frames_threshold = 1; + float max_gain_change_db_per_second = 3.f; + float max_output_noise_level_dbfs = -50.f; } adaptive_digital; } gain_controller2; @@ -384,7 +402,8 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { kCaptureFixedPostGain, kPlayoutVolumeChange, kCustomRenderProcessingRuntimeSetting, - kPlayoutAudioDeviceChange + kPlayoutAudioDeviceChange, + kCaptureOutputUsed }; // Play-out audio device properties. @@ -434,6 +453,10 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { return {Type::kCustomRenderProcessingRuntimeSetting, payload}; } + static RuntimeSetting CreateCaptureOutputUsedSetting(bool payload) { + return {Type::kCaptureOutputUsed, payload}; + } + Type type() const { return type_; } // Getters do not return a value but instead modify the argument to protect // from implicit casting. @@ -445,6 +468,10 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { RTC_DCHECK(value); *value = value_.int_value; } + void GetBool(bool* value) const { + RTC_DCHECK(value); + *value = value_.bool_value; + } void GetPlayoutAudioDeviceInfo(PlayoutAudioDeviceInfo* value) const { RTC_DCHECK(value); *value = value_.playout_audio_device_info; @@ -463,6 +490,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { U(PlayoutAudioDeviceInfo value) : playout_audio_device_info(value) {} float float_value; int int_value; + bool bool_value; PlayoutAudioDeviceInfo playout_audio_device_info; } value_; }; @@ -478,6 +506,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // rate and number of channels) have changed. Passing updated parameters // directly to |ProcessStream()| and |ProcessReverseStream()| is permissible. // If the parameters are known at init-time though, they may be provided. + // TODO(webrtc:5298): Change to return void. virtual int Initialize() = 0; // The int16 interfaces require: @@ -505,10 +534,6 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // over the parameters in the audio processing module and is likely to change. virtual void ApplyConfig(const Config& config) = 0; - // Pass down additional options which don't have explicit setters. This - // ensures the options are applied immediately. - virtual void SetExtraOptions(const webrtc::Config& config) = 0; - // TODO(ajm): Only intended for internal use. Make private and friend the // necessary classes? virtual int proc_sample_rate_hz() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/config.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/config.h index 8a245864e..7fab17831 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/config.h @@ -13,7 +13,6 @@ #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -105,7 +104,6 @@ class RTC_EXPORT Config { typedef std::map OptionMap; OptionMap options_; - // RTC_DISALLOW_COPY_AND_ASSIGN Config(const Config&); void operator=(const Config&); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h index 562b23f7d..2055f7e51 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h @@ -88,10 +88,6 @@ class MockAudioProcessing : public ::testing::NiceMock { (const ProcessingConfig& processing_config), (override)); MOCK_METHOD(void, ApplyConfig, (const Config& config), (override)); - MOCK_METHOD(void, - SetExtraOptions, - (const webrtc::Config& config), - (override)); MOCK_METHOD(int, proc_sample_rate_hz, (), (const, override)); MOCK_METHOD(int, proc_split_sample_rate_hz, (), (const, override)); MOCK_METHOD(size_t, num_input_channels, (), (const, override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h index 17a5c8771..1824fdd2a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h @@ -26,7 +26,6 @@ #include "common_audio/wav_file.h" #include "rtc_base/checks.h" #endif -#include "rtc_base/constructor_magic.h" // Check to verify that the define is properly set. #if !defined(WEBRTC_APM_DEBUG_DUMP) || \ @@ -52,6 +51,10 @@ class ApmDataDumper { // instances of the code. explicit ApmDataDumper(int instance_index); + ApmDataDumper() = delete; + ApmDataDumper(const ApmDataDumper&) = delete; + ApmDataDumper& operator=(const ApmDataDumper&) = delete; + ~ApmDataDumper(); // Activates or deactivate the dumping functionality. @@ -277,7 +280,6 @@ class ApmDataDumper { int num_channels, WavFile::SampleFormat format); #endif - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(ApmDataDumper); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h index 25f5a3be7..74650dc82 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -26,7 +26,6 @@ #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" @@ -78,6 +77,11 @@ class DelayBasedBwe { explicit DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor); + + DelayBasedBwe() = delete; + DelayBasedBwe(const DelayBasedBwe&) = delete; + DelayBasedBwe& operator=(const DelayBasedBwe&) = delete; + virtual ~DelayBasedBwe(); Result IncomingPacketFeedbackVector( @@ -143,7 +147,6 @@ class DelayBasedBwe { bool has_once_detected_overuse_; BandwidthUsage prev_state_; bool alr_limited_backoff_enabled_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DelayBasedBwe); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index b8be0982d..0a0b1801f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -96,7 +96,8 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, key_value_config_) : nullptr), bandwidth_estimation_( - std::make_unique(event_log_)), + std::make_unique(key_value_config_, + event_log_)), alr_detector_( std::make_unique(key_value_config_, config.event_log)), probe_bitrate_estimator_(new ProbeBitrateEstimator(config.event_log)), diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 1e4dcf62e..6dd70c896 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -33,7 +33,6 @@ #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" @@ -48,6 +47,11 @@ class GoogCcNetworkController : public NetworkControllerInterface { public: GoogCcNetworkController(NetworkControllerConfig config, GoogCcConfig goog_cc_config); + + GoogCcNetworkController() = delete; + GoogCcNetworkController(const GoogCcNetworkController&) = delete; + GoogCcNetworkController& operator=(const GoogCcNetworkController&) = delete; + ~GoogCcNetworkController() override; // NetworkControllerInterface @@ -137,8 +141,6 @@ class GoogCcNetworkController : public NetworkControllerInterface { bool previously_in_alr_ = false; absl::optional current_data_window_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(GoogCcNetworkController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index d2ae52840..4ca75bf26 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -19,6 +19,8 @@ #include "absl/strings/match.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/checks.h" @@ -153,19 +155,24 @@ DataRate LinkCapacityTracker::estimate() const { return DataRate::BitsPerSec(capacity_estimate_bps_); } -RttBasedBackoff::RttBasedBackoff() - : rtt_limit_("limit", TimeDelta::Seconds(3)), +RttBasedBackoff::RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config) + : disabled_("Disabled"), + configured_limit_("limit", TimeDelta::Seconds(3)), drop_fraction_("fraction", 0.8), drop_interval_("interval", TimeDelta::Seconds(1)), bandwidth_floor_("floor", DataRate::KilobitsPerSec(5)), + rtt_limit_(TimeDelta::PlusInfinity()), // By initializing this to plus infinity, we make sure that we never // trigger rtt backoff unless packet feedback is enabled. last_propagation_rtt_update_(Timestamp::PlusInfinity()), last_propagation_rtt_(TimeDelta::Zero()), last_packet_sent_(Timestamp::MinusInfinity()) { - ParseFieldTrial( - {&rtt_limit_, &drop_fraction_, &drop_interval_, &bandwidth_floor_}, - field_trial::FindFullName("WebRTC-Bwe-MaxRttLimit")); + ParseFieldTrial({&disabled_, &configured_limit_, &drop_fraction_, + &drop_interval_, &bandwidth_floor_}, + key_value_config->Lookup("WebRTC-Bwe-MaxRttLimit")); + if (!disabled_) { + rtt_limit_ = configured_limit_.Get(); + } } void RttBasedBackoff::UpdatePropagationRtt(Timestamp at_time, @@ -186,8 +193,11 @@ TimeDelta RttBasedBackoff::CorrectedRtt(Timestamp at_time) const { RttBasedBackoff::~RttBasedBackoff() = default; -SendSideBandwidthEstimation::SendSideBandwidthEstimation(RtcEventLog* event_log) - : lost_packets_since_last_loss_update_(0), +SendSideBandwidthEstimation::SendSideBandwidthEstimation( + const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log) + : rtt_backoff_(key_value_config), + lost_packets_since_last_loss_update_(0), expected_packets_since_last_loss_update_(0), current_target_(DataRate::Zero()), last_logged_target_(DataRate::Zero()), diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index 241ec8c84..a13800b7f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -21,6 +21,7 @@ #include "absl/types/optional.h" #include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -54,17 +55,19 @@ class LinkCapacityTracker { class RttBasedBackoff { public: - RttBasedBackoff(); + explicit RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config); ~RttBasedBackoff(); void UpdatePropagationRtt(Timestamp at_time, TimeDelta propagation_rtt); TimeDelta CorrectedRtt(Timestamp at_time) const; - FieldTrialParameter rtt_limit_; + FieldTrialFlag disabled_; + FieldTrialParameter configured_limit_; FieldTrialParameter drop_fraction_; FieldTrialParameter drop_interval_; FieldTrialParameter bandwidth_floor_; public: + TimeDelta rtt_limit_; Timestamp last_propagation_rtt_update_; TimeDelta last_propagation_rtt_; Timestamp last_packet_sent_; @@ -73,7 +76,8 @@ class RttBasedBackoff { class SendSideBandwidthEstimation { public: SendSideBandwidthEstimation() = delete; - explicit SendSideBandwidthEstimation(RtcEventLog* event_log); + SendSideBandwidthEstimation(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log); ~SendSideBandwidthEstimation(); void OnRouteChange(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc deleted file mode 100644 index 52baab06c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h" - -#include - -#include - -#include "absl/types/optional.h" -#include "modules/congestion_controller/goog_cc/alr_detector.h" -#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" -#include "modules/congestion_controller/goog_cc/trendline_estimator.h" -#include "modules/remote_bitrate_estimator/aimd_rate_control.h" -#include "rtc_base/checks.h" - -namespace webrtc { -namespace { -void WriteTypedValue(RtcEventLogOutput* out, int value) { - LogWriteFormat(out, "%i", value); -} -void WriteTypedValue(RtcEventLogOutput* out, double value) { - LogWriteFormat(out, "%.6f", value); -} -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { - LogWriteFormat(out, "%.0f", value ? value->bytes_per_sec() : NAN); -} -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { - LogWriteFormat(out, "%.0f", value ? value->bytes() : NAN); -} -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { - LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); -} -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { - LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); -} - -template -class TypedFieldLogger : public FieldLogger { - public: - TypedFieldLogger(std::string name, F&& getter) - : name_(std::move(name)), getter_(std::forward(getter)) {} - const std::string& name() const override { return name_; } - void WriteValue(RtcEventLogOutput* out) override { - WriteTypedValue(out, getter_()); - } - - private: - std::string name_; - F getter_; -}; - -template -FieldLogger* Log(std::string name, F&& getter) { - return new TypedFieldLogger(std::move(name), std::forward(getter)); -} - -} // namespace -GoogCcStatePrinter::GoogCcStatePrinter() { - for (auto* logger : CreateLoggers()) { - loggers_.emplace_back(logger); - } -} - -std::deque GoogCcStatePrinter::CreateLoggers() { - auto stable_estimate = [this] { - return DataRate::KilobitsPerSec( - controller_->delay_based_bwe_->rate_control_.link_capacity_ - .estimate_kbps_.value_or(-INFINITY)); - }; - auto rate_control_state = [this] { - return static_cast( - controller_->delay_based_bwe_->rate_control_.rate_control_state_); - }; - auto trend = [this] { - return reinterpret_cast( - controller_->delay_based_bwe_->active_delay_detector_); - }; - auto acknowledged_rate = [this] { - return controller_->acknowledged_bitrate_estimator_->bitrate(); - }; - auto loss_cont = [&] { - return &controller_->bandwidth_estimation_ - ->loss_based_bandwidth_estimation_; - }; - std::deque loggers({ - Log("time", [=] { return target_.at_time; }), - Log("rtt", [=] { return target_.network_estimate.round_trip_time; }), - Log("target", [=] { return target_.target_rate; }), - Log("stable_target", [=] { return target_.stable_target_rate; }), - Log("pacing", [=] { return pacing_.data_rate(); }), - Log("padding", [=] { return pacing_.pad_rate(); }), - Log("window", [=] { return congestion_window_; }), - Log("rate_control_state", [=] { return rate_control_state(); }), - Log("stable_estimate", [=] { return stable_estimate(); }), - Log("trendline", [=] { return trend()->prev_trend_; }), - Log("trendline_modified_offset", - [=] { return trend()->prev_modified_trend_; }), - Log("trendline_offset_threshold", [=] { return trend()->threshold_; }), - Log("acknowledged_rate", [=] { return acknowledged_rate(); }), - Log("est_capacity", [=] { return est_.link_capacity; }), - Log("est_capacity_dev", [=] { return est_.link_capacity_std_dev; }), - Log("est_capacity_min", [=] { return est_.link_capacity_min; }), - Log("est_cross_traffic", [=] { return est_.cross_traffic_ratio; }), - Log("est_cross_delay", [=] { return est_.cross_delay_rate; }), - Log("est_spike_delay", [=] { return est_.spike_delay_rate; }), - Log("est_pre_buffer", [=] { return est_.pre_link_buffer_delay; }), - Log("est_post_buffer", [=] { return est_.post_link_buffer_delay; }), - Log("est_propagation", [=] { return est_.propagation_delay; }), - Log("loss_ratio", [=] { return loss_cont()->last_loss_ratio_; }), - Log("loss_average", [=] { return loss_cont()->average_loss_; }), - Log("loss_average_max", [=] { return loss_cont()->average_loss_max_; }), - Log("loss_thres_inc", - [=] { return loss_cont()->loss_increase_threshold(); }), - Log("loss_thres_dec", - [=] { return loss_cont()->loss_decrease_threshold(); }), - Log("loss_dec_rate", [=] { return loss_cont()->decreased_bitrate(); }), - Log("loss_based_rate", [=] { return loss_cont()->loss_based_bitrate_; }), - Log("loss_ack_rate", - [=] { return loss_cont()->acknowledged_bitrate_max_; }), - Log("data_window", [=] { return controller_->current_data_window_; }), - Log("pushback_target", - [=] { return controller_->last_pushback_target_rate_; }), - }); - return loggers; -} -GoogCcStatePrinter::~GoogCcStatePrinter() = default; - -void GoogCcStatePrinter::PrintHeaders(RtcEventLogOutput* log) { - int ix = 0; - for (const auto& logger : loggers_) { - if (ix++) - log->Write(" "); - log->Write(logger->name()); - } - log->Write("\n"); - log->Flush(); -} - -void GoogCcStatePrinter::PrintState(RtcEventLogOutput* log, - GoogCcNetworkController* controller, - Timestamp at_time) { - controller_ = controller; - auto state_update = controller_->GetNetworkState(at_time); - target_ = state_update.target_rate.value(); - pacing_ = state_update.pacer_config.value(); - if (state_update.congestion_window) - congestion_window_ = *state_update.congestion_window; - if (controller_->network_estimator_) { - est_ = controller_->network_estimator_->GetCurrentEstimate().value_or( - NetworkStateEstimate()); - } - - int ix = 0; - for (const auto& logger : loggers_) { - if (ix++) - log->Write(" "); - logger->WriteValue(log); - } - - log->Write("\n"); - log->Flush(); -} - -GoogCcDebugFactory::GoogCcDebugFactory() - : GoogCcDebugFactory(GoogCcFactoryConfig()) {} - -GoogCcDebugFactory::GoogCcDebugFactory(GoogCcFactoryConfig config) - : GoogCcNetworkControllerFactory(std::move(config)) {} - -std::unique_ptr GoogCcDebugFactory::Create( - NetworkControllerConfig config) { - RTC_CHECK(controller_ == nullptr); - auto controller = GoogCcNetworkControllerFactory::Create(config); - controller_ = static_cast(controller.get()); - return controller; -} - -void GoogCcDebugFactory::PrintState(const Timestamp at_time) { - if (controller_ && log_writer_) { - printer_.PrintState(log_writer_.get(), controller_, at_time); - } -} - -void GoogCcDebugFactory::AttachWriter( - std::unique_ptr log_writer) { - if (log_writer) { - log_writer_ = std::move(log_writer); - printer_.PrintHeaders(log_writer_.get()); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h deleted file mode 100644 index 3eee7814c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ -#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ - -#include -#include -#include - -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/goog_cc_factory.h" -#include "api/transport/network_control.h" -#include "api/transport/network_types.h" -#include "api/units/timestamp.h" -#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" -#include "test/logging/log_writer.h" - -namespace webrtc { - -class FieldLogger { - public: - virtual ~FieldLogger() = default; - virtual const std::string& name() const = 0; - virtual void WriteValue(RtcEventLogOutput* out) = 0; -}; - -class GoogCcStatePrinter { - public: - GoogCcStatePrinter(); - GoogCcStatePrinter(const GoogCcStatePrinter&) = delete; - GoogCcStatePrinter& operator=(const GoogCcStatePrinter&) = delete; - ~GoogCcStatePrinter(); - - void PrintHeaders(RtcEventLogOutput* log); - void PrintState(RtcEventLogOutput* log, - GoogCcNetworkController* controller, - Timestamp at_time); - - private: - std::deque CreateLoggers(); - std::deque> loggers_; - - GoogCcNetworkController* controller_ = nullptr; - TargetTransferRate target_; - PacerConfig pacing_; - DataSize congestion_window_ = DataSize::PlusInfinity(); - NetworkStateEstimate est_; -}; - -class GoogCcDebugFactory : public GoogCcNetworkControllerFactory { - public: - GoogCcDebugFactory(); - explicit GoogCcDebugFactory(GoogCcFactoryConfig config); - std::unique_ptr Create( - NetworkControllerConfig config) override; - - void PrintState(const Timestamp at_time); - - void AttachWriter(std::unique_ptr log_writer); - - private: - GoogCcStatePrinter printer_; - GoogCcNetworkController* controller_ = nullptr; - std::unique_ptr log_writer_; -}; -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h index 6cd8be39a..034f2e951 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -18,7 +18,6 @@ #include "api/transport/network_control.h" #include "modules/include/module.h" #include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -66,6 +65,11 @@ class ReceiveSideCongestionController : public CallStatsObserver, public: WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock); + WrappingBitrateEstimator() = delete; + WrappingBitrateEstimator(const WrappingBitrateEstimator&) = delete; + WrappingBitrateEstimator& operator=(const WrappingBitrateEstimator&) = + delete; + ~WrappingBitrateEstimator() override; void IncomingPacket(int64_t arrival_time_ms, @@ -96,8 +100,6 @@ class ReceiveSideCongestionController : public CallStatsObserver, bool using_absolute_send_time_; uint32_t packets_since_absolute_send_time_; int min_bitrate_bps_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WrappingBitrateEstimator); }; const FieldTrialBasedConfig field_trial_config_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/OWNERS deleted file mode 100644 index eaa671cb7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/OWNERS +++ /dev/null @@ -1,2 +0,0 @@ -jamiewalch@chromium.org -sergeyu@chromium.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc deleted file mode 100644 index ca3a89f49..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h" - -#include - -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/desktop_region.h" -#include "rtc_base/checks.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { - -BlankDetectorDesktopCapturerWrapper::BlankDetectorDesktopCapturerWrapper( - std::unique_ptr capturer, - RgbaColor blank_pixel) - : capturer_(std::move(capturer)), blank_pixel_(blank_pixel) { - RTC_DCHECK(capturer_); -} - -BlankDetectorDesktopCapturerWrapper::~BlankDetectorDesktopCapturerWrapper() = - default; - -void BlankDetectorDesktopCapturerWrapper::Start( - DesktopCapturer::Callback* callback) { - callback_ = callback; - capturer_->Start(this); -} - -void BlankDetectorDesktopCapturerWrapper::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); -} - -void BlankDetectorDesktopCapturerWrapper::CaptureFrame() { - RTC_DCHECK(callback_); - capturer_->CaptureFrame(); -} - -void BlankDetectorDesktopCapturerWrapper::SetExcludedWindow(WindowId window) { - capturer_->SetExcludedWindow(window); -} - -bool BlankDetectorDesktopCapturerWrapper::GetSourceList(SourceList* sources) { - return capturer_->GetSourceList(sources); -} - -bool BlankDetectorDesktopCapturerWrapper::SelectSource(SourceId id) { - return capturer_->SelectSource(id); -} - -bool BlankDetectorDesktopCapturerWrapper::FocusOnSelectedSource() { - return capturer_->FocusOnSelectedSource(); -} - -bool BlankDetectorDesktopCapturerWrapper::IsOccluded(const DesktopVector& pos) { - return capturer_->IsOccluded(pos); -} - -void BlankDetectorDesktopCapturerWrapper::OnCaptureResult( - Result result, - std::unique_ptr frame) { - RTC_DCHECK(callback_); - if (result != Result::SUCCESS || non_blank_frame_received_) { - callback_->OnCaptureResult(result, std::move(frame)); - return; - } - - RTC_DCHECK(frame); - - // If nothing has been changed in current frame, we do not need to check it - // again. - if (!frame->updated_region().is_empty() || is_first_frame_) { - last_frame_is_blank_ = IsBlankFrame(*frame); - is_first_frame_ = false; - } - RTC_HISTOGRAM_BOOLEAN("WebRTC.DesktopCapture.BlankFrameDetected", - last_frame_is_blank_); - if (!last_frame_is_blank_) { - non_blank_frame_received_ = true; - callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); - return; - } - - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, - std::unique_ptr()); -} - -bool BlankDetectorDesktopCapturerWrapper::IsBlankFrame( - const DesktopFrame& frame) const { - // We will check 7489 pixels for a frame with 1024 x 768 resolution. - for (int i = 0; i < frame.size().width() * frame.size().height(); i += 105) { - const int x = i % frame.size().width(); - const int y = i / frame.size().width(); - if (!IsBlankPixel(frame, x, y)) { - return false; - } - } - - // We are verifying the pixel in the center as well. - return IsBlankPixel(frame, frame.size().width() / 2, - frame.size().height() / 2); -} - -bool BlankDetectorDesktopCapturerWrapper::IsBlankPixel( - const DesktopFrame& frame, - int x, - int y) const { - uint8_t* pixel_data = frame.GetFrameDataAtPos(DesktopVector(x, y)); - return RgbaColor(pixel_data) == blank_pixel_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h deleted file mode 100644 index 46ba5257f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ -#define MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/rgba_color.h" -#include "modules/desktop_capture/shared_memory.h" - -namespace webrtc { - -// A DesktopCapturer wrapper detects the return value of its owned -// DesktopCapturer implementation. If sampled pixels returned by the -// DesktopCapturer implementation all equal to the blank pixel, this wrapper -// returns ERROR_TEMPORARY. If the DesktopCapturer implementation fails for too -// many times, this wrapper returns ERROR_PERMANENT. -class BlankDetectorDesktopCapturerWrapper final - : public DesktopCapturer, - public DesktopCapturer::Callback { - public: - // Creates BlankDetectorDesktopCapturerWrapper. BlankDesktopCapturerWrapper - // takes ownership of |capturer|. The |blank_pixel| is the unmodified color - // returned by the |capturer|. - BlankDetectorDesktopCapturerWrapper(std::unique_ptr capturer, - RgbaColor blank_pixel); - ~BlankDetectorDesktopCapturerWrapper() override; - - // DesktopCapturer interface. - void Start(DesktopCapturer::Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - private: - // DesktopCapturer::Callback interface. - void OnCaptureResult(Result result, - std::unique_ptr frame) override; - - bool IsBlankFrame(const DesktopFrame& frame) const; - - // Detects whether pixel at (x, y) equals to |blank_pixel_|. - bool IsBlankPixel(const DesktopFrame& frame, int x, int y) const; - - const std::unique_ptr capturer_; - const RgbaColor blank_pixel_; - - // Whether a non-blank frame has been received. - bool non_blank_frame_received_ = false; - - // Whether the last frame is blank. - bool last_frame_is_blank_ = false; - - // Whether current frame is the first frame. - bool is_first_frame_ = true; - - DesktopCapturer::Callback* callback_ = nullptr; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc deleted file mode 100644 index e1d4b993e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/capture_result_desktop_capturer_wrapper.h" - -#include -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -CaptureResultDesktopCapturerWrapper::CaptureResultDesktopCapturerWrapper( - std::unique_ptr base_capturer, - ResultObserver* observer) - : DesktopCapturerWrapper(std::move(base_capturer)), observer_(observer) { - RTC_DCHECK(observer_); -} - -CaptureResultDesktopCapturerWrapper::~CaptureResultDesktopCapturerWrapper() = - default; - -void CaptureResultDesktopCapturerWrapper::Start(Callback* callback) { - if ((callback_ == nullptr) != (callback == nullptr)) { - if (callback) { - callback_ = callback; - base_capturer_->Start(this); - } else { - base_capturer_->Start(nullptr); - } - } - callback_ = callback; -} - -void CaptureResultDesktopCapturerWrapper::OnCaptureResult( - Result result, - std::unique_ptr frame) { - observer_->Observe(&result, &frame); - callback_->OnCaptureResult(result, std::move(frame)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h deleted file mode 100644 index 6d1d49a5e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ -#define MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ - -#include - -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_capturer_wrapper.h" -#include "modules/desktop_capture/desktop_frame.h" - -namespace webrtc { - -// A DesktopCapturerWrapper implementation to capture the result of -// |base_capturer|. Derived classes are expected to provide a ResultObserver -// implementation to observe the DesktopFrame returned by |base_capturer_|. -class CaptureResultDesktopCapturerWrapper : public DesktopCapturerWrapper, - public DesktopCapturer::Callback { - public: - using Callback = DesktopCapturer::Callback; - - // Provides a way to let derived classes or clients to modify the result - // returned by |base_capturer_|. - class ResultObserver { - public: - ResultObserver(); - virtual ~ResultObserver(); - - virtual void Observe(Result* result, - std::unique_ptr* frame) = 0; - }; - - // |observer| must outlive this instance and can be |this|. |observer| is - // guaranteed to be executed only after the constructor and before the - // destructor. - CaptureResultDesktopCapturerWrapper( - std::unique_ptr base_capturer, - ResultObserver* observer); - - ~CaptureResultDesktopCapturerWrapper() override; - - // DesktopCapturer implementations. - void Start(Callback* callback) final; - - private: - // DesktopCapturer::Callback implementation. - void OnCaptureResult(Result result, - std::unique_ptr frame) final; - - ResultObserver* const observer_; - Callback* callback_ = nullptr; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.cc deleted file mode 100644 index 1ab035507..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.cc +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/cropped_desktop_frame.h" - -#include -#include - -#include "modules/desktop_capture/desktop_region.h" -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// A DesktopFrame that is a sub-rect of another DesktopFrame. -class CroppedDesktopFrame : public DesktopFrame { - public: - CroppedDesktopFrame(std::unique_ptr frame, - const DesktopRect& rect); - - private: - const std::unique_ptr frame_; - - RTC_DISALLOW_COPY_AND_ASSIGN(CroppedDesktopFrame); -}; - -std::unique_ptr CreateCroppedDesktopFrame( - std::unique_ptr frame, - const DesktopRect& rect) { - RTC_DCHECK(frame); - - if (!DesktopRect::MakeSize(frame->size()).ContainsRect(rect)) { - return nullptr; - } - - if (frame->size().equals(rect.size())) { - return frame; - } - - return std::unique_ptr( - new CroppedDesktopFrame(std::move(frame), rect)); -} - -CroppedDesktopFrame::CroppedDesktopFrame(std::unique_ptr frame, - const DesktopRect& rect) - : DesktopFrame(rect.size(), - frame->stride(), - frame->GetFrameDataAtPos(rect.top_left()), - frame->shared_memory()), - frame_(std::move(frame)) { - MoveFrameInfoFrom(frame_.get()); - set_top_left(frame_->top_left().add(rect.top_left())); - mutable_updated_region()->IntersectWith(rect); - mutable_updated_region()->Translate(-rect.left(), -rect.top()); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.h deleted file mode 100644 index bbc3c86ca..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropped_desktop_frame.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ -#define MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ - -#include - -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Creates a DesktopFrame to contain only the area of |rect| in the original -// |frame|. -// |frame| should not be nullptr. |rect| is in |frame| coordinate, i.e. -// |frame|->top_left() does not impact the area of |rect|. -// Returns nullptr frame if |rect| is not contained by the bounds of |frame|. -std::unique_ptr RTC_EXPORT -CreateCroppedDesktopFrame(std::unique_ptr frame, - const DesktopRect& rect); - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.cc deleted file mode 100644 index bd1ba4631..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.cc +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/cropping_window_capturer.h" - -#include - -#include - -#include "modules/desktop_capture/cropped_desktop_frame.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -CroppingWindowCapturer::CroppingWindowCapturer( - const DesktopCaptureOptions& options) - : options_(options), - callback_(NULL), - window_capturer_(DesktopCapturer::CreateRawWindowCapturer(options)), - selected_window_(kNullWindowId), - excluded_window_(kNullWindowId) {} - -CroppingWindowCapturer::~CroppingWindowCapturer() {} - -void CroppingWindowCapturer::Start(DesktopCapturer::Callback* callback) { - callback_ = callback; - window_capturer_->Start(callback); -} - -void CroppingWindowCapturer::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - window_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); -} - -void CroppingWindowCapturer::CaptureFrame() { - if (ShouldUseScreenCapturer()) { - if (!screen_capturer_.get()) { - screen_capturer_ = DesktopCapturer::CreateRawScreenCapturer(options_); - if (excluded_window_) { - screen_capturer_->SetExcludedWindow(excluded_window_); - } - screen_capturer_->Start(this); - } - screen_capturer_->CaptureFrame(); - } else { - window_capturer_->CaptureFrame(); - } -} - -void CroppingWindowCapturer::SetExcludedWindow(WindowId window) { - excluded_window_ = window; - if (screen_capturer_.get()) { - screen_capturer_->SetExcludedWindow(window); - } -} - -bool CroppingWindowCapturer::GetSourceList(SourceList* sources) { - return window_capturer_->GetSourceList(sources); -} - -bool CroppingWindowCapturer::SelectSource(SourceId id) { - if (window_capturer_->SelectSource(id)) { - selected_window_ = id; - return true; - } - return false; -} - -bool CroppingWindowCapturer::FocusOnSelectedSource() { - return window_capturer_->FocusOnSelectedSource(); -} - -void CroppingWindowCapturer::OnCaptureResult( - DesktopCapturer::Result result, - std::unique_ptr screen_frame) { - if (!ShouldUseScreenCapturer()) { - RTC_LOG(LS_INFO) << "Window no longer on top when ScreenCapturer finishes"; - window_capturer_->CaptureFrame(); - return; - } - - if (result != Result::SUCCESS) { - RTC_LOG(LS_WARNING) << "ScreenCapturer failed to capture a frame"; - callback_->OnCaptureResult(result, nullptr); - return; - } - - DesktopRect window_rect = GetWindowRectInVirtualScreen(); - if (window_rect.is_empty()) { - RTC_LOG(LS_WARNING) << "Window rect is empty"; - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - callback_->OnCaptureResult( - Result::SUCCESS, - CreateCroppedDesktopFrame(std::move(screen_frame), window_rect)); -} - -bool CroppingWindowCapturer::IsOccluded(const DesktopVector& pos) { - // Returns true if either capturer returns true. - if (window_capturer_->IsOccluded(pos)) { - return true; - } - if (screen_capturer_ != nullptr && screen_capturer_->IsOccluded(pos)) { - return true; - } - return false; -} - -#if !defined(WEBRTC_WIN) -// CroppingWindowCapturer is implemented only for windows. On other platforms -// the regular window capturer is used. -// static -std::unique_ptr CroppingWindowCapturer::CreateCapturer( - const DesktopCaptureOptions& options) { - return DesktopCapturer::CreateWindowCapturer(options); -} -#endif - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.h deleted file mode 100644 index 272a19697..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ -#define MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// WindowCapturer implementation that uses a screen capturer to capture the -// whole screen and crops the video frame to the window area when the captured -// window is on top. -class RTC_EXPORT CroppingWindowCapturer : public DesktopCapturer, - public DesktopCapturer::Callback { - public: - static std::unique_ptr CreateCapturer( - const DesktopCaptureOptions& options); - - ~CroppingWindowCapturer() override; - - // DesktopCapturer implementation. - void Start(DesktopCapturer::Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - // DesktopCapturer::Callback implementation, passed to |screen_capturer_| to - // intercept the capture result. - void OnCaptureResult(DesktopCapturer::Result result, - std::unique_ptr frame) override; - - protected: - explicit CroppingWindowCapturer(const DesktopCaptureOptions& options); - - // The platform implementation should override these methods. - - // Returns true if it is OK to capture the whole screen and crop to the - // selected window, i.e. the selected window is opaque, rectangular, and not - // occluded. - virtual bool ShouldUseScreenCapturer() = 0; - - // Returns the window area relative to the top left of the virtual screen - // within the bounds of the virtual screen. This function should return the - // DesktopRect in full desktop coordinates, i.e. the top-left monitor starts - // from (0, 0). - virtual DesktopRect GetWindowRectInVirtualScreen() = 0; - - WindowId selected_window() const { return selected_window_; } - WindowId excluded_window() const { return excluded_window_; } - DesktopCapturer* window_capturer() const { return window_capturer_.get(); } - - private: - DesktopCaptureOptions options_; - DesktopCapturer::Callback* callback_; - std::unique_ptr window_capturer_; - std::unique_ptr screen_capturer_; - SourceId selected_window_; - WindowId excluded_window_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer_win.cc deleted file mode 100644 index 6e53ca352..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/cropping_window_capturer_win.cc +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/cropping_window_capturer.h" -#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" -#include "modules/desktop_capture/win/screen_capture_utils.h" -#include "modules/desktop_capture/win/selected_window_context.h" -#include "modules/desktop_capture/win/window_capture_utils.h" -#include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" -#include "rtc_base/win32.h" - -namespace webrtc { - -namespace { - -// Used to pass input data for verifying the selected window is on top. -struct TopWindowVerifierContext : public SelectedWindowContext { - TopWindowVerifierContext(HWND selected_window, - HWND excluded_window, - DesktopRect selected_window_rect, - WindowCaptureHelperWin* window_capture_helper) - : SelectedWindowContext(selected_window, - selected_window_rect, - window_capture_helper), - excluded_window(excluded_window) { - RTC_DCHECK_NE(selected_window, excluded_window); - } - - // Determines whether the selected window is on top (not occluded by any - // windows except for those it owns or any excluded window). - bool IsTopWindow() { - if (!IsSelectedWindowValid()) { - return false; - } - - // Enumerate all top-level windows above the selected window in Z-order, - // checking whether any overlaps it. This uses FindWindowEx rather than - // EnumWindows because the latter excludes certain system windows (e.g. the - // Start menu & other taskbar menus) that should be detected here to avoid - // inadvertent capture. - int num_retries = 0; - while (true) { - HWND hwnd = nullptr; - while ((hwnd = FindWindowEx(nullptr, hwnd, nullptr, nullptr))) { - if (hwnd == selected_window()) { - // Windows are enumerated in top-down Z-order, so we can stop - // enumerating upon reaching the selected window & report it's on top. - return true; - } - - // Ignore the excluded window. - if (hwnd == excluded_window) { - continue; - } - - // Ignore windows that aren't visible on the current desktop. - if (!window_capture_helper()->IsWindowVisibleOnCurrentDesktop(hwnd)) { - continue; - } - - // Ignore Chrome notification windows, especially the notification for - // the ongoing window sharing. Notes: - // - This only works with notifications from Chrome, not other Apps. - // - All notifications from Chrome will be ignored. - // - This may cause part or whole of notification window being cropped - // into the capturing of the target window if there is overlapping. - if (window_capture_helper()->IsWindowChromeNotification(hwnd)) { - continue; - } - - // Ignore windows owned by the selected window since we want to capture - // them. - if (IsWindowOwnedBySelectedWindow(hwnd)) { - continue; - } - - // Check whether this window intersects with the selected window. - if (IsWindowOverlappingSelectedWindow(hwnd)) { - // If intersection is not empty, the selected window is not on top. - return false; - } - } - - DWORD lastError = GetLastError(); - if (lastError == ERROR_SUCCESS) { - // The enumeration completed successfully without finding the selected - // window (which may have been closed). - RTC_LOG(LS_WARNING) << "Failed to find selected window (only expected " - "if it was closed)"; - RTC_DCHECK(!IsWindow(selected_window())); - return false; - } else if (lastError == ERROR_INVALID_WINDOW_HANDLE) { - // This error may occur if a window is closed around the time it's - // enumerated; retry the enumeration in this case up to 10 times - // (this should be a rare race & unlikely to recur). - if (++num_retries <= 10) { - RTC_LOG(LS_WARNING) << "Enumeration failed due to race with a window " - "closing; retrying - retry #" - << num_retries; - continue; - } else { - RTC_LOG(LS_ERROR) - << "Exhausted retry allowance around window enumeration failures " - "due to races with windows closing"; - } - } - - // The enumeration failed with an unexpected error (or more repeats of - // an infrequently-expected error than anticipated). After logging this & - // firing an assert when enabled, report that the selected window isn't - // topmost to avoid inadvertent capture of other windows. - RTC_LOG(LS_ERROR) << "Failed to enumerate windows: " << lastError; - RTC_DCHECK(false); - return false; - } - } - - const HWND excluded_window; -}; - -class CroppingWindowCapturerWin : public CroppingWindowCapturer { - public: - explicit CroppingWindowCapturerWin(const DesktopCaptureOptions& options) - : CroppingWindowCapturer(options), - full_screen_window_detector_(options.full_screen_window_detector()) {} - - void CaptureFrame() override; - - private: - bool ShouldUseScreenCapturer() override; - DesktopRect GetWindowRectInVirtualScreen() override; - - // Returns either selected by user sourceId or sourceId provided by - // FullScreenWindowDetector - WindowId GetWindowToCapture() const; - - // The region from GetWindowRgn in the desktop coordinate if the region is - // rectangular, or the rect from GetWindowRect if the region is not set. - DesktopRect window_region_rect_; - - WindowCaptureHelperWin window_capture_helper_; - - rtc::scoped_refptr full_screen_window_detector_; -}; - -void CroppingWindowCapturerWin::CaptureFrame() { - DesktopCapturer* win_capturer = window_capturer(); - if (win_capturer) { - // Update the list of available sources and override source to capture if - // FullScreenWindowDetector returns not zero - if (full_screen_window_detector_) { - full_screen_window_detector_->UpdateWindowListIfNeeded( - selected_window(), - [win_capturer](DesktopCapturer::SourceList* sources) { - return win_capturer->GetSourceList(sources); - }); - } - win_capturer->SelectSource(GetWindowToCapture()); - } - - CroppingWindowCapturer::CaptureFrame(); -} - -bool CroppingWindowCapturerWin::ShouldUseScreenCapturer() { - if (!rtc::IsWindows8OrLater() && window_capture_helper_.IsAeroEnabled()) { - return false; - } - - const HWND selected = reinterpret_cast(GetWindowToCapture()); - // Check if the window is visible on current desktop. - if (!window_capture_helper_.IsWindowVisibleOnCurrentDesktop(selected)) { - return false; - } - - // Check if the window is a translucent layered window. - const LONG window_ex_style = GetWindowLong(selected, GWL_EXSTYLE); - if (window_ex_style & WS_EX_LAYERED) { - COLORREF color_ref_key = 0; - BYTE alpha = 0; - DWORD flags = 0; - - // GetLayeredWindowAttributes fails if the window was setup with - // UpdateLayeredWindow. We have no way to know the opacity of the window in - // that case. This happens for Stiky Note (crbug/412726). - if (!GetLayeredWindowAttributes(selected, &color_ref_key, &alpha, &flags)) - return false; - - // UpdateLayeredWindow is the only way to set per-pixel alpha and will cause - // the previous GetLayeredWindowAttributes to fail. So we only need to check - // the window wide color key or alpha. - if ((flags & LWA_COLORKEY) || ((flags & LWA_ALPHA) && (alpha < 255))) { - return false; - } - } - - if (!GetWindowRect(selected, &window_region_rect_)) { - return false; - } - - DesktopRect content_rect; - if (!GetWindowContentRect(selected, &content_rect)) { - return false; - } - - DesktopRect region_rect; - // Get the window region and check if it is rectangular. - const int region_type = - GetWindowRegionTypeWithBoundary(selected, ®ion_rect); - - // Do not use the screen capturer if the region is empty or not rectangular. - if (region_type == COMPLEXREGION || region_type == NULLREGION) { - return false; - } - - if (region_type == SIMPLEREGION) { - // The |region_rect| returned from GetRgnBox() is always in window - // coordinate. - region_rect.Translate(window_region_rect_.left(), - window_region_rect_.top()); - // MSDN: The window region determines the area *within* the window where the - // system permits drawing. - // https://msdn.microsoft.com/en-us/library/windows/desktop/dd144950(v=vs.85).aspx. - // - // |region_rect| should always be inside of |window_region_rect_|. So after - // the intersection, |window_region_rect_| == |region_rect|. If so, what's - // the point of the intersecting operations? Why cannot we directly retrieve - // |window_region_rect_| from GetWindowRegionTypeWithBoundary() function? - // TODO(zijiehe): Figure out the purpose of these intersections. - window_region_rect_.IntersectWith(region_rect); - content_rect.IntersectWith(region_rect); - } - - // Check if the client area is out of the screen area. When the window is - // maximized, only its client area is visible in the screen, the border will - // be hidden. So we are using |content_rect| here. - if (!GetFullscreenRect().ContainsRect(content_rect)) { - return false; - } - - // Check if the window is occluded by any other window, excluding the child - // windows, context menus, and |excluded_window_|. - // |content_rect| is preferred, see the comments on - // IsWindowIntersectWithSelectedWindow(). - TopWindowVerifierContext context(selected, - reinterpret_cast(excluded_window()), - content_rect, &window_capture_helper_); - return context.IsTopWindow(); -} - -DesktopRect CroppingWindowCapturerWin::GetWindowRectInVirtualScreen() { - TRACE_EVENT0("webrtc", - "CroppingWindowCapturerWin::GetWindowRectInVirtualScreen"); - DesktopRect window_rect; - HWND hwnd = reinterpret_cast(GetWindowToCapture()); - if (!GetCroppedWindowRect(hwnd, /*avoid_cropping_border*/ false, &window_rect, - /*original_rect*/ nullptr)) { - RTC_LOG(LS_WARNING) << "Failed to get window info: " << GetLastError(); - return window_rect; - } - window_rect.IntersectWith(window_region_rect_); - - // Convert |window_rect| to be relative to the top-left of the virtual screen. - DesktopRect screen_rect(GetFullscreenRect()); - window_rect.IntersectWith(screen_rect); - window_rect.Translate(-screen_rect.left(), -screen_rect.top()); - return window_rect; -} - -WindowId CroppingWindowCapturerWin::GetWindowToCapture() const { - const auto selected_source = selected_window(); - const auto full_screen_source = - full_screen_window_detector_ - ? full_screen_window_detector_->FindFullScreenWindow(selected_source) - : 0; - return full_screen_source ? full_screen_source : selected_source; -} - -} // namespace - -// static -std::unique_ptr CroppingWindowCapturer::CreateCapturer( - const DesktopCaptureOptions& options) { - std::unique_ptr capturer( - new CroppingWindowCapturerWin(options)); - if (capturer && options.detect_updated_region()) { - capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); - } - - return capturer; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc deleted file mode 100644 index f282c1d50..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_and_cursor_composer.h" - -#include -#include - -#include -#include - -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/mouse_cursor.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -namespace { - -// Helper function that blends one image into another. Source image must be -// pre-multiplied with the alpha channel. Destination is assumed to be opaque. -void AlphaBlend(uint8_t* dest, - int dest_stride, - const uint8_t* src, - int src_stride, - const DesktopSize& size) { - for (int y = 0; y < size.height(); ++y) { - for (int x = 0; x < size.width(); ++x) { - uint32_t base_alpha = 255 - src[x * DesktopFrame::kBytesPerPixel + 3]; - if (base_alpha == 255) { - continue; - } else if (base_alpha == 0) { - memcpy(dest + x * DesktopFrame::kBytesPerPixel, - src + x * DesktopFrame::kBytesPerPixel, - DesktopFrame::kBytesPerPixel); - } else { - dest[x * DesktopFrame::kBytesPerPixel] = - dest[x * DesktopFrame::kBytesPerPixel] * base_alpha / 255 + - src[x * DesktopFrame::kBytesPerPixel]; - dest[x * DesktopFrame::kBytesPerPixel + 1] = - dest[x * DesktopFrame::kBytesPerPixel + 1] * base_alpha / 255 + - src[x * DesktopFrame::kBytesPerPixel + 1]; - dest[x * DesktopFrame::kBytesPerPixel + 2] = - dest[x * DesktopFrame::kBytesPerPixel + 2] * base_alpha / 255 + - src[x * DesktopFrame::kBytesPerPixel + 2]; - } - } - src += src_stride; - dest += dest_stride; - } -} - -// DesktopFrame wrapper that draws mouse on a frame and restores original -// content before releasing the underlying frame. -class DesktopFrameWithCursor : public DesktopFrame { - public: - // Takes ownership of |frame|. - DesktopFrameWithCursor(std::unique_ptr frame, - const MouseCursor& cursor, - const DesktopVector& position, - const DesktopRect& previous_cursor_rect, - bool cursor_changed); - ~DesktopFrameWithCursor() override; - - DesktopRect cursor_rect() const { return cursor_rect_; } - - private: - const std::unique_ptr original_frame_; - - DesktopVector restore_position_; - std::unique_ptr restore_frame_; - DesktopRect cursor_rect_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrameWithCursor); -}; - -DesktopFrameWithCursor::DesktopFrameWithCursor( - std::unique_ptr frame, - const MouseCursor& cursor, - const DesktopVector& position, - const DesktopRect& previous_cursor_rect, - bool cursor_changed) - : DesktopFrame(frame->size(), - frame->stride(), - frame->data(), - frame->shared_memory()), - original_frame_(std::move(frame)) { - MoveFrameInfoFrom(original_frame_.get()); - - DesktopVector image_pos = position.subtract(cursor.hotspot()); - cursor_rect_ = DesktopRect::MakeSize(cursor.image()->size()); - cursor_rect_.Translate(image_pos); - DesktopVector cursor_origin = cursor_rect_.top_left(); - cursor_rect_.IntersectWith(DesktopRect::MakeSize(size())); - - if (!previous_cursor_rect.equals(cursor_rect_)) { - mutable_updated_region()->AddRect(cursor_rect_); - mutable_updated_region()->AddRect(previous_cursor_rect); - } else if (cursor_changed) { - mutable_updated_region()->AddRect(cursor_rect_); - } - - if (cursor_rect_.is_empty()) - return; - - // Copy original screen content under cursor to |restore_frame_|. - restore_position_ = cursor_rect_.top_left(); - restore_frame_.reset(new BasicDesktopFrame(cursor_rect_.size())); - restore_frame_->CopyPixelsFrom(*this, cursor_rect_.top_left(), - DesktopRect::MakeSize(restore_frame_->size())); - - // Blit the cursor. - uint8_t* cursor_rect_data = - reinterpret_cast(data()) + cursor_rect_.top() * stride() + - cursor_rect_.left() * DesktopFrame::kBytesPerPixel; - DesktopVector origin_shift = cursor_rect_.top_left().subtract(cursor_origin); - AlphaBlend(cursor_rect_data, stride(), - cursor.image()->data() + - origin_shift.y() * cursor.image()->stride() + - origin_shift.x() * DesktopFrame::kBytesPerPixel, - cursor.image()->stride(), cursor_rect_.size()); -} - -DesktopFrameWithCursor::~DesktopFrameWithCursor() { - // Restore original content of the frame. - if (restore_frame_) { - DesktopRect target_rect = DesktopRect::MakeSize(restore_frame_->size()); - target_rect.Translate(restore_position_); - CopyPixelsFrom(restore_frame_->data(), restore_frame_->stride(), - target_rect); - } -} - -} // namespace - -DesktopAndCursorComposer::DesktopAndCursorComposer( - std::unique_ptr desktop_capturer, - const DesktopCaptureOptions& options) - : DesktopAndCursorComposer(desktop_capturer.release(), - MouseCursorMonitor::Create(options).release()) {} - -DesktopAndCursorComposer::DesktopAndCursorComposer( - DesktopCapturer* desktop_capturer, - MouseCursorMonitor* mouse_monitor) - : desktop_capturer_(desktop_capturer), mouse_monitor_(mouse_monitor) { - RTC_DCHECK(desktop_capturer_); -} - -DesktopAndCursorComposer::~DesktopAndCursorComposer() = default; - -std::unique_ptr -DesktopAndCursorComposer::CreateWithoutMouseCursorMonitor( - std::unique_ptr desktop_capturer) { - return std::unique_ptr( - new DesktopAndCursorComposer(desktop_capturer.release(), nullptr)); -} - -void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) { - callback_ = callback; - if (mouse_monitor_) - mouse_monitor_->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION); - desktop_capturer_->Start(this); -} - -void DesktopAndCursorComposer::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - desktop_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); -} - -void DesktopAndCursorComposer::CaptureFrame() { - if (mouse_monitor_) - mouse_monitor_->Capture(); - desktop_capturer_->CaptureFrame(); -} - -void DesktopAndCursorComposer::SetExcludedWindow(WindowId window) { - desktop_capturer_->SetExcludedWindow(window); -} - -bool DesktopAndCursorComposer::GetSourceList(SourceList* sources) { - return desktop_capturer_->GetSourceList(sources); -} - -bool DesktopAndCursorComposer::SelectSource(SourceId id) { - return desktop_capturer_->SelectSource(id); -} - -bool DesktopAndCursorComposer::FocusOnSelectedSource() { - return desktop_capturer_->FocusOnSelectedSource(); -} - -bool DesktopAndCursorComposer::IsOccluded(const DesktopVector& pos) { - return desktop_capturer_->IsOccluded(pos); -} - -void DesktopAndCursorComposer::OnCaptureResult( - DesktopCapturer::Result result, - std::unique_ptr frame) { - if (frame && cursor_) { - if (frame->rect().Contains(cursor_position_) && - !desktop_capturer_->IsOccluded(cursor_position_)) { - DesktopVector relative_position = - cursor_position_.subtract(frame->top_left()); -#if defined(WEBRTC_MAC) - // On OSX, the logical(DIP) and physical coordinates are used mixingly. - // For example, the captured cursor has its size in physical pixels(2x) - // and location in logical(DIP) pixels on Retina monitor. This will cause - // problem when the desktop is mixed with Retina and non-Retina monitors. - // So we use DIP pixel for all location info and compensate with the scale - // factor of current frame to the |relative_position|. - const float scale = frame->scale_factor(); - relative_position.set(relative_position.x() * scale, - relative_position.y() * scale); -#endif - auto frame_with_cursor = std::make_unique( - std::move(frame), *cursor_, relative_position, previous_cursor_rect_, - cursor_changed_); - previous_cursor_rect_ = frame_with_cursor->cursor_rect(); - cursor_changed_ = false; - frame = std::move(frame_with_cursor); - } - } - - callback_->OnCaptureResult(result, std::move(frame)); -} - -void DesktopAndCursorComposer::OnMouseCursor(MouseCursor* cursor) { - cursor_changed_ = true; - cursor_.reset(cursor); -} - -void DesktopAndCursorComposer::OnMouseCursorPosition( - const DesktopVector& position) { - cursor_position_ = position; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h deleted file mode 100644 index 8f95721ec..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/mouse_cursor.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// A wrapper for DesktopCapturer that also captures mouse using specified -// MouseCursorMonitor and renders it on the generated streams. -class RTC_EXPORT DesktopAndCursorComposer - : public DesktopCapturer, - public DesktopCapturer::Callback, - public MouseCursorMonitor::Callback { - public: - // Creates a new composer that captures mouse cursor using - // MouseCursorMonitor::Create(options) and renders it into the frames - // generated by |desktop_capturer|. - DesktopAndCursorComposer(std::unique_ptr desktop_capturer, - const DesktopCaptureOptions& options); - - ~DesktopAndCursorComposer() override; - - // Creates a new composer that relies on an external source for cursor shape - // and position information via the MouseCursorMonitor::Callback interface. - static std::unique_ptr - CreateWithoutMouseCursorMonitor( - std::unique_ptr desktop_capturer); - - // DesktopCapturer interface. - void Start(DesktopCapturer::Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - // MouseCursorMonitor::Callback interface. - void OnMouseCursor(MouseCursor* cursor) override; - void OnMouseCursorPosition(const DesktopVector& position) override; - - private: - // Allows test cases to use a fake MouseCursorMonitor implementation. - friend class DesktopAndCursorComposerTest; - - // Constructor to delegate both deprecated and new constructors and allows - // test cases to use a fake MouseCursorMonitor implementation. - DesktopAndCursorComposer(DesktopCapturer* desktop_capturer, - MouseCursorMonitor* mouse_monitor); - - // DesktopCapturer::Callback interface. - void OnCaptureResult(DesktopCapturer::Result result, - std::unique_ptr frame) override; - - const std::unique_ptr desktop_capturer_; - const std::unique_ptr mouse_monitor_; - - DesktopCapturer::Callback* callback_; - - std::unique_ptr cursor_; - DesktopVector cursor_position_; - DesktopRect previous_cursor_rect_; - bool cursor_changed_ = false; - - RTC_DISALLOW_COPY_AND_ASSIGN(DesktopAndCursorComposer); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.cc deleted file mode 100644 index c89896d5f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.cc +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_capture_options.h" -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) -#include "modules/desktop_capture/mac/full_screen_mac_application_handler.h" -#elif defined(WEBRTC_WIN) -#include "modules/desktop_capture/win/full_screen_win_application_handler.h" -#endif - -namespace webrtc { - -DesktopCaptureOptions::DesktopCaptureOptions() {} -DesktopCaptureOptions::DesktopCaptureOptions( - const DesktopCaptureOptions& options) = default; -DesktopCaptureOptions::DesktopCaptureOptions(DesktopCaptureOptions&& options) = - default; -DesktopCaptureOptions::~DesktopCaptureOptions() {} - -DesktopCaptureOptions& DesktopCaptureOptions::operator=( - const DesktopCaptureOptions& options) = default; -DesktopCaptureOptions& DesktopCaptureOptions::operator=( - DesktopCaptureOptions&& options) = default; - -// static -DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() { - DesktopCaptureOptions result; -#if defined(WEBRTC_USE_X11) - result.set_x_display(SharedXDisplay::CreateDefault()); -#endif -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - result.set_configuration_monitor(new DesktopConfigurationMonitor()); - result.set_full_screen_window_detector( - new FullScreenWindowDetector(CreateFullScreenMacApplicationHandler)); -#elif defined(WEBRTC_WIN) - result.set_full_screen_window_detector( - new FullScreenWindowDetector(CreateFullScreenWinApplicationHandler)); -#endif - return result; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.h deleted file mode 100644 index 521c80b5c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_options.h +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ - -#include "api/scoped_refptr.h" -#include "rtc_base/system/rtc_export.h" - -#if defined(WEBRTC_USE_X11) -#include "modules/desktop_capture/linux/shared_x_display.h" -#endif - -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) -#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" -#endif - -#include "modules/desktop_capture/full_screen_window_detector.h" - -namespace webrtc { - -// An object that stores initialization parameters for screen and window -// capturers. -class RTC_EXPORT DesktopCaptureOptions { - public: - // Returns instance of DesktopCaptureOptions with default parameters. On Linux - // also initializes X window connection. x_display() will be set to null if - // X11 connection failed (e.g. DISPLAY isn't set). - static DesktopCaptureOptions CreateDefault(); - - DesktopCaptureOptions(); - DesktopCaptureOptions(const DesktopCaptureOptions& options); - DesktopCaptureOptions(DesktopCaptureOptions&& options); - ~DesktopCaptureOptions(); - - DesktopCaptureOptions& operator=(const DesktopCaptureOptions& options); - DesktopCaptureOptions& operator=(DesktopCaptureOptions&& options); - -#if defined(WEBRTC_USE_X11) - SharedXDisplay* x_display() const { return x_display_; } - void set_x_display(rtc::scoped_refptr x_display) { - x_display_ = x_display; - } -#endif - -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - // TODO(zijiehe): Remove both DesktopConfigurationMonitor and - // FullScreenChromeWindowDetector out of DesktopCaptureOptions. It's not - // reasonable for external consumers to set these two parameters. - DesktopConfigurationMonitor* configuration_monitor() const { - return configuration_monitor_; - } - // If nullptr is set, ScreenCapturer won't work and WindowCapturer may return - // inaccurate result from IsOccluded() function. - void set_configuration_monitor( - rtc::scoped_refptr m) { - configuration_monitor_ = m; - } - - bool allow_iosurface() const { return allow_iosurface_; } - void set_allow_iosurface(bool allow) { allow_iosurface_ = allow; } -#endif - - FullScreenWindowDetector* full_screen_window_detector() const { - return full_screen_window_detector_; - } - void set_full_screen_window_detector( - rtc::scoped_refptr detector) { - full_screen_window_detector_ = detector; - } - - // Flag indicating that the capturer should use screen change notifications. - // Enables/disables use of XDAMAGE in the X11 capturer. - bool use_update_notifications() const { return use_update_notifications_; } - void set_use_update_notifications(bool use_update_notifications) { - use_update_notifications_ = use_update_notifications; - } - - // Flag indicating if desktop effects (e.g. Aero) should be disabled when the - // capturer is active. Currently used only on Windows. - bool disable_effects() const { return disable_effects_; } - void set_disable_effects(bool disable_effects) { - disable_effects_ = disable_effects; - } - - // Flag that should be set if the consumer uses updated_region() and the - // capturer should try to provide correct updated_region() for the frames it - // generates (e.g. by comparing each frame with the previous one). - bool detect_updated_region() const { return detect_updated_region_; } - void set_detect_updated_region(bool detect_updated_region) { - detect_updated_region_ = detect_updated_region; - } - -#if defined(WEBRTC_WIN) - bool allow_use_magnification_api() const { - return allow_use_magnification_api_; - } - void set_allow_use_magnification_api(bool allow) { - allow_use_magnification_api_ = allow; - } - // Allowing directx based capturer or not, this capturer works on windows 7 - // with platform update / windows 8 or upper. - bool allow_directx_capturer() const { return allow_directx_capturer_; } - void set_allow_directx_capturer(bool enabled) { - allow_directx_capturer_ = enabled; - } - - // Flag that may be set to allow use of the cropping window capturer (which - // captures the screen & crops that to the window region in some cases). An - // advantage of using this is significantly higher capture frame rates than - // capturing the window directly. A disadvantage of using this is the - // possibility of capturing unrelated content (e.g. overlapping windows that - // aren't detected properly, or neighboring regions when moving/resizing the - // captured window). Note: this flag influences the behavior of calls to - // DesktopCapturer::CreateWindowCapturer; calls to - // CroppingWindowCapturer::CreateCapturer ignore the flag (treat it as true). - bool allow_cropping_window_capturer() const { - return allow_cropping_window_capturer_; - } - void set_allow_cropping_window_capturer(bool allow) { - allow_cropping_window_capturer_ = allow; - } -#endif - -#if defined(WEBRTC_USE_PIPEWIRE) - bool allow_pipewire() const { return allow_pipewire_; } - void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; } -#endif - - private: -#if defined(WEBRTC_USE_X11) - rtc::scoped_refptr x_display_; -#endif - -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - rtc::scoped_refptr configuration_monitor_; - bool allow_iosurface_ = false; -#endif - - rtc::scoped_refptr full_screen_window_detector_; - -#if defined(WEBRTC_WIN) - bool allow_use_magnification_api_ = false; - bool allow_directx_capturer_ = false; - bool allow_cropping_window_capturer_ = false; -#endif -#if defined(WEBRTC_USE_X11) - bool use_update_notifications_ = false; -#else - bool use_update_notifications_ = true; -#endif - bool disable_effects_ = true; - bool detect_updated_region_ = false; -#if defined(WEBRTC_USE_PIPEWIRE) - bool allow_pipewire_ = false; -#endif -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_types.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_types.h deleted file mode 100644 index 5031cbf3a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capture_types.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ - -#include - -namespace webrtc { - -// Type used to identify windows on the desktop. Values are platform-specific: -// - On Windows: HWND cast to intptr_t. -// - On Linux (with X11): X11 Window (unsigned long) type cast to intptr_t. -// - On OSX: integer window number. -typedef intptr_t WindowId; - -const WindowId kNullWindowId = 0; - -// Type used to identify screens on the desktop. Values are platform-specific: -// - On Windows: integer display device index. -// - On OSX: CGDirectDisplayID cast to intptr_t. -// - On Linux (with X11): TBD. -// On Windows, ScreenId is implementation dependent: sending a ScreenId from one -// implementation to another usually won't work correctly. -typedef intptr_t ScreenId; - -// The screen id corresponds to all screen combined together. -const ScreenId kFullDesktopScreenId = -1; - -const ScreenId kInvalidScreenId = -2; - -// An integer to attach to each DesktopFrame to differentiate the generator of -// the frame. -namespace DesktopCapturerId { -constexpr uint32_t CreateFourCC(char a, char b, char c, char d) { - return ((static_cast(a)) | (static_cast(b) << 8) | - (static_cast(c) << 16) | (static_cast(d) << 24)); -} - -constexpr uint32_t kUnknown = 0; -constexpr uint32_t kScreenCapturerWinGdi = CreateFourCC('G', 'D', 'I', ' '); -constexpr uint32_t kScreenCapturerWinDirectx = CreateFourCC('D', 'X', 'G', 'I'); -} // namespace DesktopCapturerId - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.cc deleted file mode 100644 index 61926a602..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.cc +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_capturer.h" - -#include -#include - -#include -#include - -#include "modules/desktop_capture/cropping_window_capturer.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" - -namespace webrtc { - -DesktopCapturer::~DesktopCapturer() = default; - -void DesktopCapturer::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) {} - -void DesktopCapturer::SetExcludedWindow(WindowId window) {} - -bool DesktopCapturer::GetSourceList(SourceList* sources) { - return true; -} - -bool DesktopCapturer::SelectSource(SourceId id) { - return false; -} - -bool DesktopCapturer::FocusOnSelectedSource() { - return false; -} - -bool DesktopCapturer::IsOccluded(const DesktopVector& pos) { - return false; -} - -// static -std::unique_ptr DesktopCapturer::CreateWindowCapturer( - const DesktopCaptureOptions& options) { -#if defined(WEBRTC_WIN) - if (options.allow_cropping_window_capturer()) { - return CroppingWindowCapturer::CreateCapturer(options); - } -#endif // defined(WEBRTC_WIN) - - std::unique_ptr capturer = CreateRawWindowCapturer(options); - if (capturer && options.detect_updated_region()) { - capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); - } - - return capturer; -} - -// static -std::unique_ptr DesktopCapturer::CreateScreenCapturer( - const DesktopCaptureOptions& options) { - std::unique_ptr capturer = CreateRawScreenCapturer(options); - if (capturer && options.detect_updated_region()) { - capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); - } - - return capturer; -} - -#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -bool DesktopCapturer::IsRunningUnderWayland() { - const char* xdg_session_type = getenv("XDG_SESSION_TYPE"); - if (!xdg_session_type || strncmp(xdg_session_type, "wayland", 7) != 0) - return false; - - if (!(getenv("WAYLAND_DISPLAY"))) - return false; - - return true; -} -#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.h deleted file mode 100644 index 64339c0ca..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer.h +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ - -#include -#include - -#include -#include -#include -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -class DesktopCaptureOptions; -class DesktopFrame; - -// Abstract interface for screen and window capturers. -class RTC_EXPORT DesktopCapturer { - public: - enum class Result { - // The frame was captured successfully. - SUCCESS, - - // There was a temporary error. The caller should continue calling - // CaptureFrame(), in the expectation that it will eventually recover. - ERROR_TEMPORARY, - - // Capture has failed and will keep failing if the caller tries calling - // CaptureFrame() again. - ERROR_PERMANENT, - - MAX_VALUE = ERROR_PERMANENT - }; - - // Interface that must be implemented by the DesktopCapturer consumers. - class Callback { - public: - // Called after a frame has been captured. |frame| is not nullptr if and - // only if |result| is SUCCESS. - virtual void OnCaptureResult(Result result, - std::unique_ptr frame) = 0; - - protected: - virtual ~Callback() {} - }; - - typedef intptr_t SourceId; - - static_assert(std::is_same::value, - "SourceId should be a same type as ScreenId."); - - struct Source { - // The unique id to represent a Source of current DesktopCapturer. - SourceId id; - - // Title of the window or screen in UTF-8 encoding, maybe empty. This field - // should not be used to identify a source. - std::string title; - }; - - typedef std::vector SourceList; - - virtual ~DesktopCapturer(); - - // Called at the beginning of a capturing session. |callback| must remain - // valid until capturer is destroyed. - virtual void Start(Callback* callback) = 0; - - // Sets SharedMemoryFactory that will be used to create buffers for the - // captured frames. The factory can be invoked on a thread other than the one - // where CaptureFrame() is called. It will be destroyed on the same thread. - // Shared memory is currently supported only by some DesktopCapturer - // implementations. - virtual void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory); - - // Captures next frame, and involve callback provided by Start() function. - // Pending capture requests are canceled when DesktopCapturer is deleted. - virtual void CaptureFrame() = 0; - - // Sets the window to be excluded from the captured image in the future - // Capture calls. Used to exclude the screenshare notification window for - // screen capturing. - virtual void SetExcludedWindow(WindowId window); - - // TODO(zijiehe): Following functions should be pure virtual. The default - // implementations are for backward compatibility only. Remove default - // implementations once all DesktopCapturer implementations in Chromium have - // implemented these functions. - - // Gets a list of sources current capturer supports. Returns false in case of - // a failure. - // For DesktopCapturer implementations to capture screens, this function - // should return monitors. - // For DesktopCapturer implementations to capture windows, this function - // should only return root windows owned by applications. - virtual bool GetSourceList(SourceList* sources); - - // Selects a source to be captured. Returns false in case of a failure (e.g. - // if there is no source with the specified type and id.) - virtual bool SelectSource(SourceId id); - - // Brings the selected source to the front and sets the input focus on it. - // Returns false in case of a failure or no source has been selected or the - // implementation does not support this functionality. - virtual bool FocusOnSelectedSource(); - - // Returns true if the |pos| on the selected source is covered by other - // elements on the display, and is not visible to the users. - // |pos| is in full desktop coordinates, i.e. the top-left monitor always - // starts from (0, 0). - // The return value if |pos| is out of the scope of the source is undefined. - virtual bool IsOccluded(const DesktopVector& pos); - - // Creates a DesktopCapturer instance which targets to capture windows. - static std::unique_ptr CreateWindowCapturer( - const DesktopCaptureOptions& options); - - // Creates a DesktopCapturer instance which targets to capture screens. - static std::unique_ptr CreateScreenCapturer( - const DesktopCaptureOptions& options); - -#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - static bool IsRunningUnderWayland(); -#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - - protected: - // CroppingWindowCapturer needs to create raw capturers without wrappers, so - // the following two functions are protected. - - // Creates a platform specific DesktopCapturer instance which targets to - // capture windows. - static std::unique_ptr CreateRawWindowCapturer( - const DesktopCaptureOptions& options); - - // Creates a platform specific DesktopCapturer instance which targets to - // capture screens. - static std::unique_ptr CreateRawScreenCapturer( - const DesktopCaptureOptions& options); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.cc deleted file mode 100644 index 4e80f3052..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.cc +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" - -#include -#include - -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/desktop_region.h" -#include "modules/desktop_capture/differ_block.h" -#include "rtc_base/checks.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -namespace { - -// Returns true if (0, 0) - (|width|, |height|) vector in |old_buffer| and -// |new_buffer| are equal. |width| should be less than 32 -// (defined by kBlockSize), otherwise BlockDifference() should be used. -bool PartialBlockDifference(const uint8_t* old_buffer, - const uint8_t* new_buffer, - int width, - int height, - int stride) { - RTC_DCHECK_LT(width, kBlockSize); - const int width_bytes = width * DesktopFrame::kBytesPerPixel; - for (int i = 0; i < height; i++) { - if (memcmp(old_buffer, new_buffer, width_bytes) != 0) { - return true; - } - old_buffer += stride; - new_buffer += stride; - } - return false; -} - -// Compares columns in the range of [|left|, |right|), in a row in the -// range of [|top|, |top| + |height|), starts from |old_buffer| and -// |new_buffer|, and outputs updated regions into |output|. |stride| is the -// DesktopFrame::stride(). -void CompareRow(const uint8_t* old_buffer, - const uint8_t* new_buffer, - const int left, - const int right, - const int top, - const int bottom, - const int stride, - DesktopRegion* const output) { - const int block_x_offset = kBlockSize * DesktopFrame::kBytesPerPixel; - const int width = right - left; - const int height = bottom - top; - const int block_count = (width - 1) / kBlockSize; - const int last_block_width = width - block_count * kBlockSize; - RTC_DCHECK_GT(last_block_width, 0); - RTC_DCHECK_LE(last_block_width, kBlockSize); - - // The first block-column in a continuous dirty area in current block-row. - int first_dirty_x_block = -1; - - // We always need to add dirty area into |output| in the last block, so handle - // it separatedly. - for (int x = 0; x < block_count; x++) { - if (BlockDifference(old_buffer, new_buffer, height, stride)) { - if (first_dirty_x_block == -1) { - // This is the first dirty block in a continuous dirty area. - first_dirty_x_block = x; - } - } else if (first_dirty_x_block != -1) { - // The block on the left is the last dirty block in a continuous - // dirty area. - output->AddRect( - DesktopRect::MakeLTRB(first_dirty_x_block * kBlockSize + left, top, - x * kBlockSize + left, bottom)); - first_dirty_x_block = -1; - } - old_buffer += block_x_offset; - new_buffer += block_x_offset; - } - - bool last_block_diff; - if (last_block_width < kBlockSize) { - // The last one is a partial vector. - last_block_diff = PartialBlockDifference(old_buffer, new_buffer, - last_block_width, height, stride); - } else { - last_block_diff = BlockDifference(old_buffer, new_buffer, height, stride); - } - if (last_block_diff) { - if (first_dirty_x_block == -1) { - first_dirty_x_block = block_count; - } - output->AddRect(DesktopRect::MakeLTRB( - first_dirty_x_block * kBlockSize + left, top, right, bottom)); - } else if (first_dirty_x_block != -1) { - output->AddRect( - DesktopRect::MakeLTRB(first_dirty_x_block * kBlockSize + left, top, - block_count * kBlockSize + left, bottom)); - } -} - -// Compares |rect| area in |old_frame| and |new_frame|, and outputs dirty -// regions into |output|. -void CompareFrames(const DesktopFrame& old_frame, - const DesktopFrame& new_frame, - DesktopRect rect, - DesktopRegion* const output) { - RTC_DCHECK(old_frame.size().equals(new_frame.size())); - RTC_DCHECK_EQ(old_frame.stride(), new_frame.stride()); - rect.IntersectWith(DesktopRect::MakeSize(old_frame.size())); - - const int y_block_count = (rect.height() - 1) / kBlockSize; - const int last_y_block_height = rect.height() - y_block_count * kBlockSize; - // Offset from the start of one block-row to the next. - const int block_y_stride = old_frame.stride() * kBlockSize; - const uint8_t* prev_block_row_start = - old_frame.GetFrameDataAtPos(rect.top_left()); - const uint8_t* curr_block_row_start = - new_frame.GetFrameDataAtPos(rect.top_left()); - - int top = rect.top(); - // The last row may have a different height, so we handle it separately. - for (int y = 0; y < y_block_count; y++) { - CompareRow(prev_block_row_start, curr_block_row_start, rect.left(), - rect.right(), top, top + kBlockSize, old_frame.stride(), output); - top += kBlockSize; - prev_block_row_start += block_y_stride; - curr_block_row_start += block_y_stride; - } - CompareRow(prev_block_row_start, curr_block_row_start, rect.left(), - rect.right(), top, top + last_y_block_height, old_frame.stride(), - output); -} - -} // namespace - -DesktopCapturerDifferWrapper::DesktopCapturerDifferWrapper( - std::unique_ptr base_capturer) - : base_capturer_(std::move(base_capturer)) { - RTC_DCHECK(base_capturer_); -} - -DesktopCapturerDifferWrapper::~DesktopCapturerDifferWrapper() {} - -void DesktopCapturerDifferWrapper::Start(DesktopCapturer::Callback* callback) { - callback_ = callback; - base_capturer_->Start(this); -} - -void DesktopCapturerDifferWrapper::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - base_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); -} - -void DesktopCapturerDifferWrapper::CaptureFrame() { - base_capturer_->CaptureFrame(); -} - -void DesktopCapturerDifferWrapper::SetExcludedWindow(WindowId window) { - base_capturer_->SetExcludedWindow(window); -} - -bool DesktopCapturerDifferWrapper::GetSourceList(SourceList* sources) { - return base_capturer_->GetSourceList(sources); -} - -bool DesktopCapturerDifferWrapper::SelectSource(SourceId id) { - return base_capturer_->SelectSource(id); -} - -bool DesktopCapturerDifferWrapper::FocusOnSelectedSource() { - return base_capturer_->FocusOnSelectedSource(); -} - -bool DesktopCapturerDifferWrapper::IsOccluded(const DesktopVector& pos) { - return base_capturer_->IsOccluded(pos); -} - -void DesktopCapturerDifferWrapper::OnCaptureResult( - Result result, - std::unique_ptr input_frame) { - int64_t start_time_nanos = rtc::TimeNanos(); - if (!input_frame) { - callback_->OnCaptureResult(result, nullptr); - return; - } - RTC_DCHECK(result == Result::SUCCESS); - - std::unique_ptr frame = - SharedDesktopFrame::Wrap(std::move(input_frame)); - if (last_frame_ && (last_frame_->size().width() != frame->size().width() || - last_frame_->size().height() != frame->size().height() || - last_frame_->stride() != frame->stride())) { - last_frame_.reset(); - } - - if (last_frame_) { - DesktopRegion hints; - hints.Swap(frame->mutable_updated_region()); - for (DesktopRegion::Iterator it(hints); !it.IsAtEnd(); it.Advance()) { - CompareFrames(*last_frame_, *frame, it.rect(), - frame->mutable_updated_region()); - } - } else { - frame->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(frame->size())); - } - last_frame_ = frame->Share(); - - frame->set_capture_time_ms(frame->capture_time_ms() + - (rtc::TimeNanos() - start_time_nanos) / - rtc::kNumNanosecsPerMillisec); - callback_->OnCaptureResult(result, std::move(frame)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.h deleted file mode 100644 index 1f70cef18..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_differ_wrapper.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/shared_desktop_frame.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// DesktopCapturer wrapper that calculates updated_region() by comparing frames -// content. This class always expects the underlying DesktopCapturer -// implementation returns a superset of updated regions in DestkopFrame. If a -// DesktopCapturer implementation does not know the updated region, it should -// set updated_region() to full frame. -// -// This class marks entire frame as updated if the frame size or frame stride -// has been changed. -class RTC_EXPORT DesktopCapturerDifferWrapper - : public DesktopCapturer, - public DesktopCapturer::Callback { - public: - // Creates a DesktopCapturerDifferWrapper with a DesktopCapturer - // implementation, and takes its ownership. - explicit DesktopCapturerDifferWrapper( - std::unique_ptr base_capturer); - - ~DesktopCapturerDifferWrapper() override; - - // DesktopCapturer interface. - void Start(DesktopCapturer::Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* screens) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - private: - // DesktopCapturer::Callback interface. - void OnCaptureResult(Result result, - std::unique_ptr frame) override; - - const std::unique_ptr base_capturer_; - DesktopCapturer::Callback* callback_; - std::unique_ptr last_frame_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.cc deleted file mode 100644 index 4bbdd6c94..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.cc +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_capturer_wrapper.h" - -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -DesktopCapturerWrapper::DesktopCapturerWrapper( - std::unique_ptr base_capturer) - : base_capturer_(std::move(base_capturer)) { - RTC_DCHECK(base_capturer_); -} - -DesktopCapturerWrapper::~DesktopCapturerWrapper() = default; - -void DesktopCapturerWrapper::Start(Callback* callback) { - base_capturer_->Start(callback); -} - -void DesktopCapturerWrapper::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - base_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); -} - -void DesktopCapturerWrapper::CaptureFrame() { - base_capturer_->CaptureFrame(); -} - -void DesktopCapturerWrapper::SetExcludedWindow(WindowId window) { - base_capturer_->SetExcludedWindow(window); -} - -bool DesktopCapturerWrapper::GetSourceList(SourceList* sources) { - return base_capturer_->GetSourceList(sources); -} - -bool DesktopCapturerWrapper::SelectSource(SourceId id) { - return base_capturer_->SelectSource(id); -} - -bool DesktopCapturerWrapper::FocusOnSelectedSource() { - return base_capturer_->FocusOnSelectedSource(); -} - -bool DesktopCapturerWrapper::IsOccluded(const DesktopVector& pos) { - return base_capturer_->IsOccluded(pos); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.h deleted file mode 100644 index e0f50d79e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_capturer_wrapper.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_WRAPPER_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_WRAPPER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/shared_memory.h" - -namespace webrtc { - -// Wraps a DesktopCapturer and forwards all the function calls to it. -class DesktopCapturerWrapper : public DesktopCapturer { - public: - explicit DesktopCapturerWrapper( - std::unique_ptr base_capturer); - ~DesktopCapturerWrapper() override; - - // DesktopCapturer implementations. - void Start(Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - protected: - // Guaranteed to be valid. - const std::unique_ptr base_capturer_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.cc deleted file mode 100644 index fd10dd5d2..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.cc +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_frame.h" - -#include - -#include -#include -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -DesktopFrame::DesktopFrame(DesktopSize size, - int stride, - uint8_t* data, - SharedMemory* shared_memory) - : data_(data), - shared_memory_(shared_memory), - size_(size), - stride_(stride), - capture_time_ms_(0), - capturer_id_(DesktopCapturerId::kUnknown) { - RTC_DCHECK(size_.width() >= 0); - RTC_DCHECK(size_.height() >= 0); -} - -DesktopFrame::~DesktopFrame() = default; - -void DesktopFrame::CopyPixelsFrom(const uint8_t* src_buffer, - int src_stride, - const DesktopRect& dest_rect) { - RTC_CHECK(DesktopRect::MakeSize(size()).ContainsRect(dest_rect)); - - uint8_t* dest = GetFrameDataAtPos(dest_rect.top_left()); - for (int y = 0; y < dest_rect.height(); ++y) { - memcpy(dest, src_buffer, DesktopFrame::kBytesPerPixel * dest_rect.width()); - src_buffer += src_stride; - dest += stride(); - } -} - -void DesktopFrame::CopyPixelsFrom(const DesktopFrame& src_frame, - const DesktopVector& src_pos, - const DesktopRect& dest_rect) { - RTC_CHECK(DesktopRect::MakeSize(src_frame.size()) - .ContainsRect( - DesktopRect::MakeOriginSize(src_pos, dest_rect.size()))); - - CopyPixelsFrom(src_frame.GetFrameDataAtPos(src_pos), src_frame.stride(), - dest_rect); -} - -bool DesktopFrame::CopyIntersectingPixelsFrom(const DesktopFrame& src_frame, - double horizontal_scale, - double vertical_scale) { - const DesktopVector& origin = top_left(); - const DesktopVector& src_frame_origin = src_frame.top_left(); - - DesktopVector src_frame_offset = src_frame_origin.subtract(origin); - - // Determine the intersection, first adjusting its origin to account for any - // DPI scaling. - DesktopRect intersection_rect = src_frame.rect(); - if (horizontal_scale != 1.0 || vertical_scale != 1.0) { - DesktopVector origin_adjustment( - static_cast( - std::round((horizontal_scale - 1.0) * src_frame_offset.x())), - static_cast( - std::round((vertical_scale - 1.0) * src_frame_offset.y()))); - - intersection_rect.Translate(origin_adjustment); - - src_frame_offset = src_frame_offset.add(origin_adjustment); - } - - intersection_rect.IntersectWith(rect()); - if (intersection_rect.is_empty()) { - return false; - } - - // Translate the intersection rect to be relative to the outer rect. - intersection_rect.Translate(-origin.x(), -origin.y()); - - // Determine source position for the copy (offsets of outer frame from - // source origin, if positive). - int32_t src_pos_x = std::max(0, -src_frame_offset.x()); - int32_t src_pos_y = std::max(0, -src_frame_offset.y()); - - CopyPixelsFrom(src_frame, DesktopVector(src_pos_x, src_pos_y), - intersection_rect); - return true; -} - -DesktopRect DesktopFrame::rect() const { - const float scale = scale_factor(); - // Only scale the size. - return DesktopRect::MakeXYWH(top_left().x(), top_left().y(), - size().width() / scale, size().height() / scale); -} - -float DesktopFrame::scale_factor() const { - float scale = 1.0f; - -#if defined(WEBRTC_MAC) - // At least on Windows the logical and physical pixel are the same - // See http://crbug.com/948362. - if (!dpi().is_zero() && dpi().x() == dpi().y()) - scale = dpi().x() / kStandardDPI; -#endif - - return scale; -} - -uint8_t* DesktopFrame::GetFrameDataAtPos(const DesktopVector& pos) const { - return data() + stride() * pos.y() + DesktopFrame::kBytesPerPixel * pos.x(); -} - -void DesktopFrame::CopyFrameInfoFrom(const DesktopFrame& other) { - set_dpi(other.dpi()); - set_capture_time_ms(other.capture_time_ms()); - set_capturer_id(other.capturer_id()); - *mutable_updated_region() = other.updated_region(); - set_top_left(other.top_left()); - set_icc_profile(other.icc_profile()); -} - -void DesktopFrame::MoveFrameInfoFrom(DesktopFrame* other) { - set_dpi(other->dpi()); - set_capture_time_ms(other->capture_time_ms()); - set_capturer_id(other->capturer_id()); - mutable_updated_region()->Swap(other->mutable_updated_region()); - set_top_left(other->top_left()); - set_icc_profile(other->icc_profile()); -} - -BasicDesktopFrame::BasicDesktopFrame(DesktopSize size) - : DesktopFrame(size, - kBytesPerPixel * size.width(), - new uint8_t[kBytesPerPixel * size.width() * size.height()](), - nullptr) {} - -BasicDesktopFrame::~BasicDesktopFrame() { - delete[] data_; -} - -// static -DesktopFrame* BasicDesktopFrame::CopyOf(const DesktopFrame& frame) { - DesktopFrame* result = new BasicDesktopFrame(frame.size()); - for (int y = 0; y < frame.size().height(); ++y) { - memcpy(result->data() + y * result->stride(), - frame.data() + y * frame.stride(), - frame.size().width() * kBytesPerPixel); - } - result->CopyFrameInfoFrom(frame); - return result; -} - -// static -std::unique_ptr SharedMemoryDesktopFrame::Create( - DesktopSize size, - SharedMemoryFactory* shared_memory_factory) { - RTC_DCHECK(shared_memory_factory); - - size_t buffer_size = size.height() * size.width() * kBytesPerPixel; - std::unique_ptr shared_memory = - shared_memory_factory->CreateSharedMemory(buffer_size); - if (!shared_memory) - return nullptr; - - return std::make_unique( - size, size.width() * kBytesPerPixel, std::move(shared_memory)); -} - -SharedMemoryDesktopFrame::SharedMemoryDesktopFrame(DesktopSize size, - int stride, - SharedMemory* shared_memory) - : DesktopFrame(size, - stride, - reinterpret_cast(shared_memory->data()), - shared_memory) {} - -SharedMemoryDesktopFrame::SharedMemoryDesktopFrame( - DesktopSize size, - int stride, - std::unique_ptr shared_memory) - : SharedMemoryDesktopFrame(size, stride, shared_memory.release()) {} - -SharedMemoryDesktopFrame::~SharedMemoryDesktopFrame() { - delete shared_memory_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.h deleted file mode 100644 index 3a18b7852..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame.h +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_H_ - -#include - -#include -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/desktop_region.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -const float kStandardDPI = 96.0f; - -// DesktopFrame represents a video frame captured from the screen. -class RTC_EXPORT DesktopFrame { - public: - // DesktopFrame objects always hold RGBA data. - static const int kBytesPerPixel = 4; - - virtual ~DesktopFrame(); - - // Returns the rectangle in full desktop coordinates to indicate it covers - // the area of top_left() to top_letf() + size() / scale_factor(). - DesktopRect rect() const; - - // Returns the scale factor from DIPs to physical pixels of the frame. - // Assumes same scale in both X and Y directions at present. - float scale_factor() const; - - // Size of the frame. In physical coordinates, mapping directly from the - // underlying buffer. - const DesktopSize& size() const { return size_; } - - // The top-left of the frame in full desktop coordinates. E.g. the top left - // monitor should start from (0, 0). The desktop coordinates may be scaled by - // OS, but this is always consistent with the MouseCursorMonitor. - const DesktopVector& top_left() const { return top_left_; } - void set_top_left(const DesktopVector& top_left) { top_left_ = top_left; } - - // Distance in the buffer between two neighboring rows in bytes. - int stride() const { return stride_; } - - // Data buffer used for the frame. - uint8_t* data() const { return data_; } - - // SharedMemory used for the buffer or NULL if memory is allocated on the - // heap. The result is guaranteed to be deleted only after the frame is - // deleted (classes that inherit from DesktopFrame must ensure it). - SharedMemory* shared_memory() const { return shared_memory_; } - - // Indicates region of the screen that has changed since the previous frame. - const DesktopRegion& updated_region() const { return updated_region_; } - DesktopRegion* mutable_updated_region() { return &updated_region_; } - - // DPI of the screen being captured. May be set to zero, e.g. if DPI is - // unknown. - const DesktopVector& dpi() const { return dpi_; } - void set_dpi(const DesktopVector& dpi) { dpi_ = dpi; } - - // Time taken to capture the frame in milliseconds. - int64_t capture_time_ms() const { return capture_time_ms_; } - void set_capture_time_ms(int64_t time_ms) { capture_time_ms_ = time_ms; } - - // Copies pixels from a buffer or another frame. |dest_rect| rect must lay - // within bounds of this frame. - void CopyPixelsFrom(const uint8_t* src_buffer, - int src_stride, - const DesktopRect& dest_rect); - void CopyPixelsFrom(const DesktopFrame& src_frame, - const DesktopVector& src_pos, - const DesktopRect& dest_rect); - - // Copies pixels from another frame, with the copied & overwritten regions - // representing the intersection between the two frames. Returns true if - // pixels were copied, or false if there's no intersection. The scale factors - // represent the ratios between pixel space & offset coordinate space (e.g. - // 2.0 would indicate the frames are scaled down by 50% for display, so any - // offset between their origins should be doubled). - bool CopyIntersectingPixelsFrom(const DesktopFrame& src_frame, - double horizontal_scale, - double vertical_scale); - - // A helper to return the data pointer of a frame at the specified position. - uint8_t* GetFrameDataAtPos(const DesktopVector& pos) const; - - // The DesktopCapturer implementation which generates current DesktopFrame. - // Not all DesktopCapturer implementations set this field; it's set to - // kUnknown by default. - uint32_t capturer_id() const { return capturer_id_; } - void set_capturer_id(uint32_t capturer_id) { capturer_id_ = capturer_id; } - - // Copies various information from |other|. Anything initialized in - // constructor are not copied. - // This function is usually used when sharing a source DesktopFrame with - // several clients: the original DesktopFrame should be kept unchanged. For - // example, BasicDesktopFrame::CopyOf() and SharedDesktopFrame::Share(). - void CopyFrameInfoFrom(const DesktopFrame& other); - - // Copies various information from |other|. Anything initialized in - // constructor are not copied. Not like CopyFrameInfoFrom() function, this - // function uses swap or move constructor to avoid data copy. It won't break - // the |other|, but some of its information may be missing after this - // operation. E.g. other->updated_region_; - // This function is usually used when wrapping a DesktopFrame: the wrapper - // instance takes the ownership of |other|, so other components cannot access - // |other| anymore. For example, CroppedDesktopFrame and - // DesktopFrameWithCursor. - void MoveFrameInfoFrom(DesktopFrame* other); - - // Set and get the ICC profile of the frame data pixels. Useful to build the - // a ColorSpace object from clients of webrtc library like chromium. The - // format of an ICC profile is defined in the following specification - // http://www.color.org/specification/ICC1v43_2010-12.pdf. - const std::vector& icc_profile() const { return icc_profile_; } - void set_icc_profile(const std::vector& icc_profile) { - icc_profile_ = icc_profile; - } - - protected: - DesktopFrame(DesktopSize size, - int stride, - uint8_t* data, - SharedMemory* shared_memory); - - // Ownership of the buffers is defined by the classes that inherit from this - // class. They must guarantee that the buffer is not deleted before the frame - // is deleted. - uint8_t* const data_; - SharedMemory* const shared_memory_; - - private: - const DesktopSize size_; - const int stride_; - - DesktopRegion updated_region_; - DesktopVector top_left_; - DesktopVector dpi_; - int64_t capture_time_ms_; - uint32_t capturer_id_; - std::vector icc_profile_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrame); -}; - -// A DesktopFrame that stores data in the heap. -class RTC_EXPORT BasicDesktopFrame : public DesktopFrame { - public: - // The entire data buffer used for the frame is initialized with zeros. - explicit BasicDesktopFrame(DesktopSize size); - - ~BasicDesktopFrame() override; - - // Creates a BasicDesktopFrame that contains copy of |frame|. - // TODO(zijiehe): Return std::unique_ptr - static DesktopFrame* CopyOf(const DesktopFrame& frame); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(BasicDesktopFrame); -}; - -// A DesktopFrame that stores data in shared memory. -class RTC_EXPORT SharedMemoryDesktopFrame : public DesktopFrame { - public: - // May return nullptr if |shared_memory_factory| failed to create a - // SharedMemory instance. - // |shared_memory_factory| should not be nullptr. - static std::unique_ptr Create( - DesktopSize size, - SharedMemoryFactory* shared_memory_factory); - - // Takes ownership of |shared_memory|. - // Deprecated, use the next constructor. - SharedMemoryDesktopFrame(DesktopSize size, - int stride, - SharedMemory* shared_memory); - - // Preferred. - SharedMemoryDesktopFrame(DesktopSize size, - int stride, - std::unique_ptr shared_memory); - - ~SharedMemoryDesktopFrame() override; - - private: - // Avoid unexpected order of parameter evaluation. - // Executing both std::unique_ptr::operator->() and - // std::unique_ptr::release() in the member initializer list is not safe. - // Depends on the order of parameter evaluation, - // std::unique_ptr::operator->() may trigger assertion failure if it has - // been evaluated after std::unique_ptr::release(). By using this - // constructor, std::unique_ptr::operator->() won't be involved anymore. - SharedMemoryDesktopFrame(DesktopRect rect, - int stride, - SharedMemory* shared_memory); - - RTC_DISALLOW_COPY_AND_ASSIGN(SharedMemoryDesktopFrame); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.cc deleted file mode 100644 index 64ba86c67..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.cc +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_frame_generator.h" - -#include -#include - -#include - -#include "modules/desktop_capture/rgba_color.h" -#include "rtc_base/checks.h" -#include "rtc_base/random.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -namespace { - -// Sets |updated_region| to |frame|. If |enlarge_updated_region| is -// true, this function will randomly enlarge each DesktopRect in -// |updated_region|. But the enlarged DesktopRegion won't excceed the -// frame->size(). If |add_random_updated_region| is true, several random -// rectangles will also be included in |frame|. -void SetUpdatedRegion(DesktopFrame* frame, - const DesktopRegion& updated_region, - bool enlarge_updated_region, - int enlarge_range, - bool add_random_updated_region) { - const DesktopRect screen_rect = DesktopRect::MakeSize(frame->size()); - Random random(rtc::TimeMicros()); - frame->mutable_updated_region()->Clear(); - for (DesktopRegion::Iterator it(updated_region); !it.IsAtEnd(); - it.Advance()) { - DesktopRect rect = it.rect(); - if (enlarge_updated_region && enlarge_range > 0) { - rect.Extend(random.Rand(enlarge_range), random.Rand(enlarge_range), - random.Rand(enlarge_range), random.Rand(enlarge_range)); - rect.IntersectWith(screen_rect); - } - frame->mutable_updated_region()->AddRect(rect); - } - - if (add_random_updated_region) { - for (int i = random.Rand(10); i >= 0; i--) { - // At least a 1 x 1 updated region. - const int left = random.Rand(0, frame->size().width() - 2); - const int top = random.Rand(0, frame->size().height() - 2); - const int right = random.Rand(left + 1, frame->size().width()); - const int bottom = random.Rand(top + 1, frame->size().height()); - frame->mutable_updated_region()->AddRect( - DesktopRect::MakeLTRB(left, top, right, bottom)); - } - } -} - -// Paints pixels in |rect| of |frame| to |color|. -void PaintRect(DesktopFrame* frame, DesktopRect rect, RgbaColor rgba_color) { - static_assert(DesktopFrame::kBytesPerPixel == sizeof(uint32_t), - "kBytesPerPixel should be 4."); - RTC_DCHECK_GE(frame->size().width(), rect.right()); - RTC_DCHECK_GE(frame->size().height(), rect.bottom()); - uint32_t color = rgba_color.ToUInt32(); - uint8_t* row = frame->GetFrameDataAtPos(rect.top_left()); - for (int i = 0; i < rect.height(); i++) { - uint32_t* column = reinterpret_cast(row); - for (int j = 0; j < rect.width(); j++) { - column[j] = color; - } - row += frame->stride(); - } -} - -// Paints pixels in |region| of |frame| to |color|. -void PaintRegion(DesktopFrame* frame, - DesktopRegion* region, - RgbaColor rgba_color) { - region->IntersectWith(DesktopRect::MakeSize(frame->size())); - for (DesktopRegion::Iterator it(*region); !it.IsAtEnd(); it.Advance()) { - PaintRect(frame, it.rect(), rgba_color); - } -} - -} // namespace - -DesktopFrameGenerator::DesktopFrameGenerator() {} -DesktopFrameGenerator::~DesktopFrameGenerator() {} - -DesktopFramePainter::DesktopFramePainter() {} -DesktopFramePainter::~DesktopFramePainter() {} - -PainterDesktopFrameGenerator::PainterDesktopFrameGenerator() - : size_(1024, 768), - return_frame_(true), - provide_updated_region_hints_(false), - enlarge_updated_region_(false), - enlarge_range_(20), - add_random_updated_region_(false), - painter_(nullptr) {} -PainterDesktopFrameGenerator::~PainterDesktopFrameGenerator() {} - -std::unique_ptr PainterDesktopFrameGenerator::GetNextFrame( - SharedMemoryFactory* factory) { - if (!return_frame_) { - return nullptr; - } - - std::unique_ptr frame = std::unique_ptr( - factory ? SharedMemoryDesktopFrame::Create(size_, factory).release() - : new BasicDesktopFrame(size_)); - if (painter_) { - DesktopRegion updated_region; - if (!painter_->Paint(frame.get(), &updated_region)) { - return nullptr; - } - - if (provide_updated_region_hints_) { - SetUpdatedRegion(frame.get(), updated_region, enlarge_updated_region_, - enlarge_range_, add_random_updated_region_); - } else { - frame->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(frame->size())); - } - } - - return frame; -} - -DesktopSize* PainterDesktopFrameGenerator::size() { - return &size_; -} - -void PainterDesktopFrameGenerator::set_return_frame(bool return_frame) { - return_frame_ = return_frame; -} - -void PainterDesktopFrameGenerator::set_provide_updated_region_hints( - bool provide_updated_region_hints) { - provide_updated_region_hints_ = provide_updated_region_hints; -} - -void PainterDesktopFrameGenerator::set_enlarge_updated_region( - bool enlarge_updated_region) { - enlarge_updated_region_ = enlarge_updated_region; -} - -void PainterDesktopFrameGenerator::set_enlarge_range(int enlarge_range) { - enlarge_range_ = enlarge_range; -} - -void PainterDesktopFrameGenerator::set_add_random_updated_region( - bool add_random_updated_region) { - add_random_updated_region_ = add_random_updated_region; -} - -void PainterDesktopFrameGenerator::set_desktop_frame_painter( - DesktopFramePainter* painter) { - painter_ = painter; -} - -BlackWhiteDesktopFramePainter::BlackWhiteDesktopFramePainter() {} -BlackWhiteDesktopFramePainter::~BlackWhiteDesktopFramePainter() {} - -DesktopRegion* BlackWhiteDesktopFramePainter::updated_region() { - return &updated_region_; -} - -bool BlackWhiteDesktopFramePainter::Paint(DesktopFrame* frame, - DesktopRegion* updated_region) { - RTC_DCHECK(updated_region->is_empty()); - memset(frame->data(), 0, frame->stride() * frame->size().height()); - PaintRegion(frame, &updated_region_, RgbaColor(0xFFFFFFFF)); - updated_region_.Swap(updated_region); - return true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.h deleted file mode 100644 index c8ac5a9ae..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_generator.h +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_GENERATOR_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_GENERATOR_H_ - -#include - -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/desktop_region.h" -#include "modules/desktop_capture/shared_memory.h" - -namespace webrtc { - -// An interface to generate a DesktopFrame. -class DesktopFrameGenerator { - public: - DesktopFrameGenerator(); - virtual ~DesktopFrameGenerator(); - - virtual std::unique_ptr GetNextFrame( - SharedMemoryFactory* factory) = 0; -}; - -// An interface to paint a DesktopFrame. This interface is used by -// PainterDesktopFrameGenerator. -class DesktopFramePainter { - public: - DesktopFramePainter(); - virtual ~DesktopFramePainter(); - - virtual bool Paint(DesktopFrame* frame, DesktopRegion* updated_region) = 0; -}; - -// An implementation of DesktopFrameGenerator to take care about the -// DesktopFrame size, filling updated_region(), etc, but leaves the real -// painting work to a DesktopFramePainter implementation. -class PainterDesktopFrameGenerator final : public DesktopFrameGenerator { - public: - PainterDesktopFrameGenerator(); - ~PainterDesktopFrameGenerator() override; - - std::unique_ptr GetNextFrame( - SharedMemoryFactory* factory) override; - - // Sets the size of the frame which will be returned in next GetNextFrame() - // call. - DesktopSize* size(); - - // Decides whether BaseDesktopFrameGenerator returns a frame in next Capture() - // callback. If return_frame_ is true, BaseDesktopFrameGenerator will create a - // frame according to both size_ and SharedMemoryFactory input, and uses - // Paint() function to paint it. - void set_return_frame(bool return_frame); - - // Decides whether MockScreenCapturer returns a frame with updated regions. - // MockScreenCapturer will keep DesktopFrame::updated_region() empty if this - // field is false. - void set_provide_updated_region_hints(bool provide_updated_region_hints); - - // Decides whether MockScreenCapturer randomly enlarges updated regions in the - // DesktopFrame. Set this field to true to simulate an inaccurate updated - // regions' return from OS APIs. - void set_enlarge_updated_region(bool enlarge_updated_region); - - // The range to enlarge a updated region if |enlarge_updated_region_| is true. - // If this field is less than zero, it will be treated as zero, and - // |enlarge_updated_region_| will be ignored. - void set_enlarge_range(int enlarge_range); - - // Decides whether BaseDesktopFrameGenerator randomly add some updated regions - // in the DesktopFrame. Set this field to true to simulate an inaccurate - // updated regions' return from OS APIs. - void set_add_random_updated_region(bool add_random_updated_region); - - // Sets the painter object to do the real painting work, if no |painter_| has - // been set to this instance, the DesktopFrame returned by GetNextFrame() - // function will keep in an undefined but valid state. - // PainterDesktopFrameGenerator does not take ownership of the |painter|. - void set_desktop_frame_painter(DesktopFramePainter* painter); - - private: - DesktopSize size_; - bool return_frame_; - bool provide_updated_region_hints_; - bool enlarge_updated_region_; - int enlarge_range_; - bool add_random_updated_region_; - DesktopFramePainter* painter_; -}; - -// An implementation of DesktopFramePainter to paint black on -// mutable_updated_region(), and white elsewhere. -class BlackWhiteDesktopFramePainter final : public DesktopFramePainter { - public: - BlackWhiteDesktopFramePainter(); - ~BlackWhiteDesktopFramePainter() override; - - // The black regions of the frame which will be returned in next Paint() - // call. BlackWhiteDesktopFramePainter will draw a white frame, with black - // in the updated_region_. Each Paint() call will consume updated_region_. - DesktopRegion* updated_region(); - - // DesktopFramePainter interface. - bool Paint(DesktopFrame* frame, DesktopRegion* updated_region) override; - - private: - DesktopRegion updated_region_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_GENERATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.cc deleted file mode 100644 index 5e9928d1f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.cc +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_frame_rotation.h" - -#include "rtc_base/checks.h" -#include "third_party/libyuv/include/libyuv/rotate_argb.h" - -namespace webrtc { - -namespace { - -libyuv::RotationMode ToLibyuvRotationMode(Rotation rotation) { - switch (rotation) { - case Rotation::CLOCK_WISE_0: - return libyuv::kRotate0; - case Rotation::CLOCK_WISE_90: - return libyuv::kRotate90; - case Rotation::CLOCK_WISE_180: - return libyuv::kRotate180; - case Rotation::CLOCK_WISE_270: - return libyuv::kRotate270; - } - RTC_NOTREACHED(); - return libyuv::kRotate0; -} - -DesktopRect RotateAndOffsetRect(DesktopRect rect, - DesktopSize size, - Rotation rotation, - DesktopVector offset) { - DesktopRect result = RotateRect(rect, size, rotation); - result.Translate(offset); - return result; -} - -} // namespace - -Rotation ReverseRotation(Rotation rotation) { - switch (rotation) { - case Rotation::CLOCK_WISE_0: - return rotation; - case Rotation::CLOCK_WISE_90: - return Rotation::CLOCK_WISE_270; - case Rotation::CLOCK_WISE_180: - return Rotation::CLOCK_WISE_180; - case Rotation::CLOCK_WISE_270: - return Rotation::CLOCK_WISE_90; - } - RTC_NOTREACHED(); - return Rotation::CLOCK_WISE_0; -} - -DesktopSize RotateSize(DesktopSize size, Rotation rotation) { - switch (rotation) { - case Rotation::CLOCK_WISE_0: - case Rotation::CLOCK_WISE_180: - return size; - case Rotation::CLOCK_WISE_90: - case Rotation::CLOCK_WISE_270: - return DesktopSize(size.height(), size.width()); - } - RTC_NOTREACHED(); - return DesktopSize(); -} - -DesktopRect RotateRect(DesktopRect rect, DesktopSize size, Rotation rotation) { - switch (rotation) { - case Rotation::CLOCK_WISE_0: - return rect; - case Rotation::CLOCK_WISE_90: - return DesktopRect::MakeXYWH(size.height() - rect.bottom(), rect.left(), - rect.height(), rect.width()); - case Rotation::CLOCK_WISE_180: - return DesktopRect::MakeXYWH(size.width() - rect.right(), - size.height() - rect.bottom(), rect.width(), - rect.height()); - case Rotation::CLOCK_WISE_270: - return DesktopRect::MakeXYWH(rect.top(), size.width() - rect.right(), - rect.height(), rect.width()); - } - RTC_NOTREACHED(); - return DesktopRect(); -} - -void RotateDesktopFrame(const DesktopFrame& source, - const DesktopRect& source_rect, - const Rotation& rotation, - const DesktopVector& target_offset, - DesktopFrame* target) { - RTC_DCHECK(target); - RTC_DCHECK(DesktopRect::MakeSize(source.size()).ContainsRect(source_rect)); - // The rectangle in |target|. - const DesktopRect target_rect = - RotateAndOffsetRect(source_rect, source.size(), rotation, target_offset); - RTC_DCHECK(DesktopRect::MakeSize(target->size()).ContainsRect(target_rect)); - - if (target_rect.is_empty()) { - return; - } - - int result = libyuv::ARGBRotate( - source.GetFrameDataAtPos(source_rect.top_left()), source.stride(), - target->GetFrameDataAtPos(target_rect.top_left()), target->stride(), - source_rect.width(), source_rect.height(), - ToLibyuvRotationMode(rotation)); - RTC_DCHECK_EQ(result, 0); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.h deleted file mode 100644 index 72bb1a651..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_rotation.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_ROTATION_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_ROTATION_H_ - -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" - -namespace webrtc { - -// Represents the rotation of a DesktopFrame. -enum class Rotation { - CLOCK_WISE_0, - CLOCK_WISE_90, - CLOCK_WISE_180, - CLOCK_WISE_270, -}; - -// Rotates input DesktopFrame |source|, copies pixel in an unrotated rectangle -// |source_rect| into the target rectangle of another DesktopFrame |target|. -// Target rectangle here is the rotated |source_rect| plus |target_offset|. -// |rotation| specifies |source| to |target| rotation. |source_rect| is in -// |source| coordinate. |target_offset| is in |target| coordinate. -// This function triggers check failure if |source| does not cover the -// |source_rect|, or |target| does not cover the rotated |rect|. -void RotateDesktopFrame(const DesktopFrame& source, - const DesktopRect& source_rect, - const Rotation& rotation, - const DesktopVector& target_offset, - DesktopFrame* target); - -// Returns a reverse rotation of |rotation|. -Rotation ReverseRotation(Rotation rotation); - -// Returns a rotated DesktopSize of |size|. -DesktopSize RotateSize(DesktopSize size, Rotation rotation); - -// Returns a rotated DesktopRect of |rect|. The |size| represents the size of -// the DesktopFrame which |rect| belongs in. -DesktopRect RotateRect(DesktopRect rect, DesktopSize size, Rotation rotation); - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_ROTATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.cc deleted file mode 100644 index 58ebac91d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.cc +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_frame_win.h" - -#include - -#include "rtc_base/logging.h" - -namespace webrtc { - -DesktopFrameWin::DesktopFrameWin(DesktopSize size, - int stride, - uint8_t* data, - std::unique_ptr shared_memory, - HBITMAP bitmap) - : DesktopFrame(size, stride, data, shared_memory.get()), - bitmap_(bitmap), - owned_shared_memory_(std::move(shared_memory)) {} - -DesktopFrameWin::~DesktopFrameWin() { - DeleteObject(bitmap_); -} - -// static -std::unique_ptr DesktopFrameWin::Create( - DesktopSize size, - SharedMemoryFactory* shared_memory_factory, - HDC hdc) { - int bytes_per_row = size.width() * kBytesPerPixel; - int buffer_size = bytes_per_row * size.height(); - - // Describe a device independent bitmap (DIB) that is the size of the desktop. - BITMAPINFO bmi = {}; - bmi.bmiHeader.biHeight = -size.height(); - bmi.bmiHeader.biWidth = size.width(); - bmi.bmiHeader.biPlanes = 1; - bmi.bmiHeader.biBitCount = DesktopFrameWin::kBytesPerPixel * 8; - bmi.bmiHeader.biSize = sizeof(bmi.bmiHeader); - bmi.bmiHeader.biSizeImage = bytes_per_row * size.height(); - - std::unique_ptr shared_memory; - HANDLE section_handle = nullptr; - if (shared_memory_factory) { - shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size); - section_handle = shared_memory->handle(); - } - void* data = nullptr; - HBITMAP bitmap = - CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, &data, section_handle, 0); - if (!bitmap) { - RTC_LOG(LS_WARNING) << "Failed to allocate new window frame " - << GetLastError(); - return nullptr; - } - - return std::unique_ptr( - new DesktopFrameWin(size, bytes_per_row, reinterpret_cast(data), - std::move(shared_memory), bitmap)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.h deleted file mode 100644 index 73e864868..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_frame_win.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_WIN_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_WIN_H_ - -#include - -#include - -#include "modules/desktop_capture/desktop_frame.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// DesktopFrame implementation used by screen and window captures on Windows. -// Frame data is stored in a GDI bitmap. -class DesktopFrameWin : public DesktopFrame { - public: - ~DesktopFrameWin() override; - - static std::unique_ptr - Create(DesktopSize size, SharedMemoryFactory* shared_memory_factory, HDC hdc); - - HBITMAP bitmap() { return bitmap_; } - - private: - DesktopFrameWin(DesktopSize size, - int stride, - uint8_t* data, - std::unique_ptr shared_memory, - HBITMAP bitmap); - - HBITMAP bitmap_; - std::unique_ptr owned_shared_memory_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrameWin); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_FRAME_WIN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.cc deleted file mode 100644 index e0a5d7af8..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.cc +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_geometry.h" - -#include -#include - -namespace webrtc { - -bool DesktopRect::Contains(const DesktopVector& point) const { - return point.x() >= left() && point.x() < right() && point.y() >= top() && - point.y() < bottom(); -} - -bool DesktopRect::ContainsRect(const DesktopRect& rect) const { - return rect.left() >= left() && rect.right() <= right() && - rect.top() >= top() && rect.bottom() <= bottom(); -} - -void DesktopRect::IntersectWith(const DesktopRect& rect) { - left_ = std::max(left(), rect.left()); - top_ = std::max(top(), rect.top()); - right_ = std::min(right(), rect.right()); - bottom_ = std::min(bottom(), rect.bottom()); - if (is_empty()) { - left_ = 0; - top_ = 0; - right_ = 0; - bottom_ = 0; - } -} - -void DesktopRect::UnionWith(const DesktopRect& rect) { - if (is_empty()) { - *this = rect; - return; - } - - if (rect.is_empty()) { - return; - } - - left_ = std::min(left(), rect.left()); - top_ = std::min(top(), rect.top()); - right_ = std::max(right(), rect.right()); - bottom_ = std::max(bottom(), rect.bottom()); -} - -void DesktopRect::Translate(int32_t dx, int32_t dy) { - left_ += dx; - top_ += dy; - right_ += dx; - bottom_ += dy; -} - -void DesktopRect::Extend(int32_t left_offset, - int32_t top_offset, - int32_t right_offset, - int32_t bottom_offset) { - left_ -= left_offset; - top_ -= top_offset; - right_ += right_offset; - bottom_ += bottom_offset; -} - -void DesktopRect::Scale(double horizontal, double vertical) { - right_ += static_cast(std::round(width() * (horizontal - 1))); - bottom_ += static_cast(std::round(height() * (vertical - 1))); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.h deleted file mode 100644 index 09ebefda9..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_geometry.h +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_ - -#include - -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// A vector in the 2D integer space. E.g. can be used to represent screen DPI. -class DesktopVector { - public: - DesktopVector() : x_(0), y_(0) {} - DesktopVector(int32_t x, int32_t y) : x_(x), y_(y) {} - - int32_t x() const { return x_; } - int32_t y() const { return y_; } - bool is_zero() const { return x_ == 0 && y_ == 0; } - - bool equals(const DesktopVector& other) const { - return x_ == other.x_ && y_ == other.y_; - } - - void set(int32_t x, int32_t y) { - x_ = x; - y_ = y; - } - - DesktopVector add(const DesktopVector& other) const { - return DesktopVector(x() + other.x(), y() + other.y()); - } - DesktopVector subtract(const DesktopVector& other) const { - return DesktopVector(x() - other.x(), y() - other.y()); - } - - DesktopVector operator-() const { return DesktopVector(-x_, -y_); } - - private: - int32_t x_; - int32_t y_; -}; - -// Type used to represent screen/window size. -class DesktopSize { - public: - DesktopSize() : width_(0), height_(0) {} - DesktopSize(int32_t width, int32_t height) : width_(width), height_(height) {} - - int32_t width() const { return width_; } - int32_t height() const { return height_; } - - bool is_empty() const { return width_ <= 0 || height_ <= 0; } - - bool equals(const DesktopSize& other) const { - return width_ == other.width_ && height_ == other.height_; - } - - void set(int32_t width, int32_t height) { - width_ = width; - height_ = height; - } - - private: - int32_t width_; - int32_t height_; -}; - -// Represents a rectangle on the screen. -class RTC_EXPORT DesktopRect { - public: - static DesktopRect MakeSize(const DesktopSize& size) { - return DesktopRect(0, 0, size.width(), size.height()); - } - static DesktopRect MakeWH(int32_t width, int32_t height) { - return DesktopRect(0, 0, width, height); - } - static DesktopRect MakeXYWH(int32_t x, - int32_t y, - int32_t width, - int32_t height) { - return DesktopRect(x, y, x + width, y + height); - } - static DesktopRect MakeLTRB(int32_t left, - int32_t top, - int32_t right, - int32_t bottom) { - return DesktopRect(left, top, right, bottom); - } - static DesktopRect MakeOriginSize(const DesktopVector& origin, - const DesktopSize& size) { - return MakeXYWH(origin.x(), origin.y(), size.width(), size.height()); - } - - DesktopRect() : left_(0), top_(0), right_(0), bottom_(0) {} - - int32_t left() const { return left_; } - int32_t top() const { return top_; } - int32_t right() const { return right_; } - int32_t bottom() const { return bottom_; } - int32_t width() const { return right_ - left_; } - int32_t height() const { return bottom_ - top_; } - - void set_width(int32_t width) { right_ = left_ + width; } - void set_height(int32_t height) { bottom_ = top_ + height; } - - DesktopVector top_left() const { return DesktopVector(left_, top_); } - DesktopSize size() const { return DesktopSize(width(), height()); } - - bool is_empty() const { return left_ >= right_ || top_ >= bottom_; } - - bool equals(const DesktopRect& other) const { - return left_ == other.left_ && top_ == other.top_ && - right_ == other.right_ && bottom_ == other.bottom_; - } - - // Returns true if |point| lies within the rectangle boundaries. - bool Contains(const DesktopVector& point) const; - - // Returns true if |rect| lies within the boundaries of this rectangle. - bool ContainsRect(const DesktopRect& rect) const; - - // Finds intersection with |rect|. - void IntersectWith(const DesktopRect& rect); - - // Extends the rectangle to cover |rect|. If |this| is empty, replaces |this| - // with |rect|; if |rect| is empty, this function takes no effect. - void UnionWith(const DesktopRect& rect); - - // Adds (dx, dy) to the position of the rectangle. - void Translate(int32_t dx, int32_t dy); - void Translate(DesktopVector d) { Translate(d.x(), d.y()); } - - // Enlarges current DesktopRect by subtracting |left_offset| and |top_offset| - // from |left_| and |top_|, and adding |right_offset| and |bottom_offset| to - // |right_| and |bottom_|. This function does not normalize the result, so - // |left_| and |top_| may be less than zero or larger than |right_| and - // |bottom_|. - void Extend(int32_t left_offset, - int32_t top_offset, - int32_t right_offset, - int32_t bottom_offset); - - // Scales current DesktopRect. This function does not impact the |top_| and - // |left_|. - void Scale(double horizontal, double vertical); - - private: - DesktopRect(int32_t left, int32_t top, int32_t right, int32_t bottom) - : left_(left), top_(top), right_(right), bottom_(bottom) {} - - int32_t left_; - int32_t top_; - int32_t right_; - int32_t bottom_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.cc deleted file mode 100644 index befbcc6f4..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.cc +++ /dev/null @@ -1,567 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_region.h" - -#include - -#include -#include - -namespace webrtc { - -DesktopRegion::RowSpan::RowSpan(int32_t left, int32_t right) - : left(left), right(right) {} - -DesktopRegion::Row::Row(const Row&) = default; -DesktopRegion::Row::Row(Row&&) = default; - -DesktopRegion::Row::Row(int32_t top, int32_t bottom) - : top(top), bottom(bottom) {} - -DesktopRegion::Row::~Row() {} - -DesktopRegion::DesktopRegion() {} - -DesktopRegion::DesktopRegion(const DesktopRect& rect) { - AddRect(rect); -} - -DesktopRegion::DesktopRegion(const DesktopRect* rects, int count) { - AddRects(rects, count); -} - -DesktopRegion::DesktopRegion(const DesktopRegion& other) { - *this = other; -} - -DesktopRegion::~DesktopRegion() { - Clear(); -} - -DesktopRegion& DesktopRegion::operator=(const DesktopRegion& other) { - Clear(); - rows_ = other.rows_; - for (Rows::iterator it = rows_.begin(); it != rows_.end(); ++it) { - // Copy each row. - Row* row = it->second; - it->second = new Row(*row); - } - return *this; -} - -bool DesktopRegion::Equals(const DesktopRegion& region) const { - // Iterate over rows of the tow regions and compare each row. - Rows::const_iterator it1 = rows_.begin(); - Rows::const_iterator it2 = region.rows_.begin(); - while (it1 != rows_.end()) { - if (it2 == region.rows_.end() || it1->first != it2->first || - it1->second->top != it2->second->top || - it1->second->bottom != it2->second->bottom || - it1->second->spans != it2->second->spans) { - return false; - } - ++it1; - ++it2; - } - return it2 == region.rows_.end(); -} - -void DesktopRegion::Clear() { - for (Rows::iterator row = rows_.begin(); row != rows_.end(); ++row) { - delete row->second; - } - rows_.clear(); -} - -void DesktopRegion::SetRect(const DesktopRect& rect) { - Clear(); - AddRect(rect); -} - -void DesktopRegion::AddRect(const DesktopRect& rect) { - if (rect.is_empty()) - return; - - // Top of the part of the |rect| that hasn't been inserted yet. Increased as - // we iterate over the rows until it reaches |rect.bottom()|. - int top = rect.top(); - - // Iterate over all rows that may intersect with |rect| and add new rows when - // necessary. - Rows::iterator row = rows_.upper_bound(top); - while (top < rect.bottom()) { - if (row == rows_.end() || top < row->second->top) { - // If |top| is above the top of the current |row| then add a new row above - // the current one. - int32_t bottom = rect.bottom(); - if (row != rows_.end() && row->second->top < bottom) - bottom = row->second->top; - row = rows_.insert(row, Rows::value_type(bottom, new Row(top, bottom))); - } else if (top > row->second->top) { - // If the |top| falls in the middle of the |row| then split |row| into - // two, at |top|, and leave |row| referring to the lower of the two, - // ready to insert a new span into. - assert(top <= row->second->bottom); - Rows::iterator new_row = rows_.insert( - row, Rows::value_type(top, new Row(row->second->top, top))); - row->second->top = top; - new_row->second->spans = row->second->spans; - } - - if (rect.bottom() < row->second->bottom) { - // If the bottom of the |rect| falls in the middle of the |row| split - // |row| into two, at |top|, and leave |row| referring to the upper of - // the two, ready to insert a new span into. - Rows::iterator new_row = rows_.insert( - row, Rows::value_type(rect.bottom(), new Row(top, rect.bottom()))); - row->second->top = rect.bottom(); - new_row->second->spans = row->second->spans; - row = new_row; - } - - // Add a new span to the current row. - AddSpanToRow(row->second, rect.left(), rect.right()); - top = row->second->bottom; - - MergeWithPrecedingRow(row); - - // Move to the next row. - ++row; - } - - if (row != rows_.end()) - MergeWithPrecedingRow(row); -} - -void DesktopRegion::AddRects(const DesktopRect* rects, int count) { - for (int i = 0; i < count; ++i) { - AddRect(rects[i]); - } -} - -void DesktopRegion::MergeWithPrecedingRow(Rows::iterator row) { - assert(row != rows_.end()); - - if (row != rows_.begin()) { - Rows::iterator previous_row = row; - previous_row--; - - // If |row| and |previous_row| are next to each other and contain the same - // set of spans then they can be merged. - if (previous_row->second->bottom == row->second->top && - previous_row->second->spans == row->second->spans) { - row->second->top = previous_row->second->top; - delete previous_row->second; - rows_.erase(previous_row); - } - } -} - -void DesktopRegion::AddRegion(const DesktopRegion& region) { - // TODO(sergeyu): This function is not optimized - potentially it can iterate - // over rows of the two regions similar to how it works in Intersect(). - for (Iterator it(region); !it.IsAtEnd(); it.Advance()) { - AddRect(it.rect()); - } -} - -void DesktopRegion::Intersect(const DesktopRegion& region1, - const DesktopRegion& region2) { - Clear(); - - Rows::const_iterator it1 = region1.rows_.begin(); - Rows::const_iterator end1 = region1.rows_.end(); - Rows::const_iterator it2 = region2.rows_.begin(); - Rows::const_iterator end2 = region2.rows_.end(); - if (it1 == end1 || it2 == end2) - return; - - while (it1 != end1 && it2 != end2) { - // Arrange for |it1| to always be the top-most of the rows. - if (it2->second->top < it1->second->top) { - std::swap(it1, it2); - std::swap(end1, end2); - } - - // Skip |it1| if it doesn't intersect |it2| at all. - if (it1->second->bottom <= it2->second->top) { - ++it1; - continue; - } - - // Top of the |it1| row is above the top of |it2|, so top of the - // intersection is always the top of |it2|. - int32_t top = it2->second->top; - int32_t bottom = std::min(it1->second->bottom, it2->second->bottom); - - Rows::iterator new_row = rows_.insert( - rows_.end(), Rows::value_type(bottom, new Row(top, bottom))); - IntersectRows(it1->second->spans, it2->second->spans, - &new_row->second->spans); - if (new_row->second->spans.empty()) { - delete new_row->second; - rows_.erase(new_row); - } else { - MergeWithPrecedingRow(new_row); - } - - // If |it1| was completely consumed, move to the next one. - if (it1->second->bottom == bottom) - ++it1; - // If |it2| was completely consumed, move to the next one. - if (it2->second->bottom == bottom) - ++it2; - } -} - -// static -void DesktopRegion::IntersectRows(const RowSpanSet& set1, - const RowSpanSet& set2, - RowSpanSet* output) { - RowSpanSet::const_iterator it1 = set1.begin(); - RowSpanSet::const_iterator end1 = set1.end(); - RowSpanSet::const_iterator it2 = set2.begin(); - RowSpanSet::const_iterator end2 = set2.end(); - assert(it1 != end1 && it2 != end2); - - do { - // Arrange for |it1| to always be the left-most of the spans. - if (it2->left < it1->left) { - std::swap(it1, it2); - std::swap(end1, end2); - } - - // Skip |it1| if it doesn't intersect |it2| at all. - if (it1->right <= it2->left) { - ++it1; - continue; - } - - int32_t left = it2->left; - int32_t right = std::min(it1->right, it2->right); - assert(left < right); - - output->push_back(RowSpan(left, right)); - - // If |it1| was completely consumed, move to the next one. - if (it1->right == right) - ++it1; - // If |it2| was completely consumed, move to the next one. - if (it2->right == right) - ++it2; - } while (it1 != end1 && it2 != end2); -} - -void DesktopRegion::IntersectWith(const DesktopRegion& region) { - DesktopRegion old_region; - Swap(&old_region); - Intersect(old_region, region); -} - -void DesktopRegion::IntersectWith(const DesktopRect& rect) { - DesktopRegion region; - region.AddRect(rect); - IntersectWith(region); -} - -void DesktopRegion::Subtract(const DesktopRegion& region) { - if (region.rows_.empty()) - return; - - // |row_b| refers to the current row being subtracted. - Rows::const_iterator row_b = region.rows_.begin(); - - // Current vertical position at which subtraction is happening. - int top = row_b->second->top; - - // |row_a| refers to the current row we are subtracting from. Skip all rows - // above |top|. - Rows::iterator row_a = rows_.upper_bound(top); - - // Step through rows of the both regions subtracting content of |row_b| from - // |row_a|. - while (row_a != rows_.end() && row_b != region.rows_.end()) { - // Skip |row_a| if it doesn't intersect with the |row_b|. - if (row_a->second->bottom <= top) { - // Each output row is merged with previously-processed rows before further - // rows are processed. - MergeWithPrecedingRow(row_a); - ++row_a; - continue; - } - - if (top > row_a->second->top) { - // If |top| falls in the middle of |row_a| then split |row_a| into two, at - // |top|, and leave |row_a| referring to the lower of the two, ready to - // subtract spans from. - assert(top <= row_a->second->bottom); - Rows::iterator new_row = rows_.insert( - row_a, Rows::value_type(top, new Row(row_a->second->top, top))); - row_a->second->top = top; - new_row->second->spans = row_a->second->spans; - } else if (top < row_a->second->top) { - // If the |top| is above |row_a| then skip the range between |top| and - // top of |row_a| because it's empty. - top = row_a->second->top; - if (top >= row_b->second->bottom) { - ++row_b; - if (row_b != region.rows_.end()) - top = row_b->second->top; - continue; - } - } - - if (row_b->second->bottom < row_a->second->bottom) { - // If the bottom of |row_b| falls in the middle of the |row_a| split - // |row_a| into two, at |top|, and leave |row_a| referring to the upper of - // the two, ready to subtract spans from. - int bottom = row_b->second->bottom; - Rows::iterator new_row = - rows_.insert(row_a, Rows::value_type(bottom, new Row(top, bottom))); - row_a->second->top = bottom; - new_row->second->spans = row_a->second->spans; - row_a = new_row; - } - - // At this point the vertical range covered by |row_a| lays within the - // range covered by |row_b|. Subtract |row_b| spans from |row_a|. - RowSpanSet new_spans; - SubtractRows(row_a->second->spans, row_b->second->spans, &new_spans); - new_spans.swap(row_a->second->spans); - top = row_a->second->bottom; - - if (top >= row_b->second->bottom) { - ++row_b; - if (row_b != region.rows_.end()) - top = row_b->second->top; - } - - // Check if the row is empty after subtraction and delete it. Otherwise move - // to the next one. - if (row_a->second->spans.empty()) { - Rows::iterator row_to_delete = row_a; - ++row_a; - delete row_to_delete->second; - rows_.erase(row_to_delete); - } else { - MergeWithPrecedingRow(row_a); - ++row_a; - } - } - - if (row_a != rows_.end()) - MergeWithPrecedingRow(row_a); -} - -void DesktopRegion::Subtract(const DesktopRect& rect) { - DesktopRegion region; - region.AddRect(rect); - Subtract(region); -} - -void DesktopRegion::Translate(int32_t dx, int32_t dy) { - Rows new_rows; - - for (Rows::iterator it = rows_.begin(); it != rows_.end(); ++it) { - Row* row = it->second; - - row->top += dy; - row->bottom += dy; - - if (dx != 0) { - // Translate each span. - for (RowSpanSet::iterator span = row->spans.begin(); - span != row->spans.end(); ++span) { - span->left += dx; - span->right += dx; - } - } - - if (dy != 0) - new_rows.insert(new_rows.end(), Rows::value_type(row->bottom, row)); - } - - if (dy != 0) - new_rows.swap(rows_); -} - -void DesktopRegion::Swap(DesktopRegion* region) { - rows_.swap(region->rows_); -} - -// static -bool DesktopRegion::CompareSpanRight(const RowSpan& r, int32_t value) { - return r.right < value; -} - -// static -bool DesktopRegion::CompareSpanLeft(const RowSpan& r, int32_t value) { - return r.left < value; -} - -// static -void DesktopRegion::AddSpanToRow(Row* row, int left, int right) { - // First check if the new span is located to the right of all existing spans. - // This is an optimization to avoid binary search in the case when rectangles - // are inserted sequentially from left to right. - if (row->spans.empty() || left > row->spans.back().right) { - row->spans.push_back(RowSpan(left, right)); - return; - } - - // Find the first span that ends at or after |left|. - RowSpanSet::iterator start = std::lower_bound( - row->spans.begin(), row->spans.end(), left, CompareSpanRight); - assert(start < row->spans.end()); - - // Find the first span that starts after |right|. - RowSpanSet::iterator end = - std::lower_bound(start, row->spans.end(), right + 1, CompareSpanLeft); - if (end == row->spans.begin()) { - // There are no overlaps. Just insert the new span at the beginning. - row->spans.insert(row->spans.begin(), RowSpan(left, right)); - return; - } - - // Move end to the left, so that it points the last span that ends at or - // before |right|. - end--; - - // At this point [start, end] is the range of spans that intersect with the - // new one. - if (end < start) { - // There are no overlaps. Just insert the new span at the correct position. - row->spans.insert(start, RowSpan(left, right)); - return; - } - - left = std::min(left, start->left); - right = std::max(right, end->right); - - // Replace range [start, end] with the new span. - *start = RowSpan(left, right); - ++start; - ++end; - if (start < end) - row->spans.erase(start, end); -} - -// static -bool DesktopRegion::IsSpanInRow(const Row& row, const RowSpan& span) { - // Find the first span that starts at or after |span.left| and then check if - // it's the same span. - RowSpanSet::const_iterator it = std::lower_bound( - row.spans.begin(), row.spans.end(), span.left, CompareSpanLeft); - return it != row.spans.end() && *it == span; -} - -// static -void DesktopRegion::SubtractRows(const RowSpanSet& set_a, - const RowSpanSet& set_b, - RowSpanSet* output) { - assert(!set_a.empty() && !set_b.empty()); - - RowSpanSet::const_iterator it_b = set_b.begin(); - - // Iterate over all spans in |set_a| adding parts of it that do not intersect - // with |set_b| to the |output|. - for (RowSpanSet::const_iterator it_a = set_a.begin(); it_a != set_a.end(); - ++it_a) { - // If there is no intersection then append the current span and continue. - if (it_b == set_b.end() || it_a->right < it_b->left) { - output->push_back(*it_a); - continue; - } - - // Iterate over |set_b| spans that may intersect with |it_a|. - int pos = it_a->left; - while (it_b != set_b.end() && it_b->left < it_a->right) { - if (it_b->left > pos) - output->push_back(RowSpan(pos, it_b->left)); - if (it_b->right > pos) { - pos = it_b->right; - if (pos >= it_a->right) - break; - } - ++it_b; - } - if (pos < it_a->right) - output->push_back(RowSpan(pos, it_a->right)); - } -} - -DesktopRegion::Iterator::Iterator(const DesktopRegion& region) - : region_(region), - row_(region.rows_.begin()), - previous_row_(region.rows_.end()) { - if (!IsAtEnd()) { - assert(row_->second->spans.size() > 0); - row_span_ = row_->second->spans.begin(); - UpdateCurrentRect(); - } -} - -DesktopRegion::Iterator::~Iterator() {} - -bool DesktopRegion::Iterator::IsAtEnd() const { - return row_ == region_.rows_.end(); -} - -void DesktopRegion::Iterator::Advance() { - assert(!IsAtEnd()); - - while (true) { - ++row_span_; - if (row_span_ == row_->second->spans.end()) { - previous_row_ = row_; - ++row_; - if (row_ != region_.rows_.end()) { - assert(row_->second->spans.size() > 0); - row_span_ = row_->second->spans.begin(); - } - } - - if (IsAtEnd()) - return; - - // If the same span exists on the previous row then skip it, as we've - // already returned this span merged into the previous one, via - // UpdateCurrentRect(). - if (previous_row_ != region_.rows_.end() && - previous_row_->second->bottom == row_->second->top && - IsSpanInRow(*previous_row_->second, *row_span_)) { - continue; - } - - break; - } - - assert(!IsAtEnd()); - UpdateCurrentRect(); -} - -void DesktopRegion::Iterator::UpdateCurrentRect() { - // Merge the current rectangle with the matching spans from later rows. - int bottom; - Rows::const_iterator bottom_row = row_; - Rows::const_iterator previous; - do { - bottom = bottom_row->second->bottom; - previous = bottom_row; - ++bottom_row; - } while (bottom_row != region_.rows_.end() && - previous->second->bottom == bottom_row->second->top && - IsSpanInRow(*bottom_row->second, *row_span_)); - rect_ = DesktopRect::MakeLTRB(row_span_->left, row_->second->top, - row_span_->right, bottom); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.h deleted file mode 100644 index 1aa95d097..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/desktop_region.h +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_REGION_H_ -#define MODULES_DESKTOP_CAPTURE_DESKTOP_REGION_H_ - -#include - -#include -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// DesktopRegion represents a region of the screen or window. -// -// Internally each region is stored as a set of rows where each row contains one -// or more rectangles aligned vertically. -class RTC_EXPORT DesktopRegion { - private: - // The following private types need to be declared first because they are used - // in the public Iterator. - - // RowSpan represents a horizontal span withing a single row. - struct RowSpan { - RowSpan(int32_t left, int32_t right); - - // Used by std::vector<>. - bool operator==(const RowSpan& that) const { - return left == that.left && right == that.right; - } - - int32_t left; - int32_t right; - }; - - typedef std::vector RowSpanSet; - - // Row represents a single row of a region. A row is set of rectangles that - // have the same vertical position. - struct Row { - Row(const Row&); - Row(Row&&); - Row(int32_t top, int32_t bottom); - ~Row(); - - int32_t top; - int32_t bottom; - - RowSpanSet spans; - }; - - // Type used to store list of rows in the region. The bottom position of row - // is used as the key so that rows are always ordered by their position. The - // map stores pointers to make Translate() more efficient. - typedef std::map Rows; - - public: - // Iterator that can be used to iterate over rectangles of a DesktopRegion. - // The region must not be mutated while the iterator is used. - class RTC_EXPORT Iterator { - public: - explicit Iterator(const DesktopRegion& target); - ~Iterator(); - - bool IsAtEnd() const; - void Advance(); - - const DesktopRect& rect() const { return rect_; } - - private: - const DesktopRegion& region_; - - // Updates |rect_| based on the current |row_| and |row_span_|. If - // |row_span_| matches spans on consecutive rows then they are also merged - // into |rect_|, to generate more efficient output. - void UpdateCurrentRect(); - - Rows::const_iterator row_; - Rows::const_iterator previous_row_; - RowSpanSet::const_iterator row_span_; - DesktopRect rect_; - }; - - DesktopRegion(); - explicit DesktopRegion(const DesktopRect& rect); - DesktopRegion(const DesktopRect* rects, int count); - DesktopRegion(const DesktopRegion& other); - ~DesktopRegion(); - - DesktopRegion& operator=(const DesktopRegion& other); - - bool is_empty() const { return rows_.empty(); } - - bool Equals(const DesktopRegion& region) const; - - // Reset the region to be empty. - void Clear(); - - // Reset region to contain just |rect|. - void SetRect(const DesktopRect& rect); - - // Adds specified rect(s) or region to the region. - void AddRect(const DesktopRect& rect); - void AddRects(const DesktopRect* rects, int count); - void AddRegion(const DesktopRegion& region); - - // Finds intersection of two regions and stores them in the current region. - void Intersect(const DesktopRegion& region1, const DesktopRegion& region2); - - // Same as above but intersects content of the current region with |region|. - void IntersectWith(const DesktopRegion& region); - - // Clips the region by the |rect|. - void IntersectWith(const DesktopRect& rect); - - // Subtracts |region| from the current content of the region. - void Subtract(const DesktopRegion& region); - - // Subtracts |rect| from the current content of the region. - void Subtract(const DesktopRect& rect); - - // Adds (dx, dy) to the position of the region. - void Translate(int32_t dx, int32_t dy); - - void Swap(DesktopRegion* region); - - private: - // Comparison functions used for std::lower_bound(). Compare left or right - // edges withs a given |value|. - static bool CompareSpanLeft(const RowSpan& r, int32_t value); - static bool CompareSpanRight(const RowSpan& r, int32_t value); - - // Adds a new span to the row, coalescing spans if necessary. - static void AddSpanToRow(Row* row, int32_t left, int32_t right); - - // Returns true if the |span| exists in the given |row|. - static bool IsSpanInRow(const Row& row, const RowSpan& rect); - - // Calculates the intersection of two sets of spans. - static void IntersectRows(const RowSpanSet& set1, - const RowSpanSet& set2, - RowSpanSet* output); - - static void SubtractRows(const RowSpanSet& set_a, - const RowSpanSet& set_b, - RowSpanSet* output); - - // Merges |row| with the row above it if they contain the same spans. Doesn't - // do anything if called with |row| set to rows_.begin() (i.e. first row of - // the region). If the rows were merged |row| remains a valid iterator to the - // merged row. - void MergeWithPrecedingRow(Rows::iterator row); - - Rows rows_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_REGION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.cc deleted file mode 100644 index dd9ab457e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.cc +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/differ_block.h" - -#include - -#include "modules/desktop_capture/differ_vector_sse2.h" -#include "rtc_base/system/arch.h" -#include "system_wrappers/include/cpu_features_wrapper.h" - -namespace webrtc { - -namespace { - -bool VectorDifference_C(const uint8_t* image1, const uint8_t* image2) { - return memcmp(image1, image2, kBlockSize * kBytesPerPixel) != 0; -} - -} // namespace - -bool VectorDifference(const uint8_t* image1, const uint8_t* image2) { - static bool (*diff_proc)(const uint8_t*, const uint8_t*) = nullptr; - - if (!diff_proc) { -#if defined(WEBRTC_ARCH_ARM_FAMILY) || defined(WEBRTC_ARCH_MIPS_FAMILY) - // For ARM and MIPS processors, always use C version. - // TODO(hclam): Implement a NEON version. - diff_proc = &VectorDifference_C; -#else - bool have_sse2 = WebRtc_GetCPUInfo(kSSE2) != 0; - // For x86 processors, check if SSE2 is supported. - if (have_sse2 && kBlockSize == 32) { - diff_proc = &VectorDifference_SSE2_W32; - } else if (have_sse2 && kBlockSize == 16) { - diff_proc = &VectorDifference_SSE2_W16; - } else { - diff_proc = &VectorDifference_C; - } -#endif - } - - return diff_proc(image1, image2); -} - -bool BlockDifference(const uint8_t* image1, - const uint8_t* image2, - int height, - int stride) { - for (int i = 0; i < height; i++) { - if (VectorDifference(image1, image2)) { - return true; - } - image1 += stride; - image2 += stride; - } - return false; -} - -bool BlockDifference(const uint8_t* image1, const uint8_t* image2, int stride) { - return BlockDifference(image1, image2, kBlockSize, stride); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.h deleted file mode 100644 index 2019ae03f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_block.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_DIFFER_BLOCK_H_ -#define MODULES_DESKTOP_CAPTURE_DIFFER_BLOCK_H_ - -#include - -namespace webrtc { - -// Size (in pixels) of each square block used for diffing. This must be a -// multiple of sizeof(uint64)/8. -const int kBlockSize = 32; - -// Format: BGRA 32 bit. -const int kBytesPerPixel = 4; - -// Low level function to compare 2 vectors of pixels of size kBlockSize. Returns -// whether the blocks differ. -bool VectorDifference(const uint8_t* image1, const uint8_t* image2); - -// Low level function to compare 2 blocks of pixels of size -// (kBlockSize, |height|). Returns whether the blocks differ. -bool BlockDifference(const uint8_t* image1, - const uint8_t* image2, - int height, - int stride); - -// Low level function to compare 2 blocks of pixels of size -// (kBlockSize, kBlockSize). Returns whether the blocks differ. -bool BlockDifference(const uint8_t* image1, const uint8_t* image2, int stride); - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DIFFER_BLOCK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.cc deleted file mode 100644 index 1c8b602d7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.cc +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/differ_vector_sse2.h" - -#if defined(_MSC_VER) -#include -#else -#include -#include -#endif - -namespace webrtc { - -extern bool VectorDifference_SSE2_W16(const uint8_t* image1, - const uint8_t* image2) { - __m128i acc = _mm_setzero_si128(); - __m128i v0; - __m128i v1; - __m128i sad; - const __m128i* i1 = reinterpret_cast(image1); - const __m128i* i2 = reinterpret_cast(image2); - v0 = _mm_loadu_si128(i1); - v1 = _mm_loadu_si128(i2); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 1); - v1 = _mm_loadu_si128(i2 + 1); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 2); - v1 = _mm_loadu_si128(i2 + 2); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 3); - v1 = _mm_loadu_si128(i2 + 3); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - - // This essential means sad = acc >> 64. We only care about the lower 16 - // bits. - sad = _mm_shuffle_epi32(acc, 0xEE); - sad = _mm_adds_epu16(sad, acc); - return _mm_cvtsi128_si32(sad) != 0; -} - -extern bool VectorDifference_SSE2_W32(const uint8_t* image1, - const uint8_t* image2) { - __m128i acc = _mm_setzero_si128(); - __m128i v0; - __m128i v1; - __m128i sad; - const __m128i* i1 = reinterpret_cast(image1); - const __m128i* i2 = reinterpret_cast(image2); - v0 = _mm_loadu_si128(i1); - v1 = _mm_loadu_si128(i2); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 1); - v1 = _mm_loadu_si128(i2 + 1); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 2); - v1 = _mm_loadu_si128(i2 + 2); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 3); - v1 = _mm_loadu_si128(i2 + 3); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 4); - v1 = _mm_loadu_si128(i2 + 4); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 5); - v1 = _mm_loadu_si128(i2 + 5); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 6); - v1 = _mm_loadu_si128(i2 + 6); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - v0 = _mm_loadu_si128(i1 + 7); - v1 = _mm_loadu_si128(i2 + 7); - sad = _mm_sad_epu8(v0, v1); - acc = _mm_adds_epu16(acc, sad); - - // This essential means sad = acc >> 64. We only care about the lower 16 - // bits. - sad = _mm_shuffle_epi32(acc, 0xEE); - sad = _mm_adds_epu16(sad, acc); - return _mm_cvtsi128_si32(sad) != 0; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.h deleted file mode 100644 index a3c297eb9..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/differ_vector_sse2.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This header file is used only differ_block.h. It defines the SSE2 rountines -// for finding vector difference. - -#ifndef MODULES_DESKTOP_CAPTURE_DIFFER_VECTOR_SSE2_H_ -#define MODULES_DESKTOP_CAPTURE_DIFFER_VECTOR_SSE2_H_ - -#include - -namespace webrtc { - -// Find vector difference of dimension 16. -extern bool VectorDifference_SSE2_W16(const uint8_t* image1, - const uint8_t* image2); - -// Find vector difference of dimension 32. -extern bool VectorDifference_SSE2_W32(const uint8_t* image1, - const uint8_t* image2); - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_DIFFER_VECTOR_SSE2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.cc deleted file mode 100644 index 83e360b0d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.cc +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/fake_desktop_capturer.h" - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" - -namespace webrtc { - -FakeDesktopCapturer::FakeDesktopCapturer() = default; -FakeDesktopCapturer::~FakeDesktopCapturer() = default; - -void FakeDesktopCapturer::set_result(DesktopCapturer::Result result) { - result_ = result; -} - -int FakeDesktopCapturer::num_frames_captured() const { - return num_frames_captured_; -} - -int FakeDesktopCapturer::num_capture_attempts() const { - return num_capture_attempts_; -} - -// Uses the |generator| provided as DesktopFrameGenerator, FakeDesktopCapturer -// does -// not take the ownership of |generator|. -void FakeDesktopCapturer::set_frame_generator( - DesktopFrameGenerator* generator) { - generator_ = generator; -} - -void FakeDesktopCapturer::Start(DesktopCapturer::Callback* callback) { - callback_ = callback; -} - -void FakeDesktopCapturer::CaptureFrame() { - num_capture_attempts_++; - if (generator_) { - if (result_ != DesktopCapturer::Result::SUCCESS) { - callback_->OnCaptureResult(result_, nullptr); - return; - } - - std::unique_ptr frame( - generator_->GetNextFrame(shared_memory_factory_.get())); - if (frame) { - num_frames_captured_++; - callback_->OnCaptureResult(result_, std::move(frame)); - } else { - callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_TEMPORARY, - nullptr); - } - return; - } - callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, nullptr); -} - -void FakeDesktopCapturer::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - shared_memory_factory_ = std::move(shared_memory_factory); -} - -bool FakeDesktopCapturer::GetSourceList(DesktopCapturer::SourceList* sources) { - sources->push_back({kWindowId, "A-Fake-DesktopCapturer-Window"}); - sources->push_back({kScreenId}); - return true; -} - -bool FakeDesktopCapturer::SelectSource(DesktopCapturer::SourceId id) { - return id == kWindowId || id == kScreenId || id == kFullDesktopScreenId; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.h deleted file mode 100644 index 2aa000a17..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fake_desktop_capturer.h +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_FAKE_DESKTOP_CAPTURER_H_ -#define MODULES_DESKTOP_CAPTURE_FAKE_DESKTOP_CAPTURER_H_ - -#include - -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame_generator.h" -#include "modules/desktop_capture/shared_memory.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// A fake implementation of DesktopCapturer or its derived interfaces to -// generate DesktopFrame for testing purpose. -// -// Consumers can provide a FrameGenerator instance to generate instances of -// DesktopFrame to return for each Capture() function call. -// If no FrameGenerator provided, FakeDesktopCapturer will always return a -// nullptr DesktopFrame. -// -// Double buffering is guaranteed by the FrameGenerator. FrameGenerator -// implements in desktop_frame_generator.h guarantee double buffering, they -// creates a new instance of DesktopFrame each time. -class RTC_EXPORT FakeDesktopCapturer : public DesktopCapturer { - public: - FakeDesktopCapturer(); - ~FakeDesktopCapturer() override; - - // Decides the result which will be returned in next Capture() callback. - void set_result(DesktopCapturer::Result result); - - // Uses the |generator| provided as DesktopFrameGenerator, FakeDesktopCapturer - // does not take the ownership of |generator|. - void set_frame_generator(DesktopFrameGenerator* generator); - - // Count of DesktopFrame(s) have been returned by this instance. This field - // would never be negative. - int num_frames_captured() const; - - // Count of CaptureFrame() calls have been made. This field would never be - // negative. - int num_capture_attempts() const; - - // DesktopCapturer interface - void Start(DesktopCapturer::Callback* callback) override; - void CaptureFrame() override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - bool GetSourceList(DesktopCapturer::SourceList* sources) override; - bool SelectSource(DesktopCapturer::SourceId id) override; - - private: - static constexpr DesktopCapturer::SourceId kWindowId = 1378277495; - static constexpr DesktopCapturer::SourceId kScreenId = 1378277496; - - DesktopCapturer::Callback* callback_ = nullptr; - std::unique_ptr shared_memory_factory_; - DesktopCapturer::Result result_ = Result::SUCCESS; - DesktopFrameGenerator* generator_ = nullptr; - int num_frames_captured_ = 0; - int num_capture_attempts_ = 0; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_FAKE_DESKTOP_CAPTURER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.cc deleted file mode 100644 index 206791ca7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.cc +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/fallback_desktop_capturer_wrapper.h" - -#include - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/thread_checker.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { - -namespace { - -// Implementation to share a SharedMemoryFactory between DesktopCapturer -// instances. This class is designed for synchronized DesktopCapturer -// implementations only. -class SharedMemoryFactoryProxy : public SharedMemoryFactory { - public: - // Users should maintain the lifetime of |factory| to ensure it overlives - // current instance. - static std::unique_ptr Create( - SharedMemoryFactory* factory); - ~SharedMemoryFactoryProxy() override; - - // Forwards CreateSharedMemory() calls to |factory_|. Users should always call - // this function in one thread. Users should not call this function after the - // SharedMemoryFactory which current instance created from has been destroyed. - std::unique_ptr CreateSharedMemory(size_t size) override; - - private: - explicit SharedMemoryFactoryProxy(SharedMemoryFactory* factory); - - SharedMemoryFactory* factory_ = nullptr; - rtc::ThreadChecker thread_checker_; -}; - -} // namespace - -SharedMemoryFactoryProxy::SharedMemoryFactoryProxy( - SharedMemoryFactory* factory) { - RTC_DCHECK(factory); - factory_ = factory; -} - -// static -std::unique_ptr SharedMemoryFactoryProxy::Create( - SharedMemoryFactory* factory) { - return std::unique_ptr( - new SharedMemoryFactoryProxy(factory)); -} - -SharedMemoryFactoryProxy::~SharedMemoryFactoryProxy() = default; - -std::unique_ptr SharedMemoryFactoryProxy::CreateSharedMemory( - size_t size) { - RTC_DCHECK(thread_checker_.IsCurrent()); - return factory_->CreateSharedMemory(size); -} - -FallbackDesktopCapturerWrapper::FallbackDesktopCapturerWrapper( - std::unique_ptr main_capturer, - std::unique_ptr secondary_capturer) - : main_capturer_(std::move(main_capturer)), - secondary_capturer_(std::move(secondary_capturer)) { - RTC_DCHECK(main_capturer_); - RTC_DCHECK(secondary_capturer_); -} - -FallbackDesktopCapturerWrapper::~FallbackDesktopCapturerWrapper() = default; - -void FallbackDesktopCapturerWrapper::Start( - DesktopCapturer::Callback* callback) { - callback_ = callback; - // FallbackDesktopCapturerWrapper catchs the callback of the main capturer, - // and checks its return value to decide whether the secondary capturer should - // be involved. - main_capturer_->Start(this); - // For the secondary capturer, we do not have a backup plan anymore, so - // FallbackDesktopCapturerWrapper won't check its return value any more. It - // will directly return to the input |callback|. - secondary_capturer_->Start(callback); -} - -void FallbackDesktopCapturerWrapper::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) { - shared_memory_factory_ = std::move(shared_memory_factory); - if (shared_memory_factory_) { - main_capturer_->SetSharedMemoryFactory( - SharedMemoryFactoryProxy::Create(shared_memory_factory_.get())); - secondary_capturer_->SetSharedMemoryFactory( - SharedMemoryFactoryProxy::Create(shared_memory_factory_.get())); - } else { - main_capturer_->SetSharedMemoryFactory( - std::unique_ptr()); - secondary_capturer_->SetSharedMemoryFactory( - std::unique_ptr()); - } -} - -void FallbackDesktopCapturerWrapper::CaptureFrame() { - RTC_DCHECK(callback_); - if (main_capturer_permanent_error_) { - secondary_capturer_->CaptureFrame(); - } else { - main_capturer_->CaptureFrame(); - } -} - -void FallbackDesktopCapturerWrapper::SetExcludedWindow(WindowId window) { - main_capturer_->SetExcludedWindow(window); - secondary_capturer_->SetExcludedWindow(window); -} - -bool FallbackDesktopCapturerWrapper::GetSourceList(SourceList* sources) { - if (main_capturer_permanent_error_) { - return secondary_capturer_->GetSourceList(sources); - } - return main_capturer_->GetSourceList(sources); -} - -bool FallbackDesktopCapturerWrapper::SelectSource(SourceId id) { - if (main_capturer_permanent_error_) { - return secondary_capturer_->SelectSource(id); - } - const bool main_capturer_result = main_capturer_->SelectSource(id); - RTC_HISTOGRAM_BOOLEAN( - "WebRTC.DesktopCapture.PrimaryCapturerSelectSourceError", - main_capturer_result); - if (!main_capturer_result) { - main_capturer_permanent_error_ = true; - } - - return secondary_capturer_->SelectSource(id); -} - -bool FallbackDesktopCapturerWrapper::FocusOnSelectedSource() { - if (main_capturer_permanent_error_) { - return secondary_capturer_->FocusOnSelectedSource(); - } - return main_capturer_->FocusOnSelectedSource() || - secondary_capturer_->FocusOnSelectedSource(); -} - -bool FallbackDesktopCapturerWrapper::IsOccluded(const DesktopVector& pos) { - // Returns true if either capturer returns true. - if (main_capturer_permanent_error_) { - return secondary_capturer_->IsOccluded(pos); - } - return main_capturer_->IsOccluded(pos) || - secondary_capturer_->IsOccluded(pos); -} - -void FallbackDesktopCapturerWrapper::OnCaptureResult( - Result result, - std::unique_ptr frame) { - RTC_DCHECK(callback_); - RTC_HISTOGRAM_BOOLEAN("WebRTC.DesktopCapture.PrimaryCapturerError", - result != Result::SUCCESS); - RTC_HISTOGRAM_BOOLEAN("WebRTC.DesktopCapture.PrimaryCapturerPermanentError", - result == Result::ERROR_PERMANENT); - if (result == Result::SUCCESS) { - callback_->OnCaptureResult(result, std::move(frame)); - return; - } - - if (result == Result::ERROR_PERMANENT) { - main_capturer_permanent_error_ = true; - } - secondary_capturer_->CaptureFrame(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.h deleted file mode 100644 index 2855eae7e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/fallback_desktop_capturer_wrapper.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_FALLBACK_DESKTOP_CAPTURER_WRAPPER_H_ -#define MODULES_DESKTOP_CAPTURE_FALLBACK_DESKTOP_CAPTURER_WRAPPER_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/shared_memory.h" - -namespace webrtc { - -// A DesktopCapturer wrapper owns two DesktopCapturer implementations. If the -// main DesktopCapturer fails, it uses the secondary one instead. Two capturers -// are expected to return same SourceList, and the meaning of each SourceId is -// identical, otherwise FallbackDesktopCapturerWrapper may return frames from -// different sources. Using asynchronized DesktopCapturer implementations with -// SharedMemoryFactory is not supported, and may result crash or assertion -// failure. -class FallbackDesktopCapturerWrapper final : public DesktopCapturer, - public DesktopCapturer::Callback { - public: - FallbackDesktopCapturerWrapper( - std::unique_ptr main_capturer, - std::unique_ptr secondary_capturer); - ~FallbackDesktopCapturerWrapper() override; - - // DesktopCapturer interface. - void Start(DesktopCapturer::Callback* callback) override; - void SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) override; - void CaptureFrame() override; - void SetExcludedWindow(WindowId window) override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - private: - // DesktopCapturer::Callback interface. - void OnCaptureResult(Result result, - std::unique_ptr frame) override; - - const std::unique_ptr main_capturer_; - const std::unique_ptr secondary_capturer_; - std::unique_ptr shared_memory_factory_; - bool main_capturer_permanent_error_ = false; - DesktopCapturer::Callback* callback_ = nullptr; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_FALLBACK_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.cc deleted file mode 100644 index e0975570b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.cc +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/full_screen_application_handler.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -FullScreenApplicationHandler::FullScreenApplicationHandler( - DesktopCapturer::SourceId sourceId) - : source_id_(sourceId) {} - -DesktopCapturer::SourceId FullScreenApplicationHandler::FindFullScreenWindow( - const DesktopCapturer::SourceList&, - int64_t) const { - return 0; -} - -DesktopCapturer::SourceId FullScreenApplicationHandler::GetSourceId() const { - return source_id_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.h deleted file mode 100644 index 849cb2c76..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_application_handler.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_FULL_SCREEN_APPLICATION_HANDLER_H_ -#define MODULES_DESKTOP_CAPTURE_FULL_SCREEN_APPLICATION_HANDLER_H_ - -#include -#include "modules/desktop_capture/desktop_capturer.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// Base class for application specific handler to check criteria for switch to -// full-screen mode and find if possible the full-screen window to share. -// Supposed to be created and owned by platform specific -// FullScreenWindowDetector. -class FullScreenApplicationHandler { - public: - virtual ~FullScreenApplicationHandler() {} - - explicit FullScreenApplicationHandler(DesktopCapturer::SourceId sourceId); - - // Returns the full-screen window in place of the original window if all the - // criteria are met, or 0 if no such window found. - virtual DesktopCapturer::SourceId FindFullScreenWindow( - const DesktopCapturer::SourceList& window_list, - int64_t timestamp) const; - - // Returns source id of original window associated with - // FullScreenApplicationHandler - DesktopCapturer::SourceId GetSourceId() const; - - private: - const DesktopCapturer::SourceId source_id_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FullScreenApplicationHandler); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_FULL_SCREEN_APPLICATION_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.cc deleted file mode 100644 index d0bc9c7ca..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.cc +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/full_screen_window_detector.h" -#include "modules/desktop_capture/full_screen_application_handler.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -FullScreenWindowDetector::FullScreenWindowDetector( - ApplicationHandlerFactory application_handler_factory) - : application_handler_factory_(application_handler_factory), - last_update_time_ms_(0), - previous_source_id_(0), - no_handler_source_id_(0) {} - -DesktopCapturer::SourceId FullScreenWindowDetector::FindFullScreenWindow( - DesktopCapturer::SourceId original_source_id) { - if (app_handler_ == nullptr || - app_handler_->GetSourceId() != original_source_id) { - return 0; - } - return app_handler_->FindFullScreenWindow(window_list_, last_update_time_ms_); -} - -void FullScreenWindowDetector::UpdateWindowListIfNeeded( - DesktopCapturer::SourceId original_source_id, - rtc::FunctionView get_sources) { - const bool skip_update = previous_source_id_ != original_source_id; - previous_source_id_ = original_source_id; - - // Here is an attempt to avoid redundant creating application handler in case - // when an instance of WindowCapturer is used to generate a thumbnail to show - // in picker by calling SelectSource and CaptureFrame for every available - // source. - if (skip_update) { - return; - } - - CreateApplicationHandlerIfNeeded(original_source_id); - if (app_handler_ == nullptr) { - // There is no FullScreenApplicationHandler specific for - // current application - return; - } - - constexpr int64_t kUpdateIntervalMs = 500; - - if ((rtc::TimeMillis() - last_update_time_ms_) <= kUpdateIntervalMs) { - return; - } - - DesktopCapturer::SourceList window_list; - if (get_sources(&window_list)) { - last_update_time_ms_ = rtc::TimeMillis(); - window_list_.swap(window_list); - } -} - -void FullScreenWindowDetector::CreateApplicationHandlerIfNeeded( - DesktopCapturer::SourceId source_id) { - if (no_handler_source_id_ == source_id) { - return; - } - - if (app_handler_ == nullptr || app_handler_->GetSourceId() != source_id) { - app_handler_ = application_handler_factory_ - ? application_handler_factory_(source_id) - : nullptr; - } - - if (app_handler_ == nullptr) { - no_handler_source_id_ = source_id; - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.h deleted file mode 100644 index 46fb607b7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/full_screen_window_detector.h +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_FULL_SCREEN_WINDOW_DETECTOR_H_ -#define MODULES_DESKTOP_CAPTURE_FULL_SCREEN_WINDOW_DETECTOR_H_ - -#include -#include "api/function_view.h" -#include "api/ref_counted_base.h" -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/full_screen_application_handler.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// This is a way to handle switch to full-screen mode for application in some -// specific cases: -// - Chrome on MacOS creates a new window in full-screen mode to -// show a tab full-screen and minimizes the old window. -// - PowerPoint creates new windows in full-screen mode when user goes to -// presentation mode (Slide Show Window, Presentation Window). -// -// To continue capturing in these cases, we try to find the new full-screen -// window using criteria provided by application specific -// FullScreenApplicationHandler. - -class FullScreenWindowDetector : public rtc::RefCountedBase { - public: - using ApplicationHandlerFactory = - std::function( - DesktopCapturer::SourceId sourceId)>; - - FullScreenWindowDetector( - ApplicationHandlerFactory application_handler_factory); - - // Returns the full-screen window in place of the original window if all the - // criteria provided by FullScreenApplicationHandler are met, or 0 if no such - // window found. - DesktopCapturer::SourceId FindFullScreenWindow( - DesktopCapturer::SourceId original_source_id); - - // The caller should call this function periodically, implementation will - // update internal state no often than twice per second - void UpdateWindowListIfNeeded( - DesktopCapturer::SourceId original_source_id, - rtc::FunctionView get_sources); - - static rtc::scoped_refptr - CreateFullScreenWindowDetector(); - - protected: - std::unique_ptr app_handler_; - - private: - void CreateApplicationHandlerIfNeeded(DesktopCapturer::SourceId source_id); - - ApplicationHandlerFactory application_handler_factory_; - - int64_t last_update_time_ms_; - DesktopCapturer::SourceId previous_source_id_; - - // Save the source id when we fail to create an instance of - // CreateApplicationHandlerIfNeeded to avoid redundant attempt to do it again. - DesktopCapturer::SourceId no_handler_source_id_; - - DesktopCapturer::SourceList window_list_; - RTC_DISALLOW_COPY_AND_ASSIGN(FullScreenWindowDetector); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_FULL_SCREEN_WINDOW_DETECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc deleted file mode 100644 index 2640e93aa..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc +++ /dev/null @@ -1,890 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/base_capturer_pipewire.h" - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include "absl/memory/memory.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -#if defined(WEBRTC_DLOPEN_PIPEWIRE) -#include "modules/desktop_capture/linux/pipewire_stubs.h" - -using modules_desktop_capture_linux::InitializeStubs; -using modules_desktop_capture_linux::kModulePipewire; -using modules_desktop_capture_linux::StubPathMap; -#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) - -namespace webrtc { - -const char kDesktopBusName[] = "org.freedesktop.portal.Desktop"; -const char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop"; -const char kDesktopRequestObjectPath[] = - "/org/freedesktop/portal/desktop/request"; -const char kSessionInterfaceName[] = "org.freedesktop.portal.Session"; -const char kRequestInterfaceName[] = "org.freedesktop.portal.Request"; -const char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast"; - -const int kBytesPerPixel = 4; - -#if defined(WEBRTC_DLOPEN_PIPEWIRE) -const char kPipeWireLib[] = "libpipewire-0.2.so.1"; -#endif - -// static -void BaseCapturerPipeWire::OnStateChanged(void* data, - pw_remote_state old_state, - pw_remote_state state, - const char* error_message) { - BaseCapturerPipeWire* that = static_cast(data); - RTC_DCHECK(that); - - switch (state) { - case PW_REMOTE_STATE_ERROR: - RTC_LOG(LS_ERROR) << "PipeWire remote state error: " << error_message; - break; - case PW_REMOTE_STATE_CONNECTED: - RTC_LOG(LS_INFO) << "PipeWire remote state: connected."; - that->CreateReceivingStream(); - break; - case PW_REMOTE_STATE_CONNECTING: - RTC_LOG(LS_INFO) << "PipeWire remote state: connecting."; - break; - case PW_REMOTE_STATE_UNCONNECTED: - RTC_LOG(LS_INFO) << "PipeWire remote state: unconnected."; - break; - } -} - -// static -void BaseCapturerPipeWire::OnStreamStateChanged(void* data, - pw_stream_state old_state, - pw_stream_state state, - const char* error_message) { - BaseCapturerPipeWire* that = static_cast(data); - RTC_DCHECK(that); - - switch (state) { - case PW_STREAM_STATE_ERROR: - RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message; - break; - case PW_STREAM_STATE_CONFIGURE: - pw_stream_set_active(that->pw_stream_, true); - break; - case PW_STREAM_STATE_UNCONNECTED: - case PW_STREAM_STATE_CONNECTING: - case PW_STREAM_STATE_READY: - case PW_STREAM_STATE_PAUSED: - case PW_STREAM_STATE_STREAMING: - break; - } -} - -// static -void BaseCapturerPipeWire::OnStreamFormatChanged(void* data, - const struct spa_pod* format) { - BaseCapturerPipeWire* that = static_cast(data); - RTC_DCHECK(that); - - RTC_LOG(LS_INFO) << "PipeWire stream format changed."; - - if (!format) { - pw_stream_finish_format(that->pw_stream_, /*res=*/0, /*params=*/nullptr, - /*n_params=*/0); - return; - } - - that->spa_video_format_ = new spa_video_info_raw(); - spa_format_video_raw_parse(format, that->spa_video_format_, - &that->pw_type_->format_video); - - auto width = that->spa_video_format_->size.width; - auto height = that->spa_video_format_->size.height; - auto stride = SPA_ROUND_UP_N(width * kBytesPerPixel, 4); - auto size = height * stride; - - uint8_t buffer[1024] = {}; - auto builder = spa_pod_builder{buffer, sizeof(buffer)}; - - // Setup buffers and meta header for new format. - const struct spa_pod* params[2]; - params[0] = reinterpret_cast(spa_pod_builder_object( - &builder, - // id to enumerate buffer requirements - that->pw_core_type_->param.idBuffers, - that->pw_core_type_->param_buffers.Buffers, - // Size: specified as integer (i) and set to specified size - ":", that->pw_core_type_->param_buffers.size, "i", size, - // Stride: specified as integer (i) and set to specified stride - ":", that->pw_core_type_->param_buffers.stride, "i", stride, - // Buffers: specifies how many buffers we want to deal with, set as - // integer (i) where preferred number is 8, then allowed number is defined - // as range (r) from min and max values and it is undecided (u) to allow - // negotiation - ":", that->pw_core_type_->param_buffers.buffers, "iru", 8, - SPA_POD_PROP_MIN_MAX(1, 32), - // Align: memory alignment of the buffer, set as integer (i) to specified - // value - ":", that->pw_core_type_->param_buffers.align, "i", 16)); - params[1] = reinterpret_cast(spa_pod_builder_object( - &builder, - // id to enumerate supported metadata - that->pw_core_type_->param.idMeta, that->pw_core_type_->param_meta.Meta, - // Type: specified as id or enum (I) - ":", that->pw_core_type_->param_meta.type, "I", - that->pw_core_type_->meta.Header, - // Size: size of the metadata, specified as integer (i) - ":", that->pw_core_type_->param_meta.size, "i", - sizeof(struct spa_meta_header))); - - pw_stream_finish_format(that->pw_stream_, /*res=*/0, params, /*n_params=*/2); -} - -// static -void BaseCapturerPipeWire::OnStreamProcess(void* data) { - BaseCapturerPipeWire* that = static_cast(data); - RTC_DCHECK(that); - - pw_buffer* buf = nullptr; - - if (!(buf = pw_stream_dequeue_buffer(that->pw_stream_))) { - return; - } - - that->HandleBuffer(buf); - - pw_stream_queue_buffer(that->pw_stream_, buf); -} - -BaseCapturerPipeWire::BaseCapturerPipeWire(CaptureSourceType source_type) - : capture_source_type_(source_type) {} - -BaseCapturerPipeWire::~BaseCapturerPipeWire() { - if (pw_main_loop_) { - pw_thread_loop_stop(pw_main_loop_); - } - - if (pw_type_) { - delete pw_type_; - } - - if (spa_video_format_) { - delete spa_video_format_; - } - - if (pw_stream_) { - pw_stream_destroy(pw_stream_); - } - - if (pw_remote_) { - pw_remote_destroy(pw_remote_); - } - - if (pw_core_) { - pw_core_destroy(pw_core_); - } - - if (pw_main_loop_) { - pw_thread_loop_destroy(pw_main_loop_); - } - - if (pw_loop_) { - pw_loop_destroy(pw_loop_); - } - - if (current_frame_) { - free(current_frame_); - } - - if (start_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(connection_, start_request_signal_id_); - } - if (sources_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(connection_, - sources_request_signal_id_); - } - if (session_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(connection_, - session_request_signal_id_); - } - - if (session_handle_) { - GDBusMessage* message = g_dbus_message_new_method_call( - kDesktopBusName, session_handle_, kSessionInterfaceName, "Close"); - if (message) { - GError* error = nullptr; - g_dbus_connection_send_message(connection_, message, - G_DBUS_SEND_MESSAGE_FLAGS_NONE, - /*out_serial=*/nullptr, &error); - if (error) { - RTC_LOG(LS_ERROR) << "Failed to close the session: " << error->message; - g_error_free(error); - } - g_object_unref(message); - } - } - - g_free(start_handle_); - g_free(sources_handle_); - g_free(session_handle_); - g_free(portal_handle_); - - if (cancellable_) { - g_cancellable_cancel(cancellable_); - g_object_unref(cancellable_); - cancellable_ = nullptr; - } - - if (proxy_) { - g_object_unref(proxy_); - proxy_ = nullptr; - } -} - -void BaseCapturerPipeWire::InitPortal() { - cancellable_ = g_cancellable_new(); - g_dbus_proxy_new_for_bus( - G_BUS_TYPE_SESSION, G_DBUS_PROXY_FLAGS_NONE, /*info=*/nullptr, - kDesktopBusName, kDesktopObjectPath, kScreenCastInterfaceName, - cancellable_, - reinterpret_cast(OnProxyRequested), this); -} - -void BaseCapturerPipeWire::InitPipeWire() { -#if defined(WEBRTC_DLOPEN_PIPEWIRE) - StubPathMap paths; - - // Check if the PipeWire library is available. - paths[kModulePipewire].push_back(kPipeWireLib); - if (!InitializeStubs(paths)) { - RTC_LOG(LS_ERROR) << "Failed to load the PipeWire library and symbols."; - portal_init_failed_ = true; - return; - } -#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) - - pw_init(/*argc=*/nullptr, /*argc=*/nullptr); - - pw_loop_ = pw_loop_new(/*properties=*/nullptr); - pw_main_loop_ = pw_thread_loop_new(pw_loop_, "pipewire-main-loop"); - - pw_core_ = pw_core_new(pw_loop_, /*properties=*/nullptr); - pw_core_type_ = pw_core_get_type(pw_core_); - pw_remote_ = pw_remote_new(pw_core_, nullptr, /*user_data_size=*/0); - - InitPipeWireTypes(); - - // Initialize event handlers, remote end and stream-related. - pw_remote_events_.version = PW_VERSION_REMOTE_EVENTS; - pw_remote_events_.state_changed = &OnStateChanged; - - pw_stream_events_.version = PW_VERSION_STREAM_EVENTS; - pw_stream_events_.state_changed = &OnStreamStateChanged; - pw_stream_events_.format_changed = &OnStreamFormatChanged; - pw_stream_events_.process = &OnStreamProcess; - - pw_remote_add_listener(pw_remote_, &spa_remote_listener_, &pw_remote_events_, - this); - pw_remote_connect_fd(pw_remote_, pw_fd_); - - if (pw_thread_loop_start(pw_main_loop_) < 0) { - RTC_LOG(LS_ERROR) << "Failed to start main PipeWire loop"; - portal_init_failed_ = true; - } - - RTC_LOG(LS_INFO) << "PipeWire remote opened."; -} - -void BaseCapturerPipeWire::InitPipeWireTypes() { - spa_type_map* map = pw_core_type_->map; - pw_type_ = new PipeWireType(); - - spa_type_media_type_map(map, &pw_type_->media_type); - spa_type_media_subtype_map(map, &pw_type_->media_subtype); - spa_type_format_video_map(map, &pw_type_->format_video); - spa_type_video_format_map(map, &pw_type_->video_format); -} - -void BaseCapturerPipeWire::CreateReceivingStream() { - spa_rectangle pwMinScreenBounds = spa_rectangle{1, 1}; - spa_rectangle pwScreenBounds = - spa_rectangle{static_cast(desktop_size_.width()), - static_cast(desktop_size_.height())}; - - spa_fraction pwFrameRateMin = spa_fraction{0, 1}; - spa_fraction pwFrameRateMax = spa_fraction{60, 1}; - - pw_properties* reuseProps = - pw_properties_new_string("pipewire.client.reuse=1"); - pw_stream_ = pw_stream_new(pw_remote_, "webrtc-consume-stream", reuseProps); - - uint8_t buffer[1024] = {}; - const spa_pod* params[1]; - spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)}; - params[0] = reinterpret_cast(spa_pod_builder_object( - &builder, - // id to enumerate formats - pw_core_type_->param.idEnumFormat, pw_core_type_->spa_format, "I", - pw_type_->media_type.video, "I", pw_type_->media_subtype.raw, - // Video format: specified as id or enum (I), preferred format is BGRx, - // then allowed formats are enumerated (e) and the format is undecided (u) - // to allow negotiation - ":", pw_type_->format_video.format, "Ieu", pw_type_->video_format.BGRx, - SPA_POD_PROP_ENUM(2, pw_type_->video_format.RGBx, - pw_type_->video_format.BGRx), - // Video size: specified as rectangle (R), preferred size is specified as - // first parameter, then allowed size is defined as range (r) from min and - // max values and the format is undecided (u) to allow negotiation - ":", pw_type_->format_video.size, "Rru", &pwScreenBounds, 2, - &pwMinScreenBounds, &pwScreenBounds, - // Frame rate: specified as fraction (F) and set to minimum frame rate - // value - ":", pw_type_->format_video.framerate, "F", &pwFrameRateMin, - // Max frame rate: specified as fraction (F), preferred frame rate is set - // to maximum value, then allowed frame rate is defined as range (r) from - // min and max values and it is undecided (u) to allow negotiation - ":", pw_type_->format_video.max_framerate, "Fru", &pwFrameRateMax, 2, - &pwFrameRateMin, &pwFrameRateMax)); - - pw_stream_add_listener(pw_stream_, &spa_stream_listener_, &pw_stream_events_, - this); - pw_stream_flags flags = static_cast( - PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_INACTIVE | - PW_STREAM_FLAG_MAP_BUFFERS); - if (pw_stream_connect(pw_stream_, PW_DIRECTION_INPUT, /*port_path=*/nullptr, - flags, params, - /*n_params=*/1) != 0) { - RTC_LOG(LS_ERROR) << "Could not connect receiving stream."; - portal_init_failed_ = true; - return; - } -} - -void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) { - spa_buffer* spaBuffer = buffer->buffer; - void* src = nullptr; - - if (!(src = spaBuffer->datas[0].data)) { - return; - } - - uint32_t maxSize = spaBuffer->datas[0].maxsize; - int32_t srcStride = spaBuffer->datas[0].chunk->stride; - if (srcStride != (desktop_size_.width() * kBytesPerPixel)) { - RTC_LOG(LS_ERROR) << "Got buffer with stride different from screen stride: " - << srcStride - << " != " << (desktop_size_.width() * kBytesPerPixel); - portal_init_failed_ = true; - return; - } - - if (!current_frame_) { - current_frame_ = static_cast(malloc(maxSize)); - } - RTC_DCHECK(current_frame_ != nullptr); - - // If both sides decided to go with the RGBx format we need to convert it to - // BGRx to match color format expected by WebRTC. - if (spa_video_format_->format == pw_type_->video_format.RGBx) { - uint8_t* tempFrame = static_cast(malloc(maxSize)); - std::memcpy(tempFrame, src, maxSize); - ConvertRGBxToBGRx(tempFrame, maxSize); - std::memcpy(current_frame_, tempFrame, maxSize); - free(tempFrame); - } else { - std::memcpy(current_frame_, src, maxSize); - } -} - -void BaseCapturerPipeWire::ConvertRGBxToBGRx(uint8_t* frame, uint32_t size) { - // Change color format for KDE KWin which uses RGBx and not BGRx - for (uint32_t i = 0; i < size; i += 4) { - uint8_t tempR = frame[i]; - uint8_t tempB = frame[i + 2]; - frame[i] = tempB; - frame[i + 2] = tempR; - } -} - -guint BaseCapturerPipeWire::SetupRequestResponseSignal( - const gchar* object_path, - GDBusSignalCallback callback) { - return g_dbus_connection_signal_subscribe( - connection_, kDesktopBusName, kRequestInterfaceName, "Response", - object_path, /*arg0=*/nullptr, G_DBUS_SIGNAL_FLAGS_NO_MATCH_RULE, - callback, this, /*user_data_free_func=*/nullptr); -} - -// static -void BaseCapturerPipeWire::OnProxyRequested(GObject* /*object*/, - GAsyncResult* result, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - GError* error = nullptr; - GDBusProxy *proxy = g_dbus_proxy_new_finish(result, &error); - if (!proxy) { - if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED)) - return; - RTC_LOG(LS_ERROR) << "Failed to create a proxy for the screen cast portal: " - << error->message; - g_error_free(error); - that->portal_init_failed_ = true; - return; - } - that->proxy_ = proxy; - that->connection_ = g_dbus_proxy_get_connection(that->proxy_); - - RTC_LOG(LS_INFO) << "Created proxy for the screen cast portal."; - that->SessionRequest(); -} - -// static -gchar* BaseCapturerPipeWire::PrepareSignalHandle(GDBusConnection* connection, - const gchar* token) { - gchar* sender = g_strdup(g_dbus_connection_get_unique_name(connection) + 1); - for (int i = 0; sender[i]; i++) { - if (sender[i] == '.') { - sender[i] = '_'; - } - } - - gchar* handle = g_strconcat(kDesktopRequestObjectPath, "/", sender, "/", - token, /*end of varargs*/ nullptr); - g_free(sender); - - return handle; -} - -void BaseCapturerPipeWire::SessionRequest() { - GVariantBuilder builder; - gchar* variant_string; - - g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); - variant_string = - g_strdup_printf("webrtc_session%d", g_random_int_range(0, G_MAXINT)); - g_variant_builder_add(&builder, "{sv}", "session_handle_token", - g_variant_new_string(variant_string)); - g_free(variant_string); - variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT)); - g_variant_builder_add(&builder, "{sv}", "handle_token", - g_variant_new_string(variant_string)); - - portal_handle_ = PrepareSignalHandle(connection_, variant_string); - session_request_signal_id_ = SetupRequestResponseSignal( - portal_handle_, OnSessionRequestResponseSignal); - g_free(variant_string); - - RTC_LOG(LS_INFO) << "Screen cast session requested."; - g_dbus_proxy_call( - proxy_, "CreateSession", g_variant_new("(a{sv})", &builder), - G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_, - reinterpret_cast(OnSessionRequested), this); -} - -// static -void BaseCapturerPipeWire::OnSessionRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - GError* error = nullptr; - GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error); - if (!variant) { - if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED)) - return; - RTC_LOG(LS_ERROR) << "Failed to create a screen cast session: " - << error->message; - g_error_free(error); - that->portal_init_failed_ = true; - return; - } - RTC_LOG(LS_INFO) << "Initializing the screen cast session."; - - gchar* handle = nullptr; - g_variant_get_child(variant, 0, "o", &handle); - g_variant_unref(variant); - if (!handle) { - RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session."; - if (that->session_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(that->connection_, - that->session_request_signal_id_); - that->session_request_signal_id_ = 0; - } - that->portal_init_failed_ = true; - return; - } - - g_free(handle); - - RTC_LOG(LS_INFO) << "Subscribing to the screen cast session."; -} - -// static -void BaseCapturerPipeWire::OnSessionRequestResponseSignal( - GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - RTC_LOG(LS_INFO) - << "Received response for the screen cast session subscription."; - - guint32 portal_response; - GVariant* response_data; - g_variant_get(parameters, "(u@a{sv})", &portal_response, &response_data); - g_variant_lookup(response_data, "session_handle", "s", - &that->session_handle_); - g_variant_unref(response_data); - - if (!that->session_handle_ || portal_response) { - RTC_LOG(LS_ERROR) - << "Failed to request the screen cast session subscription."; - that->portal_init_failed_ = true; - return; - } - - that->SourcesRequest(); -} - -void BaseCapturerPipeWire::SourcesRequest() { - GVariantBuilder builder; - gchar* variant_string; - - g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); - // We want to record monitor content. - g_variant_builder_add(&builder, "{sv}", "types", - g_variant_new_uint32(capture_source_type_)); - // We don't want to allow selection of multiple sources. - g_variant_builder_add(&builder, "{sv}", "multiple", - g_variant_new_boolean(false)); - variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT)); - g_variant_builder_add(&builder, "{sv}", "handle_token", - g_variant_new_string(variant_string)); - - sources_handle_ = PrepareSignalHandle(connection_, variant_string); - sources_request_signal_id_ = SetupRequestResponseSignal( - sources_handle_, OnSourcesRequestResponseSignal); - g_free(variant_string); - - RTC_LOG(LS_INFO) << "Requesting sources from the screen cast session."; - g_dbus_proxy_call( - proxy_, "SelectSources", - g_variant_new("(oa{sv})", session_handle_, &builder), - G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_, - reinterpret_cast(OnSourcesRequested), this); -} - -// static -void BaseCapturerPipeWire::OnSourcesRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - GError* error = nullptr; - GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error); - if (!variant) { - if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED)) - return; - RTC_LOG(LS_ERROR) << "Failed to request the sources: " << error->message; - g_error_free(error); - that->portal_init_failed_ = true; - return; - } - - RTC_LOG(LS_INFO) << "Sources requested from the screen cast session."; - - gchar* handle = nullptr; - g_variant_get_child(variant, 0, "o", &handle); - g_variant_unref(variant); - if (!handle) { - RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session."; - if (that->sources_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(that->connection_, - that->sources_request_signal_id_); - that->sources_request_signal_id_ = 0; - } - that->portal_init_failed_ = true; - return; - } - - g_free(handle); - - RTC_LOG(LS_INFO) << "Subscribed to sources signal."; -} - -// static -void BaseCapturerPipeWire::OnSourcesRequestResponseSignal( - GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - RTC_LOG(LS_INFO) << "Received sources signal from session."; - - guint32 portal_response; - g_variant_get(parameters, "(u@a{sv})", &portal_response, nullptr); - if (portal_response) { - RTC_LOG(LS_ERROR) - << "Failed to select sources for the screen cast session."; - that->portal_init_failed_ = true; - return; - } - - that->StartRequest(); -} - -void BaseCapturerPipeWire::StartRequest() { - GVariantBuilder builder; - gchar* variant_string; - - g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); - variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT)); - g_variant_builder_add(&builder, "{sv}", "handle_token", - g_variant_new_string(variant_string)); - - start_handle_ = PrepareSignalHandle(connection_, variant_string); - start_request_signal_id_ = - SetupRequestResponseSignal(start_handle_, OnStartRequestResponseSignal); - g_free(variant_string); - - // "Identifier for the application window", this is Wayland, so not "x11:...". - const gchar parent_window[] = ""; - - RTC_LOG(LS_INFO) << "Starting the screen cast session."; - g_dbus_proxy_call( - proxy_, "Start", - g_variant_new("(osa{sv})", session_handle_, parent_window, &builder), - G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_, - reinterpret_cast(OnStartRequested), this); -} - -// static -void BaseCapturerPipeWire::OnStartRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - GError* error = nullptr; - GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error); - if (!variant) { - if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED)) - return; - RTC_LOG(LS_ERROR) << "Failed to start the screen cast session: " - << error->message; - g_error_free(error); - that->portal_init_failed_ = true; - return; - } - - RTC_LOG(LS_INFO) << "Initializing the start of the screen cast session."; - - gchar* handle = nullptr; - g_variant_get_child(variant, 0, "o", &handle); - g_variant_unref(variant); - if (!handle) { - RTC_LOG(LS_ERROR) - << "Failed to initialize the start of the screen cast session."; - if (that->start_request_signal_id_) { - g_dbus_connection_signal_unsubscribe(that->connection_, - that->start_request_signal_id_); - that->start_request_signal_id_ = 0; - } - that->portal_init_failed_ = true; - return; - } - - g_free(handle); - - RTC_LOG(LS_INFO) << "Subscribed to the start signal."; -} - -// static -void BaseCapturerPipeWire::OnStartRequestResponseSignal( - GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - RTC_LOG(LS_INFO) << "Start signal received."; - guint32 portal_response; - GVariant* response_data; - GVariantIter* iter = nullptr; - g_variant_get(parameters, "(u@a{sv})", &portal_response, &response_data); - if (portal_response || !response_data) { - RTC_LOG(LS_ERROR) << "Failed to start the screen cast session."; - that->portal_init_failed_ = true; - return; - } - - // Array of PipeWire streams. See - // https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml - // documentation for . - if (g_variant_lookup(response_data, "streams", "a(ua{sv})", &iter)) { - GVariant* variant; - - while (g_variant_iter_next(iter, "@(ua{sv})", &variant)) { - guint32 stream_id; - gint32 width; - gint32 height; - GVariant* options; - - g_variant_get(variant, "(u@a{sv})", &stream_id, &options); - RTC_DCHECK(options != nullptr); - - g_variant_lookup(options, "size", "(ii)", &width, &height); - - that->desktop_size_.set(width, height); - - g_variant_unref(options); - g_variant_unref(variant); - } - } - g_variant_iter_free(iter); - g_variant_unref(response_data); - - that->OpenPipeWireRemote(); -} - -void BaseCapturerPipeWire::OpenPipeWireRemote() { - GVariantBuilder builder; - g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT); - - RTC_LOG(LS_INFO) << "Opening the PipeWire remote."; - - g_dbus_proxy_call_with_unix_fd_list( - proxy_, "OpenPipeWireRemote", - g_variant_new("(oa{sv})", session_handle_, &builder), - G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, /*fd_list=*/nullptr, - cancellable_, - reinterpret_cast(OnOpenPipeWireRemoteRequested), - this); -} - -// static -void BaseCapturerPipeWire::OnOpenPipeWireRemoteRequested( - GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data) { - BaseCapturerPipeWire* that = static_cast(user_data); - RTC_DCHECK(that); - - GError* error = nullptr; - GUnixFDList* outlist = nullptr; - GVariant* variant = g_dbus_proxy_call_with_unix_fd_list_finish( - proxy, &outlist, result, &error); - if (!variant) { - if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED)) - return; - RTC_LOG(LS_ERROR) << "Failed to open the PipeWire remote: " - << error->message; - g_error_free(error); - that->portal_init_failed_ = true; - return; - } - - gint32 index; - g_variant_get(variant, "(h)", &index); - - if ((that->pw_fd_ = g_unix_fd_list_get(outlist, index, &error)) == -1) { - RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: " - << error->message; - g_error_free(error); - g_variant_unref(variant); - that->portal_init_failed_ = true; - return; - } - - g_variant_unref(variant); - g_object_unref(outlist); - - that->InitPipeWire(); -} - -void BaseCapturerPipeWire::Start(Callback* callback) { - RTC_DCHECK(!callback_); - RTC_DCHECK(callback); - - InitPortal(); - - callback_ = callback; -} - -void BaseCapturerPipeWire::CaptureFrame() { - if (portal_init_failed_) { - callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); - return; - } - - if (!current_frame_) { - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - std::unique_ptr result(new BasicDesktopFrame(desktop_size_)); - result->CopyPixelsFrom( - current_frame_, (desktop_size_.width() * kBytesPerPixel), - DesktopRect::MakeWH(desktop_size_.width(), desktop_size_.height())); - if (!result) { - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - // TODO(julien.isorce): http://crbug.com/945468. Set the icc profile on the - // frame, see ScreenCapturerX11::CaptureFrame. - - callback_->OnCaptureResult(Result::SUCCESS, std::move(result)); -} - -bool BaseCapturerPipeWire::GetSourceList(SourceList* sources) { - RTC_DCHECK(sources->size() == 0); - // List of available screens is already presented by the xdg-desktop-portal. - // But we have to add an empty source as the code expects it. - sources->push_back({0}); - return true; -} - -bool BaseCapturerPipeWire::SelectSource(SourceId id) { - // Screen selection is handled by the xdg-desktop-portal. - return true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h deleted file mode 100644 index f28d7a558..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_BASE_CAPTURER_PIPEWIRE_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_BASE_CAPTURER_PIPEWIRE_H_ - -#include -#define typeof __typeof__ -#include -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -class PipeWireType { - public: - spa_type_media_type media_type; - spa_type_media_subtype media_subtype; - spa_type_format_video format_video; - spa_type_video_format video_format; -}; - -class BaseCapturerPipeWire : public DesktopCapturer { - public: - enum CaptureSourceType { Screen = 1, Window }; - - explicit BaseCapturerPipeWire(CaptureSourceType source_type); - ~BaseCapturerPipeWire() override; - - // DesktopCapturer interface. - void Start(Callback* delegate) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - - private: - // PipeWire types --> - pw_core* pw_core_ = nullptr; - pw_type* pw_core_type_ = nullptr; - pw_stream* pw_stream_ = nullptr; - pw_remote* pw_remote_ = nullptr; - pw_loop* pw_loop_ = nullptr; - pw_thread_loop* pw_main_loop_ = nullptr; - PipeWireType* pw_type_ = nullptr; - - spa_hook spa_stream_listener_ = {}; - spa_hook spa_remote_listener_ = {}; - - pw_stream_events pw_stream_events_ = {}; - pw_remote_events pw_remote_events_ = {}; - - spa_video_info_raw* spa_video_format_ = nullptr; - - gint32 pw_fd_ = -1; - - CaptureSourceType capture_source_type_ = - BaseCapturerPipeWire::CaptureSourceType::Screen; - - // <-- end of PipeWire types - - GDBusConnection* connection_ = nullptr; - GDBusProxy* proxy_ = nullptr; - GCancellable *cancellable_ = nullptr; - gchar* portal_handle_ = nullptr; - gchar* session_handle_ = nullptr; - gchar* sources_handle_ = nullptr; - gchar* start_handle_ = nullptr; - guint session_request_signal_id_ = 0; - guint sources_request_signal_id_ = 0; - guint start_request_signal_id_ = 0; - - DesktopSize desktop_size_ = {}; - DesktopCaptureOptions options_ = {}; - - uint8_t* current_frame_ = nullptr; - Callback* callback_ = nullptr; - - bool portal_init_failed_ = false; - - void InitPortal(); - void InitPipeWire(); - void InitPipeWireTypes(); - - void CreateReceivingStream(); - void HandleBuffer(pw_buffer* buffer); - - void ConvertRGBxToBGRx(uint8_t* frame, uint32_t size); - - static void OnStateChanged(void* data, - pw_remote_state old_state, - pw_remote_state state, - const char* error); - static void OnStreamStateChanged(void* data, - pw_stream_state old_state, - pw_stream_state state, - const char* error_message); - - static void OnStreamFormatChanged(void* data, const struct spa_pod* format); - static void OnStreamProcess(void* data); - static void OnNewBuffer(void* data, uint32_t id); - - guint SetupRequestResponseSignal(const gchar* object_path, - GDBusSignalCallback callback); - - static void OnProxyRequested(GObject* object, - GAsyncResult* result, - gpointer user_data); - - static gchar* PrepareSignalHandle(GDBusConnection* connection, - const gchar* token); - - void SessionRequest(); - static void OnSessionRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data); - static void OnSessionRequestResponseSignal(GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data); - - void SourcesRequest(); - static void OnSourcesRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data); - static void OnSourcesRequestResponseSignal(GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data); - - void StartRequest(); - static void OnStartRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data); - static void OnStartRequestResponseSignal(GDBusConnection* connection, - const gchar* sender_name, - const gchar* object_path, - const gchar* interface_name, - const gchar* signal_name, - GVariant* parameters, - gpointer user_data); - - void OpenPipeWireRemote(); - static void OnOpenPipeWireRemoteRequested(GDBusProxy *proxy, - GAsyncResult* result, - gpointer user_data); - - RTC_DISALLOW_COPY_AND_ASSIGN(BaseCapturerPipeWire); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_BASE_CAPTURER_PIPEWIRE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.cc deleted file mode 100644 index e3668a507..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.cc +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/mouse_cursor_monitor_x11.h" - -#include -#include -#include -#include -#include - -#include -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/linux/x_error_trap.h" -#include "modules/desktop_capture/mouse_cursor.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace { - -// WindowCapturer returns window IDs of X11 windows with WM_STATE attribute. -// These windows may not be immediate children of the root window, because -// window managers may re-parent them to add decorations. However, -// XQueryPointer() expects to be passed children of the root. This function -// searches up the list of the windows to find the root child that corresponds -// to |window|. -Window GetTopLevelWindow(Display* display, Window window) { - while (true) { - // If the window is in WithdrawnState then look at all of its children. - ::Window root, parent; - ::Window* children; - unsigned int num_children; - if (!XQueryTree(display, window, &root, &parent, &children, - &num_children)) { - RTC_LOG(LS_ERROR) << "Failed to query for child windows although window" - "does not have a valid WM_STATE."; - return None; - } - if (children) - XFree(children); - - if (parent == root) - break; - - window = parent; - } - - return window; -} - -} // namespace - -namespace webrtc { - -MouseCursorMonitorX11::MouseCursorMonitorX11( - const DesktopCaptureOptions& options, - Window window) - : x_display_(options.x_display()), - callback_(NULL), - mode_(SHAPE_AND_POSITION), - window_(window), - have_xfixes_(false), - xfixes_event_base_(-1), - xfixes_error_base_(-1) { - // Set a default initial cursor shape in case XFixes is not present. - const int kSize = 5; - std::unique_ptr default_cursor( - new BasicDesktopFrame(DesktopSize(kSize, kSize))); - const uint8_t pixels[kSize * kSize] = { - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, - 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, - 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; - uint8_t* ptr = default_cursor->data(); - for (int y = 0; y < kSize; ++y) { - for (int x = 0; x < kSize; ++x) { - *ptr++ = pixels[kSize * y + x]; - *ptr++ = pixels[kSize * y + x]; - *ptr++ = pixels[kSize * y + x]; - *ptr++ = 0xff; - } - } - DesktopVector hotspot(2, 2); - cursor_shape_.reset(new MouseCursor(default_cursor.release(), hotspot)); -} - -MouseCursorMonitorX11::~MouseCursorMonitorX11() { - if (have_xfixes_) { - x_display_->RemoveEventHandler(xfixes_event_base_ + XFixesCursorNotify, - this); - } -} - -void MouseCursorMonitorX11::Init(Callback* callback, Mode mode) { - // Init can be called only once per instance of MouseCursorMonitor. - RTC_DCHECK(!callback_); - RTC_DCHECK(callback); - - callback_ = callback; - mode_ = mode; - - have_xfixes_ = - XFixesQueryExtension(display(), &xfixes_event_base_, &xfixes_error_base_); - - if (have_xfixes_) { - // Register for changes to the cursor shape. - XFixesSelectCursorInput(display(), window_, XFixesDisplayCursorNotifyMask); - x_display_->AddEventHandler(xfixes_event_base_ + XFixesCursorNotify, this); - - CaptureCursor(); - } else { - RTC_LOG(LS_INFO) << "X server does not support XFixes."; - } -} - -void MouseCursorMonitorX11::Capture() { - RTC_DCHECK(callback_); - - // Process X11 events in case XFixes has sent cursor notification. - x_display_->ProcessPendingXEvents(); - - // cursor_shape_| is set only if we were notified of a cursor shape change. - if (cursor_shape_.get()) - callback_->OnMouseCursor(cursor_shape_.release()); - - // Get cursor position if necessary. - if (mode_ == SHAPE_AND_POSITION) { - int root_x; - int root_y; - int win_x; - int win_y; - Window root_window; - Window child_window; - unsigned int mask; - - XErrorTrap error_trap(display()); - Bool result = XQueryPointer(display(), window_, &root_window, &child_window, - &root_x, &root_y, &win_x, &win_y, &mask); - CursorState state; - if (!result || error_trap.GetLastErrorAndDisable() != 0) { - state = OUTSIDE; - } else { - // In screen mode (window_ == root_window) the mouse is always inside. - // XQueryPointer() sets |child_window| to None if the cursor is outside - // |window_|. - state = - (window_ == root_window || child_window != None) ? INSIDE : OUTSIDE; - } - - // As the comments to GetTopLevelWindow() above indicate, in window capture, - // the cursor position capture happens in |window_|, while the frame catpure - // happens in |child_window|. These two windows are not alwyas same, as - // window manager may add some decorations to the |window_|. So translate - // the coordinate in |window_| to the coordinate space of |child_window|. - if (window_ != root_window && state == INSIDE) { - int translated_x, translated_y; - Window unused; - if (XTranslateCoordinates(display(), window_, child_window, win_x, win_y, - &translated_x, &translated_y, &unused)) { - win_x = translated_x; - win_y = translated_y; - } - } - - // X11 always starts the coordinate from (0, 0), so we do not need to - // translate here. - callback_->OnMouseCursorPosition(DesktopVector(root_x, root_y)); - } -} - -bool MouseCursorMonitorX11::HandleXEvent(const XEvent& event) { - if (have_xfixes_ && event.type == xfixes_event_base_ + XFixesCursorNotify) { - const XFixesCursorNotifyEvent* cursor_event = - reinterpret_cast(&event); - if (cursor_event->subtype == XFixesDisplayCursorNotify) { - CaptureCursor(); - } - // Return false, even if the event has been handled, because there might be - // other listeners for cursor notifications. - } - return false; -} - -void MouseCursorMonitorX11::CaptureCursor() { - RTC_DCHECK(have_xfixes_); - - XFixesCursorImage* img; - { - XErrorTrap error_trap(display()); - img = XFixesGetCursorImage(display()); - if (!img || error_trap.GetLastErrorAndDisable() != 0) - return; - } - - std::unique_ptr image( - new BasicDesktopFrame(DesktopSize(img->width, img->height))); - - // Xlib stores 32-bit data in longs, even if longs are 64-bits long. - unsigned long* src = img->pixels; - uint32_t* dst = reinterpret_cast(image->data()); - uint32_t* dst_end = dst + (img->width * img->height); - while (dst < dst_end) { - *dst++ = static_cast(*src++); - } - - DesktopVector hotspot(std::min(img->width, img->xhot), - std::min(img->height, img->yhot)); - - XFree(img); - - cursor_shape_.reset(new MouseCursor(image.release(), hotspot)); -} - -// static -MouseCursorMonitor* MouseCursorMonitorX11::CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window) { - if (!options.x_display()) - return NULL; - window = GetTopLevelWindow(options.x_display()->display(), window); - if (window == None) - return NULL; - return new MouseCursorMonitorX11(options, window); -} - -MouseCursorMonitor* MouseCursorMonitorX11::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { - if (!options.x_display()) - return NULL; - return new MouseCursorMonitorX11( - options, DefaultRootWindow(options.x_display()->display())); -} - -std::unique_ptr MouseCursorMonitorX11::Create( - const DesktopCaptureOptions& options) { - return std::unique_ptr( - CreateForScreen(options, kFullDesktopScreenId)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.h deleted file mode 100644 index 9603341a2..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/mouse_cursor_monitor_x11.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_MOUSE_CURSOR_MONITOR_X11_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_MOUSE_CURSOR_MONITOR_X11_H_ - -#include - -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/linux/shared_x_display.h" -#include "modules/desktop_capture/mouse_cursor.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" - -namespace webrtc { - -class MouseCursorMonitorX11 : public MouseCursorMonitor, - public SharedXDisplay::XEventHandler { - public: - MouseCursorMonitorX11(const DesktopCaptureOptions& options, Window window); - ~MouseCursorMonitorX11() override; - - static MouseCursorMonitor* CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window); - static MouseCursorMonitor* CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen); - static std::unique_ptr Create( - const DesktopCaptureOptions& options); - - void Init(Callback* callback, Mode mode) override; - void Capture() override; - - private: - // SharedXDisplay::XEventHandler interface. - bool HandleXEvent(const XEvent& event) override; - - Display* display() { return x_display_->display(); } - - // Captures current cursor shape and stores it in |cursor_shape_|. - void CaptureCursor(); - - rtc::scoped_refptr x_display_; - Callback* callback_; - Mode mode_; - Window window_; - - bool have_xfixes_; - int xfixes_event_base_; - int xfixes_error_base_; - - std::unique_ptr cursor_shape_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_MOUSE_CURSOR_MONITOR_X11_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc deleted file mode 100644 index fe672140c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/screen_capturer_pipewire.h" - -#include - - -namespace webrtc { - -ScreenCapturerPipeWire::ScreenCapturerPipeWire() - : BaseCapturerPipeWire(BaseCapturerPipeWire::CaptureSourceType::Screen) {} -ScreenCapturerPipeWire::~ScreenCapturerPipeWire() {} - -// static -std::unique_ptr -ScreenCapturerPipeWire::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h deleted file mode 100644 index 66dcd680e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_ - -#include - -#include "modules/desktop_capture/linux/base_capturer_pipewire.h" - -namespace webrtc { - -class ScreenCapturerPipeWire : public BaseCapturerPipeWire { - public: - ScreenCapturerPipeWire(); - ~ScreenCapturerPipeWire() override; - - static std::unique_ptr CreateRawScreenCapturer( - const DesktopCaptureOptions& options); - - RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerPipeWire); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.cc deleted file mode 100644 index 1b1707141..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.cc +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/screen_capturer_x11.h" - -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/linux/x_server_pixel_buffer.h" -#include "modules/desktop_capture/screen_capture_frame_queue.h" -#include "modules/desktop_capture/screen_capturer_helper.h" -#include "modules/desktop_capture/shared_desktop_frame.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/sanitizer.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" - -namespace webrtc { - -ScreenCapturerX11::ScreenCapturerX11() { - helper_.SetLogGridSize(4); -} - -ScreenCapturerX11::~ScreenCapturerX11() { - options_.x_display()->RemoveEventHandler(ConfigureNotify, this); - if (use_damage_) { - options_.x_display()->RemoveEventHandler(damage_event_base_ + XDamageNotify, - this); - } - if (use_randr_) { - options_.x_display()->RemoveEventHandler( - randr_event_base_ + RRScreenChangeNotify, this); - } - DeinitXlib(); -} - -bool ScreenCapturerX11::Init(const DesktopCaptureOptions& options) { - TRACE_EVENT0("webrtc", "ScreenCapturerX11::Init"); - options_ = options; - - atom_cache_ = std::make_unique(display()); - - root_window_ = RootWindow(display(), DefaultScreen(display())); - if (root_window_ == BadValue) { - RTC_LOG(LS_ERROR) << "Unable to get the root window"; - DeinitXlib(); - return false; - } - - gc_ = XCreateGC(display(), root_window_, 0, NULL); - if (gc_ == NULL) { - RTC_LOG(LS_ERROR) << "Unable to get graphics context"; - DeinitXlib(); - return false; - } - - options_.x_display()->AddEventHandler(ConfigureNotify, this); - - // Check for XFixes extension. This is required for cursor shape - // notifications, and for our use of XDamage. - if (XFixesQueryExtension(display(), &xfixes_event_base_, - &xfixes_error_base_)) { - has_xfixes_ = true; - } else { - RTC_LOG(LS_INFO) << "X server does not support XFixes."; - } - - // Register for changes to the dimensions of the root window. - XSelectInput(display(), root_window_, StructureNotifyMask); - - if (!x_server_pixel_buffer_.Init(atom_cache_.get(), - DefaultRootWindow(display()))) { - RTC_LOG(LS_ERROR) << "Failed to initialize pixel buffer."; - return false; - } - - if (options_.use_update_notifications()) { - InitXDamage(); - } - - InitXrandr(); - - // Default source set here so that selected_monitor_rect_ is sized correctly. - SelectSource(kFullDesktopScreenId); - - return true; -} - -void ScreenCapturerX11::InitXDamage() { - // Our use of XDamage requires XFixes. - if (!has_xfixes_) { - return; - } - - // Check for XDamage extension. - if (!XDamageQueryExtension(display(), &damage_event_base_, - &damage_error_base_)) { - RTC_LOG(LS_INFO) << "X server does not support XDamage."; - return; - } - - // TODO(lambroslambrou): Disable DAMAGE in situations where it is known - // to fail, such as when Desktop Effects are enabled, with graphics - // drivers (nVidia, ATI) that fail to report DAMAGE notifications - // properly. - - // Request notifications every time the screen becomes damaged. - damage_handle_ = - XDamageCreate(display(), root_window_, XDamageReportNonEmpty); - if (!damage_handle_) { - RTC_LOG(LS_ERROR) << "Unable to initialize XDamage."; - return; - } - - // Create an XFixes server-side region to collate damage into. - damage_region_ = XFixesCreateRegion(display(), 0, 0); - if (!damage_region_) { - XDamageDestroy(display(), damage_handle_); - RTC_LOG(LS_ERROR) << "Unable to create XFixes region."; - return; - } - - options_.x_display()->AddEventHandler(damage_event_base_ + XDamageNotify, - this); - - use_damage_ = true; - RTC_LOG(LS_INFO) << "Using XDamage extension."; -} - -RTC_NO_SANITIZE("cfi-icall") -void ScreenCapturerX11::InitXrandr() { - int major_version = 0; - int minor_version = 0; - int error_base_ignored = 0; - if (XRRQueryExtension(display(), &randr_event_base_, &error_base_ignored) && - XRRQueryVersion(display(), &major_version, &minor_version)) { - if (major_version > 1 || (major_version == 1 && minor_version >= 5)) { - // Dynamically link XRRGetMonitors and XRRFreeMonitors as a workaround - // to avoid a dependency issue with Debian 8. - get_monitors_ = reinterpret_cast( - dlsym(RTLD_DEFAULT, "XRRGetMonitors")); - free_monitors_ = reinterpret_cast( - dlsym(RTLD_DEFAULT, "XRRFreeMonitors")); - if (get_monitors_ && free_monitors_) { - use_randr_ = true; - RTC_LOG(LS_INFO) << "Using XRandR extension v" << major_version << '.' - << minor_version << '.'; - monitors_ = - get_monitors_(display(), root_window_, true, &num_monitors_); - - // Register for screen change notifications - XRRSelectInput(display(), root_window_, RRScreenChangeNotifyMask); - options_.x_display()->AddEventHandler( - randr_event_base_ + RRScreenChangeNotify, this); - } else { - RTC_LOG(LS_ERROR) << "Unable to link XRandR monitor functions."; - } - } else { - RTC_LOG(LS_ERROR) << "XRandR entension is older than v1.5."; - } - } else { - RTC_LOG(LS_ERROR) << "X server does not support XRandR."; - } -} - -RTC_NO_SANITIZE("cfi-icall") -void ScreenCapturerX11::UpdateMonitors() { - if (monitors_) { - free_monitors_(monitors_); - monitors_ = nullptr; - } - - monitors_ = get_monitors_(display(), root_window_, true, &num_monitors_); - - if (selected_monitor_name_) { - if (selected_monitor_name_ == static_cast(kFullDesktopScreenId)) { - selected_monitor_rect_ = - DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); - return; - } - - for (int i = 0; i < num_monitors_; ++i) { - XRRMonitorInfo& m = monitors_[i]; - if (selected_monitor_name_ == m.name) { - RTC_LOG(LS_INFO) << "XRandR monitor " << m.name << " rect updated."; - selected_monitor_rect_ = - DesktopRect::MakeXYWH(m.x, m.y, m.width, m.height); - return; - } - } - - // The selected monitor is not connected anymore - RTC_LOG(LS_INFO) << "XRandR selected monitor " << selected_monitor_name_ - << " lost."; - selected_monitor_rect_ = DesktopRect::MakeWH(0, 0); - } -} - -void ScreenCapturerX11::Start(Callback* callback) { - RTC_DCHECK(!callback_); - RTC_DCHECK(callback); - - callback_ = callback; -} - -void ScreenCapturerX11::CaptureFrame() { - TRACE_EVENT0("webrtc", "ScreenCapturerX11::CaptureFrame"); - int64_t capture_start_time_nanos = rtc::TimeNanos(); - - queue_.MoveToNextFrame(); - RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared()); - - // Process XEvents for XDamage and cursor shape tracking. - options_.x_display()->ProcessPendingXEvents(); - - // ProcessPendingXEvents() may call ScreenConfigurationChanged() which - // reinitializes |x_server_pixel_buffer_|. Check if the pixel buffer is still - // in a good shape. - if (!x_server_pixel_buffer_.is_initialized()) { - // We failed to initialize pixel buffer. - RTC_LOG(LS_ERROR) << "Pixel buffer is not initialized."; - callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); - return; - } - - // Allocate the current frame buffer only if it is not already allocated. - // Note that we can't reallocate other buffers at this point, since the caller - // may still be reading from them. - if (!queue_.current_frame()) { - std::unique_ptr frame( - new BasicDesktopFrame(selected_monitor_rect_.size())); - - // We set the top-left of the frame so the mouse cursor will be composited - // properly, and our frame buffer will not be overrun while blitting. - frame->set_top_left(selected_monitor_rect_.top_left()); - queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(std::move(frame))); - } - - std::unique_ptr result = CaptureScreen(); - if (!result) { - RTC_LOG(LS_WARNING) << "Temporarily failed to capture screen."; - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - last_invalid_region_ = result->updated_region(); - result->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec); - callback_->OnCaptureResult(Result::SUCCESS, std::move(result)); -} - -bool ScreenCapturerX11::GetSourceList(SourceList* sources) { - RTC_DCHECK(sources->size() == 0); - if (!use_randr_) { - sources->push_back({}); - return true; - } - - // Ensure that |monitors_| is updated with changes that may have happened - // between calls to GetSourceList(). - options_.x_display()->ProcessPendingXEvents(); - - for (int i = 0; i < num_monitors_; ++i) { - XRRMonitorInfo& m = monitors_[i]; - char* monitor_title = XGetAtomName(display(), m.name); - - // Note name is an X11 Atom used to id the monitor. - sources->push_back({static_cast(m.name), monitor_title}); - XFree(monitor_title); - } - - return true; -} - -bool ScreenCapturerX11::SelectSource(SourceId id) { - // Prevent the reuse of any frame buffers allocated for a previously selected - // source. This is required to stop crashes, or old data from appearing in - // a captured frame, when the new source is sized differently then the source - // that was selected at the time a reused frame buffer was created. - queue_.Reset(); - - if (!use_randr_ || id == kFullDesktopScreenId) { - selected_monitor_name_ = kFullDesktopScreenId; - selected_monitor_rect_ = - DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); - return true; - } - - for (int i = 0; i < num_monitors_; ++i) { - if (id == static_cast(monitors_[i].name)) { - RTC_LOG(LS_INFO) << "XRandR selected source: " << id; - XRRMonitorInfo& m = monitors_[i]; - selected_monitor_name_ = m.name; - selected_monitor_rect_ = - DesktopRect::MakeXYWH(m.x, m.y, m.width, m.height); - return true; - } - } - return false; -} - -bool ScreenCapturerX11::HandleXEvent(const XEvent& event) { - if (use_damage_ && (event.type == damage_event_base_ + XDamageNotify)) { - const XDamageNotifyEvent* damage_event = - reinterpret_cast(&event); - if (damage_event->damage != damage_handle_) - return false; - RTC_DCHECK(damage_event->level == XDamageReportNonEmpty); - return true; - } else if (use_randr_ && - event.type == randr_event_base_ + RRScreenChangeNotify) { - XRRUpdateConfiguration(const_cast(&event)); - UpdateMonitors(); - RTC_LOG(LS_INFO) << "XRandR screen change event received."; - return true; - } else if (event.type == ConfigureNotify) { - ScreenConfigurationChanged(); - return true; - } - return false; -} - -std::unique_ptr ScreenCapturerX11::CaptureScreen() { - std::unique_ptr frame = queue_.current_frame()->Share(); - RTC_DCHECK(selected_monitor_rect_.size().equals(frame->size())); - - // Pass the screen size to the helper, so it can clip the invalid region if it - // expands that region to a grid. - helper_.set_size_most_recent(x_server_pixel_buffer_.window_size()); - - // In the DAMAGE case, ensure the frame is up-to-date with the previous frame - // if any. If there isn't a previous frame, that means a screen-resolution - // change occurred, and |invalid_rects| will be updated to include the whole - // screen. - if (use_damage_ && queue_.previous_frame()) - SynchronizeFrame(); - - DesktopRegion* updated_region = frame->mutable_updated_region(); - - x_server_pixel_buffer_.Synchronize(); - if (use_damage_ && queue_.previous_frame()) { - // Atomically fetch and clear the damage region. - XDamageSubtract(display(), damage_handle_, None, damage_region_); - int rects_num = 0; - XRectangle bounds; - XRectangle* rects = XFixesFetchRegionAndBounds(display(), damage_region_, - &rects_num, &bounds); - for (int i = 0; i < rects_num; ++i) { - updated_region->AddRect(DesktopRect::MakeXYWH( - rects[i].x, rects[i].y, rects[i].width, rects[i].height)); - } - XFree(rects); - helper_.InvalidateRegion(*updated_region); - - // Capture the damaged portions of the desktop. - helper_.TakeInvalidRegion(updated_region); - updated_region->IntersectWith(selected_monitor_rect_); - - for (DesktopRegion::Iterator it(*updated_region); !it.IsAtEnd(); - it.Advance()) { - if (!x_server_pixel_buffer_.CaptureRect(it.rect(), frame.get())) - return nullptr; - } - } else { - // Doing full-screen polling, or this is the first capture after a - // screen-resolution change. In either case, need a full-screen capture. - if (!x_server_pixel_buffer_.CaptureRect(selected_monitor_rect_, - frame.get())) { - return nullptr; - } - updated_region->SetRect(selected_monitor_rect_); - } - - return std::move(frame); -} - -void ScreenCapturerX11::ScreenConfigurationChanged() { - TRACE_EVENT0("webrtc", "ScreenCapturerX11::ScreenConfigurationChanged"); - // Make sure the frame buffers will be reallocated. - queue_.Reset(); - - helper_.ClearInvalidRegion(); - if (!x_server_pixel_buffer_.Init(atom_cache_.get(), - DefaultRootWindow(display()))) { - RTC_LOG(LS_ERROR) << "Failed to initialize pixel buffer after screen " - "configuration change."; - } - - if (!use_randr_) { - selected_monitor_rect_ = - DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); - } -} - -void ScreenCapturerX11::SynchronizeFrame() { - // Synchronize the current buffer with the previous one since we do not - // capture the entire desktop. Note that encoder may be reading from the - // previous buffer at this time so thread access complaints are false - // positives. - - // TODO(hclam): We can reduce the amount of copying here by subtracting - // |capturer_helper_|s region from |last_invalid_region_|. - // http://crbug.com/92354 - RTC_DCHECK(queue_.previous_frame()); - - DesktopFrame* current = queue_.current_frame(); - DesktopFrame* last = queue_.previous_frame(); - RTC_DCHECK(current != last); - for (DesktopRegion::Iterator it(last_invalid_region_); !it.IsAtEnd(); - it.Advance()) { - if (selected_monitor_rect_.ContainsRect(it.rect())) { - DesktopRect r = it.rect(); - r.Translate(-selected_monitor_rect_.top_left()); - current->CopyPixelsFrom(*last, r.top_left(), r); - } - } -} - -RTC_NO_SANITIZE("cfi-icall") -void ScreenCapturerX11::DeinitXlib() { - if (monitors_) { - free_monitors_(monitors_); - monitors_ = nullptr; - } - - if (gc_) { - XFreeGC(display(), gc_); - gc_ = nullptr; - } - - x_server_pixel_buffer_.Release(); - - if (display()) { - if (damage_handle_) { - XDamageDestroy(display(), damage_handle_); - damage_handle_ = 0; - } - - if (damage_region_) { - XFixesDestroyRegion(display(), damage_region_); - damage_region_ = 0; - } - } -} - -// static -std::unique_ptr ScreenCapturerX11::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { - if (!options.x_display()) - return nullptr; - - std::unique_ptr capturer(new ScreenCapturerX11()); - if (!capturer.get()->Init(options)) { - return nullptr; - } - - return std::move(capturer); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.h deleted file mode 100644 index b19e2e46e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/screen_capturer_x11.h +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_X11_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_X11_H_ - -#include -#include -#include -#include -#include - -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_region.h" -#include "modules/desktop_capture/linux/shared_x_display.h" -#include "modules/desktop_capture/linux/x_atom_cache.h" -#include "modules/desktop_capture/linux/x_server_pixel_buffer.h" -#include "modules/desktop_capture/screen_capture_frame_queue.h" -#include "modules/desktop_capture/screen_capturer_helper.h" -#include "modules/desktop_capture/shared_desktop_frame.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// A class to perform video frame capturing for Linux on X11. -// -// If XDamage is used, this class sets DesktopFrame::updated_region() according -// to the areas reported by XDamage. Otherwise this class does not detect -// DesktopFrame::updated_region(), the field is always set to the entire frame -// rectangle. ScreenCapturerDifferWrapper should be used if that functionality -// is necessary. -class ScreenCapturerX11 : public DesktopCapturer, - public SharedXDisplay::XEventHandler { - public: - ScreenCapturerX11(); - ~ScreenCapturerX11() override; - - static std::unique_ptr CreateRawScreenCapturer( - const DesktopCaptureOptions& options); - - // TODO(ajwong): Do we really want this to be synchronous? - bool Init(const DesktopCaptureOptions& options); - - // DesktopCapturer interface. - void Start(Callback* delegate) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - - private: - Display* display() { return options_.x_display()->display(); } - - // SharedXDisplay::XEventHandler interface. - bool HandleXEvent(const XEvent& event) override; - - void InitXDamage(); - void InitXrandr(); - void UpdateMonitors(); - - // Capture screen pixels to the current buffer in the queue. In the DAMAGE - // case, the ScreenCapturerHelper already holds the list of invalid rectangles - // from HandleXEvent(). In the non-DAMAGE case, this captures the - // whole screen, then calculates some invalid rectangles that include any - // differences between this and the previous capture. - std::unique_ptr CaptureScreen(); - - // Called when the screen configuration is changed. - void ScreenConfigurationChanged(); - - // Synchronize the current buffer with |last_buffer_|, by copying pixels from - // the area of |last_invalid_rects|. - // Note this only works on the assumption that kNumBuffers == 2, as - // |last_invalid_rects| holds the differences from the previous buffer and - // the one prior to that (which will then be the current buffer). - void SynchronizeFrame(); - - void DeinitXlib(); - - DesktopCaptureOptions options_; - - Callback* callback_ = nullptr; - - // X11 graphics context. - GC gc_ = nullptr; - Window root_window_ = BadValue; - - // XRandR 1.5 monitors. - bool use_randr_ = false; - int randr_event_base_ = 0; - XRRMonitorInfo* monitors_ = nullptr; - int num_monitors_ = 0; - DesktopRect selected_monitor_rect_; - // selected_monitor_name_ will be changed to kFullDesktopScreenId - // by a call to SelectSource() at the end of Init() because - // selected_monitor_rect_ should be updated as well. - // Setting it to kFullDesktopScreenId here might be misleading. - Atom selected_monitor_name_ = 0; - typedef XRRMonitorInfo* (*get_monitors_func)(Display*, Window, Bool, int*); - typedef void (*free_monitors_func)(XRRMonitorInfo*); - get_monitors_func get_monitors_ = nullptr; - free_monitors_func free_monitors_ = nullptr; - - // XFixes. - bool has_xfixes_ = false; - int xfixes_event_base_ = -1; - int xfixes_error_base_ = -1; - - // XDamage information. - bool use_damage_ = false; - Damage damage_handle_ = 0; - int damage_event_base_ = -1; - int damage_error_base_ = -1; - XserverRegion damage_region_ = 0; - - // Access to the X Server's pixel buffer. - XServerPixelBuffer x_server_pixel_buffer_; - - // A thread-safe list of invalid rectangles, and the size of the most - // recently captured screen. - ScreenCapturerHelper helper_; - - // Queue of the frames buffers. - ScreenCaptureFrameQueue queue_; - - // Invalid region from the previous capture. This is used to synchronize the - // current with the last buffer used. - DesktopRegion last_invalid_region_; - - std::unique_ptr atom_cache_; - - RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerX11); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_X11_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.cc deleted file mode 100644 index f0b35f62d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.cc +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/shared_x_display.h" - -#include -#include - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -SharedXDisplay::SharedXDisplay(Display* display) : display_(display) { - RTC_DCHECK(display_); -} - -SharedXDisplay::~SharedXDisplay() { - RTC_DCHECK(event_handlers_.empty()); - XCloseDisplay(display_); -} - -// static -rtc::scoped_refptr SharedXDisplay::Create( - const std::string& display_name) { - Display* display = - XOpenDisplay(display_name.empty() ? NULL : display_name.c_str()); - if (!display) { - RTC_LOG(LS_ERROR) << "Unable to open display"; - return NULL; - } - return new SharedXDisplay(display); -} - -// static -rtc::scoped_refptr SharedXDisplay::CreateDefault() { - return Create(std::string()); -} - -void SharedXDisplay::AddEventHandler(int type, XEventHandler* handler) { - event_handlers_[type].push_back(handler); -} - -void SharedXDisplay::RemoveEventHandler(int type, XEventHandler* handler) { - EventHandlersMap::iterator handlers = event_handlers_.find(type); - if (handlers == event_handlers_.end()) - return; - - std::vector::iterator new_end = - std::remove(handlers->second.begin(), handlers->second.end(), handler); - handlers->second.erase(new_end, handlers->second.end()); - - // Check if no handlers left for this event. - if (handlers->second.empty()) - event_handlers_.erase(handlers); -} - -void SharedXDisplay::ProcessPendingXEvents() { - // Hold reference to |this| to prevent it from being destroyed while - // processing events. - rtc::scoped_refptr self(this); - - // Find the number of events that are outstanding "now." We don't just loop - // on XPending because we want to guarantee this terminates. - int events_to_process = XPending(display()); - XEvent e; - - for (int i = 0; i < events_to_process; i++) { - XNextEvent(display(), &e); - EventHandlersMap::iterator handlers = event_handlers_.find(e.type); - if (handlers == event_handlers_.end()) - continue; - for (std::vector::iterator it = handlers->second.begin(); - it != handlers->second.end(); ++it) { - if ((*it)->HandleXEvent(e)) - break; - } - } -} - -void SharedXDisplay::IgnoreXServerGrabs() { - int test_event_base = 0; - int test_error_base = 0; - int major = 0; - int minor = 0; - if (XTestQueryExtension(display(), &test_event_base, &test_error_base, &major, - &minor)) { - XTestGrabControl(display(), true); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.h deleted file mode 100644 index 64c498c13..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/shared_x_display.h +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_SHARED_X_DISPLAY_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_SHARED_X_DISPLAY_H_ - -#include -#include -#include - -#include "api/ref_counted_base.h" -#include "api/scoped_refptr.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -// Including Xlib.h will involve evil defines (Bool, Status, True, False), which -// easily conflict with other headers. -typedef struct _XDisplay Display; -typedef union _XEvent XEvent; - -namespace webrtc { - -// A ref-counted object to store XDisplay connection. -class RTC_EXPORT SharedXDisplay : public rtc::RefCountedBase { - public: - class XEventHandler { - public: - virtual ~XEventHandler() {} - - // Processes XEvent. Returns true if the event has been handled. - virtual bool HandleXEvent(const XEvent& event) = 0; - }; - - // Takes ownership of |display|. - explicit SharedXDisplay(Display* display); - - // Creates a new X11 Display for the |display_name|. NULL is returned if X11 - // connection failed. Equivalent to CreateDefault() when |display_name| is - // empty. - static rtc::scoped_refptr Create( - const std::string& display_name); - - // Creates X11 Display connection for the default display (e.g. specified in - // DISPLAY). NULL is returned if X11 connection failed. - static rtc::scoped_refptr CreateDefault(); - - Display* display() { return display_; } - - // Adds a new event |handler| for XEvent's of |type|. - void AddEventHandler(int type, XEventHandler* handler); - - // Removes event |handler| added using |AddEventHandler|. Doesn't do anything - // if |handler| is not registered. - void RemoveEventHandler(int type, XEventHandler* handler); - - // Processes pending XEvents, calling corresponding event handlers. - void ProcessPendingXEvents(); - - void IgnoreXServerGrabs(); - - protected: - ~SharedXDisplay() override; - - private: - typedef std::map > EventHandlersMap; - - Display* display_; - - EventHandlersMap event_handlers_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SharedXDisplay); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_SHARED_X_DISPLAY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc deleted file mode 100644 index b4559156d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/window_capturer_pipewire.h" - -#include - - -namespace webrtc { - -WindowCapturerPipeWire::WindowCapturerPipeWire() - : BaseCapturerPipeWire(BaseCapturerPipeWire::CaptureSourceType::Window) {} -WindowCapturerPipeWire::~WindowCapturerPipeWire() {} - -// static -std::unique_ptr -WindowCapturerPipeWire::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h deleted file mode 100644 index 7f184ef29..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_ - -#include - -#include "modules/desktop_capture/linux/base_capturer_pipewire.h" - -namespace webrtc { - -class WindowCapturerPipeWire : public BaseCapturerPipeWire { - public: - WindowCapturerPipeWire(); - ~WindowCapturerPipeWire() override; - - static std::unique_ptr CreateRawWindowCapturer( - const DesktopCaptureOptions& options); - - RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerPipeWire); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.cc deleted file mode 100644 index 94f9f92c8..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.cc +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/window_capturer_x11.h" - -#include -#include -#include -#include - -#include -#include -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_region.h" -#include "modules/desktop_capture/linux/shared_x_display.h" -#include "modules/desktop_capture/linux/window_finder_x11.h" -#include "modules/desktop_capture/linux/window_list_utils.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" - -namespace webrtc { - -WindowCapturerX11::WindowCapturerX11(const DesktopCaptureOptions& options) - : x_display_(options.x_display()), - atom_cache_(display()), - window_finder_(&atom_cache_) { - int event_base, error_base, major_version, minor_version; - if (XCompositeQueryExtension(display(), &event_base, &error_base) && - XCompositeQueryVersion(display(), &major_version, &minor_version) && - // XCompositeNameWindowPixmap() requires version 0.2 - (major_version > 0 || minor_version >= 2)) { - has_composite_extension_ = true; - } else { - RTC_LOG(LS_INFO) << "Xcomposite extension not available or too old."; - } - - x_display_->AddEventHandler(ConfigureNotify, this); -} - -WindowCapturerX11::~WindowCapturerX11() { - x_display_->RemoveEventHandler(ConfigureNotify, this); -} - -bool WindowCapturerX11::GetSourceList(SourceList* sources) { - return GetWindowList(&atom_cache_, [this, sources](::Window window) { - Source w; - w.id = window; - if (this->GetWindowTitle(window, &w.title)) { - sources->push_back(w); - } - return true; - }); -} - -bool WindowCapturerX11::SelectSource(SourceId id) { - if (!x_server_pixel_buffer_.Init(&atom_cache_, id)) - return false; - - // Tell the X server to send us window resizing events. - XSelectInput(display(), id, StructureNotifyMask); - - selected_window_ = id; - - // In addition to needing X11 server-side support for Xcomposite, it actually - // needs to be turned on for the window. If the user has modern - // hardware/drivers but isn't using a compositing window manager, that won't - // be the case. Here we automatically turn it on. - - // Redirect drawing to an offscreen buffer (ie, turn on compositing). X11 - // remembers who has requested this and will turn it off for us when we exit. - XCompositeRedirectWindow(display(), id, CompositeRedirectAutomatic); - - return true; -} - -bool WindowCapturerX11::FocusOnSelectedSource() { - if (!selected_window_) - return false; - - unsigned int num_children; - ::Window* children; - ::Window parent; - ::Window root; - // Find the root window to pass event to. - int status = XQueryTree(display(), selected_window_, &root, &parent, - &children, &num_children); - if (status == 0) { - RTC_LOG(LS_ERROR) << "Failed to query for the root window."; - return false; - } - - if (children) - XFree(children); - - XRaiseWindow(display(), selected_window_); - - // Some window managers (e.g., metacity in GNOME) consider it illegal to - // raise a window without also giving it input focus with - // _NET_ACTIVE_WINDOW, so XRaiseWindow() on its own isn't enough. - Atom atom = XInternAtom(display(), "_NET_ACTIVE_WINDOW", True); - if (atom != None) { - XEvent xev; - xev.xclient.type = ClientMessage; - xev.xclient.serial = 0; - xev.xclient.send_event = True; - xev.xclient.window = selected_window_; - xev.xclient.message_type = atom; - - // The format member is set to 8, 16, or 32 and specifies whether the - // data should be viewed as a list of bytes, shorts, or longs. - xev.xclient.format = 32; - - memset(xev.xclient.data.l, 0, sizeof(xev.xclient.data.l)); - - XSendEvent(display(), root, False, - SubstructureRedirectMask | SubstructureNotifyMask, &xev); - } - XFlush(display()); - return true; -} - -void WindowCapturerX11::Start(Callback* callback) { - RTC_DCHECK(!callback_); - RTC_DCHECK(callback); - - callback_ = callback; -} - -void WindowCapturerX11::CaptureFrame() { - TRACE_EVENT0("webrtc", "WindowCapturerX11::CaptureFrame"); - - if (!x_server_pixel_buffer_.IsWindowValid()) { - RTC_LOG(LS_ERROR) << "The window is no longer valid."; - callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); - return; - } - - x_display_->ProcessPendingXEvents(); - - if (!has_composite_extension_) { - // Without the Xcomposite extension we capture when the whole window is - // visible on screen and not covered by any other window. This is not - // something we want so instead, just bail out. - RTC_LOG(LS_ERROR) << "No Xcomposite extension detected."; - callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); - return; - } - - if (GetWindowState(&atom_cache_, selected_window_) == IconicState) { - // Window is in minimized. Return a 1x1 frame as same as OSX/Win does. - std::unique_ptr frame( - new BasicDesktopFrame(DesktopSize(1, 1))); - callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); - return; - } - - std::unique_ptr frame( - new BasicDesktopFrame(x_server_pixel_buffer_.window_size())); - - x_server_pixel_buffer_.Synchronize(); - if (!x_server_pixel_buffer_.CaptureRect(DesktopRect::MakeSize(frame->size()), - frame.get())) { - RTC_LOG(LS_WARNING) << "Temporarily failed to capture winodw."; - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - frame->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(frame->size())); - frame->set_top_left(x_server_pixel_buffer_.window_rect().top_left()); - - callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); -} - -bool WindowCapturerX11::IsOccluded(const DesktopVector& pos) { - return window_finder_.GetWindowUnderPoint(pos) != - static_cast(selected_window_); -} - -bool WindowCapturerX11::HandleXEvent(const XEvent& event) { - if (event.type == ConfigureNotify) { - XConfigureEvent xce = event.xconfigure; - if (xce.window == selected_window_) { - if (!DesktopRectFromXAttributes(xce).equals( - x_server_pixel_buffer_.window_rect())) { - if (!x_server_pixel_buffer_.Init(&atom_cache_, selected_window_)) { - RTC_LOG(LS_ERROR) - << "Failed to initialize pixel buffer after resizing."; - } - } - } - } - - // Always returns false, so other observers can still receive the events. - return false; -} - -bool WindowCapturerX11::GetWindowTitle(::Window window, std::string* title) { - int status; - bool result = false; - XTextProperty window_name; - window_name.value = nullptr; - if (window) { - status = XGetWMName(display(), window, &window_name); - if (status && window_name.value && window_name.nitems) { - int cnt; - char** list = nullptr; - status = - Xutf8TextPropertyToTextList(display(), &window_name, &list, &cnt); - if (status >= Success && cnt && *list) { - if (cnt > 1) { - RTC_LOG(LS_INFO) << "Window has " << cnt - << " text properties, only using the first one."; - } - *title = *list; - result = true; - } - if (list) - XFreeStringList(list); - } - if (window_name.value) - XFree(window_name.value); - } - return result; -} - -// static -std::unique_ptr WindowCapturerX11::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { - if (!options.x_display()) - return nullptr; - return std::unique_ptr(new WindowCapturerX11(options)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.h deleted file mode 100644 index f689dd6f5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_capturer_x11.h +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_X11_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_X11_H_ - -#include -#include - -#include -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/linux/shared_x_display.h" -#include "modules/desktop_capture/linux/window_finder_x11.h" -#include "modules/desktop_capture/linux/x_atom_cache.h" -#include "modules/desktop_capture/linux/x_server_pixel_buffer.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -class WindowCapturerX11 : public DesktopCapturer, - public SharedXDisplay::XEventHandler { - public: - explicit WindowCapturerX11(const DesktopCaptureOptions& options); - ~WindowCapturerX11() override; - - static std::unique_ptr CreateRawWindowCapturer( - const DesktopCaptureOptions& options); - - // DesktopCapturer interface. - void Start(Callback* callback) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - // SharedXDisplay::XEventHandler interface. - bool HandleXEvent(const XEvent& event) override; - - private: - Display* display() { return x_display_->display(); } - - // Returns window title for the specified X |window|. - bool GetWindowTitle(::Window window, std::string* title); - - Callback* callback_ = nullptr; - - rtc::scoped_refptr x_display_; - - bool has_composite_extension_ = false; - - ::Window selected_window_ = 0; - XServerPixelBuffer x_server_pixel_buffer_; - XAtomCache atom_cache_; - WindowFinderX11 window_finder_; - - RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerX11); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_X11_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.cc deleted file mode 100644 index 16fb3252a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.cc +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/window_finder_x11.h" - -#include - -#include - -#include "modules/desktop_capture/linux/window_list_utils.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -WindowFinderX11::WindowFinderX11(XAtomCache* cache) : cache_(cache) { - RTC_DCHECK(cache_); -} - -WindowFinderX11::~WindowFinderX11() = default; - -WindowId WindowFinderX11::GetWindowUnderPoint(DesktopVector point) { - WindowId id = kNullWindowId; - GetWindowList(cache_, [&id, this, point](::Window window) { - DesktopRect rect; - if (GetWindowRect(this->cache_->display(), window, &rect) && - rect.Contains(point)) { - id = window; - return false; - } - return true; - }); - return id; -} - -// static -std::unique_ptr WindowFinder::Create( - const WindowFinder::Options& options) { - if (options.cache == nullptr) { - return nullptr; - } - - return std::make_unique(options.cache); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.h deleted file mode 100644 index d0bba8697..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_finder_x11.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_FINDER_X11_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_FINDER_X11_H_ - -#include "modules/desktop_capture/window_finder.h" - -namespace webrtc { - -class XAtomCache; - -// The implementation of WindowFinder for X11. -class WindowFinderX11 final : public WindowFinder { - public: - explicit WindowFinderX11(XAtomCache* cache); - ~WindowFinderX11() override; - - // WindowFinder implementation. - WindowId GetWindowUnderPoint(DesktopVector point) override; - - private: - XAtomCache* const cache_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_FINDER_X11_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.cc deleted file mode 100644 index 06660dde1..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.cc +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/window_list_utils.h" - -#include -#include -#include - -#include - -#include "modules/desktop_capture/linux/x_error_trap.h" -#include "modules/desktop_capture/linux/x_window_property.h" -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -namespace { - -class DeferXFree { - public: - explicit DeferXFree(void* data) : data_(data) {} - ~DeferXFree(); - - private: - void* const data_; -}; - -DeferXFree::~DeferXFree() { - if (data_) - XFree(data_); -} - -// Iterates through |window| hierarchy to find first visible window, i.e. one -// that has WM_STATE property set to NormalState. -// See http://tronche.com/gui/x/icccm/sec-4.html#s-4.1.3.1 . -::Window GetApplicationWindow(XAtomCache* cache, ::Window window) { - int32_t state = GetWindowState(cache, window); - if (state == NormalState) { - // Window has WM_STATE==NormalState. Return it. - return window; - } else if (state == IconicState) { - // Window is in minimized. Skip it. - return 0; - } - - RTC_DCHECK_EQ(state, WithdrawnState); - // If the window is in WithdrawnState then look at all of its children. - ::Window root, parent; - ::Window* children; - unsigned int num_children; - if (!XQueryTree(cache->display(), window, &root, &parent, &children, - &num_children)) { - RTC_LOG(LS_ERROR) << "Failed to query for child windows although window" - "does not have a valid WM_STATE."; - return 0; - } - ::Window app_window = 0; - for (unsigned int i = 0; i < num_children; ++i) { - app_window = GetApplicationWindow(cache, children[i]); - if (app_window) - break; - } - - if (children) - XFree(children); - return app_window; -} - -// Returns true if the |window| is a desktop element. -bool IsDesktopElement(XAtomCache* cache, ::Window window) { - RTC_DCHECK(cache); - if (window == 0) - return false; - - // First look for _NET_WM_WINDOW_TYPE. The standard - // (http://standards.freedesktop.org/wm-spec/latest/ar01s05.html#id2760306) - // says this hint *should* be present on all windows, and we use the existence - // of _NET_WM_WINDOW_TYPE_NORMAL in the property to indicate a window is not - // a desktop element (that is, only "normal" windows should be shareable). - XWindowProperty window_type(cache->display(), window, - cache->WindowType()); - if (window_type.is_valid() && window_type.size() > 0) { - uint32_t* end = window_type.data() + window_type.size(); - bool is_normal = - (end != std::find(window_type.data(), end, cache->WindowTypeNormal())); - return !is_normal; - } - - // Fall back on using the hint. - XClassHint class_hint; - Status status = XGetClassHint(cache->display(), window, &class_hint); - if (status == 0) { - // No hints, assume this is a normal application window. - return false; - } - - DeferXFree free_res_name(class_hint.res_name); - DeferXFree free_res_class(class_hint.res_class); - return strcmp("gnome-panel", class_hint.res_name) == 0 || - strcmp("desktop_window", class_hint.res_name) == 0; -} - -} // namespace - -int32_t GetWindowState(XAtomCache* cache, ::Window window) { - // Get WM_STATE property of the window. - XWindowProperty window_state(cache->display(), window, - cache->WmState()); - - // WM_STATE is considered to be set to WithdrawnState when it missing. - return window_state.is_valid() ? *window_state.data() : WithdrawnState; -} - -bool GetWindowList(XAtomCache* cache, - rtc::FunctionView on_window) { - RTC_DCHECK(cache); - RTC_DCHECK(on_window); - ::Display* const display = cache->display(); - - int failed_screens = 0; - const int num_screens = XScreenCount(display); - for (int screen = 0; screen < num_screens; screen++) { - ::Window root_window = XRootWindow(display, screen); - ::Window parent; - ::Window* children; - unsigned int num_children; - { - XErrorTrap error_trap(display); - if (XQueryTree(display, root_window, &root_window, &parent, &children, - &num_children) == 0 || - error_trap.GetLastErrorAndDisable() != 0) { - failed_screens++; - RTC_LOG(LS_ERROR) << "Failed to query for child windows for screen " - << screen; - continue; - } - } - - DeferXFree free_children(children); - - for (unsigned int i = 0; i < num_children; i++) { - // Iterates in reverse order to return windows from front to back. - ::Window app_window = - GetApplicationWindow(cache, children[num_children - 1 - i]); - if (app_window && !IsDesktopElement(cache, app_window)) { - if (!on_window(app_window)) { - return true; - } - } - } - } - - return failed_screens < num_screens; -} - -bool GetWindowRect(::Display* display, - ::Window window, - DesktopRect* rect, - XWindowAttributes* attributes /* = nullptr */) { - XWindowAttributes local_attributes; - int offset_x; - int offset_y; - if (attributes == nullptr) { - attributes = &local_attributes; - } - - { - XErrorTrap error_trap(display); - if (!XGetWindowAttributes(display, window, attributes) || - error_trap.GetLastErrorAndDisable() != 0) { - return false; - } - } - *rect = DesktopRectFromXAttributes(*attributes); - - { - XErrorTrap error_trap(display); - ::Window child; - if (!XTranslateCoordinates(display, window, attributes->root, -rect->left(), - -rect->top(), &offset_x, &offset_y, &child) || - error_trap.GetLastErrorAndDisable() != 0) { - return false; - } - } - rect->Translate(offset_x, offset_y); - return true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.h deleted file mode 100644 index 243680d34..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/window_list_utils.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_LIST_UTILS_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_LIST_UTILS_H_ - -#include -#include -#include - -#include "api/function_view.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/linux/x_atom_cache.h" - -namespace webrtc { - -// Synchronously iterates all on-screen windows in |cache|.display() in -// decreasing z-order and sends them one-by-one to |on_window| function before -// GetWindowList() returns. If |on_window| returns false, this function ignores -// other windows and returns immediately. GetWindowList() returns false if -// native APIs failed. If multiple screens are attached to the |display|, this -// function returns false only when native APIs failed on all screens. Menus, -// panels and minimized windows will be ignored. -bool GetWindowList(XAtomCache* cache, - rtc::FunctionView on_window); - -// Returns WM_STATE property of the |window|. This function returns -// WithdrawnState if the |window| is missing. -int32_t GetWindowState(XAtomCache* cache, ::Window window); - -// Returns the rectangle of the |window| in the coordinates of |display|. This -// function returns false if native APIs failed. If |attributes| is provided, it -// will be filled with the attributes of |window|. The |rect| is in system -// coordinate, i.e. the primary monitor always starts from (0, 0). -bool GetWindowRect(::Display* display, - ::Window window, - DesktopRect* rect, - XWindowAttributes* attributes = nullptr); - -// Creates a DesktopRect from |attributes|. -template -DesktopRect DesktopRectFromXAttributes(const T& attributes) { - return DesktopRect::MakeXYWH(attributes.x, attributes.y, attributes.width, - attributes.height); -} - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_LIST_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.cc deleted file mode 100644 index 4ea024938..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.cc +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/x_atom_cache.h" - -#include "rtc_base/checks.h" - -namespace webrtc { - -XAtomCache::XAtomCache(::Display* display) : display_(display) { - RTC_DCHECK(display_); -} - -XAtomCache::~XAtomCache() = default; - -::Display* XAtomCache::display() const { - return display_; -} - -Atom XAtomCache::WmState() { - return CreateIfNotExist(&wm_state_, "WM_STATE"); -} - -Atom XAtomCache::WindowType() { - return CreateIfNotExist(&window_type_, "_NET_WM_WINDOW_TYPE"); -} - -Atom XAtomCache::WindowTypeNormal() { - return CreateIfNotExist(&window_type_normal_, "_NET_WM_WINDOW_TYPE_NORMAL"); -} - -Atom XAtomCache::IccProfile() { - return CreateIfNotExist(&icc_profile_, "_ICC_PROFILE"); -} - -Atom XAtomCache::CreateIfNotExist(Atom* atom, const char* name) { - RTC_DCHECK(atom); - if (*atom == None) { - *atom = XInternAtom(display(), name, True); - } - return *atom; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.h deleted file mode 100644 index 0249c1597..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_atom_cache.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_X_ATOM_CACHE_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_X_ATOM_CACHE_H_ - -#include -#include - -namespace webrtc { - -// A cache of Atom. Each Atom object is created on demand. -class XAtomCache final { - public: - explicit XAtomCache(::Display* display); - ~XAtomCache(); - - ::Display* display() const; - - Atom WmState(); - Atom WindowType(); - Atom WindowTypeNormal(); - Atom IccProfile(); - - private: - // If |*atom| is None, this function uses XInternAtom() to retrieve an Atom. - Atom CreateIfNotExist(Atom* atom, const char* name); - - ::Display* const display_; - Atom wm_state_ = None; - Atom window_type_ = None; - Atom window_type_normal_ = None; - Atom icc_profile_ = None; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_X_ATOM_CACHE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.cc deleted file mode 100644 index 53c907fc4..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.cc +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/x_error_trap.h" - -#include -#include - -#if defined(TOOLKIT_GTK) -#include -#endif // !defined(TOOLKIT_GTK) - -namespace webrtc { - -namespace { - -#if !defined(TOOLKIT_GTK) - -// TODO(sergeyu): This code is not thread safe. Fix it. Bug 2202. -static bool g_xserver_error_trap_enabled = false; -static int g_last_xserver_error_code = 0; - -int XServerErrorHandler(Display* display, XErrorEvent* error_event) { - assert(g_xserver_error_trap_enabled); - g_last_xserver_error_code = error_event->error_code; - return 0; -} - -#endif // !defined(TOOLKIT_GTK) - -} // namespace - -XErrorTrap::XErrorTrap(Display* display) - : original_error_handler_(NULL), enabled_(true) { -#if defined(TOOLKIT_GTK) - gdk_error_trap_push(); -#else // !defined(TOOLKIT_GTK) - assert(!g_xserver_error_trap_enabled); - original_error_handler_ = XSetErrorHandler(&XServerErrorHandler); - g_xserver_error_trap_enabled = true; - g_last_xserver_error_code = 0; -#endif // !defined(TOOLKIT_GTK) -} - -int XErrorTrap::GetLastErrorAndDisable() { - enabled_ = false; -#if defined(TOOLKIT_GTK) - return gdk_error_trap_push(); -#else // !defined(TOOLKIT_GTK) - assert(g_xserver_error_trap_enabled); - XSetErrorHandler(original_error_handler_); - g_xserver_error_trap_enabled = false; - return g_last_xserver_error_code; -#endif // !defined(TOOLKIT_GTK) -} - -XErrorTrap::~XErrorTrap() { - if (enabled_) - GetLastErrorAndDisable(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.h deleted file mode 100644 index 1fb0fdcc9..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_error_trap.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_X_ERROR_TRAP_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_X_ERROR_TRAP_H_ - -#include - -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// Helper class that registers X Window error handler. Caller can use -// GetLastErrorAndDisable() to get the last error that was caught, if any. -class XErrorTrap { - public: - explicit XErrorTrap(Display* display); - ~XErrorTrap(); - - // Returns last error and removes unregisters the error handler. - int GetLastErrorAndDisable(); - - private: - XErrorHandler original_error_handler_; - bool enabled_; - - RTC_DISALLOW_COPY_AND_ASSIGN(XErrorTrap); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_X_ERROR_TRAP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.cc deleted file mode 100644 index d3b568d98..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.cc +++ /dev/null @@ -1,379 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/x_server_pixel_buffer.h" - -#include -#include -#include -#include -#include - -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/linux/window_list_utils.h" -#include "modules/desktop_capture/linux/x_error_trap.h" -#include "modules/desktop_capture/linux/x_window_property.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -namespace { - -// Returns the number of bits |mask| has to be shifted left so its last -// (most-significant) bit set becomes the most-significant bit of the word. -// When |mask| is 0 the function returns 31. -uint32_t MaskToShift(uint32_t mask) { - int shift = 0; - if ((mask & 0xffff0000u) == 0) { - mask <<= 16; - shift += 16; - } - if ((mask & 0xff000000u) == 0) { - mask <<= 8; - shift += 8; - } - if ((mask & 0xf0000000u) == 0) { - mask <<= 4; - shift += 4; - } - if ((mask & 0xc0000000u) == 0) { - mask <<= 2; - shift += 2; - } - if ((mask & 0x80000000u) == 0) - shift += 1; - - return shift; -} - -// Returns true if |image| is in RGB format. -bool IsXImageRGBFormat(XImage* image) { - return image->bits_per_pixel == 32 && image->red_mask == 0xff0000 && - image->green_mask == 0xff00 && image->blue_mask == 0xff; -} - -// We expose two forms of blitting to handle variations in the pixel format. -// In FastBlit(), the operation is effectively a memcpy. -void FastBlit(XImage* x_image, - uint8_t* src_pos, - const DesktopRect& rect, - DesktopFrame* frame) { - RTC_DCHECK_LE(frame->top_left().x(), rect.left()); - RTC_DCHECK_LE(frame->top_left().y(), rect.top()); - - int src_stride = x_image->bytes_per_line; - int dst_x = rect.left() - frame->top_left().x(); - int dst_y = rect.top() - frame->top_left().y(); - - uint8_t* dst_pos = frame->data() + frame->stride() * dst_y; - dst_pos += dst_x * DesktopFrame::kBytesPerPixel; - - int height = rect.height(); - int row_bytes = rect.width() * DesktopFrame::kBytesPerPixel; - for (int y = 0; y < height; ++y) { - memcpy(dst_pos, src_pos, row_bytes); - src_pos += src_stride; - dst_pos += frame->stride(); - } -} - -void SlowBlit(XImage* x_image, - uint8_t* src_pos, - const DesktopRect& rect, - DesktopFrame* frame) { - RTC_DCHECK_LE(frame->top_left().x(), rect.left()); - RTC_DCHECK_LE(frame->top_left().y(), rect.top()); - - int src_stride = x_image->bytes_per_line; - int dst_x = rect.left() - frame->top_left().x(); - int dst_y = rect.top() - frame->top_left().y(); - int width = rect.width(), height = rect.height(); - - uint32_t red_mask = x_image->red_mask; - uint32_t green_mask = x_image->red_mask; - uint32_t blue_mask = x_image->blue_mask; - - uint32_t red_shift = MaskToShift(red_mask); - uint32_t green_shift = MaskToShift(green_mask); - uint32_t blue_shift = MaskToShift(blue_mask); - - int bits_per_pixel = x_image->bits_per_pixel; - - uint8_t* dst_pos = frame->data() + frame->stride() * dst_y; - dst_pos += dst_x * DesktopFrame::kBytesPerPixel; - // TODO(hclam): Optimize, perhaps using MMX code or by converting to - // YUV directly. - // TODO(sergeyu): This code doesn't handle XImage byte order properly and - // won't work with 24bpp images. Fix it. - for (int y = 0; y < height; y++) { - uint32_t* dst_pos_32 = reinterpret_cast(dst_pos); - uint32_t* src_pos_32 = reinterpret_cast(src_pos); - uint16_t* src_pos_16 = reinterpret_cast(src_pos); - for (int x = 0; x < width; x++) { - // Dereference through an appropriately-aligned pointer. - uint32_t pixel; - if (bits_per_pixel == 32) { - pixel = src_pos_32[x]; - } else if (bits_per_pixel == 16) { - pixel = src_pos_16[x]; - } else { - pixel = src_pos[x]; - } - uint32_t r = (pixel & red_mask) << red_shift; - uint32_t g = (pixel & green_mask) << green_shift; - uint32_t b = (pixel & blue_mask) << blue_shift; - // Write as 32-bit RGB. - dst_pos_32[x] = - ((r >> 8) & 0xff0000) | ((g >> 16) & 0xff00) | ((b >> 24) & 0xff); - } - dst_pos += frame->stride(); - src_pos += src_stride; - } -} - -} // namespace - -XServerPixelBuffer::XServerPixelBuffer() {} - -XServerPixelBuffer::~XServerPixelBuffer() { - Release(); -} - -void XServerPixelBuffer::Release() { - if (x_image_) { - XDestroyImage(x_image_); - x_image_ = nullptr; - } - if (x_shm_image_) { - XDestroyImage(x_shm_image_); - x_shm_image_ = nullptr; - } - if (shm_pixmap_) { - XFreePixmap(display_, shm_pixmap_); - shm_pixmap_ = 0; - } - if (shm_gc_) { - XFreeGC(display_, shm_gc_); - shm_gc_ = nullptr; - } - - ReleaseSharedMemorySegment(); - - window_ = 0; -} - -void XServerPixelBuffer::ReleaseSharedMemorySegment() { - if (!shm_segment_info_) - return; - if (shm_segment_info_->shmaddr != nullptr) - shmdt(shm_segment_info_->shmaddr); - if (shm_segment_info_->shmid != -1) - shmctl(shm_segment_info_->shmid, IPC_RMID, 0); - delete shm_segment_info_; - shm_segment_info_ = nullptr; -} - -bool XServerPixelBuffer::Init(XAtomCache* cache, Window window) { - Release(); - display_ = cache->display(); - - XWindowAttributes attributes; - if (!GetWindowRect(display_, window, &window_rect_, &attributes)) { - return false; - } - - if (cache->IccProfile() != None) { - // |window| is the root window when doing screen capture. - XWindowProperty icc_profile_property(cache->display(), window, - cache->IccProfile()); - if (icc_profile_property.is_valid() && icc_profile_property.size() > 0) { - icc_profile_ = std::vector( - icc_profile_property.data(), - icc_profile_property.data() + icc_profile_property.size()); - } else { - RTC_LOG(LS_WARNING) << "Failed to get icc profile"; - } - } - - window_ = window; - InitShm(attributes); - - return true; -} - -void XServerPixelBuffer::InitShm(const XWindowAttributes& attributes) { - Visual* default_visual = attributes.visual; - int default_depth = attributes.depth; - - int major, minor; - Bool have_pixmaps; - if (!XShmQueryVersion(display_, &major, &minor, &have_pixmaps)) { - // Shared memory not supported. CaptureRect will use the XImage API instead. - return; - } - - bool using_shm = false; - shm_segment_info_ = new XShmSegmentInfo; - shm_segment_info_->shmid = -1; - shm_segment_info_->shmaddr = nullptr; - shm_segment_info_->readOnly = False; - x_shm_image_ = XShmCreateImage(display_, default_visual, default_depth, - ZPixmap, 0, shm_segment_info_, - window_rect_.width(), window_rect_.height()); - if (x_shm_image_) { - shm_segment_info_->shmid = - shmget(IPC_PRIVATE, x_shm_image_->bytes_per_line * x_shm_image_->height, - IPC_CREAT | 0600); - if (shm_segment_info_->shmid != -1) { - void* shmat_result = shmat(shm_segment_info_->shmid, 0, 0); - if (shmat_result != reinterpret_cast(-1)) { - shm_segment_info_->shmaddr = reinterpret_cast(shmat_result); - x_shm_image_->data = shm_segment_info_->shmaddr; - - XErrorTrap error_trap(display_); - using_shm = XShmAttach(display_, shm_segment_info_); - XSync(display_, False); - if (error_trap.GetLastErrorAndDisable() != 0) - using_shm = false; - if (using_shm) { - RTC_LOG(LS_VERBOSE) - << "Using X shared memory segment " << shm_segment_info_->shmid; - } - } - } else { - RTC_LOG(LS_WARNING) << "Failed to get shared memory segment. " - "Performance may be degraded."; - } - } - - if (!using_shm) { - RTC_LOG(LS_WARNING) - << "Not using shared memory. Performance may be degraded."; - ReleaseSharedMemorySegment(); - return; - } - - if (have_pixmaps) - have_pixmaps = InitPixmaps(default_depth); - - shmctl(shm_segment_info_->shmid, IPC_RMID, 0); - shm_segment_info_->shmid = -1; - - RTC_LOG(LS_VERBOSE) << "Using X shared memory extension v" << major << "." - << minor << " with" << (have_pixmaps ? "" : "out") - << " pixmaps."; -} - -bool XServerPixelBuffer::InitPixmaps(int depth) { - if (XShmPixmapFormat(display_) != ZPixmap) - return false; - - { - XErrorTrap error_trap(display_); - shm_pixmap_ = XShmCreatePixmap( - display_, window_, shm_segment_info_->shmaddr, shm_segment_info_, - window_rect_.width(), window_rect_.height(), depth); - XSync(display_, False); - if (error_trap.GetLastErrorAndDisable() != 0) { - // |shm_pixmap_| is not not valid because the request was not processed - // by the X Server, so zero it. - shm_pixmap_ = 0; - return false; - } - } - - { - XErrorTrap error_trap(display_); - XGCValues shm_gc_values; - shm_gc_values.subwindow_mode = IncludeInferiors; - shm_gc_values.graphics_exposures = False; - shm_gc_ = XCreateGC(display_, window_, - GCSubwindowMode | GCGraphicsExposures, &shm_gc_values); - XSync(display_, False); - if (error_trap.GetLastErrorAndDisable() != 0) { - XFreePixmap(display_, shm_pixmap_); - shm_pixmap_ = 0; - shm_gc_ = 0; // See shm_pixmap_ comment above. - return false; - } - } - - return true; -} - -bool XServerPixelBuffer::IsWindowValid() const { - XWindowAttributes attributes; - { - XErrorTrap error_trap(display_); - if (!XGetWindowAttributes(display_, window_, &attributes) || - error_trap.GetLastErrorAndDisable() != 0) { - return false; - } - } - return true; -} - -void XServerPixelBuffer::Synchronize() { - if (shm_segment_info_ && !shm_pixmap_) { - // XShmGetImage can fail if the display is being reconfigured. - XErrorTrap error_trap(display_); - // XShmGetImage fails if the window is partially out of screen. - xshm_get_image_succeeded_ = - XShmGetImage(display_, window_, x_shm_image_, 0, 0, AllPlanes); - } -} - -bool XServerPixelBuffer::CaptureRect(const DesktopRect& rect, - DesktopFrame* frame) { - RTC_DCHECK_LE(rect.right(), window_rect_.width()); - RTC_DCHECK_LE(rect.bottom(), window_rect_.height()); - - XImage* image; - uint8_t* data; - - if (shm_segment_info_ && (shm_pixmap_ || xshm_get_image_succeeded_)) { - if (shm_pixmap_) { - XCopyArea(display_, window_, shm_pixmap_, shm_gc_, rect.left(), - rect.top(), rect.width(), rect.height(), rect.left(), - rect.top()); - XSync(display_, False); - } - - image = x_shm_image_; - data = reinterpret_cast(image->data) + - rect.top() * image->bytes_per_line + - rect.left() * image->bits_per_pixel / 8; - - } else { - if (x_image_) - XDestroyImage(x_image_); - x_image_ = XGetImage(display_, window_, rect.left(), rect.top(), - rect.width(), rect.height(), AllPlanes, ZPixmap); - if (!x_image_) - return false; - - image = x_image_; - data = reinterpret_cast(image->data); - } - - if (IsXImageRGBFormat(image)) { - FastBlit(image, data, rect, frame); - } else { - SlowBlit(image, data, rect, frame); - } - - if (!icc_profile_.empty()) - frame->set_icc_profile(icc_profile_); - - return true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.h deleted file mode 100644 index 06dd1a153..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_server_pixel_buffer.h +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Don't include this file in any .h files because it pulls in some X headers. - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_X_SERVER_PIXEL_BUFFER_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_X_SERVER_PIXEL_BUFFER_H_ - -#include -#include - -#include -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -class DesktopFrame; -class XAtomCache; - -// A class to allow the X server's pixel buffer to be accessed as efficiently -// as possible. -class XServerPixelBuffer { - public: - XServerPixelBuffer(); - ~XServerPixelBuffer(); - - void Release(); - - // Allocate (or reallocate) the pixel buffer for |window|. Returns false in - // case of an error (e.g. window doesn't exist). - bool Init(XAtomCache* cache, Window window); - - bool is_initialized() { return window_ != 0; } - - // Returns the size of the window the buffer was initialized for. - DesktopSize window_size() { return window_rect_.size(); } - - // Returns the rectangle of the window the buffer was initialized for. - const DesktopRect& window_rect() { return window_rect_; } - - // Returns true if the window can be found. - bool IsWindowValid() const; - - // If shared memory is being used without pixmaps, synchronize this pixel - // buffer with the root window contents (otherwise, this is a no-op). - // This is to avoid doing a full-screen capture for each individual - // rectangle in the capture list, when it only needs to be done once at the - // beginning. - void Synchronize(); - - // Capture the specified rectangle and stores it in the |frame|. In the case - // where the full-screen data is captured by Synchronize(), this simply - // returns the pointer without doing any more work. The caller must ensure - // that |rect| is not larger than window_size(). - bool CaptureRect(const DesktopRect& rect, DesktopFrame* frame); - - private: - void ReleaseSharedMemorySegment(); - - void InitShm(const XWindowAttributes& attributes); - bool InitPixmaps(int depth); - - Display* display_ = nullptr; - Window window_ = 0; - DesktopRect window_rect_; - XImage* x_image_ = nullptr; - XShmSegmentInfo* shm_segment_info_ = nullptr; - XImage* x_shm_image_ = nullptr; - Pixmap shm_pixmap_ = 0; - GC shm_gc_ = nullptr; - bool xshm_get_image_succeeded_ = false; - std::vector icc_profile_; - - RTC_DISALLOW_COPY_AND_ASSIGN(XServerPixelBuffer); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_X_SERVER_PIXEL_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.cc deleted file mode 100644 index ba25deec3..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/linux/x_window_property.h" - -namespace webrtc { - -XWindowPropertyBase::XWindowPropertyBase(Display* display, - Window window, - Atom property, - int expected_size) { - const int kBitsPerByte = 8; - Atom actual_type; - int actual_format; - unsigned long bytes_after; // NOLINT: type required by XGetWindowProperty - int status = XGetWindowProperty(display, window, property, 0L, ~0L, False, - AnyPropertyType, &actual_type, &actual_format, - &size_, &bytes_after, &data_); - if (status != Success) { - data_ = nullptr; - return; - } - if ((expected_size * kBitsPerByte) != actual_format) { - size_ = 0; - return; - } - - is_valid_ = true; -} - -XWindowPropertyBase::~XWindowPropertyBase() { - if (data_) - XFree(data_); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.h deleted file mode 100644 index ef643b6a8..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/linux/x_window_property.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_LINUX_X_WINDOW_PROPERTY_H_ -#define MODULES_DESKTOP_CAPTURE_LINUX_X_WINDOW_PROPERTY_H_ - -#include -#include - -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -class XWindowPropertyBase { - public: - XWindowPropertyBase(Display* display, - Window window, - Atom property, - int expected_size); - virtual ~XWindowPropertyBase(); - - // True if we got properly value successfully. - bool is_valid() const { return is_valid_; } - - // Size and value of the property. - size_t size() const { return size_; } - - protected: - unsigned char* data_ = nullptr; - - private: - bool is_valid_ = false; - unsigned long size_ = 0; // NOLINT: type required by XGetWindowProperty - - RTC_DISALLOW_COPY_AND_ASSIGN(XWindowPropertyBase); -}; - -// Convenience wrapper for XGetWindowProperty() results. -template -class XWindowProperty : public XWindowPropertyBase { - public: - XWindowProperty(Display* display, const Window window, const Atom property) - : XWindowPropertyBase(display, window, property, sizeof(PropertyType)) {} - ~XWindowProperty() override = default; - - const PropertyType* data() const { - return reinterpret_cast(data_); - } - PropertyType* data() { return reinterpret_cast(data_); } - - RTC_DISALLOW_COPY_AND_ASSIGN(XWindowProperty); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_LINUX_X_WINDOW_PROPERTY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.cc deleted file mode 100644 index de77d99e1..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.cc +++ /dev/null @@ -1,23 +0,0 @@ -/* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/mock_desktop_capturer_callback.h" - -namespace webrtc { - -MockDesktopCapturerCallback::MockDesktopCapturerCallback() = default; -MockDesktopCapturerCallback::~MockDesktopCapturerCallback() = default; - -void MockDesktopCapturerCallback::OnCaptureResult( - DesktopCapturer::Result result, - std::unique_ptr frame) { - OnCaptureResultPtr(result, &frame); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h deleted file mode 100644 index 6530dc554..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h +++ /dev/null @@ -1,38 +0,0 @@ -/* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_MOCK_DESKTOP_CAPTURER_CALLBACK_H_ -#define MODULES_DESKTOP_CAPTURE_MOCK_DESKTOP_CAPTURER_CALLBACK_H_ - -#include - -#include "modules/desktop_capture/desktop_capturer.h" -#include "test/gmock.h" - -namespace webrtc { - -class MockDesktopCapturerCallback : public DesktopCapturer::Callback { - public: - MockDesktopCapturerCallback(); - ~MockDesktopCapturerCallback() override; - - MOCK_METHOD(void, - OnCaptureResultPtr, - (DesktopCapturer::Result result, - std::unique_ptr* frame)); - void OnCaptureResult(DesktopCapturer::Result result, - std::unique_ptr frame) final; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(MockDesktopCapturerCallback); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_MOCK_DESKTOP_CAPTURER_CALLBACK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.cc deleted file mode 100644 index 3b61e10a8..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.cc +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/mouse_cursor.h" - -#include - -#include "modules/desktop_capture/desktop_frame.h" - -namespace webrtc { - -MouseCursor::MouseCursor() {} - -MouseCursor::MouseCursor(DesktopFrame* image, const DesktopVector& hotspot) - : image_(image), hotspot_(hotspot) { - assert(0 <= hotspot_.x() && hotspot_.x() <= image_->size().width()); - assert(0 <= hotspot_.y() && hotspot_.y() <= image_->size().height()); -} - -MouseCursor::~MouseCursor() {} - -// static -MouseCursor* MouseCursor::CopyOf(const MouseCursor& cursor) { - return cursor.image() - ? new MouseCursor(BasicDesktopFrame::CopyOf(*cursor.image()), - cursor.hotspot()) - : new MouseCursor(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.h deleted file mode 100644 index 2263610ed..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ -#define MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ - -#include - -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -class RTC_EXPORT MouseCursor { - public: - MouseCursor(); - - // Takes ownership of |image|. |hotspot| must be within |image| boundaries. - MouseCursor(DesktopFrame* image, const DesktopVector& hotspot); - - ~MouseCursor(); - - static MouseCursor* CopyOf(const MouseCursor& cursor); - - void set_image(DesktopFrame* image) { image_.reset(image); } - const DesktopFrame* image() const { return image_.get(); } - - void set_hotspot(const DesktopVector& hotspot) { hotspot_ = hotspot; } - const DesktopVector& hotspot() const { return hotspot_; } - - private: - std::unique_ptr image_; - DesktopVector hotspot_; - - RTC_DISALLOW_COPY_AND_ASSIGN(MouseCursor); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor.h deleted file mode 100644 index 43b75b3f7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ -#define MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -class DesktopCaptureOptions; -class DesktopFrame; -class MouseCursor; - -// Captures mouse shape and position. -class MouseCursorMonitor { - public: - // Deprecated: CursorState will not be provided. - enum CursorState { - // Cursor on top of the window including window decorations. - INSIDE, - - // Cursor is outside of the window. - OUTSIDE, - }; - - enum Mode { - // Capture only shape of the mouse cursor, but not position. - SHAPE_ONLY, - - // Capture both, mouse cursor shape and position. - SHAPE_AND_POSITION, - }; - - // Callback interface used to pass current mouse cursor position and shape. - class Callback { - public: - // Called in response to Capture() when the cursor shape has changed. Must - // take ownership of |cursor|. - virtual void OnMouseCursor(MouseCursor* cursor) = 0; - - // Called in response to Capture(). |position| indicates cursor position - // relative to the |window| specified in the constructor. - // Deprecated: use the following overload instead. - virtual void OnMouseCursorPosition(CursorState state, - const DesktopVector& position) {} - - // Called in response to Capture(). |position| indicates cursor absolute - // position on the system in fullscreen coordinate, i.e. the top-left - // monitor always starts from (0, 0). - // The coordinates of the position is controlled by OS, but it's always - // consistent with DesktopFrame.rect().top_left(). - // TODO(zijiehe): Ensure all implementations return the absolute position. - // TODO(zijiehe): Current this overload works correctly only when capturing - // mouse cursor against fullscreen. - virtual void OnMouseCursorPosition(const DesktopVector& position) {} - - protected: - virtual ~Callback() {} - }; - - virtual ~MouseCursorMonitor() {} - - // Creates a capturer that notifies of mouse cursor events while the cursor is - // over the specified window. - // - // Deprecated: use Create() function. - static MouseCursorMonitor* CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window); - - // Creates a capturer that monitors the mouse cursor shape and position over - // the specified screen. - // - // Deprecated: use Create() function. - static RTC_EXPORT MouseCursorMonitor* CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen); - - // Creates a capturer that monitors the mouse cursor shape and position across - // the entire desktop. The capturer ensures that the top-left monitor starts - // from (0, 0). - static std::unique_ptr Create( - const DesktopCaptureOptions& options); - - // Initializes the monitor with the |callback|, which must remain valid until - // capturer is destroyed. - virtual void Init(Callback* callback, Mode mode) = 0; - - // Captures current cursor shape and position (depending on the |mode| passed - // to Init()). Calls Callback::OnMouseCursor() if cursor shape has - // changed since the last call (or when Capture() is called for the first - // time) and then Callback::OnMouseCursorPosition() if mode is set to - // SHAPE_AND_POSITION. - virtual void Capture() = 0; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_MONITOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_linux.cc deleted file mode 100644 index e569f6ef3..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_linux.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" - -#if defined(WEBRTC_USE_X11) -#include "modules/desktop_capture/linux/mouse_cursor_monitor_x11.h" -#endif // defined(WEBRTC_USE_X11) - -namespace webrtc { - -// static -MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window) { -#if defined(WEBRTC_USE_X11) - return MouseCursorMonitorX11::CreateForWindow(options, window); -#else - return nullptr; -#endif // defined(WEBRTC_USE_X11) -} - -// static -MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { -#if defined(WEBRTC_USE_X11) - return MouseCursorMonitorX11::CreateForScreen(options, screen); -#else - return nullptr; -#endif // defined(WEBRTC_USE_X11) -} - -// static -std::unique_ptr MouseCursorMonitor::Create( - const DesktopCaptureOptions& options) { -#if defined(WEBRTC_USE_X11) - return MouseCursorMonitorX11::Create(options); -#else - return nullptr; -#endif // defined(WEBRTC_USE_X11) -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm deleted file mode 100644 index 31ad428e0..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/mouse_cursor_monitor.h" - -#include - -#include - -#include -#include -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/mac/desktop_configuration.h" -#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" -#include "modules/desktop_capture/mac/window_list_utils.h" -#include "modules/desktop_capture/mouse_cursor.h" - -namespace webrtc { - -namespace { -CGImageRef CreateScaledCGImage(CGImageRef image, int width, int height) { - // Create context, keeping original image properties. - CGColorSpaceRef colorspace = CGImageGetColorSpace(image); - CGContextRef context = CGBitmapContextCreate(nullptr, - width, - height, - CGImageGetBitsPerComponent(image), - width * DesktopFrame::kBytesPerPixel, - colorspace, - CGImageGetBitmapInfo(image)); - - if (!context) return nil; - - // Draw image to context, resizing it. - CGContextDrawImage(context, CGRectMake(0, 0, width, height), image); - // Extract resulting image from context. - CGImageRef imgRef = CGBitmapContextCreateImage(context); - CGContextRelease(context); - - return imgRef; -} -} // namespace - -class MouseCursorMonitorMac : public MouseCursorMonitor { - public: - MouseCursorMonitorMac(const DesktopCaptureOptions& options, - CGWindowID window_id, - ScreenId screen_id); - ~MouseCursorMonitorMac() override; - - void Init(Callback* callback, Mode mode) override; - void Capture() override; - - private: - static void DisplaysReconfiguredCallback(CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags, - void *user_parameter); - void DisplaysReconfigured(CGDirectDisplayID display, - CGDisplayChangeSummaryFlags flags); - - void CaptureImage(float scale); - - rtc::scoped_refptr configuration_monitor_; - CGWindowID window_id_; - ScreenId screen_id_; - Callback* callback_; - Mode mode_; - __strong NSImage* last_cursor_; -}; - -MouseCursorMonitorMac::MouseCursorMonitorMac(const DesktopCaptureOptions& options, - CGWindowID window_id, - ScreenId screen_id) - : configuration_monitor_(options.configuration_monitor()), - window_id_(window_id), - screen_id_(screen_id), - callback_(NULL), - mode_(SHAPE_AND_POSITION) { - assert(window_id == kCGNullWindowID || screen_id == kInvalidScreenId); -} - -MouseCursorMonitorMac::~MouseCursorMonitorMac() {} - -void MouseCursorMonitorMac::Init(Callback* callback, Mode mode) { - assert(!callback_); - assert(callback); - - callback_ = callback; - mode_ = mode; -} - -void MouseCursorMonitorMac::Capture() { - assert(callback_); - - CGEventRef event = CGEventCreate(NULL); - CGPoint gc_position = CGEventGetLocation(event); - CFRelease(event); - - DesktopVector position(gc_position.x, gc_position.y); - - MacDesktopConfiguration configuration = - configuration_monitor_->desktop_configuration(); - float scale = GetScaleFactorAtPosition(configuration, position); - - CaptureImage(scale); - - if (mode_ != SHAPE_AND_POSITION) - return; - - // Always report cursor position in DIP pixel. - callback_->OnMouseCursorPosition( - position.subtract(configuration.bounds.top_left())); -} - -void MouseCursorMonitorMac::CaptureImage(float scale) { - NSCursor* nscursor = [NSCursor currentSystemCursor]; - - NSImage* nsimage = [nscursor image]; - if (nsimage == nil || !nsimage.isValid) { - return; - } - NSSize nssize = [nsimage size]; // DIP size - - // No need to caputre cursor image if it's unchanged since last capture. - if ([[nsimage TIFFRepresentation] isEqual:[last_cursor_ TIFFRepresentation]]) return; - last_cursor_ = nsimage; - - DesktopSize size(round(nssize.width * scale), - round(nssize.height * scale)); // Pixel size - NSPoint nshotspot = [nscursor hotSpot]; - DesktopVector hotspot( - std::max(0, - std::min(size.width(), static_cast(nshotspot.x * scale))), - std::max(0, - std::min(size.height(), static_cast(nshotspot.y * scale)))); - CGImageRef cg_image = - [nsimage CGImageForProposedRect:NULL context:nil hints:nil]; - if (!cg_image) - return; - - // Before 10.12, OSX may report 1X cursor on Retina screen. (See - // crbug.com/632995.) After 10.12, OSX may report 2X cursor on non-Retina - // screen. (See crbug.com/671436.) So scaling the cursor if needed. - CGImageRef scaled_cg_image = nil; - if (CGImageGetWidth(cg_image) != static_cast(size.width())) { - scaled_cg_image = CreateScaledCGImage(cg_image, size.width(), size.height()); - if (scaled_cg_image != nil) { - cg_image = scaled_cg_image; - } - } - if (CGImageGetBitsPerPixel(cg_image) != DesktopFrame::kBytesPerPixel * 8 || - CGImageGetWidth(cg_image) != static_cast(size.width()) || - CGImageGetBitsPerComponent(cg_image) != 8) { - if (scaled_cg_image != nil) CGImageRelease(scaled_cg_image); - return; - } - - CGDataProviderRef provider = CGImageGetDataProvider(cg_image); - CFDataRef image_data_ref = CGDataProviderCopyData(provider); - if (image_data_ref == NULL) { - if (scaled_cg_image != nil) CGImageRelease(scaled_cg_image); - return; - } - - const uint8_t* src_data = - reinterpret_cast(CFDataGetBytePtr(image_data_ref)); - - // Create a MouseCursor that describes the cursor and pass it to - // the client. - std::unique_ptr image( - new BasicDesktopFrame(DesktopSize(size.width(), size.height()))); - - int src_stride = CGImageGetBytesPerRow(cg_image); - image->CopyPixelsFrom(src_data, src_stride, DesktopRect::MakeSize(size)); - - CFRelease(image_data_ref); - if (scaled_cg_image != nil) CGImageRelease(scaled_cg_image); - - std::unique_ptr cursor( - new MouseCursor(image.release(), hotspot)); - - callback_->OnMouseCursor(cursor.release()); -} - -MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( - const DesktopCaptureOptions& options, WindowId window) { - return new MouseCursorMonitorMac(options, window, kInvalidScreenId); -} - -MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { - return new MouseCursorMonitorMac(options, kCGNullWindowID, screen); -} - -std::unique_ptr MouseCursorMonitor::Create( - const DesktopCaptureOptions& options) { - return std::unique_ptr( - CreateForScreen(options, kFullDesktopScreenId)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc deleted file mode 100644 index ab1bc2fa3..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" - -namespace webrtc { - -MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window) { - return NULL; -} - -MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { - return NULL; -} - -std::unique_ptr MouseCursorMonitor::Create( - const DesktopCaptureOptions& options) { - return std::unique_ptr( - CreateForScreen(options, kFullDesktopScreenId)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc deleted file mode 100644 index bf0d8534e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/mouse_cursor.h" -#include "modules/desktop_capture/mouse_cursor_monitor.h" -#include "modules/desktop_capture/win/cursor.h" -#include "modules/desktop_capture/win/screen_capture_utils.h" -#include "modules/desktop_capture/win/window_capture_utils.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -namespace { - -bool IsSameCursorShape(const CURSORINFO& left, const CURSORINFO& right) { - // If the cursors are not showing, we do not care the hCursor handle. - return left.flags == right.flags && - (left.flags != CURSOR_SHOWING || left.hCursor == right.hCursor); -} - -} // namespace - -class MouseCursorMonitorWin : public MouseCursorMonitor { - public: - explicit MouseCursorMonitorWin(HWND window); - explicit MouseCursorMonitorWin(ScreenId screen); - ~MouseCursorMonitorWin() override; - - void Init(Callback* callback, Mode mode) override; - void Capture() override; - - private: - // Get the rect of the currently selected screen, relative to the primary - // display's top-left. If the screen is disabled or disconnected, or any error - // happens, an empty rect is returned. - DesktopRect GetScreenRect(); - - HWND window_; - ScreenId screen_; - - Callback* callback_; - Mode mode_; - - HDC desktop_dc_; - - // The last CURSORINFO (converted to MouseCursor) we have sent to the client. - CURSORINFO last_cursor_; -}; - -MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window) - : window_(window), - screen_(kInvalidScreenId), - callback_(NULL), - mode_(SHAPE_AND_POSITION), - desktop_dc_(NULL) { - memset(&last_cursor_, 0, sizeof(CURSORINFO)); -} - -MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen) - : window_(NULL), - screen_(screen), - callback_(NULL), - mode_(SHAPE_AND_POSITION), - desktop_dc_(NULL) { - assert(screen >= kFullDesktopScreenId); - memset(&last_cursor_, 0, sizeof(CURSORINFO)); -} - -MouseCursorMonitorWin::~MouseCursorMonitorWin() { - if (desktop_dc_) - ReleaseDC(NULL, desktop_dc_); -} - -void MouseCursorMonitorWin::Init(Callback* callback, Mode mode) { - assert(!callback_); - assert(callback); - - callback_ = callback; - mode_ = mode; - - desktop_dc_ = GetDC(NULL); -} - -void MouseCursorMonitorWin::Capture() { - assert(callback_); - - CURSORINFO cursor_info; - cursor_info.cbSize = sizeof(CURSORINFO); - if (!GetCursorInfo(&cursor_info)) { - RTC_LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " - << GetLastError(); - return; - } - - if (!IsSameCursorShape(cursor_info, last_cursor_)) { - if (cursor_info.flags == CURSOR_SUPPRESSED) { - // The cursor is intentionally hidden now, send an empty bitmap. - last_cursor_ = cursor_info; - callback_->OnMouseCursor(new MouseCursor( - new BasicDesktopFrame(DesktopSize()), DesktopVector())); - } else { - // According to MSDN https://goo.gl/u6gyuC, HCURSOR instances returned by - // functions other than CreateCursor do not need to be actively destroyed. - // And CloseHandle function (https://goo.gl/ja5ycW) does not close a - // cursor, so assume a HCURSOR does not need to be closed. - if (cursor_info.flags == 0) { - // Host machine does not have a hardware mouse attached, we will send a - // default one instead. - // Note, Windows automatically caches cursor resource, so we do not need - // to cache the result of LoadCursor. - cursor_info.hCursor = LoadCursor(nullptr, IDC_ARROW); - } - std::unique_ptr cursor( - CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor)); - if (cursor) { - last_cursor_ = cursor_info; - callback_->OnMouseCursor(cursor.release()); - } - } - } - - if (mode_ != SHAPE_AND_POSITION) - return; - - // CURSORINFO::ptScreenPos is in full desktop coordinate. - DesktopVector position(cursor_info.ptScreenPos.x, cursor_info.ptScreenPos.y); - bool inside = cursor_info.flags == CURSOR_SHOWING; - - if (window_) { - DesktopRect original_rect; - DesktopRect cropped_rect; - if (!GetCroppedWindowRect(window_, /*avoid_cropping_border*/ false, - &cropped_rect, &original_rect)) { - position.set(0, 0); - inside = false; - } else { - if (inside) { - HWND windowUnderCursor = WindowFromPoint(cursor_info.ptScreenPos); - inside = windowUnderCursor - ? (window_ == GetAncestor(windowUnderCursor, GA_ROOT)) - : false; - } - position = position.subtract(cropped_rect.top_left()); - } - } else { - assert(screen_ != kInvalidScreenId); - DesktopRect rect = GetScreenRect(); - if (inside) - inside = rect.Contains(position); - position = position.subtract(rect.top_left()); - } - - callback_->OnMouseCursorPosition(position); -} - -DesktopRect MouseCursorMonitorWin::GetScreenRect() { - assert(screen_ != kInvalidScreenId); - if (screen_ == kFullDesktopScreenId) { - return DesktopRect::MakeXYWH(GetSystemMetrics(SM_XVIRTUALSCREEN), - GetSystemMetrics(SM_YVIRTUALSCREEN), - GetSystemMetrics(SM_CXVIRTUALSCREEN), - GetSystemMetrics(SM_CYVIRTUALSCREEN)); - } - DISPLAY_DEVICE device; - device.cb = sizeof(device); - BOOL result = EnumDisplayDevices(NULL, screen_, &device, 0); - if (!result) - return DesktopRect(); - - DEVMODE device_mode; - device_mode.dmSize = sizeof(device_mode); - device_mode.dmDriverExtra = 0; - result = EnumDisplaySettingsEx(device.DeviceName, ENUM_CURRENT_SETTINGS, - &device_mode, 0); - if (!result) - return DesktopRect(); - - return DesktopRect::MakeXYWH( - device_mode.dmPosition.x, device_mode.dmPosition.y, - device_mode.dmPelsWidth, device_mode.dmPelsHeight); -} - -MouseCursorMonitor* MouseCursorMonitor::CreateForWindow( - const DesktopCaptureOptions& options, - WindowId window) { - return new MouseCursorMonitorWin(reinterpret_cast(window)); -} - -MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { - return new MouseCursorMonitorWin(screen); -} - -std::unique_ptr MouseCursorMonitor::Create( - const DesktopCaptureOptions& options) { - return std::unique_ptr( - CreateForScreen(options, kFullDesktopScreenId)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.cc deleted file mode 100644 index 9639d627f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.cc +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/resolution_tracker.h" - -namespace webrtc { - -bool ResolutionTracker::SetResolution(DesktopSize size) { - if (!initialized_) { - initialized_ = true; - last_size_ = size; - return false; - } - - if (last_size_.equals(size)) { - return false; - } - - last_size_ = size; - return true; -} - -void ResolutionTracker::Reset() { - initialized_ = false; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.h deleted file mode 100644 index 55f25d133..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/resolution_tracker.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_RESOLUTION_TRACKER_H_ -#define MODULES_DESKTOP_CAPTURE_RESOLUTION_TRACKER_H_ - -#include "modules/desktop_capture/desktop_geometry.h" - -namespace webrtc { - -class ResolutionTracker final { - public: - // Sets the resolution to |size|. Returns true if a previous size was recorded - // and differs from |size|. - bool SetResolution(DesktopSize size); - - // Resets to the initial state. - void Reset(); - - private: - DesktopSize last_size_; - bool initialized_ = false; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_RESOLUTION_TRACKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.cc deleted file mode 100644 index 362928a47..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.cc +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/rgba_color.h" - -#include "rtc_base/system/arch.h" - -namespace webrtc { - -namespace { - -bool AlphaEquals(uint8_t i, uint8_t j) { - // On Linux and Windows 8 or early version, '0' was returned for alpha channel - // from capturer APIs, on Windows 10, '255' was returned. So a workaround is - // to treat 0 as 255. - return i == j || ((i == 0 || i == 255) && (j == 0 || j == 255)); -} - -} // namespace - -RgbaColor::RgbaColor(uint8_t blue, uint8_t green, uint8_t red, uint8_t alpha) { - this->blue = blue; - this->green = green; - this->red = red; - this->alpha = alpha; -} - -RgbaColor::RgbaColor(uint8_t blue, uint8_t green, uint8_t red) - : RgbaColor(blue, green, red, 0xff) {} - -RgbaColor::RgbaColor(const uint8_t* bgra) - : RgbaColor(bgra[0], bgra[1], bgra[2], bgra[3]) {} - -RgbaColor::RgbaColor(uint32_t bgra) - : RgbaColor(reinterpret_cast(&bgra)) {} - -bool RgbaColor::operator==(const RgbaColor& right) const { - return blue == right.blue && green == right.green && red == right.red && - AlphaEquals(alpha, right.alpha); -} - -bool RgbaColor::operator!=(const RgbaColor& right) const { - return !(*this == right); -} - -uint32_t RgbaColor::ToUInt32() const { -#if defined(WEBRTC_ARCH_LITTLE_ENDIAN) - return blue | (green << 8) | (red << 16) | (alpha << 24); -#else - return (blue << 24) | (green << 16) | (red << 8) | alpha; -#endif -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.h deleted file mode 100644 index c47c8bc65..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/rgba_color.h +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_RGBA_COLOR_H_ -#define MODULES_DESKTOP_CAPTURE_RGBA_COLOR_H_ - -#include - -#include "modules/desktop_capture/desktop_frame.h" - -namespace webrtc { - -// A four-byte structure to store a color in BGRA format. This structure also -// provides functions to be created from uint8_t array, say, -// DesktopFrame::data(). It always uses BGRA order for internal storage to match -// DesktopFrame::data(). -struct RgbaColor final { - // Creates a color with BGRA channels. - RgbaColor(uint8_t blue, uint8_t green, uint8_t red, uint8_t alpha); - - // Creates a color with BGR channels, and set alpha channel to 255 (opaque). - RgbaColor(uint8_t blue, uint8_t green, uint8_t red); - - // Creates a color from four-byte in BGRA order, i.e. DesktopFrame::data(). - explicit RgbaColor(const uint8_t* bgra); - - // Creates a color from BGRA channels in a uint format. Consumers should make - // sure the memory order of the uint32_t is always BGRA from left to right, no - // matter the system endian. This function creates an equivalent RgbaColor - // instance from the ToUInt32() result of another RgbaColor instance. - explicit RgbaColor(uint32_t bgra); - - // Returns true if |this| and |right| is the same color. - bool operator==(const RgbaColor& right) const; - - // Returns true if |this| and |right| are different colors. - bool operator!=(const RgbaColor& right) const; - - uint32_t ToUInt32() const; - - uint8_t blue; - uint8_t green; - uint8_t red; - uint8_t alpha; -}; -static_assert( - DesktopFrame::kBytesPerPixel == sizeof(RgbaColor), - "A pixel in DesktopFrame should be safe to be represented by a RgbaColor"); - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_RGBA_COLOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capture_frame_queue.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capture_frame_queue.h deleted file mode 100644 index 40dc6daa2..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capture_frame_queue.h +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURE_FRAME_QUEUE_H_ -#define MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURE_FRAME_QUEUE_H_ - -#include - -#include "rtc_base/constructor_magic.h" -// TODO(zijiehe): These headers are not used in this file, but to avoid build -// break in remoting/host. We should add headers in each individual files. -#include "modules/desktop_capture/desktop_frame.h" // Remove -#include "modules/desktop_capture/shared_desktop_frame.h" // Remove - -namespace webrtc { - -// Represents a queue of reusable video frames. Provides access to the 'current' -// frame - the frame that the caller is working with at the moment, and to the -// 'previous' frame - the predecessor of the current frame swapped by -// MoveToNextFrame() call, if any. -// -// The caller is expected to (re)allocate frames if current_frame() returns -// NULL. The caller can mark all frames in the queue for reallocation (when, -// say, frame dimensions change). The queue records which frames need updating -// which the caller can query. -// -// Frame consumer is expected to never hold more than kQueueLength frames -// created by this function and it should release the earliest one before trying -// to capture a new frame (i.e. before MoveToNextFrame() is called). -template -class ScreenCaptureFrameQueue { - public: - ScreenCaptureFrameQueue() : current_(0) {} - ~ScreenCaptureFrameQueue() = default; - - // Moves to the next frame in the queue, moving the 'current' frame to become - // the 'previous' one. - void MoveToNextFrame() { current_ = (current_ + 1) % kQueueLength; } - - // Replaces the current frame with a new one allocated by the caller. The - // existing frame (if any) is destroyed. Takes ownership of |frame|. - void ReplaceCurrentFrame(std::unique_ptr frame) { - frames_[current_] = std::move(frame); - } - - // Marks all frames obsolete and resets the previous frame pointer. No - // frames are freed though as the caller can still access them. - void Reset() { - for (int i = 0; i < kQueueLength; i++) { - frames_[i].reset(); - } - current_ = 0; - } - - FrameType* current_frame() const { return frames_[current_].get(); } - - FrameType* previous_frame() const { - return frames_[(current_ + kQueueLength - 1) % kQueueLength].get(); - } - - private: - // Index of the current frame. - int current_; - - static const int kQueueLength = 2; - std::unique_ptr frames_[kQueueLength]; - - RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCaptureFrameQueue); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURE_FRAME_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_darwin.mm b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_darwin.mm deleted file mode 100644 index d5a7bb052..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_darwin.mm +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/desktop_capture/mac/screen_capturer_mac.h" - -namespace webrtc { - -// static -std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { - if (!options.configuration_monitor()) { - return nullptr; - } - - std::unique_ptr capturer(new ScreenCapturerMac( - options.configuration_monitor(), options.detect_updated_region(), options.allow_iosurface())); - if (!capturer.get()->Init()) { - return nullptr; - } - - return capturer; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.cc deleted file mode 100644 index 8a23c88be..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.cc +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/screen_capturer_helper.h" - -#include - -namespace webrtc { - -ScreenCapturerHelper::ScreenCapturerHelper() - : invalid_region_lock_(RWLockWrapper::CreateRWLock()), log_grid_size_(0) {} - -ScreenCapturerHelper::~ScreenCapturerHelper() {} - -void ScreenCapturerHelper::ClearInvalidRegion() { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); - invalid_region_.Clear(); -} - -void ScreenCapturerHelper::InvalidateRegion( - const DesktopRegion& invalid_region) { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); - invalid_region_.AddRegion(invalid_region); -} - -void ScreenCapturerHelper::InvalidateScreen(const DesktopSize& size) { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); - invalid_region_.AddRect(DesktopRect::MakeSize(size)); -} - -void ScreenCapturerHelper::TakeInvalidRegion(DesktopRegion* invalid_region) { - invalid_region->Clear(); - - { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); - invalid_region->Swap(&invalid_region_); - } - - if (log_grid_size_ > 0) { - DesktopRegion expanded_region; - ExpandToGrid(*invalid_region, log_grid_size_, &expanded_region); - expanded_region.Swap(invalid_region); - - invalid_region->IntersectWith(DesktopRect::MakeSize(size_most_recent_)); - } -} - -void ScreenCapturerHelper::SetLogGridSize(int log_grid_size) { - log_grid_size_ = log_grid_size; -} - -const DesktopSize& ScreenCapturerHelper::size_most_recent() const { - return size_most_recent_; -} - -void ScreenCapturerHelper::set_size_most_recent(const DesktopSize& size) { - size_most_recent_ = size; -} - -// Returns the largest multiple of |n| that is <= |x|. -// |n| must be a power of 2. |nMask| is ~(|n| - 1). -static int DownToMultiple(int x, int nMask) { - return (x & nMask); -} - -// Returns the smallest multiple of |n| that is >= |x|. -// |n| must be a power of 2. |nMask| is ~(|n| - 1). -static int UpToMultiple(int x, int n, int nMask) { - return ((x + n - 1) & nMask); -} - -void ScreenCapturerHelper::ExpandToGrid(const DesktopRegion& region, - int log_grid_size, - DesktopRegion* result) { - assert(log_grid_size >= 1); - int grid_size = 1 << log_grid_size; - int grid_size_mask = ~(grid_size - 1); - - result->Clear(); - for (DesktopRegion::Iterator it(region); !it.IsAtEnd(); it.Advance()) { - int left = DownToMultiple(it.rect().left(), grid_size_mask); - int right = UpToMultiple(it.rect().right(), grid_size, grid_size_mask); - int top = DownToMultiple(it.rect().top(), grid_size_mask); - int bottom = UpToMultiple(it.rect().bottom(), grid_size, grid_size_mask); - result->AddRect(DesktopRect::MakeLTRB(left, top, right, bottom)); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.h deleted file mode 100644 index fc4c85b70..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_helper.h +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_HELPER_H_ -#define MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_HELPER_H_ - -#include - -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/desktop_region.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -namespace webrtc { - -// ScreenCapturerHelper is intended to be used by an implementation of the -// ScreenCapturer interface. It maintains a thread-safe invalid region, and -// the size of the most recently captured screen, on behalf of the -// ScreenCapturer that owns it. -class ScreenCapturerHelper { - public: - ScreenCapturerHelper(); - ~ScreenCapturerHelper(); - - // Clear out the invalid region. - void ClearInvalidRegion(); - - // Invalidate the specified region. - void InvalidateRegion(const DesktopRegion& invalid_region); - - // Invalidate the entire screen, of a given size. - void InvalidateScreen(const DesktopSize& size); - - // Copies current invalid region to |invalid_region| clears invalid region - // storage for the next frame. - void TakeInvalidRegion(DesktopRegion* invalid_region); - - // Access the size of the most recently captured screen. - const DesktopSize& size_most_recent() const; - void set_size_most_recent(const DesktopSize& size); - - // Lossy compression can result in color values leaking between pixels in one - // block. If part of a block changes, then unchanged parts of that block can - // be changed in the compressed output. So we need to re-render an entire - // block whenever part of the block changes. - // - // If |log_grid_size| is >= 1, then this function makes TakeInvalidRegion() - // produce an invalid region expanded so that its vertices lie on a grid of - // size 2 ^ |log_grid_size|. The expanded region is then clipped to the size - // of the most recently captured screen, as previously set by - // set_size_most_recent(). - // If |log_grid_size| is <= 0, then the invalid region is not expanded. - void SetLogGridSize(int log_grid_size); - - // Expands a region so that its vertices all lie on a grid. - // The grid size must be >= 2, so |log_grid_size| must be >= 1. - static void ExpandToGrid(const DesktopRegion& region, - int log_grid_size, - DesktopRegion* result); - - private: - // A region that has been manually invalidated (through InvalidateRegion). - // These will be returned as dirty_region in the capture data during the next - // capture. - DesktopRegion invalid_region_; - - // A lock protecting |invalid_region_| across threads. - std::unique_ptr invalid_region_lock_; - - // The size of the most recently captured screen. - DesktopSize size_most_recent_; - - // The log (base 2) of the size of the grid to which the invalid region is - // expanded. - // If the value is <= 0, then the invalid region is not expanded to a grid. - int log_grid_size_; - - RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerHelper); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_HELPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_linux.cc deleted file mode 100644 index 82dbae481..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_linux.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" - -#if defined(WEBRTC_USE_PIPEWIRE) -#include "modules/desktop_capture/linux/screen_capturer_pipewire.h" -#endif // defined(WEBRTC_USE_PIPEWIRE) - -#if defined(WEBRTC_USE_X11) -#include "modules/desktop_capture/linux/screen_capturer_x11.h" -#endif // defined(WEBRTC_USE_X11) - -namespace webrtc { - -// static -std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { -#if defined(WEBRTC_USE_PIPEWIRE) - if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { - return ScreenCapturerPipeWire::CreateRawScreenCapturer(options); - } -#endif // defined(WEBRTC_USE_PIPEWIRE) - -#if defined(WEBRTC_USE_X11) - return ScreenCapturerX11::CreateRawScreenCapturer(options); -#endif // defined(WEBRTC_USE_X11) - - return nullptr; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_win.cc deleted file mode 100644 index be6d8a527..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_capturer_win.cc +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/fallback_desktop_capturer_wrapper.h" -#include "modules/desktop_capture/rgba_color.h" -#include "modules/desktop_capture/win/screen_capturer_win_directx.h" -#include "modules/desktop_capture/win/screen_capturer_win_gdi.h" -#include "modules/desktop_capture/win/screen_capturer_win_magnifier.h" - -namespace webrtc { - -namespace { - -std::unique_ptr CreateScreenCapturerWinDirectx() { - std::unique_ptr capturer(new ScreenCapturerWinDirectx()); - capturer.reset(new BlankDetectorDesktopCapturerWrapper( - std::move(capturer), RgbaColor(0, 0, 0, 0))); - return capturer; -} - -} // namespace - -// static -std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( - const DesktopCaptureOptions& options) { - std::unique_ptr capturer(new ScreenCapturerWinGdi(options)); - if (options.allow_directx_capturer()) { - // |dxgi_duplicator_controller| should be alive in this scope to ensure it - // won't unload DxgiDuplicatorController. - auto dxgi_duplicator_controller = DxgiDuplicatorController::Instance(); - if (ScreenCapturerWinDirectx::IsSupported()) { - capturer.reset(new FallbackDesktopCapturerWrapper( - CreateScreenCapturerWinDirectx(), std::move(capturer))); - } - } - - if (options.allow_use_magnification_api()) { - // ScreenCapturerWinMagnifier cannot work on Windows XP or earlier, as well - // as 64-bit only Windows, and it may randomly crash on multi-screen - // systems. So we may need to fallback to use original capturer. - capturer.reset(new FallbackDesktopCapturerWrapper( - std::unique_ptr(new ScreenCapturerWinMagnifier()), - std::move(capturer))); - } - - return capturer; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.cc deleted file mode 100644 index 6460f19f6..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.cc +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/screen_drawer.h" - -namespace webrtc { - -namespace { -std::unique_ptr g_screen_drawer_lock; -} // namespace - -ScreenDrawerLock::ScreenDrawerLock() = default; -ScreenDrawerLock::~ScreenDrawerLock() = default; - -ScreenDrawer::ScreenDrawer() { - g_screen_drawer_lock = ScreenDrawerLock::Create(); -} - -ScreenDrawer::~ScreenDrawer() { - g_screen_drawer_lock.reset(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.h deleted file mode 100644 index 061900023..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_H_ -#define MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_H_ - -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/rgba_color.h" - -namespace webrtc { - -// A cross-process lock to ensure only one ScreenDrawer can be used at a certain -// time. -class ScreenDrawerLock { - public: - virtual ~ScreenDrawerLock(); - - static std::unique_ptr Create(); - - protected: - ScreenDrawerLock(); -}; - -// A set of basic platform dependent functions to draw various shapes on the -// screen. -class ScreenDrawer { - public: - // Creates a ScreenDrawer for the current platform, returns nullptr if no - // ScreenDrawer implementation available. - // If the implementation cannot guarantee two ScreenDrawer instances won't - // impact each other, this function may block current thread until another - // ScreenDrawer has been destroyed. - static std::unique_ptr Create(); - - ScreenDrawer(); - virtual ~ScreenDrawer(); - - // Returns the region inside which DrawRectangle() function are expected to - // work, in capturer coordinates (assuming ScreenCapturer::SelectScreen has - // not been called). This region may exclude regions of the screen reserved by - // the OS for things like menu bars or app launchers. The DesktopRect is in - // system coordinate, i.e. the primary monitor always starts from (0, 0). - virtual DesktopRect DrawableRegion() = 0; - - // Draws a rectangle to cover |rect| with |color|. Note, rect.bottom() and - // rect.right() two lines are not included. The part of |rect| which is out of - // DrawableRegion() will be ignored. - virtual void DrawRectangle(DesktopRect rect, RgbaColor color) = 0; - - // Clears all content on the screen by filling the area with black. - virtual void Clear() = 0; - - // Blocks current thread until OS finishes previous DrawRectangle() actions. - // ScreenCapturer should be able to capture the changes after this function - // finish. - virtual void WaitForPendingDraws() = 0; - - // Returns true if incomplete shapes previous actions required may be drawn on - // the screen after a WaitForPendingDraws() call. i.e. Though the complete - // shapes will eventually be drawn on the screen, due to some OS limitations, - // these shapes may be partially appeared sometimes. - virtual bool MayDrawIncompleteShapes() = 0; - - // Returns the id of the drawer window. This function returns kNullWindowId if - // the implementation does not draw on a window of the system. - virtual WindowId window_id() const = 0; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_linux.cc deleted file mode 100644 index afd29ced0..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_linux.cc +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include - -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_geometry.h" -#include "modules/desktop_capture/linux/shared_x_display.h" -#include "modules/desktop_capture/rgba_color.h" -#include "modules/desktop_capture/screen_drawer.h" -#include "modules/desktop_capture/screen_drawer_lock_posix.h" -#include "rtc_base/checks.h" -#include "system_wrappers/include/sleep.h" - -namespace webrtc { - -namespace { - -// A ScreenDrawer implementation for X11. -class ScreenDrawerLinux : public ScreenDrawer { - public: - ScreenDrawerLinux(); - ~ScreenDrawerLinux() override; - - // ScreenDrawer interface. - DesktopRect DrawableRegion() override; - void DrawRectangle(DesktopRect rect, RgbaColor color) override; - void Clear() override; - void WaitForPendingDraws() override; - bool MayDrawIncompleteShapes() override; - WindowId window_id() const override; - - private: - // Bring the window to the front, this can help to avoid the impact from other - // windows or shadow effect. - void BringToFront(); - - rtc::scoped_refptr display_; - int screen_num_; - DesktopRect rect_; - Window window_; - GC context_; - Colormap colormap_; -}; - -ScreenDrawerLinux::ScreenDrawerLinux() { - display_ = SharedXDisplay::CreateDefault(); - RTC_CHECK(display_.get()); - screen_num_ = DefaultScreen(display_->display()); - XWindowAttributes root_attributes; - if (!XGetWindowAttributes(display_->display(), - RootWindow(display_->display(), screen_num_), - &root_attributes)) { - RTC_NOTREACHED() << "Failed to get root window size."; - } - window_ = XCreateSimpleWindow( - display_->display(), RootWindow(display_->display(), screen_num_), 0, 0, - root_attributes.width, root_attributes.height, 0, - BlackPixel(display_->display(), screen_num_), - BlackPixel(display_->display(), screen_num_)); - XSelectInput(display_->display(), window_, StructureNotifyMask); - XMapWindow(display_->display(), window_); - while (true) { - XEvent event; - XNextEvent(display_->display(), &event); - if (event.type == MapNotify) { - break; - } - } - XFlush(display_->display()); - Window child; - int x, y; - if (!XTranslateCoordinates(display_->display(), window_, - RootWindow(display_->display(), screen_num_), 0, 0, - &x, &y, &child)) { - RTC_NOTREACHED() << "Failed to get window position."; - } - // Some window manager does not allow a window to cover two or more monitors. - // So if the window is on the first monitor of a two-monitor system, the - // second half won't be able to show up without changing configurations of WM, - // and its DrawableRegion() is not accurate. - rect_ = DesktopRect::MakeLTRB(x, y, root_attributes.width, - root_attributes.height); - context_ = DefaultGC(display_->display(), screen_num_); - colormap_ = DefaultColormap(display_->display(), screen_num_); - BringToFront(); - // Wait for window animations. - SleepMs(200); -} - -ScreenDrawerLinux::~ScreenDrawerLinux() { - XUnmapWindow(display_->display(), window_); - XDestroyWindow(display_->display(), window_); -} - -DesktopRect ScreenDrawerLinux::DrawableRegion() { - return rect_; -} - -void ScreenDrawerLinux::DrawRectangle(DesktopRect rect, RgbaColor color) { - rect.Translate(-rect_.left(), -rect_.top()); - XColor xcolor; - // X11 does not support Alpha. - // X11 uses 16 bits for each primary color, so we need to slightly normalize - // a 8 bits channel to 16 bits channel, by setting the low 8 bits as its high - // 8 bits to avoid a mismatch of color returned by capturer. - xcolor.red = (color.red << 8) + color.red; - xcolor.green = (color.green << 8) + color.green; - xcolor.blue = (color.blue << 8) + color.blue; - xcolor.flags = DoRed | DoGreen | DoBlue; - XAllocColor(display_->display(), colormap_, &xcolor); - XSetForeground(display_->display(), context_, xcolor.pixel); - XFillRectangle(display_->display(), window_, context_, rect.left(), - rect.top(), rect.width(), rect.height()); - XFlush(display_->display()); -} - -void ScreenDrawerLinux::Clear() { - DrawRectangle(rect_, RgbaColor(0, 0, 0)); -} - -// TODO(zijiehe): Find the right signal from X11 to indicate the finish of all -// pending paintings. -void ScreenDrawerLinux::WaitForPendingDraws() { - SleepMs(50); -} - -bool ScreenDrawerLinux::MayDrawIncompleteShapes() { - return true; -} - -WindowId ScreenDrawerLinux::window_id() const { - return window_; -} - -void ScreenDrawerLinux::BringToFront() { - Atom state_above = XInternAtom(display_->display(), "_NET_WM_STATE_ABOVE", 1); - Atom window_state = XInternAtom(display_->display(), "_NET_WM_STATE", 1); - if (state_above == None || window_state == None) { - // Fallback to use XRaiseWindow, it's not reliable if two windows are both - // raise itself to the top. - XRaiseWindow(display_->display(), window_); - return; - } - - XEvent event; - memset(&event, 0, sizeof(event)); - event.type = ClientMessage; - event.xclient.window = window_; - event.xclient.message_type = window_state; - event.xclient.format = 32; - event.xclient.data.l[0] = 1; // _NET_WM_STATE_ADD - event.xclient.data.l[1] = state_above; - XSendEvent(display_->display(), RootWindow(display_->display(), screen_num_), - False, SubstructureRedirectMask | SubstructureNotifyMask, &event); -} - -} // namespace - -// static -std::unique_ptr ScreenDrawerLock::Create() { - return std::make_unique(); -} - -// static -std::unique_ptr ScreenDrawer::Create() { - if (SharedXDisplay::CreateDefault().get()) { - return std::make_unique(); - } - return nullptr; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.cc deleted file mode 100644 index 095189b05..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.cc +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/screen_drawer_lock_posix.h" - -#include -#include - -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -namespace { - -// A uuid as the name of semaphore. -static constexpr char kSemaphoreName[] = "GSDL54fe5552804711e6a7253f429a"; - -} // namespace - -ScreenDrawerLockPosix::ScreenDrawerLockPosix() - : ScreenDrawerLockPosix(kSemaphoreName) {} - -ScreenDrawerLockPosix::ScreenDrawerLockPosix(const char* name) { - semaphore_ = sem_open(name, O_CREAT, S_IRWXU | S_IRWXG | S_IRWXO, 1); - if (semaphore_ == SEM_FAILED) { - RTC_LOG_ERRNO(LS_ERROR) << "Failed to create named semaphore with " << name; - RTC_NOTREACHED(); - } - - sem_wait(semaphore_); -} - -ScreenDrawerLockPosix::~ScreenDrawerLockPosix() { - if (semaphore_ == SEM_FAILED) { - return; - } - - sem_post(semaphore_); - sem_close(semaphore_); - // sem_unlink a named semaphore won't wait until other clients to release the - // sem_t. So if a new process starts, it will sem_open a different kernel - // object with the same name and eventually breaks the cross-process lock. -} - -// static -void ScreenDrawerLockPosix::Unlink(const char* name) { - sem_unlink(name); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.h deleted file mode 100644 index 1d5adf2c5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_lock_posix.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_LOCK_POSIX_H_ -#define MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_LOCK_POSIX_H_ - -#include - -#include "modules/desktop_capture/screen_drawer.h" - -namespace webrtc { - -class ScreenDrawerLockPosix final : public ScreenDrawerLock { - public: - ScreenDrawerLockPosix(); - // Provides a name other than the default one for test only. - explicit ScreenDrawerLockPosix(const char* name); - ~ScreenDrawerLockPosix() override; - - // Unlinks the named semaphore actively. This will remove the sem_t object in - // the system and allow others to create a different sem_t object with the - // same/ name. - static void Unlink(const char* name); - - private: - sem_t* semaphore_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SCREEN_DRAWER_LOCK_POSIX_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_mac.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_mac.cc deleted file mode 100644 index 17719e443..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_mac.cc +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// TODO(zijiehe): Implement ScreenDrawerMac - -#include - -#include "modules/desktop_capture/screen_drawer.h" -#include "modules/desktop_capture/screen_drawer_lock_posix.h" - -namespace webrtc { - -// static -std::unique_ptr ScreenDrawerLock::Create() { - return std::make_unique(); -} - -// static -std::unique_ptr ScreenDrawer::Create() { - return nullptr; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_win.cc deleted file mode 100644 index cacb88957..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/screen_drawer_win.cc +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include - -#include "modules/desktop_capture/screen_drawer.h" -#include "system_wrappers/include/sleep.h" - -namespace webrtc { - -namespace { - -static constexpr TCHAR kMutexName[] = - TEXT("Local\\ScreenDrawerWin-da834f82-8044-11e6-ac81-73dcdd1c1869"); - -class ScreenDrawerLockWin : public ScreenDrawerLock { - public: - ScreenDrawerLockWin(); - ~ScreenDrawerLockWin() override; - - private: - HANDLE mutex_; -}; - -ScreenDrawerLockWin::ScreenDrawerLockWin() { - while (true) { - mutex_ = CreateMutex(NULL, FALSE, kMutexName); - if (GetLastError() != ERROR_ALREADY_EXISTS && mutex_ != NULL) { - break; - } else { - if (mutex_) { - CloseHandle(mutex_); - } - SleepMs(1000); - } - } -} - -ScreenDrawerLockWin::~ScreenDrawerLockWin() { - CloseHandle(mutex_); -} - -DesktopRect GetScreenRect() { - HDC hdc = GetDC(NULL); - DesktopRect rect = DesktopRect::MakeWH(GetDeviceCaps(hdc, HORZRES), - GetDeviceCaps(hdc, VERTRES)); - ReleaseDC(NULL, hdc); - return rect; -} - -HWND CreateDrawerWindow(DesktopRect rect) { - HWND hwnd = CreateWindowA( - "STATIC", "DrawerWindow", WS_POPUPWINDOW | WS_VISIBLE, rect.left(), - rect.top(), rect.width(), rect.height(), NULL, NULL, NULL, NULL); - SetForegroundWindow(hwnd); - return hwnd; -} - -COLORREF ColorToRef(RgbaColor color) { - // Windows device context does not support alpha. - return RGB(color.red, color.green, color.blue); -} - -// A ScreenDrawer implementation for Windows. -class ScreenDrawerWin : public ScreenDrawer { - public: - ScreenDrawerWin(); - ~ScreenDrawerWin() override; - - // ScreenDrawer interface. - DesktopRect DrawableRegion() override; - void DrawRectangle(DesktopRect rect, RgbaColor color) override; - void Clear() override; - void WaitForPendingDraws() override; - bool MayDrawIncompleteShapes() override; - WindowId window_id() const override; - - private: - // Bring the window to the front, this can help to avoid the impact from other - // windows or shadow effects. - void BringToFront(); - - // Draw a line with |color|. - void DrawLine(DesktopVector start, DesktopVector end, RgbaColor color); - - // Draw a dot with |color|. - void DrawDot(DesktopVector vect, RgbaColor color); - - const DesktopRect rect_; - HWND window_; - HDC hdc_; -}; - -ScreenDrawerWin::ScreenDrawerWin() - : ScreenDrawer(), - rect_(GetScreenRect()), - window_(CreateDrawerWindow(rect_)), - hdc_(GetWindowDC(window_)) { - // We do not need to handle any messages for the |window_|, so disable Windows - // from processing windows ghosting feature. - DisableProcessWindowsGhosting(); - - // Always use stock pen (DC_PEN) and brush (DC_BRUSH). - SelectObject(hdc_, GetStockObject(DC_PEN)); - SelectObject(hdc_, GetStockObject(DC_BRUSH)); - BringToFront(); -} - -ScreenDrawerWin::~ScreenDrawerWin() { - ReleaseDC(NULL, hdc_); - DestroyWindow(window_); - // Unfortunately there is no EnableProcessWindowsGhosting() API. -} - -DesktopRect ScreenDrawerWin::DrawableRegion() { - return rect_; -} - -void ScreenDrawerWin::DrawRectangle(DesktopRect rect, RgbaColor color) { - if (rect.width() == 1 && rect.height() == 1) { - // Rectangle function cannot draw a 1 pixel rectangle. - DrawDot(rect.top_left(), color); - return; - } - - if (rect.width() == 1 || rect.height() == 1) { - // Rectangle function cannot draw a 1 pixel rectangle. - DrawLine(rect.top_left(), DesktopVector(rect.right(), rect.bottom()), - color); - return; - } - - SetDCBrushColor(hdc_, ColorToRef(color)); - SetDCPenColor(hdc_, ColorToRef(color)); - Rectangle(hdc_, rect.left(), rect.top(), rect.right(), rect.bottom()); -} - -void ScreenDrawerWin::Clear() { - DrawRectangle(rect_, RgbaColor(0, 0, 0)); -} - -// TODO(zijiehe): Find the right signal to indicate the finish of all pending -// paintings. -void ScreenDrawerWin::WaitForPendingDraws() { - BringToFront(); - SleepMs(50); -} - -bool ScreenDrawerWin::MayDrawIncompleteShapes() { - return true; -} - -WindowId ScreenDrawerWin::window_id() const { - return reinterpret_cast(window_); -} - -void ScreenDrawerWin::DrawLine(DesktopVector start, - DesktopVector end, - RgbaColor color) { - POINT points[2]; - points[0].x = start.x(); - points[0].y = start.y(); - points[1].x = end.x(); - points[1].y = end.y(); - SetDCPenColor(hdc_, ColorToRef(color)); - Polyline(hdc_, points, 2); -} - -void ScreenDrawerWin::DrawDot(DesktopVector vect, RgbaColor color) { - SetPixel(hdc_, vect.x(), vect.y(), ColorToRef(color)); -} - -void ScreenDrawerWin::BringToFront() { - if (SetWindowPos(window_, HWND_TOPMOST, 0, 0, 0, 0, - SWP_NOMOVE | SWP_NOSIZE) != FALSE) { - return; - } - - long ex_style = GetWindowLong(window_, GWL_EXSTYLE); - ex_style |= WS_EX_TOPMOST; - if (SetWindowLong(window_, GWL_EXSTYLE, ex_style) != 0) { - return; - } - - BringWindowToTop(window_); -} - -} // namespace - -// static -std::unique_ptr ScreenDrawerLock::Create() { - return std::unique_ptr(new ScreenDrawerLockWin()); -} - -// static -std::unique_ptr ScreenDrawer::Create() { - return std::unique_ptr(new ScreenDrawerWin()); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.cc deleted file mode 100644 index 2ded14547..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.cc +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/shared_desktop_frame.h" - -#include -#include -#include - -namespace webrtc { - -SharedDesktopFrame::~SharedDesktopFrame() {} - -// static -std::unique_ptr SharedDesktopFrame::Wrap( - std::unique_ptr desktop_frame) { - return std::unique_ptr( - new SharedDesktopFrame(new Core(std::move(desktop_frame)))); -} - -SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) { - return Wrap(std::unique_ptr(desktop_frame)).release(); -} - -DesktopFrame* SharedDesktopFrame::GetUnderlyingFrame() { - return core_->get(); -} - -bool SharedDesktopFrame::ShareFrameWith(const SharedDesktopFrame& other) const { - return core_->get() == other.core_->get(); -} - -std::unique_ptr SharedDesktopFrame::Share() { - std::unique_ptr result(new SharedDesktopFrame(core_)); - result->CopyFrameInfoFrom(*this); - return result; -} - -bool SharedDesktopFrame::IsShared() { - return !core_->HasOneRef(); -} - -SharedDesktopFrame::SharedDesktopFrame(rtc::scoped_refptr core) - : DesktopFrame((*core)->size(), - (*core)->stride(), - (*core)->data(), - (*core)->shared_memory()), - core_(core) { - CopyFrameInfoFrom(*(core_->get())); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.h deleted file mode 100644 index fd862d7f2..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_desktop_frame.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_ -#define MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_ - -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// SharedDesktopFrame is a DesktopFrame that may have multiple instances all -// sharing the same buffer. -class RTC_EXPORT SharedDesktopFrame : public DesktopFrame { - public: - ~SharedDesktopFrame() override; - - static std::unique_ptr Wrap( - std::unique_ptr desktop_frame); - - // Deprecated. - // TODO(sergeyu): remove this method. - static SharedDesktopFrame* Wrap(DesktopFrame* desktop_frame); - - // Deprecated. Clients do not need to know the underlying DesktopFrame - // instance. - // TODO(zijiehe): Remove this method. - // Returns the underlying instance of DesktopFrame. - DesktopFrame* GetUnderlyingFrame(); - - // Returns whether |this| and |other| share the underlying DesktopFrame. - bool ShareFrameWith(const SharedDesktopFrame& other) const; - - // Creates a clone of this object. - std::unique_ptr Share(); - - // Checks if the frame is currently shared. If it returns false it's - // guaranteed that there are no clones of the object. - bool IsShared(); - - private: - typedef rtc::RefCountedObject> Core; - - SharedDesktopFrame(rtc::scoped_refptr core); - - const rtc::scoped_refptr core_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SharedDesktopFrame); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.cc deleted file mode 100644 index b4ff78b2a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.cc +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/shared_memory.h" - -namespace webrtc { - -#if defined(WEBRTC_WIN) -const SharedMemory::Handle SharedMemory::kInvalidHandle = NULL; -#else -const SharedMemory::Handle SharedMemory::kInvalidHandle = -1; -#endif - -SharedMemory::SharedMemory(void* data, size_t size, Handle handle, int id) - : data_(data), size_(size), handle_(handle), id_(id) {} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.h deleted file mode 100644 index 2e2d2f338..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/shared_memory.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_SHARED_MEMORY_H_ -#define MODULES_DESKTOP_CAPTURE_SHARED_MEMORY_H_ - -#include - -#if defined(WEBRTC_WIN) -#include -#endif - -#include - -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// SharedMemory is a base class for shared memory. It stores all required -// parameters of the buffer, but doesn't have any logic to allocate or destroy -// the actual buffer. DesktopCapturer consumers that need to use shared memory -// for video frames must extend this class with creation and destruction logic -// specific for the target platform and then call -// DesktopCapturer::SetSharedMemoryFactory(). -class RTC_EXPORT SharedMemory { - public: -#if defined(WEBRTC_WIN) - typedef HANDLE Handle; - static const Handle kInvalidHandle; -#else - typedef int Handle; - static const Handle kInvalidHandle; -#endif - - void* data() const { return data_; } - size_t size() const { return size_; } - - // Platform-specific handle of the buffer. - Handle handle() const { return handle_; } - - // Integer identifier that can be used used by consumers of DesktopCapturer - // interface to identify shared memory buffers it created. - int id() const { return id_; } - - virtual ~SharedMemory() {} - - protected: - SharedMemory(void* data, size_t size, Handle handle, int id); - - void* const data_; - const size_t size_; - const Handle handle_; - const int id_; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(SharedMemory); -}; - -// Interface used to create SharedMemory instances. -class SharedMemoryFactory { - public: - SharedMemoryFactory() {} - virtual ~SharedMemoryFactory() {} - - virtual std::unique_ptr CreateSharedMemory(size_t size) = 0; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(SharedMemoryFactory); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_SHARED_MEMORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_linux.cc deleted file mode 100644 index 41dbf836b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_linux.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" - -#if defined(WEBRTC_USE_PIPEWIRE) -#include "modules/desktop_capture/linux/window_capturer_pipewire.h" -#endif // defined(WEBRTC_USE_PIPEWIRE) - -#if defined(WEBRTC_USE_X11) -#include "modules/desktop_capture/linux/window_capturer_x11.h" -#endif // defined(WEBRTC_USE_X11) - -namespace webrtc { - -// static -std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { -#if defined(WEBRTC_USE_PIPEWIRE) - if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { - return WindowCapturerPipeWire::CreateRawWindowCapturer(options); - } -#endif // defined(WEBRTC_USE_PIPEWIRE) - -#if defined(WEBRTC_USE_X11) - return WindowCapturerX11::CreateRawWindowCapturer(options); -#endif // defined(WEBRTC_USE_X11) - - return nullptr; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_mac.mm b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_mac.mm deleted file mode 100644 index 96f89eb14..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_mac.mm +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "modules/desktop_capture/mac/desktop_configuration.h" -#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" -#include "modules/desktop_capture/mac/desktop_frame_cgimage.h" -#include "modules/desktop_capture/mac/window_list_utils.h" -#include "modules/desktop_capture/window_finder_mac.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" - -namespace webrtc { - -namespace { - -// Returns true if the window exists. -bool IsWindowValid(CGWindowID id) { - CFArrayRef window_id_array = - CFArrayCreate(nullptr, reinterpret_cast(&id), 1, nullptr); - CFArrayRef window_array = - CGWindowListCreateDescriptionFromArray(window_id_array); - bool valid = window_array && CFArrayGetCount(window_array); - CFRelease(window_id_array); - CFRelease(window_array); - - return valid; -} - -class WindowCapturerMac : public DesktopCapturer { - public: - explicit WindowCapturerMac( - rtc::scoped_refptr full_screen_window_detector, - rtc::scoped_refptr configuration_monitor); - ~WindowCapturerMac() override; - - // DesktopCapturer interface. - void Start(Callback* callback) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - private: - Callback* callback_ = nullptr; - - // The window being captured. - CGWindowID window_id_ = 0; - - rtc::scoped_refptr full_screen_window_detector_; - - const rtc::scoped_refptr configuration_monitor_; - - WindowFinderMac window_finder_; - - RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerMac); -}; - -WindowCapturerMac::WindowCapturerMac( - rtc::scoped_refptr full_screen_window_detector, - rtc::scoped_refptr configuration_monitor) - : full_screen_window_detector_(std::move(full_screen_window_detector)), - configuration_monitor_(std::move(configuration_monitor)), - window_finder_(configuration_monitor_) {} - -WindowCapturerMac::~WindowCapturerMac() {} - -bool WindowCapturerMac::GetSourceList(SourceList* sources) { - return webrtc::GetWindowList(sources, true, true); -} - -bool WindowCapturerMac::SelectSource(SourceId id) { - if (!IsWindowValid(id)) - return false; - window_id_ = id; - return true; -} - -bool WindowCapturerMac::FocusOnSelectedSource() { - if (!window_id_) - return false; - - CGWindowID ids[1]; - ids[0] = window_id_; - CFArrayRef window_id_array = - CFArrayCreate(nullptr, reinterpret_cast(&ids), 1, nullptr); - - CFArrayRef window_array = - CGWindowListCreateDescriptionFromArray(window_id_array); - if (!window_array || 0 == CFArrayGetCount(window_array)) { - // Could not find the window. It might have been closed. - RTC_LOG(LS_INFO) << "Window not found"; - CFRelease(window_id_array); - return false; - } - - CFDictionaryRef window = reinterpret_cast( - CFArrayGetValueAtIndex(window_array, 0)); - CFNumberRef pid_ref = reinterpret_cast( - CFDictionaryGetValue(window, kCGWindowOwnerPID)); - - int pid; - CFNumberGetValue(pid_ref, kCFNumberIntType, &pid); - - // TODO(jiayl): this will bring the process main window to the front. We - // should find a way to bring only the window to the front. - bool result = - [[NSRunningApplication runningApplicationWithProcessIdentifier: pid] - activateWithOptions: NSApplicationActivateIgnoringOtherApps]; - - CFRelease(window_id_array); - CFRelease(window_array); - return result; -} - -bool WindowCapturerMac::IsOccluded(const DesktopVector& pos) { - DesktopVector sys_pos = pos; - if (configuration_monitor_) { - auto configuration = configuration_monitor_->desktop_configuration(); - sys_pos = pos.add(configuration.bounds.top_left()); - } - return window_finder_.GetWindowUnderPoint(sys_pos) != window_id_; -} - -void WindowCapturerMac::Start(Callback* callback) { - assert(!callback_); - assert(callback); - - callback_ = callback; -} - -void WindowCapturerMac::CaptureFrame() { - TRACE_EVENT0("webrtc", "WindowCapturerMac::CaptureFrame"); - - if (!IsWindowValid(window_id_)) { - RTC_LOG(LS_ERROR) << "The window is not valid any longer."; - callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); - return; - } - - CGWindowID on_screen_window = window_id_; - if (full_screen_window_detector_) { - full_screen_window_detector_->UpdateWindowListIfNeeded( - window_id_, [](DesktopCapturer::SourceList* sources) { - return webrtc::GetWindowList(sources, true, false); - }); - - CGWindowID full_screen_window = full_screen_window_detector_->FindFullScreenWindow(window_id_); - - if (full_screen_window != kCGNullWindowID) on_screen_window = full_screen_window; - } - - std::unique_ptr frame = DesktopFrameCGImage::CreateForWindow(on_screen_window); - if (!frame) { - RTC_LOG(LS_WARNING) << "Temporarily failed to capture window."; - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); - return; - } - - frame->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(frame->size())); - frame->set_top_left(GetWindowBounds(on_screen_window).top_left()); - - float scale_factor = GetWindowScaleFactor(window_id_, frame->size()); - frame->set_dpi(DesktopVector(kStandardDPI * scale_factor, kStandardDPI * scale_factor)); - - callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); -} - -} // namespace - -// static -std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { - return std::unique_ptr(new WindowCapturerMac( - options.full_screen_window_detector(), options.configuration_monitor())); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_null.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_null.cc deleted file mode 100644 index 66e76a50f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_null.cc +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame.h" -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -namespace { - -class WindowCapturerNull : public DesktopCapturer { - public: - WindowCapturerNull(); - ~WindowCapturerNull() override; - - // DesktopCapturer interface. - void Start(Callback* callback) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - - private: - Callback* callback_ = nullptr; - - RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerNull); -}; - -WindowCapturerNull::WindowCapturerNull() {} -WindowCapturerNull::~WindowCapturerNull() {} - -bool WindowCapturerNull::GetSourceList(SourceList* sources) { - // Not implemented yet. - return false; -} - -bool WindowCapturerNull::SelectSource(SourceId id) { - // Not implemented yet. - return false; -} - -void WindowCapturerNull::Start(Callback* callback) { - assert(!callback_); - assert(callback); - - callback_ = callback; -} - -void WindowCapturerNull::CaptureFrame() { - // Not implemented yet. - callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); -} - -} // namespace - -// static -std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { - return std::unique_ptr(new WindowCapturerNull()); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_win.cc deleted file mode 100644 index a63a24df5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_capturer_win.cc +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/desktop_capture_options.h" -#include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/win/window_capturer_win_gdi.h" -#include "modules/desktop_capture/win/window_capturer_win_wgc.h" - -namespace webrtc { - -// static -std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( - const DesktopCaptureOptions& options) { - // TODO(bugs.webrtc.org/11760): Add a WebRTC field trial (or similar - // mechanism) and Windows version check here that leads to use of the WGC - // capturer once it is fully implemented. - if (true) { - return WindowCapturerWinGdi::CreateRawWindowCapturer(options); - } else { - return WindowCapturerWinWgc::CreateRawWindowCapturer(options); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.cc deleted file mode 100644 index 86127d4c0..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.cc +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/window_finder.h" - -namespace webrtc { - -WindowFinder::Options::Options() = default; -WindowFinder::Options::~Options() = default; -WindowFinder::Options::Options(const WindowFinder::Options& other) = default; -WindowFinder::Options::Options(WindowFinder::Options&& other) = default; - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.h deleted file mode 100644 index 0d346640a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_H_ -#define MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_H_ - -#include - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/desktop_geometry.h" - -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) -#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" -#endif - -namespace webrtc { - -#if defined(WEBRTC_USE_X11) -class XAtomCache; -#endif - -// An interface to return the id of the visible window under a certain point. -class WindowFinder { - public: - WindowFinder() = default; - virtual ~WindowFinder() = default; - - // Returns the id of the visible window under |point|. This function returns - // kNullWindowId if no window is under |point| and the platform does not have - // "root window" concept, i.e. the visible area under |point| is the desktop. - // |point| is always in system coordinate, i.e. the primary monitor always - // starts from (0, 0). - virtual WindowId GetWindowUnderPoint(DesktopVector point) = 0; - - struct Options final { - Options(); - ~Options(); - Options(const Options& other); - Options(Options&& other); - -#if defined(WEBRTC_USE_X11) - XAtomCache* cache = nullptr; -#endif -#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - rtc::scoped_refptr configuration_monitor; -#endif - }; - - // Creates a platform-independent WindowFinder implementation. This function - // returns nullptr if |options| does not contain enough information or - // WindowFinder does not support current platform. - static std::unique_ptr Create(const Options& options); -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.h deleted file mode 100644 index 988dd497d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_MAC_H_ -#define MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_MAC_H_ - -#include "api/scoped_refptr.h" -#include "modules/desktop_capture/window_finder.h" - -namespace webrtc { - -class DesktopConfigurationMonitor; - -// The implementation of WindowFinder for Mac OSX. -class WindowFinderMac final : public WindowFinder { - public: - explicit WindowFinderMac( - rtc::scoped_refptr configuration_monitor); - ~WindowFinderMac() override; - - // WindowFinder implementation. - WindowId GetWindowUnderPoint(DesktopVector point) override; - - private: - const rtc::scoped_refptr configuration_monitor_; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_MAC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.mm b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.mm deleted file mode 100644 index e1d0316c7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_mac.mm +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/window_finder_mac.h" - -#include - -#include -#include - -#include "modules/desktop_capture/mac/desktop_configuration.h" -#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" -#include "modules/desktop_capture/mac/window_list_utils.h" - -namespace webrtc { - -WindowFinderMac::WindowFinderMac( - rtc::scoped_refptr configuration_monitor) - : configuration_monitor_(std::move(configuration_monitor)) {} -WindowFinderMac::~WindowFinderMac() = default; - -WindowId WindowFinderMac::GetWindowUnderPoint(DesktopVector point) { - WindowId id = kNullWindowId; - GetWindowList( - [&id, point](CFDictionaryRef window) { - DesktopRect bounds; - bounds = GetWindowBounds(window); - if (bounds.Contains(point)) { - id = GetWindowId(window); - return false; - } - return true; - }, - true, - true); - return id; -} - -// static -std::unique_ptr WindowFinder::Create( - const WindowFinder::Options& options) { - return std::make_unique(options.configuration_monitor); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.cc b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.cc deleted file mode 100644 index a8c3d39e1..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.cc +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/desktop_capture/window_finder_win.h" - -#include - -#include - -namespace webrtc { - -WindowFinderWin::WindowFinderWin() = default; -WindowFinderWin::~WindowFinderWin() = default; - -WindowId WindowFinderWin::GetWindowUnderPoint(DesktopVector point) { - HWND window = WindowFromPoint(POINT{point.x(), point.y()}); - if (!window) { - return kNullWindowId; - } - - // The difference between GA_ROOTOWNER and GA_ROOT can be found at - // https://groups.google.com/a/chromium.org/forum/#!topic/chromium-dev/Hirr_DkuZdw. - // In short, we should use GA_ROOT, since we only care about the root window - // but not the owner. - window = GetAncestor(window, GA_ROOT); - if (!window) { - return kNullWindowId; - } - - return reinterpret_cast(window); -} - -// static -std::unique_ptr WindowFinder::Create( - const WindowFinder::Options& options) { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.h b/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.h deleted file mode 100644 index a04e7e1aa..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/desktop_capture/window_finder_win.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_WIN_H_ -#define MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_WIN_H_ - -#include "modules/desktop_capture/window_finder.h" - -namespace webrtc { - -// The implementation of WindowFinder for Windows. -class WindowFinderWin final : public WindowFinder { - public: - WindowFinderWin(); - ~WindowFinderWin() override; - - // WindowFinder implementation. - WindowId GetWindowUnderPoint(DesktopVector point) override; -}; - -} // namespace webrtc - -#endif // MODULES_DESKTOP_CAPTURE_WINDOW_FINDER_WIN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.cc b/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.cc deleted file mode 100644 index a589312ec..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/include/module_common_types.h" - -#include - -#include -#include - -#include "rtc_base/numerics/safe_conversions.h" - -namespace webrtc { - -RTPFragmentationHeader::RTPFragmentationHeader() - : fragmentationVectorSize(0), - fragmentationOffset(nullptr), - fragmentationLength(nullptr) {} - -RTPFragmentationHeader::RTPFragmentationHeader(RTPFragmentationHeader&& other) - : RTPFragmentationHeader() { - swap(*this, other); -} - -RTPFragmentationHeader& RTPFragmentationHeader::operator=( - RTPFragmentationHeader&& other) { - swap(*this, other); - return *this; -} - -RTPFragmentationHeader::~RTPFragmentationHeader() { - delete[] fragmentationOffset; - delete[] fragmentationLength; -} - -void swap(RTPFragmentationHeader& a, RTPFragmentationHeader& b) { - using std::swap; - swap(a.fragmentationVectorSize, b.fragmentationVectorSize); - swap(a.fragmentationOffset, b.fragmentationOffset); - swap(a.fragmentationLength, b.fragmentationLength); -} - -void RTPFragmentationHeader::CopyFrom(const RTPFragmentationHeader& src) { - if (this == &src) { - return; - } - - if (src.fragmentationVectorSize != fragmentationVectorSize) { - // new size of vectors - - // delete old - delete[] fragmentationOffset; - fragmentationOffset = nullptr; - delete[] fragmentationLength; - fragmentationLength = nullptr; - - if (src.fragmentationVectorSize > 0) { - // allocate new - if (src.fragmentationOffset) { - fragmentationOffset = new size_t[src.fragmentationVectorSize]; - } - if (src.fragmentationLength) { - fragmentationLength = new size_t[src.fragmentationVectorSize]; - } - } - // set new size - fragmentationVectorSize = src.fragmentationVectorSize; - } - - if (src.fragmentationVectorSize > 0) { - // copy values - if (src.fragmentationOffset) { - memcpy(fragmentationOffset, src.fragmentationOffset, - src.fragmentationVectorSize * sizeof(size_t)); - } - if (src.fragmentationLength) { - memcpy(fragmentationLength, src.fragmentationLength, - src.fragmentationVectorSize * sizeof(size_t)); - } - } -} - -void RTPFragmentationHeader::Resize(size_t size) { - const uint16_t size16 = rtc::dchecked_cast(size); - if (fragmentationVectorSize < size16) { - uint16_t old_vector_size = fragmentationVectorSize; - size_t* old_offsets = fragmentationOffset; - fragmentationOffset = new size_t[size16]; - memset(fragmentationOffset + old_vector_size, 0, - sizeof(size_t) * (size16 - old_vector_size)); - size_t* old_lengths = fragmentationLength; - fragmentationLength = new size_t[size16]; - memset(fragmentationLength + old_vector_size, 0, - sizeof(size_t) * (size16 - old_vector_size)); - - // copy old values - if (old_vector_size > 0) { - if (old_offsets != nullptr) { - memcpy(fragmentationOffset, old_offsets, - sizeof(size_t) * old_vector_size); - delete[] old_offsets; - } - if (old_lengths != nullptr) { - memcpy(fragmentationLength, old_lengths, - sizeof(size_t) * old_vector_size); - delete[] old_lengths; - } - } - fragmentationVectorSize = size16; - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.h b/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.h index 3afd7b7d7..7c9ef39cf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.h +++ b/TMessagesProj/jni/voip/webrtc/modules/include/module_common_types.h @@ -11,44 +11,12 @@ #ifndef MODULES_INCLUDE_MODULE_COMMON_TYPES_H_ #define MODULES_INCLUDE_MODULE_COMMON_TYPES_H_ -#include #include #include -#include "rtc_base/system/rtc_export.h" - namespace webrtc { -class RTC_EXPORT RTPFragmentationHeader { - public: - RTPFragmentationHeader(); - RTPFragmentationHeader(const RTPFragmentationHeader&) = delete; - RTPFragmentationHeader(RTPFragmentationHeader&& other); - RTPFragmentationHeader& operator=(const RTPFragmentationHeader& other) = - delete; - RTPFragmentationHeader& operator=(RTPFragmentationHeader&& other); - ~RTPFragmentationHeader(); - - friend void swap(RTPFragmentationHeader& a, RTPFragmentationHeader& b); - - void CopyFrom(const RTPFragmentationHeader& src); - void VerifyAndAllocateFragmentationHeader(size_t size) { Resize(size); } - - void Resize(size_t size); - size_t Size() const { return fragmentationVectorSize; } - - size_t Offset(size_t index) const { return fragmentationOffset[index]; } - size_t Length(size_t index) const { return fragmentationLength[index]; } - - // TODO(danilchap): Move all members to private section, - // simplify by replacing raw arrays with single std::vector - uint16_t fragmentationVectorSize; // Number of fragmentations - size_t* fragmentationOffset; // Offset of pointer to data for each - // fragmentation - size_t* fragmentationLength; // Data size for each fragmentation -}; - // Interface used by the CallStats class to distribute call statistics. // Callbacks will be triggered as soon as the class has been registered to a // CallStats object using RegisterStatsObserver. diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc index 1949570d8..381a54d50 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc @@ -37,9 +37,8 @@ BitrateProberConfig::BitrateProberConfig( : min_probe_packets_sent("min_probe_packets_sent", 5), min_probe_delta("min_probe_delta", TimeDelta::Millis(1)), min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), - max_probe_delay("max_probe_delay", TimeDelta::Millis(3)), - // TODO(bugs.webrtc.org/11780): Change to default true. - abort_delayed_probes("abort_delayed_probes", false) { + max_probe_delay("max_probe_delay", TimeDelta::Millis(10)), + abort_delayed_probes("abort_delayed_probes", true) { ParseFieldTrial( {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration, &max_probe_delay, &abort_delayed_probes}, diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc index 107316d4e..5ffbc903b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc @@ -84,6 +84,7 @@ int GetPriorityForType(RtpPacketMediaType type) { // BWE high. return kFirstPriority + 4; } + RTC_CHECK_NOTREACHED(); } } // namespace @@ -112,7 +113,7 @@ PacingController::PacingController(Clock* clock, IsEnabled(*field_trials_, "WebRTC-Pacer-PadInSilence")), pace_audio_(IsEnabled(*field_trials_, "WebRTC-Pacer-BlockAudio")), small_first_probe_packet_( - IsEnabled(*field_trials_, "WebRTC-Pacer-SmallFirstProbePacket")), + !IsDisabled(*field_trials_, "WebRTC-Pacer-SmallFirstProbePacket")), ignore_transport_overhead_( IsEnabled(*field_trials_, "WebRTC-Pacer-IgnoreTransportOverhead")), padding_target_duration_(GetDynamicPaddingTarget(*field_trials_)), diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc index eb8b11bb6..69ec5457a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc @@ -224,9 +224,13 @@ void TaskQueuePacedSender::MaybeProcessPackets( // If we're probing and there isn't already a wakeup scheduled for the next // process time, always post a task and just round sleep time down to // nearest millisecond. - time_to_next_process = - std::max(TimeDelta::Zero(), - (next_process_time - now).RoundDownTo(TimeDelta::Millis(1))); + if (next_process_time.IsMinusInfinity()) { + time_to_next_process = TimeDelta::Zero(); + } else { + time_to_next_process = + std::max(TimeDelta::Zero(), + (next_process_time - now).RoundDownTo(TimeDelta::Millis(1))); + } } else if (next_process_time_.IsMinusInfinity() || next_process_time <= next_process_time_ - hold_back_window_) { // Schedule a new task since there is none currently scheduled diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/inter_arrival.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/inter_arrival.h index 1d84970de..dbc630ff6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/inter_arrival.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/inter_arrival.h @@ -14,8 +14,6 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Helper class to compute the inter-arrival time delta and the size delta @@ -35,6 +33,10 @@ class InterArrival { double timestamp_to_ms_coeff, bool enable_burst_grouping); + InterArrival() = delete; + InterArrival(const InterArrival&) = delete; + InterArrival& operator=(const InterArrival&) = delete; + // This function returns true if a delta was computed, or false if the current // group is still incomplete or if only one group has been completed. // |timestamp| is the timestamp. @@ -87,8 +89,6 @@ class InterArrival { double timestamp_to_ms_coeff_; bool burst_grouping_; int num_consecutive_reordered_packets_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(InterArrival); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h index 242336334..f42a28f8c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h @@ -27,7 +27,6 @@ #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" @@ -76,6 +75,13 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { public: RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer, Clock* clock); + + RemoteBitrateEstimatorAbsSendTime() = delete; + RemoteBitrateEstimatorAbsSendTime(const RemoteBitrateEstimatorAbsSendTime&) = + delete; + RemoteBitrateEstimatorAbsSendTime& operator=( + const RemoteBitrateEstimatorAbsSendTime&) = delete; + ~RemoteBitrateEstimatorAbsSendTime() override; void IncomingPacket(int64_t arrival_time_ms, @@ -141,8 +147,6 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { mutable Mutex mutex_; Ssrcs ssrcs_ RTC_GUARDED_BY(&mutex_); AimdRateControl remote_rate_ RTC_GUARDED_BY(&mutex_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorAbsSendTime); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index 6da67e5a1..9fd2f9fc0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -21,7 +21,6 @@ #include "api/transport/field_trial_based_config.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -35,6 +34,13 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { public: RemoteBitrateEstimatorSingleStream(RemoteBitrateObserver* observer, Clock* clock); + + RemoteBitrateEstimatorSingleStream() = delete; + RemoteBitrateEstimatorSingleStream( + const RemoteBitrateEstimatorSingleStream&) = delete; + RemoteBitrateEstimatorSingleStream& operator=( + const RemoteBitrateEstimatorSingleStream&) = delete; + ~RemoteBitrateEstimatorSingleStream() override; void IncomingPacket(int64_t arrival_time_ms, @@ -74,8 +80,6 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { int64_t last_process_time_; int64_t process_interval_ms_ RTC_GUARDED_BY(mutex_); bool uma_recorded_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorSingleStream); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h index 5a30da828..079a7f888 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h @@ -263,10 +263,11 @@ class Logging { Context(uint32_t name, int64_t timestamp_ms, bool enabled); Context(const std::string& name, int64_t timestamp_ms, bool enabled); Context(const char* name, int64_t timestamp_ms, bool enabled); - ~Context(); - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Context); + Context() = delete; + Context(const Context&) = delete; + Context& operator=(const Context&) = delete; + ~Context(); }; static Logging* GetInstance(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 46c310e27..cbc2d9211 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -65,6 +65,7 @@ enum RTPExtensionType : int { kRtpExtensionTransportSequenceNumber02, kRtpExtensionPlayoutDelay, kRtpExtensionVideoContentType, + kRtpExtensionVideoLayersAllocation, kRtpExtensionVideoTiming, kRtpExtensionRtpStreamId, kRtpExtensionRepairedRtpStreamId, diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index d597b1e28..70b073cd7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -82,13 +82,6 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(bool, SendingMedia, (), (const, override)); MOCK_METHOD(bool, IsAudioConfigured, (), (const, override)); MOCK_METHOD(void, SetAsPartOfAllocation, (bool), (override)); - MOCK_METHOD(void, - BitrateSent, - (uint32_t * total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate), - (const, override)); MOCK_METHOD(RtpSendRates, GetSendRates, (), (const, override)); MOCK_METHOD(bool, OnSendingRtpFrame, diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc index 6a64d3e23..1da19e476 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc @@ -43,6 +43,7 @@ std::unique_ptr CreateVideoRtpDepacketizer( case kVideoCodecMultiplex: return std::make_unique(); } + RTC_CHECK_NOTREACHED(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index ba091cefb..6cb9d9330 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -27,11 +27,11 @@ constexpr int kSendSideDelayWindowMs = 1000; constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; -bool IsEnabled(absl::string_view name, - const WebRtcKeyValueConfig* field_trials) { +bool IsDisabled(absl::string_view name, + const WebRtcKeyValueConfig* field_trials) { FieldTrialBasedConfig default_trials; auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), "Enabled"); + return absl::StartsWith(trials.Lookup(name), "Disabled"); } } // namespace @@ -63,7 +63,7 @@ DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), send_side_bwe_with_overhead_( - IsEnabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), + !IsDisabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc index 5e762335e..54f3555fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include +#include + #include "rtc_base/checks.h" namespace webrtc { @@ -19,16 +22,16 @@ CompoundPacket::CompoundPacket() = default; CompoundPacket::~CompoundPacket() = default; -void CompoundPacket::Append(RtcpPacket* packet) { +void CompoundPacket::Append(std::unique_ptr packet) { RTC_CHECK(packet); - appended_packets_.push_back(packet); + appended_packets_.push_back(std::move(packet)); } bool CompoundPacket::Create(uint8_t* packet, size_t* index, size_t max_length, PacketReadyCallback callback) const { - for (RtcpPacket* appended : appended_packets_) { + for (const auto& appended : appended_packets_) { if (!appended->Create(packet, index, max_length, callback)) return false; } @@ -37,7 +40,7 @@ bool CompoundPacket::Create(uint8_t* packet, size_t CompoundPacket::BlockLength() const { size_t block_length = 0; - for (RtcpPacket* appended : appended_packets_) { + for (const auto& appended : appended_packets_) { block_length += appended->BlockLength(); } return block_length; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h index f521c7f92..8bee60069 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h @@ -12,6 +12,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ +#include #include #include "modules/rtp_rtcp/source/rtcp_packet.h" @@ -25,7 +26,7 @@ class CompoundPacket : public RtcpPacket { CompoundPacket(); ~CompoundPacket() override; - void Append(RtcpPacket* packet); + void Append(std::unique_ptr packet); // Size of this packet in bytes (i.e. total size of nested packets). size_t BlockLength() const override; @@ -36,7 +37,7 @@ class CompoundPacket : public RtcpPacket { PacketReadyCallback callback) const override; protected: - std::vector appended_packets_; + std::vector> appended_packets_; private: RTC_DISALLOW_COPY_AND_ASSIGN(CompoundPacket); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc index fae635e1b..61e6085bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc @@ -37,7 +37,6 @@ #include "modules/rtp_rtcp/source/time_util.h" #include "modules/rtp_rtcp/source/tmmbr_help.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/trace_event.h" @@ -55,10 +54,10 @@ class PacketContainer : public rtcp::CompoundPacket { public: PacketContainer(Transport* transport, RtcEventLog* event_log) : transport_(transport), event_log_(event_log) {} - ~PacketContainer() override { - for (RtcpPacket* packet : appended_packets_) - delete packet; - } + + PacketContainer() = delete; + PacketContainer(const PacketContainer&) = delete; + PacketContainer& operator=(const PacketContainer&) = delete; size_t SendPackets(size_t max_payload_length) { size_t bytes_sent = 0; @@ -76,8 +75,6 @@ class PacketContainer : public rtcp::CompoundPacket { private: Transport* transport_; RtcEventLog* const event_log_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(PacketContainer); }; // Helper to put several RTCP packets into lower layer datagram RTCP packet. @@ -792,14 +789,14 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( if (builder_it->first == kRtcpBye) { packet_bye = std::move(packet); } else { - out_packet->Append(packet.release()); + out_packet->Append(std::move(packet)); } } } // Append the BYE now at the end if (packet_bye) { - out_packet->Append(packet_bye.release()); + out_packet->Append(std::move(packet_bye)); } if (packet_type_counter_observer_ != nullptr) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h index bcdf15eda..22b2bb7b7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h @@ -31,7 +31,6 @@ #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -65,6 +64,11 @@ class RTCPSender final { }; explicit RTCPSender(const RtpRtcpInterface::Configuration& config); + + RTCPSender() = delete; + RTCPSender(const RTCPSender&) = delete; + RTCPSender& operator=(const RTCPSender&) = delete; + virtual ~RTCPSender(); RtcpMode Status() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); @@ -308,8 +312,6 @@ class RTCPSender final { const RtcpContext&); // Map from RTCPPacketType to builder. std::map builders_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTCPSender); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format.cc index c7a35ee5a..27ce7a6fe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format.cc @@ -53,7 +53,7 @@ std::unique_ptr RtpPacketizer::Create( case kVideoCodecH265: { const auto& h265 = absl::get(rtp_video_header.video_type_header); - return absl::make_unique( + return std::make_unique( payload, limits, h265.packetization_mode); } #endif @@ -68,8 +68,9 @@ std::unique_ptr RtpPacketizer::Create( return std::make_unique(payload, limits, vp9); } case kVideoCodecAV1: - return std::make_unique(payload, limits, - rtp_video_header.frame_type); + return std::make_unique( + payload, limits, rtp_video_header.frame_type, + rtp_video_header.is_last_frame_in_picture); default: { return std::make_unique(payload, limits, rtp_video_header); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h index cc950be8d..a1c9665a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h @@ -107,7 +107,6 @@ class RtpPacketizerH265 : public RtpPacketizer { const PayloadSizeLimits limits_; size_t num_packets_left_; - RTPFragmentationHeader fragmentation_; RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerH265); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc index 63562c5b8..c16dcaf6f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc @@ -13,6 +13,7 @@ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -40,6 +41,7 @@ constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), + CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index 527874d78..b540e4b22 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -371,7 +371,7 @@ constexpr uint8_t PlayoutDelayLimits::kValueSizeBytes; constexpr const char PlayoutDelayLimits::kUri[]; bool PlayoutDelayLimits::Parse(rtc::ArrayView data, - PlayoutDelay* playout_delay) { + VideoPlayoutDelay* playout_delay) { RTC_DCHECK(playout_delay); if (data.size() != 3) return false; @@ -386,7 +386,7 @@ bool PlayoutDelayLimits::Parse(rtc::ArrayView data, } bool PlayoutDelayLimits::Write(rtc::ArrayView data, - const PlayoutDelay& playout_delay) { + const VideoPlayoutDelay& playout_delay) { RTC_DCHECK_EQ(data.size(), 3); RTC_DCHECK_LE(0, playout_delay.min_ms); RTC_DCHECK_LE(playout_delay.min_ms, playout_delay.max_ms); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index 8a81280f7..1352611fb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -148,7 +148,7 @@ class VideoOrientation { class PlayoutDelayLimits { public: - using value_type = PlayoutDelay; + using value_type = VideoPlayoutDelay; static constexpr RTPExtensionType kId = kRtpExtensionPlayoutDelay; static constexpr uint8_t kValueSizeBytes = 3; static constexpr const char kUri[] = @@ -162,10 +162,10 @@ class PlayoutDelayLimits { static constexpr int kMaxMs = 0xfff * kGranularityMs; // 40950. static bool Parse(rtc::ArrayView data, - PlayoutDelay* playout_delay); - static size_t ValueSize(const PlayoutDelay&) { return kValueSizeBytes; } + VideoPlayoutDelay* playout_delay); + static size_t ValueSize(const VideoPlayoutDelay&) { return kValueSizeBytes; } static bool Write(rtc::ArrayView data, - const PlayoutDelay& playout_delay); + const VideoPlayoutDelay& playout_delay); }; class VideoContentTypeExtension { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc index 767c9a068..38d29cc2b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc @@ -196,6 +196,7 @@ void RtpPacket::ZeroMutableExtensions() { case RTPExtensionType::kRtpExtensionRepairedRtpStreamId: case RTPExtensionType::kRtpExtensionRtpStreamId: case RTPExtensionType::kRtpExtensionVideoContentType: + case RTPExtensionType::kRtpExtensionVideoLayersAllocation: case RTPExtensionType::kRtpExtensionVideoRotation: case RTPExtensionType::kRtpExtensionInbandComfortNoise: { // Non-mutable extension. Don't change it. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h index f249116b7..4a2bf91bd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h @@ -19,7 +19,6 @@ #include "api/function_view.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -63,6 +62,11 @@ class RtpPacketHistory { static constexpr int kPacketCullingDelayFactor = 3; RtpPacketHistory(Clock* clock, bool enable_padding_prio); + + RtpPacketHistory() = delete; + RtpPacketHistory(const RtpPacketHistory&) = delete; + RtpPacketHistory& operator=(const RtpPacketHistory&) = delete; + ~RtpPacketHistory(); // Set/get storage mode. Note that setting the state will clear the history, @@ -211,8 +215,6 @@ class RtpPacketHistory { // Objects from |packet_history_| ordered by "most likely to be useful", used // in GetPayloadPaddingPacket(). PacketPrioritySet padding_priority_ RTC_GUARDED_BY(lock_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpPacketHistory); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc index 909b1289e..4408beed3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc @@ -88,10 +88,12 @@ int MaxFragmentSize(int remaining_bytes) { RtpPacketizerAv1::RtpPacketizerAv1(rtc::ArrayView payload, RtpPacketizer::PayloadSizeLimits limits, - VideoFrameType frame_type) + VideoFrameType frame_type, + bool is_last_frame_in_picture) : frame_type_(frame_type), obus_(ParseObus(payload)), - packets_(Packetize(obus_, limits)) {} + packets_(Packetize(obus_, limits)), + is_last_frame_in_picture_(is_last_frame_in_picture) {} std::vector RtpPacketizerAv1::ParseObus( rtc::ArrayView payload) { @@ -414,11 +416,8 @@ bool RtpPacketizerAv1::NextPacket(RtpPacketToSend* packet) { kAggregationHeaderSize + next_packet.packet_size); ++packet_index_; - if (packet_index_ == packets_.size()) { - // TODO(danilchap): To support spatial scalability pass and use information - // if this frame is the last in the temporal unit. - packet->SetMarker(true); - } + bool is_last_packet_in_frame = packet_index_ == packets_.size(); + packet->SetMarker(is_last_packet_in_frame && is_last_frame_in_picture_); return true; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.h index 79fa6e02f..520e746ea 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.h @@ -26,7 +26,8 @@ class RtpPacketizerAv1 : public RtpPacketizer { public: RtpPacketizerAv1(rtc::ArrayView payload, PayloadSizeLimits limits, - VideoFrameType frame_type); + VideoFrameType frame_type, + bool is_last_frame_in_picture); ~RtpPacketizerAv1() override = default; size_t NumPackets() const override { return packets_.size() - packet_index_; } @@ -63,6 +64,7 @@ class RtpPacketizerAv1 : public RtpPacketizer { const VideoFrameType frame_type_; const std::vector obus_; const std::vector packets_; + const bool is_last_frame_in_picture_; size_t packet_index_ = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 92abd9be6..b2268c7d1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -727,19 +727,6 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) { rtcp_receiver_.SetRemoteSSRC(ssrc); } -void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate) const { - RtpSendRates send_rates = rtp_sender_->packet_sender.GetSendRates(); - *total_rate = send_rates.Sum().bps(); - if (video_rate) - *video_rate = 0; - if (fec_rate) - *fec_rate = 0; - *nack_rate = send_rates[RtpPacketMediaType::kRetransmission].bps(); -} - RtpSendRates ModuleRtpRtcpImpl::GetSendRates() const { return rtp_sender_->packet_sender.GetSendRates(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index c413efe13..7f7df174f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -264,11 +264,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { uint32_t* NTPfrac, uint32_t* remote_sr) const; - void BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nackRate) const override; - RtpSendRates GetSendRates() const override; void OnReceivedNack( diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 31e485483..88ede3d43 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -631,21 +631,6 @@ void ModuleRtpRtcpImpl2::SetRemoteSSRC(const uint32_t ssrc) { rtcp_receiver_.SetRemoteSSRC(ssrc); } -// TODO(nisse): Delete video_rate amd fec_rate arguments. -void ModuleRtpRtcpImpl2::BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate) const { - RTC_DCHECK_RUN_ON(worker_queue_); - RtpSendRates send_rates = rtp_sender_->packet_sender.GetSendRates(); - *total_rate = send_rates.Sum().bps(); - if (video_rate) - *video_rate = 0; - if (fec_rate) - *fec_rate = 0; - *nack_rate = send_rates[RtpPacketMediaType::kRetransmission].bps(); -} - RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const { RTC_DCHECK_RUN_ON(worker_queue_); return rtp_sender_->packet_sender.GetSendRates(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 9611ace91..9eb7e3a6d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -250,11 +250,6 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, uint32_t* NTPfrac, uint32_t* remote_sr) const; - void BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nackRate) const override; - RtpSendRates GetSendRates() const override; void OnReceivedNack( diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index f763da244..3bd5d4705 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -267,13 +267,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // bitrate estimate since the stream participates in the bitrate allocation. virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0; - // TODO(sprang): Remove when all call sites have been moved to - // GetSendRates(). Fetches the current send bitrates in bits/s. - virtual void BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate) const = 0; - // Returns bitrate sent (post-pacing) per packet type. virtual RtpSendRates GetSendRates() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc index 119306805..584fced39 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -118,6 +118,7 @@ bool IsNonVolatile(RTPExtensionType type) { case kRtpExtensionVideoRotation: case kRtpExtensionPlayoutDelay: case kRtpExtensionVideoContentType: + case kRtpExtensionVideoLayersAllocation: case kRtpExtensionVideoTiming: case kRtpExtensionRepairedRtpStreamId: case kRtpExtensionColorSpace: @@ -127,6 +128,7 @@ bool IsNonVolatile(RTPExtensionType type) { RTC_NOTREACHED(); return false; } + RTC_CHECK_NOTREACHED(); } bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) { @@ -338,6 +340,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { return -1; } packet->set_packet_type(RtpPacketMediaType::kRetransmission); + packet->set_fec_protect_packet(false); std::vector> packets; packets.emplace_back(std::move(packet)); paced_sender_->EnqueuePackets(std::move(packets)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h index fd1a8da75..1580259b3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h @@ -29,7 +29,6 @@ #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/deprecation.h" #include "rtc_base/random.h" #include "rtc_base/rate_statistics.h" @@ -49,6 +48,10 @@ class RTPSender { RtpPacketHistory* packet_history, RtpPacketSender* packet_sender); + RTPSender() = delete; + RTPSender(const RTPSender&) = delete; + RTPSender& operator=(const RTPSender&) = delete; + ~RTPSender(); void SetSendingMediaStatus(bool enabled) RTC_LOCKS_EXCLUDED(send_mutex_); @@ -230,8 +233,6 @@ class RTPSender { bool supports_bwe_extension_ RTC_GUARDED_BY(send_mutex_); RateLimiter* const retransmission_rate_limiter_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSender); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc index d15e7458a..8cf60aaec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -46,15 +46,22 @@ const char* FrameTypeToString(AudioFrameType frame_type) { case AudioFrameType::kAudioFrameCN: return "audio_cn"; } + RTC_CHECK_NOTREACHED(); } #endif +constexpr char kIncludeCaptureClockOffset[] = + "WebRTC-IncludeCaptureClockOffset"; + } // namespace RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtp_sender) : clock_(clock), rtp_sender_(rtp_sender), - absolute_capture_time_sender_(clock) { + absolute_capture_time_sender_(clock), + include_capture_clock_offset_( + absl::StartsWith(field_trials_.Lookup(kIncludeCaptureClockOffset), + "Enabled")) { RTC_DCHECK(clock_); } @@ -280,7 +287,8 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, // absolute capture time sending. encoder_rtp_timestamp_frequency.value_or(0), Int64MsToUQ32x32(absolute_capture_timestamp_ms + NtpOffsetMs()), - /*estimated_capture_clock_offset=*/absl::nullopt); + /*estimated_capture_clock_offset=*/ + include_capture_clock_offset_ ? absl::make_optional(0) : absl::nullopt); if (absolute_capture_time) { // It also checks that extension was registered during SDP negotiation. If // not then setter won't do anything. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h index 3d3ca52c8..57b9dd7ce 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h @@ -17,11 +17,11 @@ #include #include "absl/strings/string_view.h" +#include "api/transport/field_trial_based_config.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/dtmf_queue.h" #include "modules/rtp_rtcp/source/rtp_sender.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/one_time_event.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -32,6 +32,11 @@ namespace webrtc { class RTPSenderAudio { public: RTPSenderAudio(Clock* clock, RTPSender* rtp_sender); + + RTPSenderAudio() = delete; + RTPSenderAudio(const RTPSenderAudio&) = delete; + RTPSenderAudio& operator=(const RTPSenderAudio&) = delete; + ~RTPSenderAudio(); int32_t RegisterAudioPayload(absl::string_view payload_name, @@ -106,7 +111,8 @@ class RTPSenderAudio { AbsoluteCaptureTimeSender absolute_capture_time_sender_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSenderAudio); + const FieldTrialBasedConfig field_trials_; + const bool include_capture_clock_offset_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index c89b9e42c..aba23ddc4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -30,11 +30,12 @@ constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(kBitrateStatisticsWindowMs); -bool IsEnabled(absl::string_view name, - const WebRtcKeyValueConfig* field_trials) { +bool IsTrialSetTo(const WebRtcKeyValueConfig* field_trials, + absl::string_view name, + absl::string_view value) { FieldTrialBasedConfig default_trials; auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), "Enabled"); + return absl::StartsWith(trials.Lookup(name), value); } } // namespace @@ -89,7 +90,9 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), send_side_bwe_with_overhead_( - IsEnabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), + !IsTrialSetTo(config.field_trials, + "WebRTC-SendSideBwe-WithOverhead", + "Disabled")), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), @@ -98,10 +101,7 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, is_audio_(config.audio), #endif need_rtp_packet_infos_(config.need_rtp_packet_infos), - fec_generator_( - IsEnabled("WebRTC-DeferredFecGeneration", config.field_trials) - ? config.fec_generator - : nullptr), + fec_generator_(config.fec_generator), transport_feedback_observer_(config.transport_feedback_callback), send_side_delay_observer_(config.send_side_delay_observer), send_packet_observer_(config.send_packet_observer), @@ -172,7 +172,7 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, } if (fec_generator_ && packet->fec_protect_packet()) { - // Deferred fec generation is used, add packet to generator. + // This packet should be protected by FEC, add it to packet generator. RTC_DCHECK(fec_generator_); RTC_DCHECK(packet->packet_type() == RtpPacketMediaType::kVideo); absl::optional> diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index 43b31ddff..55ba9b054 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -34,8 +34,10 @@ #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/trace_event.h" @@ -44,6 +46,8 @@ namespace webrtc { namespace { constexpr size_t kRedForFecHeaderLength = 1; constexpr int64_t kMaxUnretransmittableFrameIntervalMs = 33 * 4; +constexpr char kIncludeCaptureClockOffset[] = + "WebRTC-IncludeCaptureClockOffset"; void BuildRedPayload(const RtpPacketToSend& media_packet, RtpPacketToSend* red_packet) { @@ -109,10 +113,35 @@ const char* FrameTypeToString(VideoFrameType frame_type) { } #endif -bool IsNoopDelay(const PlayoutDelay& delay) { +bool IsNoopDelay(const VideoPlayoutDelay& delay) { return delay.min_ms == -1 && delay.max_ms == -1; } +absl::optional LoadVideoPlayoutDelayOverride( + const WebRtcKeyValueConfig* key_value_config) { + RTC_DCHECK(key_value_config); + FieldTrialOptional playout_delay_min_ms("min_ms", absl::nullopt); + FieldTrialOptional playout_delay_max_ms("max_ms", absl::nullopt); + ParseFieldTrial({&playout_delay_max_ms, &playout_delay_min_ms}, + key_value_config->Lookup("WebRTC-ForceSendPlayoutDelay")); + return playout_delay_max_ms && playout_delay_min_ms + ? absl::make_optional(*playout_delay_min_ms, + *playout_delay_max_ms) + : absl::nullopt; +} + +// Some packets can be skipped and the stream can still be decoded. Those +// packets are less likely to be retransmitted if they are lost. +bool PacketWillLikelyBeRequestedForRestransmitionIfLost( + const RTPVideoHeader& video_header) { + return IsBaseLayer(video_header) && + !(video_header.generic.has_value() + ? absl::c_linear_search( + video_header.generic->decode_target_indications, + DecodeTargetIndication::kDiscardable) + : false); +} + } // namespace RTPSenderVideo::RTPSenderVideo(const Config& config) @@ -124,13 +153,13 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) : (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)), last_rotation_(kVideoRotation_0), transmit_color_space_next_frame_(false), + send_allocation_(false), current_playout_delay_{-1, -1}, playout_delay_pending_(false), + forced_playout_delay_(LoadVideoPlayoutDelayOverride(config.field_trials)), red_payload_type_(config.red_payload_type), - fec_generator_(config.fec_generator), fec_type_(config.fec_type), fec_overhead_bytes_(config.fec_overhead_bytes), - video_bitrate_(1000, RateStatistics::kBpsScale), packetization_overhead_bitrate_(1000, RateStatistics::kBpsScale), frame_encryptor_(config.frame_encryptor), require_frame_encryption_(config.require_frame_encryption), @@ -146,7 +175,10 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) config.frame_transformer, rtp_sender_->SSRC(), config.send_transport_queue) - : nullptr) { + : nullptr), + include_capture_clock_offset_(absl::StartsWith( + config.field_trials->Lookup(kIncludeCaptureClockOffset), + "Enabled")) { if (frame_transformer_delegate_) frame_transformer_delegate_->Init(); } @@ -159,27 +191,11 @@ RTPSenderVideo::~RTPSenderVideo() { void RTPSenderVideo::LogAndSendToNetwork( std::vector> packets, size_t unpacketized_payload_size) { - int64_t now_ms = clock_->TimeInMilliseconds(); -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - if (fec_generator_) { - uint32_t fec_rate_kbps = fec_generator_->CurrentFecRate().kbps(); - for (const auto& packet : packets) { - if (packet->packet_type() == - RtpPacketMediaType::kForwardErrorCorrection) { - const uint32_t ssrc = packet->Ssrc(); - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoFecBitrate_kbps", now_ms, - fec_rate_kbps, ssrc); - } - } - } -#endif - { MutexLock lock(&stats_mutex_); size_t packetized_payload_size = 0; for (const auto& packet : packets) { if (*packet->packet_type() == RtpPacketMediaType::kVideo) { - video_bitrate_.Update(packet->size(), now_ms); packetized_payload_size += packet->payload_size(); } } @@ -221,11 +237,15 @@ void RTPSenderVideo::SetVideoStructure( frame_transformer_delegate_->SetVideoStructureUnderLock(video_structure); return; } - // Lock is being held by SetVideoStructure() caller. - SetVideoStructureUnderLock(video_structure); + SetVideoStructureInternal(video_structure); } -void RTPSenderVideo::SetVideoStructureUnderLock( +void RTPSenderVideo::SetVideoStructureAfterTransformation( + const FrameDependencyStructure* video_structure) { + SetVideoStructureInternal(video_structure); +} + +void RTPSenderVideo::SetVideoStructureInternal( const FrameDependencyStructure* video_structure) { RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); if (video_structure == nullptr) { @@ -255,6 +275,28 @@ void RTPSenderVideo::SetVideoStructureUnderLock( video_structure_->structure_id = structure_id; } +void RTPSenderVideo::SetVideoLayersAllocation( + VideoLayersAllocation allocation) { + if (frame_transformer_delegate_) { + frame_transformer_delegate_->SetVideoLayersAllocationUnderLock( + std::move(allocation)); + return; + } + SetVideoLayersAllocationInternal(std::move(allocation)); +} + +void RTPSenderVideo::SetVideoLayersAllocationAfterTransformation( + VideoLayersAllocation allocation) { + SetVideoLayersAllocationInternal(std::move(allocation)); +} + +void RTPSenderVideo::SetVideoLayersAllocationInternal( + VideoLayersAllocation allocation) { + RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); + allocation_ = std::move(allocation); + send_allocation_ = true; +} + void RTPSenderVideo::AddRtpHeaderExtensions( const RTPVideoHeader& video_header, const absl::optional& absolute_capture_time, @@ -340,10 +382,15 @@ void RTPSenderVideo::AddRtpHeaderExtensions( descriptor.active_decode_targets_bitmask = active_decode_targets_tracker_.ActiveDecodeTargetsBitmask(); } - // To avoid extra structure copy, temporary share ownership of the - // video_structure with the dependency descriptor. + // VP9 mark all layer frames of the first picture as kVideoFrameKey, + // Structure should be attached to the descriptor to lowest spatial layer + // when inter layer dependency is used, i.e. L structures; or to all + // layers when inter layer dependency is not used, i.e. S structures. + // Distinguish these two cases by checking if there are any dependencies. if (video_header.frame_type == VideoFrameType::kVideoFrameKey && - first_packet) { + video_header.generic->dependencies.empty() && first_packet) { + // To avoid extra structure copy, temporary share ownership of the + // video_structure with the dependency descriptor. descriptor.attached_structure = absl::WrapUnique(video_structure_.get()); } @@ -385,6 +432,18 @@ void RTPSenderVideo::AddRtpHeaderExtensions( generic_descriptor); } } + + if (first_packet && send_allocation_) { + if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { + packet->SetExtension( + allocation_.value()); + } else if (PacketWillLikelyBeRequestedForRestransmitionIfLost( + video_header)) { + VideoLayersAllocation allocation = allocation_.value(); + allocation.resolution_and_frame_rate_is_valid = false; + packet->SetExtension(allocation); + } + } } bool RTPSenderVideo::SendVideo( @@ -394,7 +453,8 @@ bool RTPSenderVideo::SendVideo( int64_t capture_time_ms, rtc::ArrayView payload, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms) { + absl::optional expected_retransmission_time_ms, + absl::optional estimated_capture_clock_offset_ms) { #if RTC_TRACE_EVENTS_ENABLED TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", FrameTypeToString(video_header.frame_type)); @@ -414,10 +474,15 @@ bool RTPSenderVideo::SendVideo( } MaybeUpdateCurrentPlayoutDelay(video_header); - if (video_header.frame_type == VideoFrameType::kVideoFrameKey && - !IsNoopDelay(current_playout_delay_)) { - // Force playout delay on key-frames, if set. - playout_delay_pending_ = true; + if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { + if (!IsNoopDelay(current_playout_delay_)) { + // Force playout delay on key-frames, if set. + playout_delay_pending_ = true; + } + if (allocation_) { + // Send the bitrate allocation on every key frame. + send_allocation_ = true; + } } if (video_structure_ != nullptr && video_header.generic) { @@ -428,9 +493,15 @@ bool RTPSenderVideo::SendVideo( video_header.generic->frame_id, video_header.generic->chain_diffs); } + const uint8_t temporal_id = GetTemporalId(video_header); + // No FEC protection for upper temporal layers, if used. + const bool use_fec = fec_type_.has_value() && + (temporal_id == 0 || temporal_id == kNoTemporalIdx); + // Maximum size of packet including rtp headers. // Extra space left in case packet will be resent using fec or rtx. - int packet_capacity = rtp_sender_->MaxRtpPacketSize() - FecPacketOverhead() - + int packet_capacity = rtp_sender_->MaxRtpPacketSize() - + (use_fec ? FecPacketOverhead() : 0) - (rtp_sender_->RtxStatus() ? kRtxHeaderSize : 0); std::unique_ptr single_packet = @@ -446,7 +517,9 @@ bool RTPSenderVideo::SendVideo( single_packet->Csrcs()), single_packet->Timestamp(), kVideoPayloadTypeFrequency, Int64MsToUQ32x32(single_packet->capture_time_ms() + NtpOffsetMs()), - /*estimated_capture_clock_offset=*/absl::nullopt); + /*estimated_capture_clock_offset=*/ + include_capture_clock_offset_ ? estimated_capture_clock_offset_ms + : absl::nullopt); auto first_packet = std::make_unique(*single_packet); auto middle_packet = std::make_unique(*single_packet); @@ -488,8 +561,8 @@ bool RTPSenderVideo::SendVideo( first_packet->HasExtension() || first_packet->HasExtension(); - // Minimization of the vp8 descriptor may erase temporal_id, so save it. - const uint8_t temporal_id = GetTemporalId(video_header); + // Minimization of the vp8 descriptor may erase temporal_id, so use + // |temporal_id| rather than reference |video_header| beyond this point. if (has_generic_descriptor) { MinimizeDescriptor(&video_header); } @@ -582,18 +655,11 @@ bool RTPSenderVideo::SendVideo( packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); } - // No FEC protection for upper temporal layers, if used. - if (fec_type_.has_value() && - (temporal_id == 0 || temporal_id == kNoTemporalIdx)) { - if (fec_generator_) { - fec_generator_->AddPacketAndGenerateFec(*packet); - } else { - // Deferred FEC generation, just mark packet. - packet->set_fec_protect_packet(true); - } - } + packet->set_fec_protect_packet(use_fec); if (red_enabled()) { + // TODO(sprang): Consider packetizing directly into packets with the RED + // header already in place, to avoid this copy. std::unique_ptr red_packet(new RtpPacketToSend(*packet)); BuildRedPayload(*packet, red_packet.get()); red_packet->SetPayloadType(*red_payload_type_); @@ -620,19 +686,6 @@ bool RTPSenderVideo::SendVideo( } } - if (fec_generator_) { - // Fetch any FEC packets generated from the media frame and add them to - // the list of packets to send. - auto fec_packets = fec_generator_->GetFecPackets(); - const bool generate_sequence_numbers = !fec_generator_->FecSsrc(); - for (auto& fec_packet : fec_packets) { - if (generate_sequence_numbers) { - rtp_sender_->AssignSequenceNumber(fec_packet.get()); - } - rtp_packets.emplace_back(std::move(fec_packet)); - } - } - LogAndSendToNetwork(std::move(rtp_packets), payload.size()); // Update details about the last sent frame. @@ -647,15 +700,11 @@ bool RTPSenderVideo::SendVideo( } if (video_header.frame_type == VideoFrameType::kVideoFrameKey || - (IsBaseLayer(video_header) && - !(video_header.generic.has_value() - ? absl::c_linear_search( - video_header.generic->decode_target_indications, - DecodeTargetIndication::kDiscardable) - : false))) { - // This frame has guaranteed delivery, no need to populate playout + PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header)) { + // This frame will likely be delivered, no need to populate playout // delay extensions until it changes again. playout_delay_pending_ = false; + send_allocation_ = false; } TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", @@ -681,11 +730,6 @@ bool RTPSenderVideo::SendEncodedImage( expected_retransmission_time_ms); } -uint32_t RTPSenderVideo::VideoBitrateSent() const { - MutexLock lock(&stats_mutex_); - return video_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); -} - uint32_t RTPSenderVideo::PacketizationOverheadBps() const { MutexLock lock(&stats_mutex_); return packetization_overhead_bitrate_.Rate(clock_->TimeInMilliseconds()) @@ -785,12 +829,13 @@ bool RTPSenderVideo::UpdateConditionalRetransmit( void RTPSenderVideo::MaybeUpdateCurrentPlayoutDelay( const RTPVideoHeader& header) { - if (IsNoopDelay(header.playout_delay)) { + VideoPlayoutDelay requested_delay = + forced_playout_delay_.value_or(header.playout_delay); + + if (IsNoopDelay(requested_delay)) { return; } - PlayoutDelay requested_delay = header.playout_delay; - if (requested_delay.min_ms > PlayoutDelayLimits::kMaxMs || requested_delay.max_ms > PlayoutDelayLimits::kMaxMs) { RTC_DLOG(LS_ERROR) diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h index e8cba5073..3f431dfec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h @@ -24,6 +24,7 @@ #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/active_decode_targets_helper.h" @@ -70,8 +71,6 @@ class RTPSenderVideo { // expected to outlive the RTPSenderVideo object they are passed to. Clock* clock = nullptr; RTPSender* rtp_sender = nullptr; - FlexfecSender* flexfec_sender = nullptr; - VideoFecGenerator* fec_generator = nullptr; // Some FEC data is duplicated here in preparation of moving FEC to // the egress stage. absl::optional fec_type; @@ -91,13 +90,19 @@ class RTPSenderVideo { // expected_retransmission_time_ms.has_value() -> retransmission allowed. // Calls to this method is assumed to be externally serialized. + // |estimated_capture_clock_offset_ms| is an estimated clock offset between + // this sender and the original capturer, for this video packet. See + // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time for more + // details. If the sender and the capture has the same clock, it is supposed + // to be zero valued, which is given as the default. bool SendVideo(int payload_type, absl::optional codec_type, uint32_t rtp_timestamp, int64_t capture_time_ms, rtc::ArrayView payload, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms); + absl::optional expected_retransmission_time_ms, + absl::optional estimated_capture_clock_offset_ms = 0); bool SendEncodedImage( int payload_type, @@ -113,14 +118,27 @@ class RTPSenderVideo { // All calls to SendVideo after this call must use video_header compatible // with the video_structure. void SetVideoStructure(const FrameDependencyStructure* video_structure); - void SetVideoStructureUnderLock( + // Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists + // to ensure correct syncronization. + void SetVideoStructureAfterTransformation( const FrameDependencyStructure* video_structure); - uint32_t VideoBitrateSent() const; + // Sets current active VideoLayersAllocation. The allocation will be sent + // using the rtp video layers allocation extension. The allocation will be + // sent in full on every key frame. The allocation will be sent once on a + // none discardable delta frame per call to this method and will not contain + // resolution and frame rate. + void SetVideoLayersAllocation(VideoLayersAllocation allocation); + // Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists + // to ensure correct syncronization. + void SetVideoLayersAllocationAfterTransformation( + VideoLayersAllocation allocation); // Returns the current packetization overhead rate, in bps. Note that this is // the payload overhead, eg the VP8 payload headers, not the RTP headers // or extension/ + // TODO(sprang): Consider moving this to RtpSenderEgress so it's in the same + // place as the other rate stats. uint32_t PacketizationOverheadBps() const; protected: @@ -141,6 +159,10 @@ class RTPSenderVideo { int64_t last_frame_time_ms; }; + void SetVideoStructureInternal( + const FrameDependencyStructure* video_structure); + void SetVideoLayersAllocationInternal(VideoLayersAllocation allocation); + void AddRtpHeaderExtensions( const RTPVideoHeader& video_header, const absl::optional& absolute_capture_time, @@ -177,24 +199,28 @@ class RTPSenderVideo { bool transmit_color_space_next_frame_ RTC_GUARDED_BY(send_checker_); std::unique_ptr video_structure_ RTC_GUARDED_BY(send_checker_); + absl::optional allocation_ + RTC_GUARDED_BY(send_checker_); + // Flag indicating if we should send |allocation_|. + bool send_allocation_ RTC_GUARDED_BY(send_checker_); // Current target playout delay. - PlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_); - // Flag indicating if we need to propagate |current_playout_delay_| in order + VideoPlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_); + // Flag indicating if we need to send |current_playout_delay_| in order // to guarantee it gets delivered. bool playout_delay_pending_; + // Set by the field trial WebRTC-ForceSendPlayoutDelay to override the playout + // delay of outgoing video frames. + const absl::optional forced_playout_delay_; // Should never be held when calling out of this class. Mutex mutex_; const absl::optional red_payload_type_; - VideoFecGenerator* const fec_generator_; absl::optional fec_type_; const size_t fec_overhead_bytes_; // Per packet max FEC overhead. mutable Mutex stats_mutex_; - // Bitrate used for video payload and RTP headers. - RateStatistics video_bitrate_ RTC_GUARDED_BY(stats_mutex_); RateStatistics packetization_overhead_bitrate_ RTC_GUARDED_BY(stats_mutex_); std::map frame_stats_by_temporal_layer_ @@ -218,6 +244,8 @@ class RTPSenderVideo { const rtc::scoped_refptr frame_transformer_delegate_; + + const bool include_capture_clock_offset_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 786e46777..074b64086 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -162,7 +162,14 @@ void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock( const FrameDependencyStructure* video_structure) { MutexLock lock(&sender_lock_); RTC_CHECK(sender_); - sender_->SetVideoStructureUnderLock(video_structure); + sender_->SetVideoStructureAfterTransformation(video_structure); +} + +void RTPSenderVideoFrameTransformerDelegate::SetVideoLayersAllocationUnderLock( + VideoLayersAllocation allocation) { + MutexLock lock(&sender_lock_); + RTC_CHECK(sender_); + sender_->SetVideoLayersAllocationAfterTransformation(std::move(allocation)); } void RTPSenderVideoFrameTransformerDelegate::Reset() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index a14ce3a81..857386929 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -16,6 +16,7 @@ #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" +#include "api/video/video_layers_allocation.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -51,10 +52,16 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { // Delegates the call to RTPSendVideo::SendVideo on the |encoder_queue_|. void SendVideo(std::unique_ptr frame) const; - // Delegates the call to RTPSendVideo::SendVideo under |sender_lock_|. + // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation + // under |sender_lock_|. void SetVideoStructureUnderLock( const FrameDependencyStructure* video_structure); + // Delegates the call to + // RTPSendVideo::SetVideoLayersAllocationAfterTransformation under + // |sender_lock_|. + void SetVideoLayersAllocationUnderLock(VideoLayersAllocation allocation); + // Unregisters and releases the |frame_transformer_| reference, and resets // |sender_| under lock. Called from RTPSenderVideo destructor to prevent the // |sender_| to dangle. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc index c25fd96fa..a3d6d6f7f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc @@ -492,6 +492,10 @@ void RtpHeaderParser::ParseOneByteExtensionHeader( &header->extension.video_timing); break; } + case kRtpExtensionVideoLayersAllocation: + RTC_LOG(WARNING) << "VideoLayersAllocation extension unsupported by " + "rtp header parser."; + break; case kRtpExtensionRtpStreamId: { std::string name(reinterpret_cast(ptr), len + 1); if (IsLegalRsidName(name)) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h index 8789906dc..b5934ff8b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h @@ -23,7 +23,6 @@ #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include_directory) #include "modules/video_coding/codecs/h264/include/h264_globals.h" #ifndef DISABLE_H265 #include "modules/video_coding/codecs/h265/include/h265_globals.h" @@ -83,10 +82,11 @@ struct RTPVideoHeader { VideoContentType content_type = VideoContentType::UNSPECIFIED; bool is_first_packet_in_frame = false; bool is_last_packet_in_frame = false; + bool is_last_frame_in_picture = true; uint8_t simulcastIdx = 0; VideoCodecType codec = VideoCodecType::kVideoCodecGeneric; - PlayoutDelay playout_delay = {-1, -1}; + VideoPlayoutDelay playout_delay; VideoSendTiming video_timing; absl::optional color_space; RTPVideoTypeHeader video_type_header; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc new file mode 100644 index 000000000..dbaa36b15 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc @@ -0,0 +1,260 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" + +#include + +#include "api/video/video_layers_allocation.h" +#include "rtc_base/bit_buffer.h" + +namespace webrtc { + +constexpr RTPExtensionType RtpVideoLayersAllocationExtension::kId; +constexpr const char RtpVideoLayersAllocationExtension::kUri[]; + +namespace { + +// Counts the number of bits used in the binary representation of val. +size_t CountBits(uint64_t val) { + size_t bit_count = 0; + while (val != 0) { + bit_count++; + val >>= 1; + } + return bit_count; +} + +// Counts the number of bits used if `val`is encoded using unsigned exponential +// Golomb encoding. +// TODO(bugs.webrtc.org/12000): Move to bit_buffer.cc if Golomb encoding is used +// in the final version. +size_t SizeExponentialGolomb(uint32_t val) { + if (val == std::numeric_limits::max()) { + return 0; + } + uint64_t val_to_encode = static_cast(val) + 1; + return CountBits(val_to_encode) * 2 - 1; +} + +} // namespace + +// TODO(bugs.webrtc.org/12000): Review and revise the content and encoding of +// this extension. This is an experimental first version. + +// 0 1 2 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | NS|RSID|T|X|Res| Bit encoded data... +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// NS: Number of spatial layers/simulcast streams - 1. 2 bits, thus allowing +// passing number of layers/streams up-to 4. +// RSID: RTP stream id this allocation is sent on, numbered from 0. 2 bits. +// T: indicates if all spatial layers have the same amount of temporal layers. +// X: indicates if resolution and frame rate per spatial layer is present. +// Res: 2 bits reserved for future use. +// Bit encoded data: consists of following fields written in order: +// 1) T=1: Nt - 2-bit value of number of temporal layers - 1 +// T=0: NS 2-bit values of numbers of temporal layers - 1 for all spatial +// layers from lower to higher. +// 2) Bitrates: +// One value for each spatial x temporal layer. +// Format: RSID (2-bit) SID(2-bit),folowed by bitrate for all temporal +// layers for the RSID,SID tuple. All bitrates are in kbps. All bitrates are +// total required bitrate to receive the corresponding layer, i.e. in +// simulcast mode they include only corresponding spatial layer, in full-svc +// all lower spatial layers are included. All lower temporal layers are also +// included. All bitrates are written using unsigned Exponential Golomb +// encoding. +// 3) [only if X bit is set]. Encoded width, 16-bit, height, 16-bit, +// max frame rate 8-bit per spatial layer in order from lower to higher. + +bool RtpVideoLayersAllocationExtension::Write( + rtc::ArrayView data, + const VideoLayersAllocation& allocation) { + RTC_DCHECK_LT(allocation.rtp_stream_index, + VideoLayersAllocation::kMaxSpatialIds); + RTC_DCHECK_GE(data.size(), ValueSize(allocation)); + rtc::BitBufferWriter writer(data.data(), data.size()); + + // NS: + if (allocation.active_spatial_layers.empty()) + return false; + writer.WriteBits(allocation.active_spatial_layers.size() - 1, 2); + + // RSID: + writer.WriteBits(allocation.rtp_stream_index, 2); + + // T: + bool num_tls_is_the_same = true; + size_t first_layers_number_of_temporal_layers = + allocation.active_spatial_layers.front() + .target_bitrate_per_temporal_layer.size(); + for (const auto& spatial_layer : allocation.active_spatial_layers) { + if (first_layers_number_of_temporal_layers != + spatial_layer.target_bitrate_per_temporal_layer.size()) { + num_tls_is_the_same = false; + break; + } + } + writer.WriteBits(num_tls_is_the_same ? 1 : 0, 1); + + // X: + writer.WriteBits(allocation.resolution_and_frame_rate_is_valid ? 1 : 0, 1); + + // RESERVED: + writer.WriteBits(/*val=*/0, /*bit_count=*/2); + + if (num_tls_is_the_same) { + writer.WriteBits(first_layers_number_of_temporal_layers - 1, 2); + } else { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteBits( + spatial_layer.target_bitrate_per_temporal_layer.size() - 1, 2); + } + } + + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteBits(spatial_layer.rtp_stream_index, 2); + writer.WriteBits(spatial_layer.spatial_id, 2); + for (const DataRate& bitrate : + spatial_layer.target_bitrate_per_temporal_layer) { + writer.WriteExponentialGolomb(bitrate.kbps()); + } + } + + if (allocation.resolution_and_frame_rate_is_valid) { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteUInt16(spatial_layer.width); + writer.WriteUInt16(spatial_layer.height); + writer.WriteUInt8(spatial_layer.frame_rate_fps); + } + } + return true; +} + +bool RtpVideoLayersAllocationExtension::Parse( + rtc::ArrayView data, + VideoLayersAllocation* allocation) { + if (data.size() == 0) + return false; + rtc::BitBuffer reader(data.data(), data.size()); + if (!allocation) + return false; + allocation->active_spatial_layers.clear(); + + uint32_t val; + // NS: + if (!reader.ReadBits(&val, 2)) + return false; + int active_spatial_layers = val + 1; + + // RSID: + if (!reader.ReadBits(&val, 2)) + return false; + allocation->rtp_stream_index = val; + + // T: + if (!reader.ReadBits(&val, 1)) + return false; + bool num_tls_is_constant = (val == 1); + + // X: + if (!reader.ReadBits(&val, 1)) + return false; + allocation->resolution_and_frame_rate_is_valid = (val == 1); + + // RESERVED: + if (!reader.ReadBits(&val, 2)) + return false; + + int number_of_temporal_layers[VideoLayersAllocation::kMaxSpatialIds]; + if (num_tls_is_constant) { + if (!reader.ReadBits(&val, 2)) + return false; + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + number_of_temporal_layers[sl_idx] = val + 1; + } + } else { + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + if (!reader.ReadBits(&val, 2)) + return false; + number_of_temporal_layers[sl_idx] = val + 1; + if (number_of_temporal_layers[sl_idx] > + VideoLayersAllocation::kMaxTemporalIds) + return false; + } + } + + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + allocation->active_spatial_layers.emplace_back(); + auto& spatial_layer = allocation->active_spatial_layers.back(); + auto& temporal_layers = spatial_layer.target_bitrate_per_temporal_layer; + if (!reader.ReadBits(&val, 2)) + return false; + spatial_layer.rtp_stream_index = val; + if (!reader.ReadBits(&val, 2)) + return false; + spatial_layer.spatial_id = val; + for (int tl_idx = 0; tl_idx < number_of_temporal_layers[sl_idx]; ++tl_idx) { + reader.ReadExponentialGolomb(&val); + temporal_layers.push_back(DataRate::KilobitsPerSec(val)); + } + } + + if (allocation->resolution_and_frame_rate_is_valid) { + for (auto& spatial_layer : allocation->active_spatial_layers) { + if (!reader.ReadUInt16(&spatial_layer.width)) + return false; + if (!reader.ReadUInt16(&spatial_layer.height)) + return false; + if (!reader.ReadUInt8(&spatial_layer.frame_rate_fps)) + return false; + } + } + return true; +} + +size_t RtpVideoLayersAllocationExtension::ValueSize( + const VideoLayersAllocation& allocation) { + if (allocation.active_spatial_layers.empty()) { + return 0; + } + size_t size_in_bits = 8; // Fixed first byte.¨ + bool num_tls_is_the_same = true; + size_t first_layers_number_of_temporal_layers = + allocation.active_spatial_layers.front() + .target_bitrate_per_temporal_layer.size(); + for (const auto& spatial_layer : allocation.active_spatial_layers) { + if (first_layers_number_of_temporal_layers != + spatial_layer.target_bitrate_per_temporal_layer.size()) { + num_tls_is_the_same = false; + } + size_in_bits += 4; // RSID, SID tuple. + for (const auto& bitrate : + spatial_layer.target_bitrate_per_temporal_layer) { + size_in_bits += SizeExponentialGolomb(bitrate.kbps()); + } + } + if (num_tls_is_the_same) { + size_in_bits += 2; + } else { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + size_in_bits += + 2 * spatial_layer.target_bitrate_per_temporal_layer.size(); + } + } + if (allocation.resolution_and_frame_rate_is_valid) { + size_in_bits += allocation.active_spatial_layers.size() * 5 * 8; + } + return (size_in_bits + 7) / 8; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h new file mode 100644 index 000000000..ff8ea2a7a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ + +#include "api/video/video_layers_allocation.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + +namespace webrtc { + +// TODO(bugs.webrtc.org/12000): Note that this extensions is being developed and +// the wire format will likely change. +class RtpVideoLayersAllocationExtension { + public: + using value_type = VideoLayersAllocation; + static constexpr RTPExtensionType kId = kRtpExtensionVideoLayersAllocation; + static constexpr const char kUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00"; + static bool Parse(rtc::ArrayView data, + VideoLayersAllocation* allocation); + static size_t ValueSize(const VideoLayersAllocation& allocation); + static bool Write(rtc::ArrayView data, + const VideoLayersAllocation& allocation); +}; + +} // namespace webrtc +#endif // MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.cc index 91a72326c..846977e68 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.cc @@ -25,34 +25,25 @@ namespace webrtc { namespace videocapturemodule { DeviceInfoImpl::DeviceInfoImpl() - : _apiLock(*RWLockWrapper::CreateRWLock()), - _lastUsedDeviceName(NULL), - _lastUsedDeviceNameLength(0) {} + : _lastUsedDeviceName(NULL), _lastUsedDeviceNameLength(0) {} DeviceInfoImpl::~DeviceInfoImpl(void) { - _apiLock.AcquireLockExclusive(); + MutexLock lock(&_apiLock); free(_lastUsedDeviceName); - _apiLock.ReleaseLockExclusive(); - - delete &_apiLock; } int32_t DeviceInfoImpl::NumberOfCapabilities(const char* deviceUniqueIdUTF8) { if (!deviceUniqueIdUTF8) return -1; - _apiLock.AcquireLockShared(); + MutexLock lock(&_apiLock); // Is it the same device that is asked for again. if (absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); return static_cast(_captureCapabilities.size()); } - // Need to get exclusive rights to create the new capability map. - _apiLock.ReleaseLockShared(); - WriteLockScoped cs2(_apiLock); int32_t ret = CreateCapabilityMap(deviceUniqueIdUTF8); return ret; @@ -63,20 +54,14 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8, VideoCaptureCapability& capability) { assert(deviceUniqueIdUTF8 != NULL); - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); if (!absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); - _apiLock.AcquireLockExclusive(); if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); return -1; } - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); } // Make sure the number is valid @@ -98,17 +83,13 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( if (!deviceUniqueIdUTF8) return -1; - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); if (!absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); - _apiLock.AcquireLockExclusive(); if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { return -1; } - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); } int32_t bestformatIndex = -1; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h index 37a457ce8..4b4738960 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h @@ -18,7 +18,8 @@ #include "api/video/video_rotation.h" #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_defines.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { namespace videocapturemodule { @@ -45,15 +46,16 @@ class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo { * Fills the member variable _captureCapabilities with capabilities for the * given device name. */ - virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) = 0; + virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock) = 0; protected: // Data members typedef std::vector VideoCaptureCapabilities; - VideoCaptureCapabilities _captureCapabilities; - RWLockWrapper& _apiLock; - char* _lastUsedDeviceName; - uint32_t _lastUsedDeviceNameLength; + VideoCaptureCapabilities _captureCapabilities RTC_GUARDED_BY(_apiLock); + Mutex _apiLock; + char* _lastUsedDeviceName RTC_GUARDED_BY(_apiLock); + uint32_t _lastUsedDeviceNameLength RTC_GUARDED_BY(_apiLock); }; } // namespace videocapturemodule } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h index a320c36fd..304ae7123 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h @@ -33,13 +33,14 @@ class DeviceInfoLinux : public DeviceInfoImpl { * Fills the membervariable _captureCapabilities with capabilites for the * given device name. */ - int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override; + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, const char* /*dialogTitleUTF8*/, void* /*parentWindow*/, uint32_t /*positionX*/, uint32_t /*positionY*/) override; - int32_t FillCapabilities(int fd); + int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); int32_t Init() override; private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc index 1a8a0c477..bedb51937 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc @@ -19,7 +19,7 @@ #include "api/video/i420_buffer.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/logging.h" #include "third_party/libaom/source/libaom/aom/aom_decoder.h" @@ -59,7 +59,7 @@ class LibaomAv1Decoder final : public VideoDecoder { aom_codec_ctx_t context_; bool inited_; // Pool of memory buffers to store decoded image data for application access. - I420BufferPool buffer_pool_; + VideoFrameBufferPool buffer_pool_; DecodedImageCallback* decode_complete_callback_; }; @@ -138,7 +138,7 @@ int32_t LibaomAv1Decoder::Decode(const EncodedImage& encoded_image, // Allocate memory for decoded frame. rtc::scoped_refptr buffer = - buffer_pool_.CreateBuffer(decoded_image->d_w, decoded_image->d_h); + buffer_pool_.CreateI420Buffer(decoded_image->d_w, decoded_image->d_h); if (!buffer.get()) { // Pool has too many pending frames. RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned due to lack of" diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index cd9b37442..c1accad55 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -25,10 +25,11 @@ #include "api/video/video_frame.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "third_party/libaom/source/libaom/aom/aom_codec.h" @@ -93,7 +94,7 @@ class LibaomAv1Encoder final : public VideoEncoder { void SetSvcRefFrameConfig( const ScalableVideoController::LayerFrameConfig& layer_frame); - const std::unique_ptr svc_controller_; + std::unique_ptr svc_controller_; bool inited_; absl::optional svc_params_; VideoCodec encoder_settings_; @@ -164,6 +165,21 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, "LibaomAv1Encoder."; return result; } + if (encoder_settings_.numberOfSimulcastStreams > 1) { + RTC_LOG(LS_WARNING) << "Simulcast is not implemented by LibaomAv1Encoder."; + return result; + } + absl::string_view scalability_mode = encoder_settings_.ScalabilityMode(); + // When scalability_mode is not set, keep using svc_controller_ created + // at construction of the encoder. + if (!scalability_mode.empty()) { + svc_controller_ = CreateScalabilityStructure(scalability_mode); + } + if (svc_controller_ == nullptr) { + RTC_LOG(LS_WARNING) << "Failed to set scalability mode " + << scalability_mode; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } if (!SetSvcParams(svc_controller_->StreamConfig())) { return WEBRTC_VIDEO_CODEC_ERROR; @@ -223,6 +239,12 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, << " on control AV1E_SET_CPUUSED."; return WEBRTC_VIDEO_CODEC_ERROR; } + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_CDEF, 1); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_CDEF."; + return WEBRTC_VIDEO_CODEC_ERROR; + } ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_TPL_MODEL, 0); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret @@ -440,7 +462,10 @@ int32_t LibaomAv1Encoder::Encode( const uint32_t duration = kRtpTicksPerSecond / static_cast(encoder_settings_.maxFramerate); - for (ScalableVideoController::LayerFrameConfig& layer_frame : layer_frames) { + for (size_t i = 0; i < layer_frames.size(); ++i) { + ScalableVideoController::LayerFrameConfig& layer_frame = layer_frames[i]; + const bool end_of_picture = i == layer_frames.size() - 1; + aom_enc_frame_flags_t flags = layer_frame.IsKeyframe() ? AOM_EFLAG_FORCE_KF : 0; @@ -460,7 +485,6 @@ int32_t LibaomAv1Encoder::Encode( // Get encoded image data. EncodedImage encoded_image; - encoded_image._completeFrame = true; aom_codec_iter_t iter = nullptr; int data_pkt_count = 0; while (const aom_codec_cx_pkt_t* pkt = @@ -507,6 +531,7 @@ int32_t LibaomAv1Encoder::Encode( if (encoded_image.size() > 0) { CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = kVideoCodecAV1; + codec_specific_info.end_of_picture = end_of_picture; bool is_keyframe = layer_frame.IsKeyframe(); codec_specific_info.generic_frame_info = svc_controller_->OnEncodeDone(std::move(layer_frame)); @@ -527,7 +552,7 @@ int32_t LibaomAv1Encoder::Encode( } } encoded_image_callback_->OnEncodedImage(encoded_image, - &codec_specific_info, nullptr); + &codec_specific_info); } } @@ -597,6 +622,7 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { info.has_trusted_rate_controller = true; info.is_hardware_accelerated = false; info.scaling_settings = VideoEncoder::ScalingSettings(kMinQindex, kMaxQindex); + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; return info; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h index c2f04e669..04a2b65f5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -14,7 +14,7 @@ #include "absl/base/attributes.h" #include "api/video_codecs/video_encoder.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc deleted file mode 100644 index ae4c87922..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -constexpr DecodeTargetIndication kDtis[3][2] = { - {kSwitch, kSwitch}, // KeyFrame - {kNotPresent, kDiscardable}, // DeltaFrame T1 - {kSwitch, kSwitch}, // DeltaFrame T0 -}; - -} // namespace - -ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL1T2::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 1; - result.num_temporal_layers = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 1; - structure.decode_target_protected_by_chain = {0, 0}; - structure.templates.resize(3); - structure.templates[0].T(0).Dtis("SS").ChainDiffs({0}); - structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2}); - structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1}); - return structure; -} - -std::vector -ScalabilityStructureL1T2::NextFrameConfig(bool restart) { - if (!active_decode_targets_[0]) { - RTC_LOG(LS_WARNING) << "No bitrate allocated for temporal layer 0, yet " - "frame is requested. No frame will be encoded."; - return {}; - } - if (restart) { - next_pattern_ = kKeyFrame; - } else if (!active_decode_targets_[1]) { - next_pattern_ = kDeltaFrameT0; - } - std::vector result(1); - - switch (next_pattern_) { - case kKeyFrame: - result[0].Id(0).T(0).Keyframe().Update(0); - next_pattern_ = kDeltaFrameT1; - break; - case kDeltaFrameT1: - result[0].Id(1).T(1).Reference(0); - next_pattern_ = kDeltaFrameT0; - break; - case kDeltaFrameT0: - result[0].Id(2).T(0).ReferenceAndUpdate(0); - next_pattern_ = kDeltaFrameT1; - break; - } - return result; -} - -absl::optional ScalabilityStructureL1T2::OnEncodeDone( - LayerFrameConfig config) { - // Encoder may have generated a keyframe even when not asked for it. Treat - // such frame same as requested keyframe, in particular restart the sequence. - if (config.IsKeyframe()) { - config = NextFrameConfig(/*restart=*/true).front(); - } - - absl::optional frame_info; - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - frame_info->part_of_chain = {config.TemporalId() == 0}; - frame_info->active_decode_targets = active_decode_targets_; - return frame_info; -} - -void ScalabilityStructureL1T2::OnRatesUpdated( - const VideoBitrateAllocation& bitrates) { - if (bitrates.GetBitrate(0, 0) == 0) { - // It is unclear what frame can be produced when base layer is disabled, - // so mark all decode targets as inactive to produce no frames. - active_decode_targets_.reset(); - return; - } - active_decode_targets_.set(0, true); - active_decode_targets_.set(1, bitrates.GetBitrate(0, 1) > 0); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h deleted file mode 100644 index 55a9e8bbb..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_ - -#include -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -class ScalabilityStructureL1T2 : public ScalableVideoController { - public: - ~ScalabilityStructureL1T2() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; - - private: - enum FramePattern { - kKeyFrame, - kDeltaFrameT1, - kDeltaFrameT0, - }; - - FramePattern next_pattern_ = kKeyFrame; - std::bitset<32> active_decode_targets_ = 0b11; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc deleted file mode 100644 index a04a4262e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h" - -#include -#include - -#include "absl/base/macros.h" -#include "absl/types/optional.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -constexpr DecodeTargetIndication kDtis[3][3] = { - {kSwitch, kSwitch, kSwitch}, // T0 - {kNotPresent, kDiscardable, kSwitch}, // T1 - {kNotPresent, kNotPresent, kDiscardable}, // T2 -}; - -} // namespace - -ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL1T3::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 1; - result.num_temporal_layers = 3; - return result; -} - -FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 3; - structure.num_chains = 1; - structure.decode_target_protected_by_chain = {0, 0, 0}; - structure.templates.resize(5); - structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0}); - structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4}); - structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2}); - structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1}); - structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1}); - return structure; -} - -std::vector -ScalabilityStructureL1T3::NextFrameConfig(bool restart) { - if (restart) { - next_pattern_ = kKeyFrame; - } - std::vector config(1); - - switch (next_pattern_) { - case kKeyFrame: - config[0].T(0).Keyframe().Update(0); - next_pattern_ = kDeltaFrameT2A; - break; - case kDeltaFrameT2A: - config[0].T(2).Reference(0); - next_pattern_ = kDeltaFrameT1; - break; - case kDeltaFrameT1: - config[0].T(1).Reference(0).Update(1); - next_pattern_ = kDeltaFrameT2B; - break; - case kDeltaFrameT2B: - config[0].T(2).Reference(1); - next_pattern_ = kDeltaFrameT0; - break; - case kDeltaFrameT0: - config[0].T(0).ReferenceAndUpdate(0); - next_pattern_ = kDeltaFrameT2A; - break; - } - return config; -} - -absl::optional ScalabilityStructureL1T3::OnEncodeDone( - LayerFrameConfig config) { - absl::optional frame_info; - if (config.TemporalId() < 0 || - config.TemporalId() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected temporal id " << config.TemporalId(); - return frame_info; - } - frame_info.emplace(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign( - std::begin(kDtis[config.TemporalId()]), - std::end(kDtis[config.TemporalId()])); - frame_info->part_of_chain = {config.TemporalId() == 0}; - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h deleted file mode 100644 index 562d0f2a5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_ - -#include - -#include "absl/types/optional.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// T2 0 0 0 0 -// | / | / -// T1 / 0 / 0 ... -// |_/ |_/ -// T0 0-------0------ -// Time-> 0 1 2 3 4 5 6 7 -class ScalabilityStructureL1T3 : public ScalableVideoController { - public: - ~ScalabilityStructureL1T3() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - enum FramePattern { - kKeyFrame, - kDeltaFrameT2A, - kDeltaFrameT1, - kDeltaFrameT2B, - kDeltaFrameT0, - }; - - FramePattern next_pattern_ = kKeyFrame; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc deleted file mode 100644 index 2070a4c9b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; -constexpr auto kRequired = DecodeTargetIndication::kRequired; - -constexpr DecodeTargetIndication kDtis[4][2] = { - {kSwitch, kSwitch}, // Key, S0 - {kNotPresent, kSwitch}, // Key, S1 - {kSwitch, kRequired}, // Delta, S0 - {kNotPresent, kRequired}, // Delta, S1 -}; - -} // namespace - -ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T1::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 1; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 1}; - structure.templates.resize(4); - structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2}); - structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); - structure.templates[2].S(1).Dtis("-R").ChainDiffs({1, 1}).FrameDiffs({2, 1}); - structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL2T1::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0); -} - -std::vector -ScalabilityStructureL2T1::NextFrameConfig(bool restart) { - std::vector result(2); - // Buffer0 keeps latest S0 frame, Buffer1 keeps latest S1 frame. - if (restart || keyframe_) { - result[0] = KeyFrameConfig(); - result[1].Id(1).S(1).Reference(0).Update(1); - keyframe_ = false; - } else { - result[0].Id(2).S(0).ReferenceAndUpdate(0); - result[1].Id(3).S(1).Reference(0).ReferenceAndUpdate(1); - } - return result; -} - -absl::optional ScalabilityStructureL2T1::OnEncodeDone( - LayerFrameConfig config) { - absl::optional frame_info; - if (config.IsKeyframe()) { - config = KeyFrameConfig(); - } - - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = std::move(config.Buffers()); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - frame_info->part_of_chain = {config.SpatialId() == 0, true}; - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h deleted file mode 100644 index 0f5360260..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_ - -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// S1 0--0--0- -// | | | ... -// S0 0--0--0- -class ScalabilityStructureL2T1 : public ScalableVideoController { - public: - ~ScalabilityStructureL2T1() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - LayerFrameConfig KeyFrameConfig() const; - - bool keyframe_ = true; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc deleted file mode 100644 index ab76f1f7e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -constexpr DecodeTargetIndication kDtis[3][2] = { - {kSwitch, kSwitch}, // Key, S0 - {kSwitch, kNotPresent}, // Delta, S0 - {kNotPresent, kSwitch}, // Key and Delta, S1 -}; - -} // namespace - -ScalabilityStructureL2T1Key::~ScalabilityStructureL2T1Key() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T1Key::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 1; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL2T1Key::DependencyStructure() - const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 1}; - structure.templates.resize(4); - structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); - structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); - structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); - structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL2T1Key::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0); -} - -std::vector -ScalabilityStructureL2T1Key::NextFrameConfig(bool restart) { - std::vector result(2); - - // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame. - if (restart || keyframe_) { - result[0] = KeyFrameConfig(); - result[1].Id(2).S(1).Reference(0).Update(1); - keyframe_ = false; - } else { - result[0].Id(1).S(0).ReferenceAndUpdate(0); - result[1].Id(2).S(1).ReferenceAndUpdate(1); - } - return result; -} - -absl::optional ScalabilityStructureL2T1Key::OnEncodeDone( - LayerFrameConfig config) { - absl::optional frame_info; - if (config.IsKeyframe()) { - config = KeyFrameConfig(); - } - - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = std::move(config.Buffers()); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - if (config.IsKeyframe()) { - frame_info->part_of_chain = {true, true}; - } else { - frame_info->part_of_chain = {config.SpatialId() == 0, - config.SpatialId() == 1}; - } - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h deleted file mode 100644 index c1d8c8947..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_ - -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// S1 0--0--0- -// | ... -// S0 0--0--0- -class ScalabilityStructureL2T1Key : public ScalableVideoController { - public: - ~ScalabilityStructureL2T1Key() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - LayerFrameConfig KeyFrameConfig() const; - - bool keyframe_ = true; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc deleted file mode 100644 index 3da41832a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; -constexpr auto kRequired = DecodeTargetIndication::kRequired; - -// decode targets: S0T0, S0T1, S1T0, S1T1 -constexpr DecodeTargetIndication kDtis[6][4] = { - {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1 - {kNotPresent, kDiscardable, kNotPresent, kRequired}, // kDeltaT1, S0 - {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1 - {kSwitch, kSwitch, kRequired, kRequired}, // kDeltaT0, S0 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1 -}; - -} // namespace - -ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T2::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 2; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 4; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 0, 1, 1}; - structure.templates.resize(6); - auto& templates = structure.templates; - templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); - templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4}); - templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2}); - templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); - templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1}); - templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL2T2::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0); -} - -std::vector -ScalabilityStructureL2T2::NextFrameConfig(bool restart) { - if (restart) { - next_pattern_ = kKey; - } - std::vector result(2); - - // Buffer0 keeps latest S0T0 frame, - // Buffer1 keeps latest S1T0 frame. - // Buffer2 keeps latest S0T1 frame. - switch (next_pattern_) { - case kKey: - result[0] = KeyFrameConfig(); - result[1].Id(1).S(1).T(0).Reference(0).Update(1); - next_pattern_ = kDeltaT1; - break; - case kDeltaT1: - result[0].Id(2).S(0).T(1).Reference(0).Update(2); - result[1].Id(3).S(1).T(1).Reference(2).Reference(1); - next_pattern_ = kDeltaT0; - break; - case kDeltaT0: - result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0); - result[1].Id(5).S(1).T(0).Reference(0).ReferenceAndUpdate(1); - next_pattern_ = kDeltaT1; - break; - } - return result; -} - -absl::optional ScalabilityStructureL2T2::OnEncodeDone( - LayerFrameConfig config) { - if (config.IsKeyframe()) { - config = KeyFrameConfig(); - } - - absl::optional frame_info; - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - if (config.TemporalId() == 0) { - frame_info->part_of_chain = {config.SpatialId() == 0, true}; - } else { - frame_info->part_of_chain = {false, false}; - } - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h deleted file mode 100644 index dbf5036c1..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_ - -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// S1T1 0 0 -// /| /| / -// S1T0 0-+-0-+-0 -// | | | | | ... -// S0T1 | 0 | 0 | -// |/ |/ |/ -// S0T0 0---0---0-- -// Time-> 0 1 2 3 4 -class ScalabilityStructureL2T2 : public ScalableVideoController { - public: - ~ScalabilityStructureL2T2() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - enum FramePattern { - kKey, - kDeltaT1, - kDeltaT0, - }; - LayerFrameConfig KeyFrameConfig() const; - - FramePattern next_pattern_ = kKey; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc deleted file mode 100644 index a59ef1a97..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -// decode targets: S0T0, S0T1, S1T0, S1T1 -constexpr DecodeTargetIndication kDtis[6][4] = { - {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1 - {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDeltaT1, S0 - {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1 - {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDeltaT0, S0 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1 -}; - -} // namespace - -ScalabilityStructureL2T2Key::~ScalabilityStructureL2T2Key() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T2Key::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 2; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure() - const { - FrameDependencyStructure structure; - structure.num_decode_targets = 4; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 0, 1, 1}; - structure.templates.resize(6); - auto& templates = structure.templates; - templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); - templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4}); - templates[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2}); - templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); - templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4}); - templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL2T2Key::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0); -} - -std::vector -ScalabilityStructureL2T2Key::NextFrameConfig(bool restart) { - if (restart) { - next_pattern_ = kKey; - } - std::vector result(2); - - // Buffer0 keeps latest S0T0 frame, - // Buffer1 keeps latest S1T0 frame. - switch (next_pattern_) { - case kKey: - result[0] = KeyFrameConfig(); - result[1].Id(1).S(1).T(0).Reference(0).Update(1); - next_pattern_ = kDeltaT1; - break; - case kDeltaT1: - result[0].Id(2).S(0).T(1).Reference(0); - result[1].Id(3).S(1).T(1).Reference(1); - next_pattern_ = kDeltaT0; - break; - case kDeltaT0: - result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0); - result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1); - next_pattern_ = kDeltaT1; - break; - } - return result; -} - -absl::optional ScalabilityStructureL2T2Key::OnEncodeDone( - LayerFrameConfig config) { - if (config.IsKeyframe()) { - config = KeyFrameConfig(); - } - - absl::optional frame_info; - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - if (config.IsKeyframe()) { - frame_info->part_of_chain = {true, true}; - } else if (config.TemporalId() == 0) { - frame_info->part_of_chain = {config.SpatialId() == 0, - config.SpatialId() == 1}; - } else { - frame_info->part_of_chain = {false, false}; - } - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h deleted file mode 100644 index 9adfcbcd5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_ - -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// S1T1 0 0 -// / / / -// S1T0 0---0---0 -// | ... -// S0T1 | 0 0 -// |/ / / -// S0T0 0---0---0 -// Time-> 0 1 2 3 4 -class ScalabilityStructureL2T2Key : public ScalableVideoController { - public: - ~ScalabilityStructureL2T2Key() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - enum FramePattern { - kKey, - kDeltaT1, - kDeltaT0, - }; - LayerFrameConfig KeyFrameConfig() const; - - FramePattern next_pattern_ = kKey; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc deleted file mode 100644 index d205b4a1c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -constexpr DecodeTargetIndication kDtis[6][4] = { - {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0T0 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1T0 - {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDelta0, S0T0 - {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDelta0, S1T1 - {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDelta1, S0T1 - {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDelta1, S1T0 -}; - -} // namespace - -ScalabilityStructureL2T2KeyShift::~ScalabilityStructureL2T2KeyShift() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T2KeyShift::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 2; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL2T2KeyShift::DependencyStructure() - const { - FrameDependencyStructure structure; - structure.num_decode_targets = 4; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 0, 1, 1}; - structure.templates.resize(7); - auto& templates = structure.templates; - templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); - templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({2, 1}).FrameDiffs({2}); - templates[2].S(0).T(0).Dtis("SS--").ChainDiffs({4, 1}).FrameDiffs({4}); - templates[3].S(0).T(1).Dtis("-D--").ChainDiffs({2, 3}).FrameDiffs({2}); - templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); - templates[5].S(1).T(0).Dtis("--SS").ChainDiffs({3, 4}).FrameDiffs({4}); - templates[6].S(1).T(1).Dtis("---D").ChainDiffs({1, 2}).FrameDiffs({2}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL2T2KeyShift::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0); -} - -std::vector -ScalabilityStructureL2T2KeyShift::NextFrameConfig(bool restart) { - if (restart) { - next_pattern_ = kKey; - } - std::vector result(2); - - // Buffer0 keeps latest S0T0 frame, - // Buffer1 keeps latest S1T0 frame. - switch (next_pattern_) { - case kKey: - result[0] = KeyFrameConfig(); - result[1].Id(1).S(1).T(0).Reference(0).Update(1); - next_pattern_ = kDelta0; - break; - case kDelta0: - result[0].Id(2).S(0).T(0).ReferenceAndUpdate(0); - result[1].Id(3).S(1).T(1).Reference(1); - next_pattern_ = kDelta1; - break; - case kDelta1: - result[0].Id(4).S(0).T(1).Reference(0); - result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1); - next_pattern_ = kDelta0; - break; - } - return result; -} - -absl::optional ScalabilityStructureL2T2KeyShift::OnEncodeDone( - LayerFrameConfig config) { - if (config.IsKeyframe()) { - config = KeyFrameConfig(); - } - - absl::optional frame_info; - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - if (config.IsKeyframe()) { - frame_info->part_of_chain = {true, true}; - } else if (config.TemporalId() == 0) { - frame_info->part_of_chain = {config.SpatialId() == 0, - config.SpatialId() == 1}; - } else { - frame_info->part_of_chain = {false, false}; - } - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc deleted file mode 100644 index 51eb00352..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h" - -#include -#include - -#include "absl/base/macros.h" -#include "absl/types/optional.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; -constexpr auto kRequired = DecodeTargetIndication::kRequired; - -constexpr DecodeTargetIndication kDtis[5][3] = { - {kSwitch, kSwitch, kSwitch}, // Key, S0 - {kNotPresent, kSwitch, kSwitch}, // Key, S1 - {kNotPresent, kNotPresent, kSwitch}, // Key and Delta, S2 - {kSwitch, kRequired, kRequired}, // Delta, S0 - {kNotPresent, kSwitch, kRequired}, // Delta, S1 -}; - -} // namespace - -ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL3T1::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 3; - result.num_temporal_layers = 1; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 4; - result.scaling_factor_num[1] = 1; - result.scaling_factor_den[1] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 3; - structure.num_chains = 3; - structure.decode_target_protected_by_chain = {0, 1, 2}; - auto& templates = structure.templates; - templates.resize(6); - templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3}); - templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); - templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1}); - templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); - templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1}); - templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); - return structure; -} - -std::vector -ScalabilityStructureL3T1::NextFrameConfig(bool restart) { - std::vector config(3); - - // Buffer i keeps latest frame for spatial layer i - if (restart || keyframe_) { - config[0].Id(0).S(0).Keyframe().Update(0); - config[1].Id(1).S(1).Update(1).Reference(0); - config[2].Id(2).S(2).Update(2).Reference(1); - keyframe_ = false; - } else { - config[0].Id(3).S(0).ReferenceAndUpdate(0); - config[1].Id(4).S(1).ReferenceAndUpdate(1).Reference(0); - config[2].Id(2).S(2).ReferenceAndUpdate(2).Reference(1); - } - return config; -} - -absl::optional ScalabilityStructureL3T1::OnEncodeDone( - LayerFrameConfig config) { - absl::optional frame_info; - if (config.IsKeyframe() && config.Id() != 0) { - // Encoder generated a key frame without asking to. - if (config.SpatialId() > 0) { - RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId() - << " for key frame."; - } - config = LayerFrameConfig().Id(0).S(0).Keyframe().Update(0); - } - - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - frame_info->part_of_chain = {config.SpatialId() == 0, config.SpatialId() <= 1, - true}; - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc deleted file mode 100644 index 14e261f74..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h" - -#include -#include - -#include "absl/base/macros.h" -#include "absl/types/optional.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; -constexpr auto kRequired = DecodeTargetIndication::kRequired; - -constexpr DecodeTargetIndication kDtis[12][9] = { - // Key, S0 - {kSwitch, kSwitch, kSwitch, // S0 - kSwitch, kSwitch, kSwitch, // S1 - kSwitch, kSwitch, kSwitch}, // S2 - // Key, S1 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kSwitch, kSwitch, kSwitch, // S1 - kSwitch, kSwitch, kSwitch}, // S2 - // Key, S2 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kNotPresent, kNotPresent, // S1 - kSwitch, kSwitch, kSwitch}, // S2 - // Delta, S0T2 - {kNotPresent, kNotPresent, kDiscardable, // S0 - kNotPresent, kNotPresent, kRequired, // S1 - kNotPresent, kNotPresent, kRequired}, // S2 - // Delta, S1T2 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kNotPresent, kDiscardable, // S1 - kNotPresent, kNotPresent, kRequired}, // S2 - // Delta, S2T2 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kNotPresent, kNotPresent, // S1 - kNotPresent, kNotPresent, kDiscardable}, // S2 - // Delta, S0T1 - {kNotPresent, kDiscardable, kSwitch, // S0 - kNotPresent, kRequired, kRequired, // S1 - kNotPresent, kRequired, kRequired}, // S2 - // Delta, S1T1 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kDiscardable, kSwitch, // S1 - kNotPresent, kRequired, kRequired}, // S2 - // Delta, S2T1 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kNotPresent, kNotPresent, // S1 - kNotPresent, kDiscardable, kSwitch}, // S2 - // Delta, S0T0 - {kSwitch, kSwitch, kSwitch, // S0 - kRequired, kRequired, kRequired, // S1 - kRequired, kRequired, kRequired}, // S2 - // Delta, S1T0 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kSwitch, kSwitch, kSwitch, // S1 - kRequired, kRequired, kRequired}, // S2 - // Delta, S2T0 - {kNotPresent, kNotPresent, kNotPresent, // S0 - kNotPresent, kNotPresent, kNotPresent, // S1 - kSwitch, kSwitch, kSwitch}, // S2 -}; - -} // namespace - -ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL3T3::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 3; - result.num_temporal_layers = 3; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 4; - result.scaling_factor_num[1] = 1; - result.scaling_factor_den[1] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 9; - structure.num_chains = 3; - structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; - auto& t = structure.templates; - t.resize(15); - // Templates are shown in the order frames following them appear in the - // stream, but in `structure.templates` array templates are sorted by - // (`spatial_id`, `temporal_id`) since that is a dependency descriptor - // requirement. Indexes are written in hex for nicer alignment. - t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); - t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); - t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); - t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); - t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); - t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); - t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); - t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1}); - t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1}); - t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3}); - t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1}); - t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1}); - t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12}); - t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1}); - t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1}); - return structure; -} - -ScalableVideoController::LayerFrameConfig -ScalabilityStructureL3T3::KeyFrameConfig() const { - return LayerFrameConfig().Id(0).S(0).T(0).Keyframe().Update(0); -} - -std::vector -ScalabilityStructureL3T3::NextFrameConfig(bool restart) { - if (restart) { - next_pattern_ = kKeyFrame; - } - std::vector config(3); - - // For this structure name each of 8 buffers after the layer of the frame that - // buffer keeps. - static constexpr int kS0T0 = 0; - static constexpr int kS1T0 = 1; - static constexpr int kS2T0 = 2; - static constexpr int kS0T1 = 3; - static constexpr int kS1T1 = 4; - static constexpr int kS2T1 = 5; - static constexpr int kS0T2 = 6; - static constexpr int kS1T2 = 7; - switch (next_pattern_) { - case kKeyFrame: - config[0].Id(0).S(0).T(0).Keyframe().Update(kS0T0); - config[1].Id(1).S(1).T(0).Update(kS1T0).Reference(kS0T0); - config[2].Id(2).S(2).T(0).Update(kS2T0).Reference(kS1T0); - next_pattern_ = kDeltaFrameT2A; - break; - case kDeltaFrameT2A: - config[0].Id(3).S(0).T(2).Reference(kS0T0).Update(kS0T2); - config[1].Id(4).S(1).T(2).Reference(kS1T0).Reference(kS0T2).Update(kS1T2); - config[2].Id(5).S(2).T(2).Reference(kS2T0).Reference(kS1T2); - next_pattern_ = kDeltaFrameT1; - break; - case kDeltaFrameT1: - config[0].Id(6).S(0).T(1).Reference(kS0T0).Update(kS0T1); - config[1].Id(7).S(1).T(1).Reference(kS1T0).Reference(kS0T1).Update(kS1T1); - config[2].Id(8).S(2).T(1).Reference(kS2T0).Reference(kS1T1).Update(kS2T1); - next_pattern_ = kDeltaFrameT2B; - break; - case kDeltaFrameT2B: - config[0].Id(3).S(0).T(2).Reference(kS0T1).Update(kS0T2); - config[1].Id(4).S(1).T(2).Reference(kS1T1).Reference(kS0T2).Update(kS1T2); - config[2].Id(5).S(2).T(2).Reference(kS2T1).Reference(kS1T2); - next_pattern_ = kDeltaFrameT0; - break; - case kDeltaFrameT0: - config[0].Id(9).S(0).T(0).ReferenceAndUpdate(kS0T0); - config[1].Id(10).S(1).T(0).ReferenceAndUpdate(kS1T0).Reference(kS0T0); - config[2].Id(11).S(2).T(0).ReferenceAndUpdate(kS2T0).Reference(kS1T0); - next_pattern_ = kDeltaFrameT2A; - break; - } - return config; -} - -absl::optional ScalabilityStructureL3T3::OnEncodeDone( - LayerFrameConfig config) { - if (config.IsKeyframe() && config.Id() != 0) { - // Encoder generated a key frame without asking to. - if (config.SpatialId() > 0) { - RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId() - << " for key frame."; - } - config = LayerFrameConfig() - .Keyframe() - .Id(0) - .S(0) - .T(0) - .Update(0) - .Update(1) - .Update(2) - .Update(3) - .Update(4) - .Update(5) - .Update(6) - .Update(7); - } - - absl::optional frame_info; - if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = config.Buffers(); - frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]), - std::end(kDtis[config.Id()])); - if (config.TemporalId() == 0) { - frame_info->part_of_chain = {config.SpatialId() == 0, - config.SpatialId() <= 1, true}; - } else { - frame_info->part_of_chain = {false, false, false}; - } - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h deleted file mode 100644 index 363f07e01..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_ - -#include - -#include "absl/types/optional.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" - -namespace webrtc { - -// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc -class ScalabilityStructureL3T3 : public ScalableVideoController { - public: - ~ScalabilityStructureL3T3() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - enum FramePattern { - kKeyFrame, - kDeltaFrameT2A, - kDeltaFrameT1, - kDeltaFrameT2B, - kDeltaFrameT0, - }; - LayerFrameConfig KeyFrameConfig() const; - - FramePattern next_pattern_ = kKeyFrame; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc deleted file mode 100644 index 182891fa5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; -constexpr auto kSwitch = DecodeTargetIndication::kSwitch; - -constexpr DecodeTargetIndication kDtis[2][2] = { - {kSwitch, kNotPresent}, // S0 - {kNotPresent, kSwitch}, // S1 -}; - -} // namespace - -ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureS2T1::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 1; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 1}; - structure.templates.resize(4); - structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); - structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0}); - structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); - structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0}); - return structure; -} - -std::vector -ScalabilityStructureS2T1::NextFrameConfig(bool restart) { - std::vector result(2); - // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame. - if (restart || keyframe_) { - result[0].S(0).Keyframe().Update(0); - result[1].S(1).Keyframe().Update(1); - keyframe_ = false; - } else { - result[0].S(0).ReferenceAndUpdate(0); - result[1].S(1).ReferenceAndUpdate(1); - } - return result; -} - -absl::optional ScalabilityStructureS2T1::OnEncodeDone( - LayerFrameConfig config) { - absl::optional frame_info; - if (config.SpatialId() < 0 || - config.SpatialId() >= int{ABSL_ARRAYSIZE(kDtis)}) { - RTC_LOG(LS_ERROR) << "Unexpected spatial id " << config.SpatialId(); - return frame_info; - } - frame_info.emplace(); - frame_info->spatial_id = config.SpatialId(); - frame_info->temporal_id = config.TemporalId(); - frame_info->encoder_buffers = std::move(config.Buffers()); - frame_info->decode_target_indications.assign( - std::begin(kDtis[config.SpatialId()]), - std::end(kDtis[config.SpatialId()])); - frame_info->part_of_chain = {config.SpatialId() == 0, - config.SpatialId() == 1}; - return frame_info; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 9e32c68f7..9002b8746 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -36,6 +36,7 @@ extern "C" { #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" +#include "third_party/libyuv/include/libyuv/convert.h" namespace webrtc { @@ -103,7 +104,7 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // TODO(nisse): Delete that feature from the video pool, instead add // an explicit call to InitializeData here. rtc::scoped_refptr frame_buffer = - decoder->pool_.CreateBuffer(width, height); + decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); int y_size = width * height; int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); @@ -150,10 +151,13 @@ void H264DecoderImpl::AVFreeBuffer2(void* opaque, uint8_t* data) { } H264DecoderImpl::H264DecoderImpl() - : pool_(true), + : ffmpeg_buffer_pool_(true), decoded_image_callback_(nullptr), has_reported_init_(false), - has_reported_error_(false) {} + has_reported_error_(false), + preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} H264DecoderImpl::~H264DecoderImpl() { Release(); @@ -219,7 +223,8 @@ int32_t H264DecoderImpl::InitDecode(const VideoCodec* codec_settings, av_frame_.reset(av_frame_alloc()); if (codec_settings && codec_settings->buffer_pool_size) { - if (!pool_.Resize(*codec_settings->buffer_pool_size)) { + if (!ffmpeg_buffer_pool_.Resize(*codec_settings->buffer_pool_size) || + !output_buffer_pool_.Resize(*codec_settings->buffer_pool_size)) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } } @@ -325,12 +330,25 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, i420_buffer->DataV() + i420_buffer->StrideV() * i420_buffer->height() / 2); - auto cropped_buffer = WrapI420Buffer( + rtc::scoped_refptr cropped_buffer = WrapI420Buffer( av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], av_frame_->linesize[kVPlaneIndex], rtc::KeepRefUntilDone(i420_buffer)); + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); + auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer( + cropped_i420->width(), cropped_i420->height()); + libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(), + cropped_i420->DataU(), cropped_i420->StrideU(), + cropped_i420->DataV(), cropped_i420->StrideV(), + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + i420_buffer->width(), i420_buffer->height()); + cropped_buffer = nv12_buffer; + } + // Pass on color space from input frame if explicitly specified. const ColorSpace& color_space = input_image.ColorSpace() ? *input_image.ColorSpace() diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h index 80892de63..47af12c8c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h @@ -40,7 +40,7 @@ extern "C" { } // extern "C" #include "common_video/h264/h264_bitstream_parser.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" namespace webrtc { @@ -88,7 +88,10 @@ class H264DecoderImpl : public H264Decoder { void ReportInit(); void ReportError(); - I420BufferPool pool_; + // Used by ffmpeg via |AVGetBuffer2()| to allocate I420 images. + VideoFrameBufferPool ffmpeg_buffer_pool_; + // Used to allocate NV12 images if NV12 output is preferred. + VideoFrameBufferPool output_buffer_pool_; std::unique_ptr av_context_; std::unique_ptr av_frame_; @@ -98,6 +101,9 @@ class H264DecoderImpl : public H264Decoder { bool has_reported_error_; webrtc::H264BitstreamParser h264_bitstream_parser_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index af36dd986..ea784c19e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -87,19 +87,15 @@ VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) { } // namespace // Helper method used by H264EncoderImpl::Encode. -// Copies the encoded bytes from |info| to |encoded_image| and updates the -// fragmentation information of |frag_header|. The |encoded_image->_buffer| may -// be deleted and reallocated if a bigger buffer is required. +// Copies the encoded bytes from |info| to |encoded_image|. The +// |encoded_image->_buffer| may be deleted and reallocated if a bigger buffer is +// required. // // After OpenH264 encoding, the encoded bytes are stored in |info| spread out // over a number of layers and "NAL units". Each NAL unit is a fragment starting // with the four-byte start code {0,0,0,1}. All of this data (including the -// start codes) is copied to the |encoded_image->_buffer| and the |frag_header| -// is updated to point to each fragment, with offsets and lengths set as to -// exclude the start codes. -static void RtpFragmentize(EncodedImage* encoded_image, - SFrameBSInfo* info, - RTPFragmentationHeader* frag_header) { +// start codes) is copied to the |encoded_image->_buffer|. +static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) { // Calculate minimum buffer size required to hold encoded data. size_t required_capacity = 0; size_t fragments_count = 0; @@ -114,12 +110,12 @@ static void RtpFragmentize(EncodedImage* encoded_image, } } // TODO(nisse): Use a cache or buffer pool to avoid allocation? - encoded_image->SetEncodedData(EncodedImageBuffer::Create(required_capacity)); + auto buffer = EncodedImageBuffer::Create(required_capacity); + encoded_image->SetEncodedData(buffer); // Iterate layers and NAL units, note each NAL unit as a fragment and copy // the data to |encoded_image->_buffer|. const uint8_t start_code[4] = {0, 0, 0, 1}; - frag_header->VerifyAndAllocateFragmentationHeader(fragments_count); size_t frag = 0; encoded_image->set_size(0); for (int layer = 0; layer < info->iLayerNum; ++layer) { @@ -134,15 +130,10 @@ static void RtpFragmentize(EncodedImage* encoded_image, RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 1], start_code[1]); RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 2], start_code[2]); RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 3], start_code[3]); - frag_header->fragmentationOffset[frag] = - encoded_image->size() + layer_len + sizeof(start_code); - frag_header->fragmentationLength[frag] = - layerInfo.pNalLengthInByte[nal] - sizeof(start_code); layer_len += layerInfo.pNalLengthInByte[nal]; } // Copy the entire layer's data (including start codes). - memcpy(encoded_image->data() + encoded_image->size(), layerInfo.pBsBuf, - layer_len); + memcpy(buffer->data() + encoded_image->size(), layerInfo.pBsBuf, layer_len); encoded_image->set_size(encoded_image->size() + layer_len); } } @@ -284,7 +275,6 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width, codec_.simulcastStream[idx].height); encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity)); - encoded_images_[i]._completeFrame = true; encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width; encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height; encoded_images_[i].set_size(0); @@ -485,8 +475,7 @@ int32_t H264EncoderImpl::Encode( // Split encoded image up into fragments. This also updates // |encoded_image_|. - RTPFragmentationHeader frag_header; - RtpFragmentize(&encoded_images_[i], &info, &frag_header); + RtpFragmentize(&encoded_images_[i], &info); // Encoder can skip frames to save bandwidth in which case // |encoded_images_[i]._length| == 0. @@ -518,7 +507,7 @@ int32_t H264EncoderImpl::Encode( } } encoded_image_callback_->OnEncodedImage(encoded_images_[i], - &codec_specific, &frag_header); + &codec_specific); } } return WEBRTC_VIDEO_CODEC_OK; @@ -555,6 +544,12 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams(size_t i) const { // |uiIntraPeriod| - multiple of GOP size // |keyFrameInterval| - number of frames encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval; + // Reuse SPS id if possible. This helps to avoid reset of chromium HW decoder + // on each key-frame. + // Note that WebRTC resets encoder on resolution change which makes all + // EParameterSetStrategy modes except INCREASING_ID (default) essentially + // equivalent to CONSTANT_ID. + encoder_params.eSpsPpsIdStrategy = SPS_LISTING; encoder_params.uiMaxNalSize = 0; // Threading model: use auto. // 0: auto (dynamic imp. internal encoder) @@ -625,6 +620,7 @@ VideoEncoder::EncoderInfo H264EncoderImpl::GetEncoderInfo() const { info.is_hardware_accelerated = false; info.has_internal_source = false; info.supports_simulcast = true; + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; return info; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h index 7fb87eae9..4eb4ad38c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -68,7 +68,7 @@ class H264EncoderImpl : public H264Encoder { EncodedImageCallback* callback) override; void SetRates(const RateControlParameters& parameters) override; - // The result of encoding - an EncodedImage and RTPFragmentationHeader - are + // The result of encoding - an EncodedImage and CodecSpecificInfo - are // passed to the encode complete callback. int32_t Encode(const VideoFrame& frame, const std::vector* frame_types) override; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h index 92a4c88ce..c43109e46 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h @@ -57,8 +57,7 @@ class MultiplexEncoderAdapter : public VideoEncoder { EncodedImageCallback::Result OnEncodedImage( AlphaCodecStream stream_idx, const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation); + const CodecSpecificInfo* codecSpecificInfo); private: // Wrapper class that redirects OnEncodedImage() calls. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc index cd39e72c2..39c14e412 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc @@ -76,23 +76,26 @@ struct MultiplexDecoderAdapter::DecodedImageData { decoded_image_(decoded_image), decode_time_ms_(decode_time_ms), qp_(qp) {} + + DecodedImageData() = delete; + DecodedImageData(const DecodedImageData&) = delete; + DecodedImageData& operator=(const DecodedImageData&) = delete; + const AlphaCodecStream stream_idx_; VideoFrame decoded_image_; const absl::optional decode_time_ms_; const absl::optional qp_; - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DecodedImageData); }; struct MultiplexDecoderAdapter::AugmentingData { AugmentingData(std::unique_ptr augmenting_data, uint16_t data_size) : data_(std::move(augmenting_data)), size_(data_size) {} + AugmentingData() = delete; + AugmentingData(const AugmentingData&) = delete; + AugmentingData& operator=(const AugmentingData&) = delete; + std::unique_ptr data_; const uint16_t size_; - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AugmentingData); }; MultiplexDecoderAdapter::MultiplexDecoderAdapter( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc index 69fe7b9a1..0fbbc4271 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc @@ -17,7 +17,6 @@ #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/video_common.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" #include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" @@ -35,12 +34,11 @@ class MultiplexEncoderAdapter::AdapterEncodedImageCallback EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { if (!adapter_) return Result(Result::OK); return adapter_->OnEncodedImage(stream_idx_, encoded_image, - codec_specific_info, fragmentation); + codec_specific_info); } private: @@ -109,6 +107,7 @@ int MultiplexEncoderAdapter::InitEncode( encoder_info_ = EncoderInfo(); encoder_info_.implementation_name = "MultiplexEncoderAdapter ("; encoder_info_.requested_resolution_alignment = 1; + encoder_info_.apply_alignment_to_all_simulcast_layers = false; // This needs to be false so that we can do the split in Encode(). encoder_info_.supports_native_handle = false; @@ -145,6 +144,10 @@ int MultiplexEncoderAdapter::InitEncode( encoder_info_.requested_resolution_alignment, encoder_impl_info.requested_resolution_alignment); + if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) { + encoder_info_.apply_alignment_to_all_simulcast_layers = true; + } + encoder_info_.has_internal_source = false; encoders_.emplace_back(std::move(encoder)); @@ -292,8 +295,7 @@ VideoEncoder::EncoderInfo MultiplexEncoderAdapter::GetEncoderInfo() const { EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( AlphaCodecStream stream_idx, const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codecSpecificInfo) { // Save the image MultiplexImageComponent image_component; image_component.component_index = stream_idx; @@ -330,8 +332,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( CodecSpecificInfo codec_info = *codecSpecificInfo; codec_info.codecType = kVideoCodecMultiplex; - encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info, - fragmentation); + encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info); } stashed_images_.erase(stashed_images_.begin(), stashed_image_next_itr); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc index 1a3df403a..7bf611715 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc @@ -195,6 +195,10 @@ class LibvpxVp8Facade : public LibvpxInterface { vpx_codec_iter_t* iter) const override { return ::vpx_codec_get_cx_data(ctx, iter); } + + const char* codec_error_detail(vpx_codec_ctx_t* ctx) const override { + return ::vpx_codec_error_detail(ctx); + } }; } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h index fe40dedec..3da38ea24 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h @@ -93,6 +93,8 @@ class LibvpxInterface { vpx_codec_ctx_t* ctx, vpx_codec_iter_t* iter) const = 0; + virtual const char* codec_error_detail(vpx_codec_ctx_t* ctx) const = 0; + // Returns interface wrapping the actual libvpx functions. static std::unique_ptr CreateEncoder(); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index d86d8767c..af48c9253 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -132,7 +132,10 @@ LibvpxVp8Decoder::LibvpxVp8Decoder() key_frame_required_(true), deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup() : absl::nullopt), - qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {} + qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr), + preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} LibvpxVp8Decoder::~LibvpxVp8Decoder() { inited_ = true; // in order to do the actual release @@ -237,21 +240,14 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image, if (key_frame_required_) { if (input_image._frameType != VideoFrameType::kVideoFrameKey) return WEBRTC_VIDEO_CODEC_ERROR; - // We have a key frame - is it complete? - if (input_image._completeFrame) { - key_frame_required_ = false; - } else { - return WEBRTC_VIDEO_CODEC_ERROR; - } + key_frame_required_ = false; } // Restrict error propagation using key frame requests. // Reset on a key frame refresh. - if (input_image._frameType == VideoFrameType::kVideoFrameKey && - input_image._completeFrame) { + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { propagation_cnt_ = -1; // Start count on first loss. - } else if ((!input_image._completeFrame || missing_frames) && - propagation_cnt_ == -1) { + } else if (missing_frames && propagation_cnt_ == -1) { propagation_cnt_ = 0; } if (propagation_cnt_ >= 0) { @@ -328,8 +324,39 @@ int LibvpxVp8Decoder::ReturnFrame( last_frame_width_ = img->d_w; last_frame_height_ = img->d_h; // Allocate memory for decoded image. - rtc::scoped_refptr buffer = - buffer_pool_.CreateBuffer(img->d_w, img->d_h); + rtc::scoped_refptr buffer; + + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + // Convert instead of making a copy. + // Note: libvpx doesn't support creating NV12 image directly. + // Due to the bitstream structure such a change would just hide the + // conversion operation inside the decode call. + rtc::scoped_refptr nv12_buffer = + buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + buffer = nv12_buffer; + if (nv12_buffer.get()) { + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + img->d_w, img->d_h); + } + } else { + rtc::scoped_refptr i420_buffer = + buffer_pool_.CreateI420Buffer(img->d_w, img->d_h); + buffer = i420_buffer; + if (i420_buffer.get()) { + libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + img->d_w, img->d_h); + } + } + if (!buffer.get()) { // Pool has too many pending frames. RTC_HISTOGRAM_BOOLEAN("WebRTC.Video.LibvpxVp8Decoder.TooManyPendingFrames", @@ -337,14 +364,6 @@ int LibvpxVp8Decoder::ReturnFrame( return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } - libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - buffer->MutableDataY(), buffer->StrideY(), - buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), img->d_w, - img->d_h); - VideoFrame decoded_image = VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_timestamp_rtp(timestamp) diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index 2a0c5f2c5..cf699f183 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -16,7 +16,7 @@ #include "absl/types/optional.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" #include "vpx/vp8dx.h" @@ -54,7 +54,7 @@ class LibvpxVp8Decoder : public VideoDecoder { const webrtc::ColorSpace* explicit_color_space); const bool use_postproc_; - I420BufferPool buffer_pool_; + VideoFrameBufferPool buffer_pool_; DecodedImageCallback* decode_complete_callback_; bool inited_; vpx_codec_ctx_t* decoder_; @@ -64,6 +64,9 @@ class LibvpxVp8Decoder : public VideoDecoder { bool key_frame_required_; const absl::optional deblock_params_; const std::unique_ptr qp_smoother_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 2f901ad81..340817658 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -68,9 +68,6 @@ constexpr uint32_t kVp832ByteAlign = 32u; constexpr int kRtpTicksPerSecond = 90000; constexpr int kRtpTicksPerMs = kRtpTicksPerSecond / 1000; -constexpr double kLowRateFactor = 1.0; -constexpr double kHighRateFactor = 2.0; - // VP8 denoiser states. enum denoiserState : uint32_t { kDenoiserOff, @@ -82,15 +79,6 @@ enum denoiserState : uint32_t { kDenoiserOnAdaptive }; -// These settings correspond to the settings in vpx_codec_enc_cfg. -struct Vp8RateSettings { - uint32_t rc_undershoot_pct; - uint32_t rc_overshoot_pct; - uint32_t rc_buf_sz; - uint32_t rc_buf_optimal_sz; - uint32_t rc_dropframe_thresh; -}; - // Greatest common divisior int GCD(int a, int b) { int c = a % b; @@ -102,56 +90,6 @@ int GCD(int a, int b) { return b; } -uint32_t Interpolate(uint32_t low, - uint32_t high, - double bandwidth_headroom_factor) { - RTC_DCHECK_GE(bandwidth_headroom_factor, kLowRateFactor); - RTC_DCHECK_LE(bandwidth_headroom_factor, kHighRateFactor); - - // |factor| is between 0.0 and 1.0. - const double factor = bandwidth_headroom_factor - kLowRateFactor; - - return static_cast(((1.0 - factor) * low) + (factor * high) + 0.5); -} - -Vp8RateSettings GetRateSettings(double bandwidth_headroom_factor) { - static const Vp8RateSettings low_settings{1000u, 0u, 100u, 30u, 40u}; - static const Vp8RateSettings high_settings{100u, 15u, 1000u, 600u, 5u}; - - if (bandwidth_headroom_factor <= kLowRateFactor) { - return low_settings; - } else if (bandwidth_headroom_factor >= kHighRateFactor) { - return high_settings; - } - - Vp8RateSettings settings; - settings.rc_undershoot_pct = - Interpolate(low_settings.rc_undershoot_pct, - high_settings.rc_undershoot_pct, bandwidth_headroom_factor); - settings.rc_overshoot_pct = - Interpolate(low_settings.rc_overshoot_pct, high_settings.rc_overshoot_pct, - bandwidth_headroom_factor); - settings.rc_buf_sz = - Interpolate(low_settings.rc_buf_sz, high_settings.rc_buf_sz, - bandwidth_headroom_factor); - settings.rc_buf_optimal_sz = - Interpolate(low_settings.rc_buf_optimal_sz, - high_settings.rc_buf_optimal_sz, bandwidth_headroom_factor); - settings.rc_dropframe_thresh = - Interpolate(low_settings.rc_dropframe_thresh, - high_settings.rc_dropframe_thresh, bandwidth_headroom_factor); - return settings; -} - -void UpdateRateSettings(vpx_codec_enc_cfg_t* config, - const Vp8RateSettings& new_settings) { - config->rc_undershoot_pct = new_settings.rc_undershoot_pct; - config->rc_overshoot_pct = new_settings.rc_overshoot_pct; - config->rc_buf_sz = new_settings.rc_buf_sz; - config->rc_buf_optimal_sz = new_settings.rc_buf_optimal_sz; - config->rc_dropframe_thresh = new_settings.rc_dropframe_thresh; -} - static_assert(Vp8EncoderConfig::TemporalLayerConfig::kMaxPeriodicity == VPX_TS_MAX_PERIODICITY, "Vp8EncoderConfig::kMaxPeriodicity must be kept in sync with the " @@ -291,7 +229,6 @@ vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr interface, VP8Encoder::Settings settings) : libvpx_(std::move(interface)), - experimental_cpu_speed_config_arm_(CpuSpeedExperiment::GetConfigs()), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), requested_resolution_alignment_override_( GetRequestedResolutionAlignmentOverride()), @@ -408,18 +345,12 @@ void LibvpxVp8Encoder::SetRates(const RateControlParameters& parameters) { UpdateVpxConfiguration(stream_idx); - if (rate_control_settings_.Vp8DynamicRateSettings()) { - // Tweak rate control settings based on available network headroom. - UpdateRateSettings( - &vpx_configs_[i], - GetRateSettings(parameters.bandwidth_allocation.bps() / - parameters.bitrate.get_sum_bps())); - } - vpx_codec_err_t err = libvpx_->codec_enc_config_set(&encoders_[i], &vpx_configs_[i]); if (err != VPX_CODEC_OK) { - RTC_LOG(LS_WARNING) << "Error configuring codec, error code: " << err; + RTC_LOG(LS_WARNING) << "Error configuring codec, error code: " << err + << ", details: " + << libvpx_->codec_error_detail(&encoders_[i]); } } } @@ -496,6 +427,10 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } + // Use the previous pixel format to avoid extra image allocations. + vpx_img_fmt_t pixel_format = + raw_images_.empty() ? VPX_IMG_FMT_I420 : raw_images_[0].fmt; + int retVal = Release(); if (retVal < 0) { return retVal; @@ -553,9 +488,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, downsampling_factors_[number_of_streams - 1].num = 1; downsampling_factors_[number_of_streams - 1].den = 1; } - for (int i = 0; i < number_of_streams; ++i) { - encoded_images_[i]._completeFrame = true; - } + // populate encoder configuration with default values if (libvpx_->codec_enc_config_default(vpx_codec_vp8_cx(), &vpx_configs_[0], 0)) { @@ -649,8 +582,8 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Creating a wrapper to the image - setting image data to NULL. // Actual pointer will be set in encode. Setting align to 1, as it // is meaningless (no memory allocation is done here). - libvpx_->img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, - inst->height, 1, NULL); + libvpx_->img_wrap(&raw_images_[0], pixel_format, inst->width, inst->height, 1, + NULL); // Note the order we use is different from webm, we have lowest resolution // at position 0 and they have highest resolution at position 0. @@ -698,10 +631,9 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Setting alignment to 32 - as that ensures at least 16 for all // planes (32 for Y, 16 for U,V). Libvpx sets the requested stride for // the y plane, but only half of it to the u and v planes. - libvpx_->img_alloc(&raw_images_[i], VPX_IMG_FMT_I420, - inst->simulcastStream[stream_idx].width, - inst->simulcastStream[stream_idx].height, - kVp832ByteAlign); + libvpx_->img_alloc( + &raw_images_[i], pixel_format, inst->simulcastStream[stream_idx].width, + inst->simulcastStream[stream_idx].height, kVp832ByteAlign); SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx); vpx_configs_[i].rc_target_bitrate = stream_bitrates[stream_idx]; if (stream_bitrates[stream_idx] > 0) { @@ -728,14 +660,17 @@ int LibvpxVp8Encoder::GetCpuSpeed(int width, int height) { // On mobile platform, use a lower speed setting for lower resolutions for // CPUs with 4 or more cores. RTC_DCHECK_GT(number_of_cores_, 0); + if (experimental_cpu_speed_config_arm_ + .GetValue(width * height, number_of_cores_) + .has_value()) { + return experimental_cpu_speed_config_arm_ + .GetValue(width * height, number_of_cores_) + .value(); + } + if (number_of_cores_ <= 3) return -12; - if (experimental_cpu_speed_config_arm_) { - return CpuSpeedExperiment::GetValue(width * height, - *experimental_cpu_speed_config_arm_); - } - if (width * height <= 352 * 288) return -8; else if (width * height <= 640 * 480) @@ -1010,26 +945,31 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, flags[i] = send_key_frame ? VPX_EFLAG_FORCE_KF : EncodeFlags(tl_configs[i]); } - rtc::scoped_refptr input_image = - frame.video_frame_buffer()->ToI420(); + rtc::scoped_refptr input_image = frame.video_frame_buffer(); // Since we are extracting raw pointers from |input_image| to // |raw_images_[0]|, the resolution of these frames must match. RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w); RTC_DCHECK_EQ(input_image->height(), raw_images_[0].d_h); - - // Image in vpx_image_t format. - // Input image is const. VP8's raw image is not defined as const. - raw_images_[0].planes[VPX_PLANE_Y] = - const_cast(input_image->DataY()); - raw_images_[0].planes[VPX_PLANE_U] = - const_cast(input_image->DataU()); - raw_images_[0].planes[VPX_PLANE_V] = - const_cast(input_image->DataV()); - - raw_images_[0].stride[VPX_PLANE_Y] = input_image->StrideY(); - raw_images_[0].stride[VPX_PLANE_U] = input_image->StrideU(); - raw_images_[0].stride[VPX_PLANE_V] = input_image->StrideV(); - + switch (input_image->type()) { + case VideoFrameBuffer::Type::kI420: + PrepareI420Image(input_image->GetI420()); + break; + case VideoFrameBuffer::Type::kNV12: + PrepareNV12Image(input_image->GetNV12()); + break; + default: { + rtc::scoped_refptr i420_image = + input_image->ToI420(); + if (!i420_image) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString(input_image->type()) + << " image to I420. Can't encode frame."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + input_image = i420_image; + PrepareI420Image(i420_image); + } + } struct CleanUpOnExit { explicit CleanUpOnExit(vpx_image_t& raw_image) : raw_image_(raw_image) {} ~CleanUpOnExit() { @@ -1040,22 +980,6 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, vpx_image_t& raw_image_; } clean_up_on_exit(raw_images_[0]); - for (size_t i = 1; i < encoders_.size(); ++i) { - // Scale the image down a number of times by downsampling factor - libyuv::I420Scale( - raw_images_[i - 1].planes[VPX_PLANE_Y], - raw_images_[i - 1].stride[VPX_PLANE_Y], - raw_images_[i - 1].planes[VPX_PLANE_U], - raw_images_[i - 1].stride[VPX_PLANE_U], - raw_images_[i - 1].planes[VPX_PLANE_V], - raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, - raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], - raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], - raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], - raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, - raw_images_[i].d_h, libyuv::kFilterBilinear); - } - if (send_key_frame) { // Adapt the size of the key frame when in screenshare with 1 temporal // layer. @@ -1214,7 +1138,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, &qp_128); encoded_images_[encoder_idx].qp_ = qp_128; encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx], - &codec_specific, nullptr); + &codec_specific); const size_t steady_state_size = SteadyStateSize( stream_idx, codec_specific.codecSpecific.VP8.temporalIdx); if (qp_128 > variable_framerate_experiment_.steady_state_qp || @@ -1267,6 +1191,8 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const { info.scaling_settings.min_pixels_per_frame = rate_control_settings_.LibvpxVp8MinPixels().value(); } + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; if (inited_) { // |encoder_idx| is libvpx index where 0 is highest resolution. @@ -1305,17 +1231,99 @@ int LibvpxVp8Encoder::RegisterEncodeCompleteCallback( return WEBRTC_VIDEO_CODEC_OK; } +void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) { + RTC_DCHECK(!raw_images_.empty()); + if (raw_images_[0].fmt == fmt) { + RTC_DCHECK(std::all_of( + std::next(raw_images_.begin()), raw_images_.end(), + [fmt](const vpx_image_t& raw_img) { return raw_img.fmt == fmt; })) + << "Not all raw images had the right format!"; + return; + } + RTC_LOG(INFO) << "Updating vp8 encoder pixel format to " + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); + for (size_t i = 0; i < raw_images_.size(); ++i) { + vpx_image_t& img = raw_images_[i]; + auto d_w = img.d_w; + auto d_h = img.d_h; + libvpx_->img_free(&img); + // First image is wrapping the input frame, the rest are allocated. + if (i == 0) { + libvpx_->img_wrap(&img, fmt, d_w, d_h, 1, NULL); + } else { + libvpx_->img_alloc(&img, fmt, d_w, d_h, kVp832ByteAlign); + } + } +} + +void LibvpxVp8Encoder::PrepareI420Image(const I420BufferInterface* frame) { + RTC_DCHECK(!raw_images_.empty()); + MaybeUpdatePixelFormat(VPX_IMG_FMT_I420); + // Image in vpx_image_t format. + // Input image is const. VP8's raw image is not defined as const. + raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); + raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataU()); + raw_images_[0].planes[VPX_PLANE_V] = const_cast(frame->DataV()); + + raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); + raw_images_[0].stride[VPX_PLANE_U] = frame->StrideU(); + raw_images_[0].stride[VPX_PLANE_V] = frame->StrideV(); + + for (size_t i = 1; i < encoders_.size(); ++i) { + // Scale the image down a number of times by downsampling factor + libyuv::I420Scale( + raw_images_[i - 1].planes[VPX_PLANE_Y], + raw_images_[i - 1].stride[VPX_PLANE_Y], + raw_images_[i - 1].planes[VPX_PLANE_U], + raw_images_[i - 1].stride[VPX_PLANE_U], + raw_images_[i - 1].planes[VPX_PLANE_V], + raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, + raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], + raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], + raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], + raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, + raw_images_[i].d_h, libyuv::kFilterBilinear); + } +} + +void LibvpxVp8Encoder::PrepareNV12Image(const NV12BufferInterface* frame) { + RTC_DCHECK(!raw_images_.empty()); + MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12); + // Image in vpx_image_t format. + // Input image is const. VP8's raw image is not defined as const. + raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); + raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataUV()); + raw_images_[0].planes[VPX_PLANE_V] = raw_images_[0].planes[VPX_PLANE_U] + 1; + raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); + raw_images_[0].stride[VPX_PLANE_U] = frame->StrideUV(); + raw_images_[0].stride[VPX_PLANE_V] = frame->StrideUV(); + + for (size_t i = 1; i < encoders_.size(); ++i) { + // Scale the image down a number of times by downsampling factor + libyuv::NV12Scale( + raw_images_[i - 1].planes[VPX_PLANE_Y], + raw_images_[i - 1].stride[VPX_PLANE_Y], + raw_images_[i - 1].planes[VPX_PLANE_U], + raw_images_[i - 1].stride[VPX_PLANE_U], raw_images_[i - 1].d_w, + raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], + raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], + raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].d_w, + raw_images_[i].d_h, libyuv::kFilterBilinear); + raw_images_[i].planes[VPX_PLANE_V] = raw_images_[i].planes[VPX_PLANE_U] + 1; + } +} + // static LibvpxVp8Encoder::VariableFramerateExperiment LibvpxVp8Encoder::ParseVariableFramerateConfig(std::string group_name) { - FieldTrialFlag enabled = FieldTrialFlag("Enabled"); + FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); FieldTrialParameter qp("min_qp", 15); FieldTrialParameter undershoot_percentage("undershoot", 30); - ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage}, + ParseFieldTrial({&disabled, &framerate_limit, &qp, &undershoot_percentage}, field_trial::FindFullName(group_name)); VariableFramerateExperiment config; - config.enabled = enabled.Get(); + config.enabled = !disabled.Get(); config.framerate_limit = framerate_limit.Get(); config.steady_state_qp = qp.Get(); config.steady_state_undershoot_percentage = undershoot_percentage.Get(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index f6cfd0ffe..c08b9b088 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -93,10 +93,13 @@ class LibvpxVp8Encoder : public VideoEncoder { bool UpdateVpxConfiguration(size_t stream_index); + void MaybeUpdatePixelFormat(vpx_img_fmt fmt); + void PrepareI420Image(const I420BufferInterface* frame); + void PrepareNV12Image(const NV12BufferInterface* frame); + const std::unique_ptr libvpx_; - const absl::optional> - experimental_cpu_speed_config_arm_; + const CpuSpeedExperiment experimental_cpu_speed_config_arm_; const RateControlSettings rate_control_settings_; // EncoderInfo::requested_resolution_alignment override from field trial. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc index 53a68bd5e..5aebd2c52 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc @@ -29,6 +29,7 @@ TemporalLayersChecker::CreateTemporalLayersChecker(Vp8TemporalLayersType type, // Conference mode temporal layering for screen content in base stream. return std::make_unique(num_temporal_layers); } + RTC_CHECK_NOTREACHED(); } TemporalLayersChecker::TemporalLayersChecker(int num_temporal_layers) diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h index 9bd8b0e31..f6b562e18 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h @@ -14,7 +14,7 @@ #include -#include "common_types.h" // NOLINT(build/include) +#include "api/video_codecs/spatial_layer.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.h index a4e0c28cc..fa53a155a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.h @@ -11,52 +11,7 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ #define MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ -#include -#include - -#include "absl/container/inlined_vector.h" -#include "api/video/video_bitrate_allocation.h" -#include "api/video/video_bitrate_allocator.h" -#include "api/video/video_codec_constants.h" -#include "api/video_codecs/video_codec.h" -#include "rtc_base/experiments/stable_target_rate_experiment.h" - -namespace webrtc { - -class SvcRateAllocator : public VideoBitrateAllocator { - public: - explicit SvcRateAllocator(const VideoCodec& codec); - - VideoBitrateAllocation Allocate( - VideoBitrateAllocationParameters parameters) override; - - static DataRate GetMaxBitrate(const VideoCodec& codec); - static DataRate GetPaddingBitrate(const VideoCodec& codec); - static absl::InlinedVector GetLayerStartBitrates( - const VideoCodec& codec); - - private: - VideoBitrateAllocation GetAllocationNormalVideo( - DataRate total_bitrate, - size_t first_active_layer, - size_t num_spatial_layers) const; - - VideoBitrateAllocation GetAllocationScreenSharing( - DataRate total_bitrate, - size_t first_active_layer, - size_t num_spatial_layers) const; - - // Returns the number of layers that are active and have enough bitrate to - // actually be enabled. - size_t FindNumEnabledLayers(DataRate target_rate) const; - - const VideoCodec codec_; - const StableTargetRateExperiment experiment_settings_; - const absl::InlinedVector - cumulative_layer_start_bitrates_; - size_t last_active_layer_count_; -}; - -} // namespace webrtc +// TODO(danilchap): Update dependent includes and remove this forwarding header. +#include "modules/video_coding/svc/svc_rate_allocator.h" #endif // MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc index d29c19dc8..c2b1f501f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -19,20 +19,27 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" +#include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8cx.h" #include "vpx/vp8dx.h" #include "vpx/vpx_decoder.h" @@ -49,9 +56,6 @@ uint8_t kUpdBufIdx[4] = {0, 0, 1, 0}; // Maximum allowed PID difference for differnet per-layer frame-rate case. const int kMaxAllowedPidDiff = 30; -constexpr double kLowRateFactor = 1.0; -constexpr double kHighRateFactor = 2.0; - // TODO(ilink): Tune these thresholds further. // Selected using ConverenceMotion_1280_720_50.yuv clip. // No toggling observed on any link capacity from 100-2000kbps. @@ -61,15 +65,6 @@ constexpr double kHighRateFactor = 2.0; constexpr int kLowVp9QpThreshold = 149; constexpr int kHighVp9QpThreshold = 205; -// These settings correspond to the settings in vpx_codec_enc_cfg. -struct Vp9RateSettings { - uint32_t rc_undershoot_pct; - uint32_t rc_overshoot_pct; - uint32_t rc_buf_sz; - uint32_t rc_buf_optimal_sz; - uint32_t rc_dropframe_thresh; -}; - // Only positive speeds, range for real-time coding currently is: 5 - 8. // Lower means slower/better quality, higher means fastest/lower quality. int GetCpuSpeed(int width, int height) { @@ -161,54 +156,110 @@ std::pair GetActiveLayers( return {0, 0}; } -uint32_t Interpolate(uint32_t low, - uint32_t high, - double bandwidth_headroom_factor) { - RTC_DCHECK_GE(bandwidth_headroom_factor, kLowRateFactor); - RTC_DCHECK_LE(bandwidth_headroom_factor, kHighRateFactor); - - // |factor| is between 0.0 and 1.0. - const double factor = bandwidth_headroom_factor - kLowRateFactor; - - return static_cast(((1.0 - factor) * low) + (factor * high) + 0.5); -} - -Vp9RateSettings GetRateSettings(double bandwidth_headroom_factor) { - static const Vp9RateSettings low_settings{100u, 0u, 100u, 33u, 40u}; - static const Vp9RateSettings high_settings{50u, 50u, 1000u, 700u, 5u}; - - if (bandwidth_headroom_factor <= kLowRateFactor) { - return low_settings; - } else if (bandwidth_headroom_factor >= kHighRateFactor) { - return high_settings; +std::unique_ptr CreateVp9ScalabilityStructure( + const VideoCodec& codec) { + int num_spatial_layers = codec.VP9().numberOfSpatialLayers; + int num_temporal_layers = + std::max(1, int{codec.VP9().numberOfTemporalLayers}); + if (num_spatial_layers == 1 && num_temporal_layers == 1) { + return std::make_unique(); } - Vp9RateSettings settings; - settings.rc_undershoot_pct = - Interpolate(low_settings.rc_undershoot_pct, - high_settings.rc_undershoot_pct, bandwidth_headroom_factor); - settings.rc_overshoot_pct = - Interpolate(low_settings.rc_overshoot_pct, high_settings.rc_overshoot_pct, - bandwidth_headroom_factor); - settings.rc_buf_sz = - Interpolate(low_settings.rc_buf_sz, high_settings.rc_buf_sz, - bandwidth_headroom_factor); - settings.rc_buf_optimal_sz = - Interpolate(low_settings.rc_buf_optimal_sz, - high_settings.rc_buf_optimal_sz, bandwidth_headroom_factor); - settings.rc_dropframe_thresh = - Interpolate(low_settings.rc_dropframe_thresh, - high_settings.rc_dropframe_thresh, bandwidth_headroom_factor); - return settings; + char name[20]; + rtc::SimpleStringBuilder ss(name); + if (codec.mode == VideoCodecMode::kScreensharing) { + // TODO(bugs.webrtc.org/11999): Compose names of the structures when they + // are implemented. + return nullptr; + } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOn || + num_spatial_layers == 1) { + ss << "L" << num_spatial_layers << "T" << num_temporal_layers; + } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOnKeyPic) { + ss << "L" << num_spatial_layers << "T" << num_temporal_layers << "_KEY"; + } else { + RTC_DCHECK_EQ(codec.VP9().interLayerPred, InterLayerPredMode::kOff); + ss << "S" << num_spatial_layers << "T" << num_temporal_layers; + } + + // Check spatial ratio. + if (num_spatial_layers > 1 && codec.spatialLayers[0].targetBitrate > 0) { + if (codec.width != codec.spatialLayers[num_spatial_layers - 1].width || + codec.height != codec.spatialLayers[num_spatial_layers - 1].height) { + RTC_LOG(LS_WARNING) + << "Top layer resolution expected to match overall resolution"; + return nullptr; + } + // Check if the ratio is one of the supported. + int numerator; + int denominator; + if (codec.spatialLayers[1].width == 2 * codec.spatialLayers[0].width) { + numerator = 1; + denominator = 2; + // no suffix for 1:2 ratio. + } else if (2 * codec.spatialLayers[1].width == + 3 * codec.spatialLayers[0].width) { + numerator = 2; + denominator = 3; + ss << "h"; + } else { + RTC_LOG(LS_WARNING) << "Unsupported scalability ratio " + << codec.spatialLayers[0].width << ":" + << codec.spatialLayers[1].width; + return nullptr; + } + // Validate ratio is consistent for all spatial layer transitions. + for (int sid = 1; sid < num_spatial_layers; ++sid) { + if (codec.spatialLayers[sid].width * numerator != + codec.spatialLayers[sid - 1].width * denominator || + codec.spatialLayers[sid].height * numerator != + codec.spatialLayers[sid - 1].height * denominator) { + RTC_LOG(LS_WARNING) << "Inconsistent scalability ratio " << numerator + << ":" << denominator; + return nullptr; + } + } + } + + auto scalability_structure_controller = CreateScalabilityStructure(name); + if (scalability_structure_controller == nullptr) { + RTC_LOG(LS_WARNING) << "Unsupported scalability structure " << name; + } else { + RTC_LOG(LS_INFO) << "Created scalability structure " << name; + } + return scalability_structure_controller; } -void UpdateRateSettings(vpx_codec_enc_cfg_t* config, - const Vp9RateSettings& new_settings) { - config->rc_undershoot_pct = new_settings.rc_undershoot_pct; - config->rc_overshoot_pct = new_settings.rc_overshoot_pct; - config->rc_buf_sz = new_settings.rc_buf_sz; - config->rc_buf_optimal_sz = new_settings.rc_buf_optimal_sz; - config->rc_dropframe_thresh = new_settings.rc_dropframe_thresh; +vpx_svc_ref_frame_config_t Vp9References( + rtc::ArrayView layers) { + vpx_svc_ref_frame_config_t ref_config = {}; + for (const ScalableVideoController::LayerFrameConfig& layer_frame : layers) { + const auto& buffers = layer_frame.Buffers(); + RTC_DCHECK_LE(buffers.size(), 3); + int sid = layer_frame.SpatialId(); + if (!buffers.empty()) { + ref_config.lst_fb_idx[sid] = buffers[0].id; + ref_config.reference_last[sid] = buffers[0].referenced; + if (buffers[0].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[0].id); + } + } + if (buffers.size() > 1) { + ref_config.gld_fb_idx[sid] = buffers[1].id; + ref_config.reference_golden[sid] = buffers[1].referenced; + if (buffers[1].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[1].id); + } + } + if (buffers.size() > 2) { + ref_config.alt_fb_idx[sid] = buffers[2].id; + ref_config.reference_alt_ref[sid] = buffers[2].referenced; + if (buffers[2].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[2].id); + } + } + } + // TODO(bugs.webrtc.org/11999): Fill ref_config.duration + return ref_config; } } // namespace @@ -220,6 +271,10 @@ void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, } VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) + : VP9EncoderImpl(codec, FieldTrialBasedConfig()) {} + +VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, + const WebRtcKeyValueConfig& trials) : encoded_image_(), encoded_complete_callback_(nullptr), profile_( @@ -238,27 +293,32 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) num_spatial_layers_(0), num_active_spatial_layers_(0), first_active_layer_(0), - layer_deactivation_requires_key_frame_( - field_trial::IsEnabled("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation")), + layer_deactivation_requires_key_frame_(absl::StartsWith( + trials.Lookup("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation"), + "Enabled")), is_svc_(false), inter_layer_pred_(InterLayerPredMode::kOn), external_ref_control_(false), // Set in InitEncode because of tests. - trusted_rate_controller_(RateControlSettings::ParseFromFieldTrials() - .LibvpxVp9TrustedRateController()), - dynamic_rate_settings_( - RateControlSettings::ParseFromFieldTrials().Vp9DynamicRateSettings()), + trusted_rate_controller_( + RateControlSettings::ParseFromKeyValueConfig(&trials) + .LibvpxVp9TrustedRateController()), layer_buffering_(false), full_superframe_drop_(true), first_frame_in_picture_(true), ss_info_needed_(false), force_all_active_layers_(false), + use_svc_controller_( + absl::StartsWith(trials.Lookup("WebRTC-Vp9DependencyDescriptor"), + "Enabled")), is_flexible_mode_(false), - variable_framerate_experiment_(ParseVariableFramerateConfig( - "WebRTC-VP9VariableFramerateScreenshare")), + variable_framerate_experiment_(ParseVariableFramerateConfig(trials)), variable_framerate_controller_( variable_framerate_experiment_.framerate_limit), - quality_scaler_experiment_( - ParseQualityScalerConfig("WebRTC-VP9QualityScaler")), + quality_scaler_experiment_(ParseQualityScalerConfig(trials)), + external_ref_ctrl_( + !absl::StartsWith(trials.Lookup("WebRTC-Vp9ExternalRefCtrl"), + "Disabled")), + per_layer_speed_(ParsePerLayerSpeed(trials)), num_steady_state_frames_(0), config_changed_(true) { codec_ = {}; @@ -269,8 +329,7 @@ VP9EncoderImpl::~VP9EncoderImpl() { Release(); } -void VP9EncoderImpl::SetFecControllerOverride( - FecControllerOverride* fec_controller_override) { +void VP9EncoderImpl::SetFecControllerOverride(FecControllerOverride*) { // Ignored. } @@ -396,6 +455,8 @@ bool VP9EncoderImpl::SetSvcRates( first_active_layer_ = 0; bool seen_active_layer = false; bool expect_no_more_active_layers = false; + int highest_active_width = 0; + int highest_active_height = 0; for (int i = 0; i < num_spatial_layers_; ++i) { if (config_->ss_target_bitrate[i] > 0) { RTC_DCHECK(!expect_no_more_active_layers) << "Only middle layer is " @@ -405,6 +466,12 @@ bool VP9EncoderImpl::SetSvcRates( } num_active_spatial_layers_ = i + 1; seen_active_layer = true; + highest_active_width = + (svc_params_.scaling_factor_num[i] * config_->g_w) / + svc_params_.scaling_factor_den[i]; + highest_active_height = + (svc_params_.scaling_factor_num[i] * config_->g_h) / + svc_params_.scaling_factor_den[i]; } else { expect_no_more_active_layers = seen_active_layer; } @@ -419,7 +486,20 @@ bool VP9EncoderImpl::SetSvcRates( force_all_active_layers_ = true; } + if (svc_controller_) { + VideoBitrateAllocation allocation; + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + allocation.SetBitrate( + sid, tid, + config_->layer_target_bitrate[sid * num_temporal_layers_ + tid] * + 1000); + } + } + svc_controller_->OnRatesUpdated(allocation); + } current_bitrate_allocation_ = bitrate_allocation; + cpu_speed_ = GetCpuSpeed(highest_active_width, highest_active_height); config_changed_ = true; return true; } @@ -441,13 +521,6 @@ void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { codec_.maxFramerate = static_cast(parameters.framerate_fps + 0.5); - if (dynamic_rate_settings_) { - // Tweak rate control settings based on available network headroom. - UpdateRateSettings( - config_, GetRateSettings(parameters.bandwidth_allocation.bps() / - parameters.bitrate.get_sum_bps())); - } - bool res = SetSvcRates(parameters.bitrate); RTC_DCHECK(res) << "Failed to set new bitrate allocation"; config_changed_ = true; @@ -480,6 +553,9 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } + absl::optional previous_img_fmt = + raw_ ? absl::make_optional(raw_->fmt) : absl::nullopt; + int ret_val = Release(); if (ret_val < 0) { return ret_val; @@ -505,12 +581,14 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, num_temporal_layers_ = 1; } + if (use_svc_controller_) { + svc_controller_ = CreateVp9ScalabilityStructure(*inst); + } framerate_controller_ = std::vector( num_spatial_layers_, FramerateController(codec_.maxFramerate)); is_svc_ = (num_spatial_layers_ > 1 || num_temporal_layers_ > 1); - encoded_image_._completeFrame = true; // Populate encoder configuration with default values. if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { return WEBRTC_VIDEO_CODEC_ERROR; @@ -520,7 +598,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, unsigned int bits_for_storage = 8; switch (profile_) { case VP9Profile::kProfile0: - img_fmt = VPX_IMG_FMT_I420; + img_fmt = previous_img_fmt.value_or(VPX_IMG_FMT_I420); bits_for_storage = 8; config_->g_bit_depth = VPX_BITS_8; config_->g_profile = 0; @@ -602,11 +680,10 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, // External reference control is required for different frame rate on spatial // layers because libvpx generates rtp incompatible references in this case. - external_ref_control_ = - !field_trial::IsDisabled("WebRTC-Vp9ExternalRefCtrl") || - (num_spatial_layers_ > 1 && - codec_.mode == VideoCodecMode::kScreensharing) || - inter_layer_pred_ == InterLayerPredMode::kOn; + external_ref_control_ = external_ref_ctrl_ || + (num_spatial_layers_ > 1 && + codec_.mode == VideoCodecMode::kScreensharing) || + inter_layer_pred_ == InterLayerPredMode::kOn; if (num_temporal_layers_ == 1) { gof_.SetGofInfoVP9(kTemporalStructureMode1); @@ -684,7 +761,13 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { svc_params_.min_quantizers[i] = config_->rc_min_quantizer; } config_->ss_number_layers = num_spatial_layers_; - if (ExplicitlyConfiguredSpatialLayers()) { + if (svc_controller_) { + auto stream_config = svc_controller_->StreamConfig(); + for (int i = 0; i < stream_config.num_spatial_layers; ++i) { + svc_params_.scaling_factor_num[i] = stream_config.scaling_factor_num[i]; + svc_params_.scaling_factor_den[i] = stream_config.scaling_factor_den[i]; + } + } else if (ExplicitlyConfiguredSpatialLayers()) { for (int i = 0; i < num_spatial_layers_; ++i) { const auto& layer = codec_.spatialLayers[i]; RTC_CHECK_GT(layer.width, 0); @@ -743,6 +826,22 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv); return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + + if (per_layer_speed_.enabled) { + for (int i = 0; i < num_spatial_layers_; ++i) { + if (codec_.spatialLayers[i].active) { + continue; + } + + if (per_layer_speed_.layers[i] != -1) { + svc_params_.speed_per_layer[i] = per_layer_speed_.layers[i]; + } else { + svc_params_.speed_per_layer[i] = GetCpuSpeed( + codec_.spatialLayers[i].width, codec_.spatialLayers[i].height); + } + } + } + vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_); vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target_); @@ -882,6 +981,13 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, force_key_frame_ = true; } + if (svc_controller_) { + layer_frames_ = svc_controller_->NextFrameConfig(force_key_frame_); + if (layer_frames_.empty()) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + } + vpx_svc_layer_id_t layer_id = {0}; if (!force_key_frame_) { const size_t gof_idx = (pics_since_key_ + 1) % gof_.num_frames_in_gof; @@ -953,6 +1059,15 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, layer_id.spatial_layer_id = first_active_layer_; } + if (svc_controller_) { + layer_id.spatial_layer_id = layer_frames_.front().SpatialId(); + layer_id.temporal_layer_id = layer_frames_.front().TemporalId(); + for (const auto& layer : layer_frames_) { + layer_id.temporal_layer_id_per_spatial[layer.SpatialId()] = + layer.TemporalId(); + } + } + vpx_codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &layer_id); if (num_spatial_layers_ > 1) { @@ -965,6 +1080,7 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, if (vpx_codec_enc_config_set(encoder_, config_)) { return WEBRTC_VIDEO_CODEC_ERROR; } + vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_); config_changed_ = false; } @@ -978,20 +1094,37 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, input_image_ = &input_image; // Keep reference to buffer until encode completes. - rtc::scoped_refptr i420_buffer; + rtc::scoped_refptr video_frame_buffer; const I010BufferInterface* i010_buffer; rtc::scoped_refptr i010_copy; switch (profile_) { case VP9Profile::kProfile0: { - i420_buffer = input_image.video_frame_buffer()->ToI420(); - // Image in vpx_image_t format. - // Input image is const. VPX's raw image is not defined as const. - raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); - raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); - raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); - raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + if (input_image.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNV12) { + const NV12BufferInterface* nv12_buffer = + input_image.video_frame_buffer()->GetNV12(); + video_frame_buffer = nv12_buffer; + MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); + raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); + raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; + raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); + raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); + } else { + rtc::scoped_refptr i420_buffer = + input_image.video_frame_buffer()->ToI420(); + video_frame_buffer = i420_buffer; + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); + // Image in vpx_image_t format. + // Input image is const. VPX's raw image is not defined as const. + raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); + raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); + raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); + raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + } break; } case VP9Profile::kProfile1: { @@ -1030,7 +1163,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, flags = VPX_EFLAG_FORCE_KF; } - if (external_ref_control_) { + if (svc_controller_) { + vpx_svc_ref_frame_config_t ref_config = Vp9References(layer_frames_); + vpx_codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &ref_config); + } else if (external_ref_control_) { vpx_svc_ref_frame_config_t ref_config = SetReferences(force_key_frame_, layer_id.spatial_layer_id); @@ -1196,6 +1332,31 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } first_frame_in_picture_ = false; + + // Populate codec-agnostic section in the codec specific structure. + if (svc_controller_) { + auto it = absl::c_find_if( + layer_frames_, + [&](const ScalableVideoController::LayerFrameConfig& config) { + return config.SpatialId() == spatial_idx->value_or(0); + }); + RTC_CHECK(it != layer_frames_.end()) + << "Failed to find spatial id " << spatial_idx->value_or(0); + codec_specific->generic_frame_info = svc_controller_->OnEncodeDone(*it); + if (is_key_frame) { + codec_specific->template_structure = + svc_controller_->DependencyStructure(); + auto& resolutions = codec_specific->template_structure->resolutions; + resolutions.resize(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + resolutions[sid] = RenderResolution( + /*width=*/codec_.width * svc_params_.scaling_factor_num[sid] / + svc_params_.scaling_factor_den[sid], + /*height=*/codec_.height * svc_params_.scaling_factor_num[sid] / + svc_params_.scaling_factor_den[sid]); + } + } + } } void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, @@ -1484,7 +1645,6 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { encoded_image_._frameType = VideoFrameType::kVideoFrameKey; force_key_frame_ = false; } - RTC_DCHECK_LE(encoded_image_.size(), encoded_image_.capacity()); codec_specific_ = {}; absl::optional spatial_index; @@ -1523,17 +1683,10 @@ void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { } } - codec_specific_.codecSpecific.VP9.end_of_picture = end_of_picture; + codec_specific_.end_of_picture = end_of_picture; - // No data partitioning in VP9, so 1 partition only. - int part_idx = 0; - RTPFragmentationHeader frag_info; - frag_info.VerifyAndAllocateFragmentationHeader(1); - frag_info.fragmentationOffset[part_idx] = 0; - frag_info.fragmentationLength[part_idx] = encoded_image_.size(); - - encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific_, - &frag_info); + encoded_complete_callback_->OnEncodedImage(encoded_image_, + &codec_specific_); if (codec_.mode == VideoCodecMode::kScreensharing) { const uint8_t spatial_idx = encoded_image_.SpatialIndex().value_or(0); @@ -1571,7 +1724,8 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { EncoderInfo info; info.supports_native_handle = false; info.implementation_name = "libvpx"; - if (quality_scaler_experiment_.enabled) { + if (quality_scaler_experiment_.enabled && inited_ && + codec_.VP9().automaticResizeOn) { info.scaling_settings = VideoEncoder::ScalingSettings( quality_scaler_experiment_.low_qp, quality_scaler_experiment_.high_qp); } else { @@ -1608,6 +1762,10 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { (sl_fps_fraction / decimator))); } } + if (profile_ == VP9Profile::kProfile0) { + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; + } } return info; } @@ -1629,7 +1787,8 @@ size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { // static VP9EncoderImpl::VariableFramerateExperiment -VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { +VP9EncoderImpl::ParseVariableFramerateConfig( + const WebRtcKeyValueConfig& trials) { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); FieldTrialParameter qp("min_qp", 32); @@ -1638,7 +1797,7 @@ VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { "frames_before_steady_state", 5); ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage, &frames_before_steady_state}, - field_trial::FindFullName(group_name)); + trials.Lookup("WebRTC-VP9VariableFramerateScreenshare")); VariableFramerateExperiment config; config.enabled = enabled.Get(); config.framerate_limit = framerate_limit.Get(); @@ -1651,12 +1810,12 @@ VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { // static VP9EncoderImpl::QualityScalerExperiment -VP9EncoderImpl::ParseQualityScalerConfig(std::string group_name) { +VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter low_qp("low_qp", kLowVp9QpThreshold); FieldTrialParameter high_qp("hihg_qp", kHighVp9QpThreshold); ParseFieldTrial({&disabled, &low_qp, &high_qp}, - field_trial::FindFullName(group_name)); + trials.Lookup("WebRTC-VP9QualityScaler")); QualityScalerExperiment config; config.enabled = !disabled.Get(); RTC_LOG(LS_INFO) << "Webrtc quality scaler for vp9 is " @@ -1667,16 +1826,47 @@ VP9EncoderImpl::ParseQualityScalerConfig(std::string group_name) { return config; } -VP9DecoderImpl::VP9DecoderImpl() +// static +VP9EncoderImpl::SpeedSettings VP9EncoderImpl::ParsePerLayerSpeed( + const WebRtcKeyValueConfig& trials) { + FieldTrialFlag enabled("enabled"); + FieldTrialParameter speeds[kMaxSpatialLayers]{ + {"s0", -1}, {"s1", -1}, {"s2", -1}, {"s3", -1}, {"s4", -1}}; + ParseFieldTrial( + {&enabled, &speeds[0], &speeds[1], &speeds[2], &speeds[3], &speeds[4]}, + trials.Lookup("WebRTC-VP9-PerLayerSpeed")); + return SpeedSettings{enabled.Get(), + {speeds[0].Get(), speeds[1].Get(), speeds[2].Get(), + speeds[3].Get(), speeds[4].Get()}}; +} + +void VP9EncoderImpl::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { + if (!raw_) { + raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + } else if (raw_->fmt != fmt) { + RTC_LOG(INFO) << "Switching VP9 encoder pixel format to " + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); + vpx_img_free(raw_); + raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + } + // else no-op since the image is already in the right format. +} + +VP9DecoderImpl::VP9DecoderImpl() : VP9DecoderImpl(FieldTrialBasedConfig()) {} +VP9DecoderImpl::VP9DecoderImpl(const WebRtcKeyValueConfig& trials) : decode_complete_callback_(nullptr), inited_(false), decoder_(nullptr), - key_frame_required_(true) {} + key_frame_required_(true), + preferred_output_format_( + absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} VP9DecoderImpl::~VP9DecoderImpl() { inited_ = true; // in order to do the actual release Release(); - int num_buffers_in_use = frame_buffer_pool_.GetNumBuffersInUse(); + int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); if (num_buffers_in_use > 0) { // The frame buffers are reference counted and frames are exposed after // decoding. There may be valid usage cases where previous frames are still @@ -1737,7 +1927,7 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { return WEBRTC_VIDEO_CODEC_MEMORY; } - if (!frame_buffer_pool_.InitializeVpxUsePool(decoder_)) { + if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { return WEBRTC_VIDEO_CODEC_MEMORY; } @@ -1745,7 +1935,8 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { // Always start with a complete key frame. key_frame_required_ = true; if (inst && inst->buffer_pool_size) { - if (!frame_buffer_pool_.Resize(*inst->buffer_pool_size)) { + if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || + !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } } @@ -1797,12 +1988,7 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image, if (key_frame_required_) { if (input_image._frameType != VideoFrameType::kVideoFrameKey) return WEBRTC_VIDEO_CODEC_ERROR; - // We have a key frame - is it complete? - if (input_image._completeFrame) { - key_frame_required_ = false; - } else { - return WEBRTC_VIDEO_CODEC_ERROR; - } + key_frame_required_ = false; } vpx_codec_iter_t iter = nullptr; vpx_image_t* img; @@ -1810,8 +1996,9 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image, if (input_image.size() == 0) { buffer = nullptr; // Triggers full frame concealment. } - // During decode libvpx may get and release buffers from |frame_buffer_pool_|. - // In practice libvpx keeps a few (~3-4) buffers alive at a time. + // During decode libvpx may get and release buffers from + // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive + // at a time. if (vpx_codec_decode(decoder_, buffer, static_cast(input_image.size()), 0, VPX_DL_REALTIME)) { @@ -1855,15 +2042,34 @@ int VP9DecoderImpl::ReturnFrame( switch (img->bit_depth) { case 8: if (img->fmt == VPX_IMG_FMT_I420) { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + rtc::scoped_refptr nv12_buffer = + output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + if (!nv12_buffer.get()) { + // Buffer pool is full. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + img_wrapped_buffer = nv12_buffer; + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), + nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), + nv12_buffer->StrideUV(), img->d_w, img->d_h); + // No holding onto img_buffer as it's no longer needed and can be + // reused. + } else { + img_wrapped_buffer = WrapI420Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI420Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + rtc::KeepRefUntilDone(img_buffer)); + } } else if (img->fmt == VPX_IMG_FMT_I444) { img_wrapped_buffer = WrapI444Buffer( img->d_w, img->d_h, img->planes[VPX_PLANE_Y], @@ -1924,7 +2130,7 @@ int VP9DecoderImpl::Release() { if (decoder_ != nullptr) { if (inited_) { // When a codec is destroyed libvpx will release any buffers of - // |frame_buffer_pool_| it is currently using. + // |libvpx_buffer_pool_| it is currently using. if (vpx_codec_destroy(decoder_)) { ret_val = WEBRTC_VIDEO_CODEC_MEMORY; } @@ -1935,7 +2141,8 @@ int VP9DecoderImpl::Release() { // Releases buffers from the pool. Any buffers not in use are deleted. Buffers // still referenced externally are deleted once fully released, not returning // to the pool. - frame_buffer_pool_.ClearPool(); + libvpx_buffer_pool_.ClearPool(); + output_buffer_pool_.Release(); inited_ = false; return ret_val; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h index fae94c752..14c3ca8cc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h @@ -20,10 +20,13 @@ #include #include "api/fec_controller_override.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_encoder.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "media/base/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller.h" #include "vpx/vp8cx.h" #include "vpx/vpx_decoder.h" @@ -34,6 +37,8 @@ namespace webrtc { class VP9EncoderImpl : public VP9Encoder { public: explicit VP9EncoderImpl(const cricket::VideoCodec& codec); + VP9EncoderImpl(const cricket::VideoCodec& codec, + const WebRtcKeyValueConfig& trials); ~VP9EncoderImpl() override; @@ -98,6 +103,8 @@ class VP9EncoderImpl : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); + void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + EncodedImage encoded_image_; CodecSpecificInfo codec_specific_; EncodedImageCallback* encoded_complete_callback_; @@ -125,7 +132,6 @@ class VP9EncoderImpl : public VP9Encoder { InterLayerPredMode inter_layer_pred_; bool external_ref_control_; const bool trusted_rate_controller_; - const bool dynamic_rate_settings_; bool layer_buffering_; const bool full_superframe_drop_; vpx_svc_frame_drop_t svc_drop_frame_; @@ -133,7 +139,9 @@ class VP9EncoderImpl : public VP9Encoder { VideoBitrateAllocation current_bitrate_allocation_; bool ss_info_needed_; bool force_all_active_layers_; + const bool use_svc_controller_; + std::unique_ptr svc_controller_; std::vector framerate_controller_; // Used for flexible mode. @@ -157,6 +165,7 @@ class VP9EncoderImpl : public VP9Encoder { size_t temporal_layer_id = 0; }; std::map ref_buf_; + std::vector layer_frames_; // Variable frame-rate related fields and methods. const struct VariableFramerateExperiment { @@ -173,7 +182,7 @@ class VP9EncoderImpl : public VP9Encoder { int frames_before_steady_state; } variable_framerate_experiment_; static VariableFramerateExperiment ParseVariableFramerateConfig( - std::string group_name); + const WebRtcKeyValueConfig& trials); FramerateController variable_framerate_controller_; const struct QualityScalerExperiment { @@ -182,7 +191,14 @@ class VP9EncoderImpl : public VP9Encoder { bool enabled; } quality_scaler_experiment_; static QualityScalerExperiment ParseQualityScalerConfig( - std::string group_name); + const WebRtcKeyValueConfig& trials); + const bool external_ref_ctrl_; + + const struct SpeedSettings { + bool enabled; + int layers[kMaxSpatialLayers]; + } per_layer_speed_; + static SpeedSettings ParsePerLayerSpeed(const WebRtcKeyValueConfig& trials); int num_steady_state_frames_; // Only set config when this flag is set. @@ -192,6 +208,7 @@ class VP9EncoderImpl : public VP9Encoder { class VP9DecoderImpl : public VP9Decoder { public: VP9DecoderImpl(); + explicit VP9DecoderImpl(const WebRtcKeyValueConfig& trials); virtual ~VP9DecoderImpl(); @@ -214,13 +231,18 @@ class VP9DecoderImpl : public VP9Decoder { const webrtc::ColorSpace* explicit_color_space); // Memory pool used to share buffers between libvpx and webrtc. - Vp9FrameBufferPool frame_buffer_pool_; + Vp9FrameBufferPool libvpx_buffer_pool_; + // Buffer pool used to allocate additionally needed NV12 buffers. + VideoFrameBufferPool output_buffer_pool_; DecodedImageCallback* decode_complete_callback_; bool inited_; vpx_codec_ctx_t* decoder_; bool key_frame_required_; VideoCodec current_codec_; int num_cores_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc index 32be39bcb..a7a4b8f75 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc @@ -96,7 +96,7 @@ bool VCMDecoderDataBase::DeregisterReceiveCodec(uint8_t payload_type) { dec_map_.erase(it); if (payload_type == current_payload_type_) { // This codec is currently in use. - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; current_payload_type_ = 0; } return true; @@ -113,7 +113,7 @@ VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( // If decoder exists - delete. if (ptr_decoder_) { ptr_decoder_.reset(); - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; current_payload_type_ = 0; } ptr_decoder_ = CreateAndInitDecoder(frame, &receive_codec_); @@ -126,7 +126,7 @@ VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) < 0) { ptr_decoder_.reset(); - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; current_payload_type_ = 0; return nullptr; } @@ -178,7 +178,7 @@ std::unique_ptr VCMDecoderDataBase::CreateAndInitDecoder( RTC_LOG(LS_ERROR) << "Failed to initialize decoder. Error code: " << err; return nullptr; } - memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec)); + *new_codec = *decoder_item->settings.get(); return ptr_decoder; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc index 3c26b8a72..4638771b2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc @@ -43,7 +43,6 @@ void VCMEncodedFrame::Reset() { _frameType = VideoFrameType::kVideoFrameDelta; _encodedWidth = 0; _encodedHeight = 0; - _completeFrame = false; _missingFrame = false; set_size(0); _codecSpecificInfo.codecType = kVideoCodecGeneric; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h index 261aae77a..a77d42eec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h @@ -34,7 +34,9 @@ class RTC_EXPORT VCMEncodedFrame : protected EncodedImage { _renderTimeMs = renderTimeMs; } - void SetPlayoutDelay(PlayoutDelay playout_delay) { + VideoPlayoutDelay PlayoutDelay() const { return playout_delay_; } + + void SetPlayoutDelay(VideoPlayoutDelay playout_delay) { playout_delay_ = playout_delay; } @@ -90,10 +92,6 @@ class RTC_EXPORT VCMEncodedFrame : protected EncodedImage { */ EncodedImage::Timing video_timing() const { return timing_; } EncodedImage::Timing* video_timing_mutable() { return &timing_; } - /** - * True if this frame is complete, false otherwise - */ - bool Complete() const { return _completeFrame; } /** * True if there's a frame missing before this frame */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer.cc index 755acb294..0f64ab144 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer.cc @@ -70,11 +70,6 @@ void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) { gof_info.temporal_up_switch[idx]; } -bool VCMFrameBuffer::IsSessionComplete() const { - TRACE_EVENT0("webrtc", "VCMFrameBuffer::IsSessionComplete"); - return _sessionInfo.complete(); -} - // Insert packet VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, int64_t timeInMs, @@ -98,15 +93,16 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, } } + size_t oldSize = encoded_image_buffer_ ? encoded_image_buffer_->size() : 0; uint32_t requiredSizeBytes = size() + packet.sizeBytes + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0); - if (requiredSizeBytes > capacity()) { + if (requiredSizeBytes > oldSize) { const uint8_t* prevBuffer = data(); const uint32_t increments = requiredSizeBytes / kBufferIncStepSizeBytes + (requiredSizeBytes % kBufferIncStepSizeBytes > 0); - const uint32_t newSize = capacity() + increments * kBufferIncStepSizeBytes; + const uint32_t newSize = oldSize + increments * kBufferIncStepSizeBytes; if (newSize > kMaxJBFrameSizeBytes) { RTC_LOG(LS_ERROR) << "Failed to insert packet due to frame being too " "big."; @@ -133,7 +129,9 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, if (packet.sizeBytes > 0) CopyCodecSpecific(&packet.video_header); - int retVal = _sessionInfo.InsertPacket(packet, data(), frame_data); + int retVal = _sessionInfo.InsertPacket( + packet, encoded_image_buffer_ ? encoded_image_buffer_->data() : nullptr, + frame_data); if (retVal == -1) { return kSizeError; } else if (retVal == -2) { @@ -262,7 +260,6 @@ void VCMFrameBuffer::PrepareForDecode(bool continuous) { // Transfer frame information to EncodedFrame and create any codec // specific information. _frameType = _sessionInfo.FrameType(); - _completeFrame = _sessionInfo.complete(); _missingFrame = !continuous; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc index fd65d7e25..afce78766 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc @@ -348,6 +348,11 @@ void FrameBuffer::Clear() { ClearFramesAndHistory(); } +int FrameBuffer::Size() { + MutexLock lock(&mutex_); + return frames_.size(); +} + void FrameBuffer::UpdateRtt(int64_t rtt_ms) { MutexLock lock(&mutex_); jitter_estimator_.UpdateRtt(rtt_ms); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h index c88ae891c..2ed21c4f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h @@ -23,7 +23,6 @@ #include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/jitter_estimator.h" #include "modules/video_coding/utility/decoded_frames_history.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/experiments/rtt_mult_experiment.h" #include "rtc_base/numerics/sequence_number_util.h" @@ -50,6 +49,10 @@ class FrameBuffer { VCMTiming* timing, VCMReceiveStatisticsCallback* stats_callback); + FrameBuffer() = delete; + FrameBuffer(const FrameBuffer&) = delete; + FrameBuffer& operator=(const FrameBuffer&) = delete; + virtual ~FrameBuffer(); // Insert a frame into the frame buffer. Returns the picture id @@ -81,6 +84,8 @@ class FrameBuffer { // Clears the FrameBuffer, removing all the buffered frames. void Clear(); + int Size(); + private: struct FrameInfo { FrameInfo(); @@ -188,8 +193,6 @@ class FrameBuffer { // rtt_mult experiment settings. const absl::optional rtt_mult_settings_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer); }; } // namespace video_coding diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc index bdb43f799..25fd23234 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc @@ -38,7 +38,8 @@ RtpFrameObject::RtpFrameObject( const absl::optional& color_space, RtpPacketInfos packet_infos, rtc::scoped_refptr image_buffer) - : first_seq_num_(first_seq_num), + : image_buffer_(image_buffer), + first_seq_num_(first_seq_num), last_seq_num_(last_seq_num), last_packet_received_time_(last_packet_received_time), times_nacked_(times_nacked) { @@ -50,7 +51,6 @@ RtpFrameObject::RtpFrameObject( // TODO(philipel): Remove when encoded image is replaced by EncodedFrame. // VCMEncodedFrame members CopyCodecSpecific(&rtp_video_header_); - _completeFrame = true; _payloadType = payload_type; SetTimestamp(rtp_timestamp); ntp_time_ms_ = ntp_time_ms; @@ -60,7 +60,7 @@ RtpFrameObject::RtpFrameObject( // as of the first packet's. SetPlayoutDelay(rtp_video_header_.playout_delay); - SetEncodedData(std::move(image_buffer)); + SetEncodedData(image_buffer_); _encodedWidth = rtp_video_header_.width; _encodedHeight = rtp_video_header_.height; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h index 831b444df..d812b8fd2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h @@ -48,7 +48,11 @@ class RtpFrameObject : public EncodedFrame { bool delayed_by_retransmission() const override; const RTPVideoHeader& GetRtpVideoHeader() const; + uint8_t* mutable_data() { return image_buffer_->data(); } + private: + // Reference for mutable access. + rtc::scoped_refptr image_buffer_; RTPVideoHeader rtp_video_header_; VideoCodecType codec_type_; uint16_t first_seq_num_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc index 50ecd8da8..79057926f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc @@ -13,6 +13,7 @@ #include #include +#include #include "api/video/video_timing.h" #include "modules/video_coding/include/video_error_codes.h" @@ -31,12 +32,18 @@ VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming* timing, : _clock(clock), _timing(timing), _timestampMap(kDecoderFrameMemoryLength), - _extra_decode_time("t", absl::nullopt) { + _extra_decode_time("t", absl::nullopt), + low_latency_renderer_enabled_("enabled", true), + low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", + true) { ntp_offset_ = _clock->CurrentNtpInMilliseconds() - _clock->TimeInMilliseconds(); ParseFieldTrial({&_extra_decode_time}, field_trial::FindFullName("WebRTC-SlowDownDecoder")); + ParseFieldTrial({&low_latency_renderer_enabled_, + &low_latency_renderer_include_predecode_buffer_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } VCMDecodedFrameCallback::~VCMDecodedFrameCallback() {} @@ -85,9 +92,11 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). VCMFrameInformation* frameInfo; + int timestamp_map_size = 0; { MutexLock lock(&lock_); frameInfo = _timestampMap.Pop(decodedImage.timestamp()); + timestamp_map_size = _timestampMap.Size(); } if (frameInfo == NULL) { @@ -101,13 +110,30 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_packet_infos(frameInfo->packet_infos); decodedImage.set_rotation(frameInfo->rotation); - const Timestamp now = _clock->CurrentTime(); - RTC_DCHECK(frameInfo->decodeStart); - if (!decode_time_ms) { - decode_time_ms = (now - *frameInfo->decodeStart).ms(); + if (low_latency_renderer_enabled_ && frameInfo->playout_delay.min_ms == 0 && + frameInfo->playout_delay.max_ms > 0) { + absl::optional max_composition_delay_in_frames = + _timing->MaxCompositionDelayInFrames(); + if (max_composition_delay_in_frames) { + // Subtract frames that are in flight. + if (low_latency_renderer_include_predecode_buffer_) { + *max_composition_delay_in_frames -= timestamp_map_size; + *max_composition_delay_in_frames = + std::max(0, *max_composition_delay_in_frames); + } + decodedImage.set_max_composition_delay_in_frames( + max_composition_delay_in_frames); + } } - _timing->StopDecodeTimer(*decode_time_ms, now.ms()); - decodedImage.set_processing_time({*frameInfo->decodeStart, now}); + + RTC_DCHECK(frameInfo->decodeStart); + const Timestamp now = _clock->CurrentTime(); + const TimeDelta decode_time = decode_time_ms + ? TimeDelta::Millis(*decode_time_ms) + : now - *frameInfo->decodeStart; + _timing->StopDecodeTimer(decode_time.ms(), now.ms()); + decodedImage.set_processing_time( + {*frameInfo->decodeStart, *frameInfo->decodeStart + decode_time}); // Report timing information. TimingFrameInfo timing_frame_info; @@ -161,7 +187,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_timestamp_us(frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec); - _receiveCallback->FrameToRender(decodedImage, qp, *decode_time_ms, + _receiveCallback->FrameToRender(decodedImage, qp, decode_time.ms(), frameInfo->content_type); } @@ -223,6 +249,7 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { _frameInfos[_nextFrameInfoIdx].decodeStart = now; _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); + _frameInfos[_nextFrameInfoIdx].playout_delay = frame.PlayoutDelay(); _frameInfos[_nextFrameInfoIdx].timing = frame.video_timing(); _frameInfos[_nextFrameInfoIdx].ntp_time_ms = frame.EncodedImage().ntp_time_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h index b89d3f436..8481fdc15 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h @@ -35,6 +35,7 @@ struct VCMFrameInformation { void* userData; VideoRotation rotation; VideoContentType content_type; + PlayoutDelay playout_delay; EncodedImage::Timing timing; int64_t ntp_time_ms; RtpPacketInfos packet_infos; @@ -75,6 +76,16 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { int64_t ntp_offset_; // Set by the field trial WebRTC-SlowDownDecoder to simulate a slow decoder. FieldTrialOptional _extra_decode_time; + + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter |enabled| + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_; + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter + // |include_predecode_buffer| determines if the predecode buffer should be + // taken into account when calculating maximum number of frames in composition + // queue. + FieldTrialParameter low_latency_renderer_include_predecode_buffer_; }; class VCMGenericDecoder { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h index fbf6cefbd..c7834a272 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h @@ -25,6 +25,7 @@ #endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -82,7 +83,7 @@ struct CodecSpecificInfoVP9 { uint8_t num_ref_pics; uint8_t p_diff[kMaxVp9RefPics]; - bool end_of_picture; + RTC_DEPRECATED bool end_of_picture; }; static_assert(std::is_pod::value, ""); @@ -123,6 +124,7 @@ struct RTC_EXPORT CodecSpecificInfo { VideoCodecType codecType; CodecSpecificInfoUnion codecSpecific; + bool end_of_picture = true; absl::optional generic_frame_info; absl::optional template_structure; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h index ff9b7d6a6..641e7121e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h @@ -41,9 +41,7 @@ enum { }; enum VCMVideoProtection { - kProtectionNone, kProtectionNack, - kProtectionFEC, kProtectionNackFEC, }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc index 9d2d3a2d1..772098a73 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc @@ -298,8 +298,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) { last_decoded_state_.SetState(frame); DropPacketsFromNackList(last_decoded_state_.sequence_num()); - if ((*frame).IsSessionComplete()) - UpdateAveragePacketsPerFrame(frame->NumPackets()); + UpdateAveragePacketsPerFrame(frame->NumPackets()); return frame; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h index 160d8c7c3..e34f7040b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h @@ -82,9 +82,9 @@ class PacketBuffer { PacketBuffer(Clock* clock, size_t start_buffer_size, size_t max_buffer_size); ~PacketBuffer(); - InsertResult InsertPacket(std::unique_ptr packet) ABSL_MUST_USE_RESULT + ABSL_MUST_USE_RESULT InsertResult InsertPacket(std::unique_ptr packet) RTC_LOCKS_EXCLUDED(mutex_); - InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT + ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_); void ClearTo(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_); void Clear() RTC_LOCKS_EXCLUDED(mutex_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc index 2db4e211b..6b942fbe5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc @@ -161,18 +161,6 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, frame->SetRenderTime(render_time_ms); TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->Timestamp(), "SetRenderTS", "render_time", frame->RenderTimeMs()); - if (!frame->Complete()) { - // Update stats for incomplete frames. - bool retransmitted = false; - const int64_t last_packet_time_ms = - jitter_buffer_.LastPacketTime(frame, &retransmitted); - if (last_packet_time_ms >= 0 && !retransmitted) { - // We don't want to include timestamps which have suffered from - // retransmission here, since we compensate with extra retransmission - // delay within the jitter estimate. - timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms); - } - } return frame; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc similarity index 64% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc index 17375547c..4b4a23ed2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc @@ -7,24 +7,23 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/av1/create_scalability_structure.h" +#include "modules/video_coding/svc/create_scalability_structure.h" #include #include "absl/strings/string_view.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1h.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h" -#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h" -#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h" +#include "modules/video_coding/svc/scalability_structure_key_svc.h" +#include "modules/video_coding/svc/scalability_structure_l1t2.h" +#include "modules/video_coding/svc/scalability_structure_l1t3.h" +#include "modules/video_coding/svc/scalability_structure_l2t1.h" +#include "modules/video_coding/svc/scalability_structure_l2t1h.h" +#include "modules/video_coding/svc/scalability_structure_l2t2.h" +#include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" +#include "modules/video_coding/svc/scalability_structure_l3t1.h" +#include "modules/video_coding/svc/scalability_structure_l3t3.h" +#include "modules/video_coding/svc/scalability_structure_s2t1.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" namespace webrtc { @@ -54,6 +53,7 @@ constexpr NamedStructureFactory kFactories[] = { {"L2T2_KEY_SHIFT", Create}, {"L3T1", Create}, {"L3T3", Create}, + {"L3T3_KEY", Create}, {"S2T1", Create}, }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h similarity index 72% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h index fe4a283ae..9a14221fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/create_scalability_structure.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h @@ -7,14 +7,14 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_ +#ifndef MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ +#define MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ #include #include #include "absl/strings/string_view.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -26,4 +26,4 @@ std::unique_ptr CreateScalabilityStructure( } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_CREATE_SCALABILITY_STRUCTURE_H_ +#endif // MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc new file mode 100644 index 000000000..c489b6050 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc @@ -0,0 +1,285 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +enum : int { kKey, kDelta }; +} // namespace + +constexpr int ScalabilityStructureFullSvc::kMaxNumSpatialLayers; +constexpr int ScalabilityStructureFullSvc::kMaxNumTemporalLayers; +constexpr absl::string_view ScalabilityStructureFullSvc::kFramePatternNames[]; + +ScalabilityStructureFullSvc::ScalabilityStructureFullSvc( + int num_spatial_layers, + int num_temporal_layers) + : num_spatial_layers_(num_spatial_layers), + num_temporal_layers_(num_temporal_layers), + active_decode_targets_( + (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { + RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); + RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers); +} + +ScalabilityStructureFullSvc::~ScalabilityStructureFullSvc() = default; + +ScalabilityStructureFullSvc::StreamLayersConfig +ScalabilityStructureFullSvc::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = num_spatial_layers_; + result.num_temporal_layers = num_temporal_layers_; + result.scaling_factor_num[num_spatial_layers_ - 1] = 1; + result.scaling_factor_den[num_spatial_layers_ - 1] = 1; + for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { + result.scaling_factor_num[sid - 1] = 1; + result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + } + return result; +} + +bool ScalabilityStructureFullSvc::TemporalLayerIsActive(int tid) const { + if (tid >= num_temporal_layers_) { + return false; + } + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (DecodeTargetIsActive(sid, tid)) { + return true; + } + } + return false; +} + +DecodeTargetIndication ScalabilityStructureFullSvc::Dti( + int sid, + int tid, + const LayerFrameConfig& config) { + if (sid < config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (sid == config.SpatialId()) { + if (tid == 0) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return DecodeTargetIndication::kSwitch; + } + if (tid == config.TemporalId()) { + return DecodeTargetIndication::kDiscardable; + } + if (tid > config.TemporalId()) { + RTC_DCHECK_GT(tid, config.TemporalId()); + return DecodeTargetIndication::kSwitch; + } + } + RTC_DCHECK_GT(sid, config.SpatialId()); + RTC_DCHECK_GE(tid, config.TemporalId()); + if (config.IsKeyframe() || config.Id() == kKey) { + return DecodeTargetIndication::kSwitch; + } + return DecodeTargetIndication::kRequired; +} + +ScalabilityStructureFullSvc::FramePattern +ScalabilityStructureFullSvc::NextPattern() const { + switch (last_pattern_) { + case kNone: + case kDeltaT2B: + return kDeltaT0; + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + return kDeltaT2B; + } + return kDeltaT0; + case kDeltaT0: + if (TemporalLayerIsActive(2)) { + return kDeltaT2A; + } + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + } +} + +std::vector +ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { + std::vector configs; + if (active_decode_targets_.none()) { + last_pattern_ = kNone; + return configs; + } + configs.reserve(num_spatial_layers_); + + if (last_pattern_ == kNone || restart) { + can_reference_t0_frame_for_spatial_id_.reset(); + last_pattern_ = kNone; + } + FramePattern current_pattern = NextPattern(); + + absl::optional spatial_dependency_buffer_id; + switch (current_pattern) { + case kDeltaT0: + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + // Next frame from the spatial layer `sid` shouldn't depend on + // potentially old previous frame from the spatial layer `sid`. + can_reference_t0_frame_for_spatial_id_.reset(sid); + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(last_pattern_ == kNone ? kKey : kDelta).S(sid).T(0); + + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } else if (last_pattern_ == kNone) { + config.Keyframe(); + } + + if (can_reference_t0_frame_for_spatial_id_[sid]) { + config.ReferenceAndUpdate(BufferIndex(sid, /*tid=*/0)); + } else { + // TODO(bugs.webrtc.org/11999): Propagate chain restart on delta frame + // to ChainDiffCalculator + config.Update(BufferIndex(sid, /*tid=*/0)); + } + + can_reference_t0_frame_for_spatial_id_.set(sid); + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/0); + } + break; + case kDeltaT1: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/1) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(1); + // Temporal reference. + config.Reference(BufferIndex(sid, /*tid=*/0)); + // Spatial reference unless this is the lowest active spatial layer. + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } + // No frame reference top layer frame, so no need save it into a buffer. + if (num_temporal_layers_ > 2 || sid < num_spatial_layers_ - 1) { + config.Update(BufferIndex(sid, /*tid=*/1)); + can_reference_t1_frame_for_spatial_id_.set(sid); + } + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/1); + } + break; + case kDeltaT2A: + case kDeltaT2B: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/2) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(2); + // Temporal reference. + if (current_pattern == kDeltaT2B && + can_reference_t1_frame_for_spatial_id_[sid]) { + config.Reference(BufferIndex(sid, /*tid=*/1)); + } else { + config.Reference(BufferIndex(sid, /*tid=*/0)); + } + // Spatial reference unless this is the lowest active spatial layer. + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } + // No frame reference top layer frame, so no need save it into a buffer. + if (sid < num_spatial_layers_ - 1) { + config.Update(BufferIndex(sid, /*tid=*/2)); + } + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/2); + } + break; + case kNone: + RTC_NOTREACHED(); + break; + } + + if (configs.empty() && !restart) { + RTC_LOG(LS_WARNING) << "Failed to generate configuration for L" + << num_spatial_layers_ << "T" << num_temporal_layers_ + << " with active decode targets " + << active_decode_targets_.to_string('-').substr( + active_decode_targets_.size() - + num_spatial_layers_ * num_temporal_layers_) + << " and transition from " + << kFramePatternNames[last_pattern_] << " to " + << kFramePatternNames[current_pattern] + << ". Resetting."; + return NextFrameConfig(/*restart=*/true); + } + + last_pattern_ = current_pattern; + return configs; +} + +GenericFrameInfo ScalabilityStructureFullSvc::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications.reserve(num_spatial_layers_ * + num_temporal_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + if (config.TemporalId() == 0) { + frame_info.part_of_chain.resize(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + frame_info.part_of_chain[sid] = config.SpatialId() <= sid; + } + } else { + frame_info.part_of_chain.assign(num_spatial_layers_, false); + } + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureFullSvc::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + // Enable/disable spatial layers independetely. + bool active = true; + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + // To enable temporal layer, require bitrates for lower temporal layers. + active = active && bitrates.GetBitrate(sid, tid) > 0; + SetDecodeTargetIsActive(sid, tid, active); + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h new file mode 100644 index 000000000..d490d6e4a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ + +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalabilityStructureFullSvc : public ScalableVideoController { + public: + ScalabilityStructureFullSvc(int num_spatial_layers, int num_temporal_layers); + ~ScalabilityStructureFullSvc() override; + + StreamLayersConfig StreamConfig() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kNone, + kDeltaT2A, + kDeltaT1, + kDeltaT2B, + kDeltaT0, + }; + static constexpr absl::string_view kFramePatternNames[] = { + "None", "DeltaT2A", "DeltaT1", "DeltaT2B", "DeltaT0"}; + static constexpr int kMaxNumSpatialLayers = 3; + static constexpr int kMaxNumTemporalLayers = 3; + + // Index of the buffer to store last frame for layer (`sid`, `tid`) + int BufferIndex(int sid, int tid) const { + return tid * num_spatial_layers_ + sid; + } + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * num_temporal_layers_ + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); + } + FramePattern NextPattern() const; + bool TemporalLayerIsActive(int tid) const; + static DecodeTargetIndication Dti(int sid, + int tid, + const LayerFrameConfig& frame); + + const int num_spatial_layers_; + const int num_temporal_layers_; + + FramePattern last_pattern_ = kNone; + std::bitset can_reference_t0_frame_for_spatial_id_ = 0; + std::bitset can_reference_t1_frame_for_spatial_id_ = 0; + std::bitset<32> active_decode_targets_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc new file mode 100644 index 000000000..cfc89a379 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc @@ -0,0 +1,336 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_key_svc.h" + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +// Values to use as LayerFrameConfig::Id +enum : int { kKey, kDelta }; + +DecodeTargetIndication +Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { + if (config.IsKeyframe() || config.Id() == kKey) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent + : DecodeTargetIndication::kSwitch; + } + + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == config.TemporalId() && tid > 0) { + return DecodeTargetIndication::kDiscardable; + } + return DecodeTargetIndication::kSwitch; +} + +} // namespace + +constexpr int ScalabilityStructureKeySvc::kMaxNumSpatialLayers; +constexpr int ScalabilityStructureKeySvc::kMaxNumTemporalLayers; + +ScalabilityStructureKeySvc::ScalabilityStructureKeySvc(int num_spatial_layers, + int num_temporal_layers) + : num_spatial_layers_(num_spatial_layers), + num_temporal_layers_(num_temporal_layers), + active_decode_targets_( + (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { + // There is no point to use this structure without spatial scalability. + RTC_DCHECK_GT(num_spatial_layers, 1); + RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); + RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers); +} + +ScalabilityStructureKeySvc::~ScalabilityStructureKeySvc() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureKeySvc::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = num_spatial_layers_; + result.num_temporal_layers = num_temporal_layers_; + result.scaling_factor_num[num_spatial_layers_ - 1] = 1; + result.scaling_factor_den[num_spatial_layers_ - 1] = 1; + for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { + result.scaling_factor_num[sid - 1] = 1; + result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + } + return result; +} + +bool ScalabilityStructureKeySvc::TemporalLayerIsActive(int tid) const { + if (tid >= num_temporal_layers_) { + return false; + } + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (DecodeTargetIsActive(sid, tid)) { + return true; + } + } + return false; +} + +std::vector +ScalabilityStructureKeySvc::KeyframeConfig() { + std::vector configs; + configs.reserve(num_spatial_layers_); + absl::optional spatial_dependency_buffer_id; + spatial_id_is_enabled_.reset(); + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kKey).S(sid).T(0); + + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } else { + config.Keyframe(); + } + config.Update(BufferIndex(sid, /*tid=*/0)); + + spatial_id_is_enabled_.set(sid); + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/0); + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T0Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + spatial_id_is_enabled_.reset(sid); + continue; + } + configs.emplace_back(); + configs.back().Id(kDelta).S(sid).T(0).ReferenceAndUpdate( + BufferIndex(sid, /*tid=*/0)); + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T1Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/1)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(1).Reference(BufferIndex(sid, /*tid=*/0)); + if (num_temporal_layers_ > 2) { + config.Update(BufferIndex(sid, /*tid=*/1)); + can_reference_t1_frame_for_spatial_id_.set(sid); + } + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T2Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/2)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(2); + if (can_reference_t1_frame_for_spatial_id_[sid]) { + config.Reference(BufferIndex(sid, /*tid=*/1)); + } else { + config.Reference(BufferIndex(sid, /*tid=*/0)); + } + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::NextFrameConfig(bool restart) { + if (active_decode_targets_.none()) { + last_pattern_ = kNone; + return {}; + } + + if (restart) { + last_pattern_ = kNone; + } + + switch (last_pattern_) { + case kNone: + last_pattern_ = kDeltaT0; + return KeyframeConfig(); + case kDeltaT2B: + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + last_pattern_ = kDeltaT1; + return T1Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + last_pattern_ = kDeltaT2B; + return T2Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT0: + if (TemporalLayerIsActive(2)) { + last_pattern_ = kDeltaT2A; + return T2Config(); + } else if (TemporalLayerIsActive(1)) { + last_pattern_ = kDeltaT1; + return T1Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + } + RTC_NOTREACHED(); + return {}; +} + +GenericFrameInfo ScalabilityStructureKeySvc::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications.reserve(num_spatial_layers_ * + num_temporal_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + frame_info.part_of_chain.assign(num_spatial_layers_, false); + if (config.IsKeyframe() || config.Id() == kKey) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + for (int sid = config.SpatialId(); sid < num_spatial_layers_; ++sid) { + frame_info.part_of_chain[sid] = true; + } + } else if (config.TemporalId() == 0) { + frame_info.part_of_chain[config.SpatialId()] = true; + } + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureKeySvc::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + // Enable/disable spatial layers independetely. + bool active = bitrates.GetBitrate(sid, /*tid=*/0) > 0; + SetDecodeTargetIsActive(sid, /*tid=*/0, active); + if (!spatial_id_is_enabled_[sid] && active) { + // Key frame is required to reenable any spatial layer. + last_pattern_ = kNone; + } + + for (int tid = 1; tid < num_temporal_layers_; ++tid) { + // To enable temporal layer, require bitrates for lower temporal layers. + active = active && bitrates.GetBitrate(sid, tid) > 0; + SetDecodeTargetIsActive(sid, tid, active); + } + } +} + +ScalabilityStructureL2T1Key::~ScalabilityStructureL2T1Key() = default; + +FrameDependencyStructure ScalabilityStructureL2T1Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); + return structure; +} + +ScalabilityStructureL2T2Key::~ScalabilityStructureL2T2Key() = default; + +FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(6); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4}); + templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2}); + return structure; +} + +ScalabilityStructureL3T3Key::~ScalabilityStructureL3T3Key() = default; + +FrameDependencyStructure ScalabilityStructureL3T3Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x0].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); + t[0x5].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0x3].S(0).T(2).Dtis("--D------").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D---").ChainDiffs({4, 3, 2}).FrameDiffs({3}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3}); + t[0x2].S(0).T(1).Dtis("-DS------").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS---").ChainDiffs({7, 6, 5}).FrameDiffs({6}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6}); + t[0x4].S(0).T(2).Dtis("--D------").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D---").ChainDiffs({10, 9, 8}).FrameDiffs({3}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3}); + t[0x1].S(0).T(0).Dtis("SSS------").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x6].S(1).T(0).Dtis("---SSS---").ChainDiffs({1, 12, 11}).FrameDiffs({12}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 12}).FrameDiffs({12}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h new file mode 100644 index 000000000..1d3277b5c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ + +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalabilityStructureKeySvc : public ScalableVideoController { + public: + ScalabilityStructureKeySvc(int num_spatial_layers, int num_temporal_layers); + ~ScalabilityStructureKeySvc() override; + + StreamLayersConfig StreamConfig() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kNone, + kDeltaT0, + kDeltaT2A, + kDeltaT1, + kDeltaT2B, + }; + static constexpr int kMaxNumSpatialLayers = 3; + static constexpr int kMaxNumTemporalLayers = 3; + + // Index of the buffer to store last frame for layer (`sid`, `tid`) + int BufferIndex(int sid, int tid) const { + return tid * num_spatial_layers_ + sid; + } + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * num_temporal_layers_ + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); + } + bool TemporalLayerIsActive(int tid) const; + std::vector KeyframeConfig(); + std::vector T0Config(); + std::vector T1Config(); + std::vector T2Config(); + + const int num_spatial_layers_; + const int num_temporal_layers_; + + FramePattern last_pattern_ = kNone; + std::bitset spatial_id_is_enabled_; + std::bitset can_reference_t1_frame_for_spatial_id_; + std::bitset<32> active_decode_targets_; +}; + +// S1 0--0--0- +// | ... +// S0 0--0--0- +class ScalabilityStructureL2T1Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL2T1Key() : ScalabilityStructureKeySvc(2, 1) {} + ~ScalabilityStructureL2T1Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S1T1 0 0 +// / / / +// S1T0 0---0---0 +// | ... +// S0T1 | 0 0 +// |/ / / +// S0T0 0---0---0 +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL2T2Key() : ScalabilityStructureKeySvc(2, 2) {} + ~ScalabilityStructureL2T2Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureL3T3Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL3T3Key() : ScalabilityStructureKeySvc(3, 3) {} + ~ScalabilityStructureL3T3Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc new file mode 100644 index 000000000..f639e2da6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l1t2.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default; + +FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0}; + structure.templates.resize(3); + structure.templates[0].T(0).Dtis("SS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h new file mode 100644 index 000000000..d2f81aa11 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +class ScalabilityStructureL1T2 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL1T2() : ScalabilityStructureFullSvc(1, 2) {} + ~ScalabilityStructureL1T2() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc new file mode 100644 index 000000000..17073344c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l1t3.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default; + +FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0, 0}; + structure.templates.resize(5); + structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4}); + structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1}); + structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h new file mode 100644 index 000000000..00e48ccc4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// T2 0 0 0 0 +// | / | / +// T1 / 0 / 0 ... +// |_/ |_/ +// T0 0-------0------ +// Time-> 0 1 2 3 4 5 6 7 +class ScalabilityStructureL1T3 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL1T3() : ScalabilityStructureFullSvc(1, 3) {} + ~ScalabilityStructureL1T3() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc new file mode 100644 index 000000000..efd751665 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t1.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default; + +FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({2, 1}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h new file mode 100644 index 000000000..96a0da56d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// S1 0--0--0- +// | | | ... +// S0 0--0--0- +class ScalabilityStructureL2T1 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL2T1() : ScalabilityStructureFullSvc(2, 1) {} + ~ScalabilityStructureL2T1() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc similarity index 93% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc index 7b05c92cf..c4682764a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1h.h" +#include "modules/video_coding/svc/scalability_structure_l2t1h.h" #include #include diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h similarity index 63% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h index ec419d9c3..7200a1084 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1h.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h @@ -7,11 +7,11 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ -#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalability_structure_l2t1.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -24,4 +24,4 @@ class ScalabilityStructureL2T1h : public ScalabilityStructureL2T1 { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1H_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc new file mode 100644 index 000000000..a381ad080 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t2.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default; + +FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(6); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1}); + templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h new file mode 100644 index 000000000..781ea7e60 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// S1T1 0 0 +// /| /| / +// S1T0 0-+-0-+-0 +// | | | | | ... +// S0T1 | 0 | 0 | +// |/ |/ |/ +// S0T0 0---0---0-- +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL2T2() : ScalabilityStructureFullSvc(2, 2) {} + ~ScalabilityStructureL2T2() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc new file mode 100644 index 000000000..c53ff8f07 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +DecodeTargetIndication +Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { + if (config.IsKeyframe()) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent + : DecodeTargetIndication::kSwitch; + } + + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == config.TemporalId() && tid > 0) { + return DecodeTargetIndication::kDiscardable; + } + return DecodeTargetIndication::kSwitch; +} + +} // namespace + +constexpr int ScalabilityStructureL2T2KeyShift::kNumSpatialLayers; +constexpr int ScalabilityStructureL2T2KeyShift::kNumTemporalLayers; + +ScalabilityStructureL2T2KeyShift::~ScalabilityStructureL2T2KeyShift() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureL2T2KeyShift::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = 2; + result.num_temporal_layers = 2; + result.scaling_factor_num[0] = 1; + result.scaling_factor_den[0] = 2; + return result; +} + +FrameDependencyStructure ScalabilityStructureL2T2KeyShift::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(7); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[2].S(0).T(0).Dtis("SS--").ChainDiffs({4, 1}).FrameDiffs({4}); + templates[3].S(0).T(1).Dtis("-D--").ChainDiffs({2, 3}).FrameDiffs({2}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[5].S(1).T(0).Dtis("--SS").ChainDiffs({3, 4}).FrameDiffs({4}); + templates[6].S(1).T(1).Dtis("---D").ChainDiffs({1, 2}).FrameDiffs({2}); + return structure; +} + +std::vector +ScalabilityStructureL2T2KeyShift::NextFrameConfig(bool restart) { + std::vector configs; + configs.reserve(2); + if (restart) { + next_pattern_ = kKey; + } + + // Buffer0 keeps latest S0T0 frame, + // Buffer1 keeps latest S1T0 frame. + switch (next_pattern_) { + case kKey: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).Update(0).Keyframe(); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).Update(1); + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.back().Reference(0); + } else { + configs.back().Keyframe(); + } + } + next_pattern_ = kDelta0; + break; + case kDelta0: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).ReferenceAndUpdate(0); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/1)) { + configs.emplace_back(); + configs.back().S(1).T(1).Reference(1); + } + if (configs.empty() && DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).ReferenceAndUpdate(1); + } + next_pattern_ = kDelta1; + break; + case kDelta1: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/1)) { + configs.emplace_back(); + configs.back().S(0).T(1).Reference(0); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).ReferenceAndUpdate(1); + } + if (configs.empty() && DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).ReferenceAndUpdate(0); + } + next_pattern_ = kDelta0; + break; + } + + RTC_DCHECK(!configs.empty() || active_decode_targets_.none()); + return configs; +} + +GenericFrameInfo ScalabilityStructureL2T2KeyShift::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + for (int tid = 0; tid < kNumTemporalLayers; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + if (config.IsKeyframe()) { + frame_info.part_of_chain = {true, true}; + } else if (config.TemporalId() == 0) { + frame_info.part_of_chain = {config.SpatialId() == 0, + config.SpatialId() == 1}; + } else { + frame_info.part_of_chain = {false, false}; + } + return frame_info; +} + +void ScalabilityStructureL2T2KeyShift::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + // Enable/disable spatial layers independetely. + bool active = bitrates.GetBitrate(sid, /*tid=*/0) > 0; + if (!DecodeTargetIsActive(sid, /*tid=*/0) && active) { + // Key frame is required to reenable any spatial layer. + next_pattern_ = kKey; + } + + SetDecodeTargetIsActive(sid, /*tid=*/0, active); + SetDecodeTargetIsActive(sid, /*tid=*/1, + active && bitrates.GetBitrate(sid, /*tid=*/1) > 0); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h similarity index 57% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h index 1b18bd7c1..26d1afcb2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h @@ -7,14 +7,15 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ #include #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -34,8 +35,8 @@ class ScalabilityStructureL2T2KeyShift : public ScalableVideoController { FrameDependencyStructure DependencyStructure() const override; std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; private: enum FramePattern { @@ -43,11 +44,21 @@ class ScalabilityStructureL2T2KeyShift : public ScalableVideoController { kDelta0, kDelta1, }; - LayerFrameConfig KeyFrameConfig() const; + + static constexpr int kNumSpatialLayers = 2; + static constexpr int kNumTemporalLayers = 2; + + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * kNumTemporalLayers + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * kNumTemporalLayers + tid, value); + } FramePattern next_pattern_ = kKey; + std::bitset<32> active_decode_targets_ = 0b1111; }; } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc new file mode 100644 index 000000000..d7a532446 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l3t1.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default; + +FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 1, 2}; + auto& templates = structure.templates; + templates.resize(6); + templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); + templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1}); + templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1}); + templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h similarity index 50% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h index 404860d08..dea40e96b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h @@ -7,15 +7,11 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ -#include - -#include "absl/types/optional.h" #include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" namespace webrtc { @@ -25,21 +21,14 @@ namespace webrtc { // | | | // S0 0-0-0- // Time-> 0 1 2 -class ScalabilityStructureL3T1 : public ScalableVideoController { +class ScalabilityStructureL3T1 : public ScalabilityStructureFullSvc { public: + ScalabilityStructureL3T1() : ScalabilityStructureFullSvc(3, 1) {} ~ScalabilityStructureL3T1() override; - StreamLayersConfig StreamConfig() const override; FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; - - private: - bool keyframe_ = true; }; } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc new file mode 100644 index 000000000..932056b0d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l3t3.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default; + +FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); + t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); + t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1}); + t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1}); + t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h new file mode 100644 index 000000000..3f42726cc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc +class ScalabilityStructureL3T3 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL3T3() : ScalabilityStructureFullSvc(3, 3) {} + ~ScalabilityStructureL3T3() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc new file mode 100644 index 000000000..618deb4b3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_s2t1.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +constexpr int ScalabilityStructureS2T1::kNumSpatialLayers; + +ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureS2T1::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = kNumSpatialLayers; + result.num_temporal_layers = 1; + result.scaling_factor_num[0] = 1; + result.scaling_factor_den[0] = 2; + return result; +} + +FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = kNumSpatialLayers; + structure.num_chains = kNumSpatialLayers; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0}); + return structure; +} + +std::vector +ScalabilityStructureS2T1::NextFrameConfig(bool restart) { + if (restart) { + can_reference_frame_for_spatial_id_.reset(); + } + std::vector configs; + configs.reserve(kNumSpatialLayers); + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + if (!active_decode_targets_[sid]) { + can_reference_frame_for_spatial_id_.reset(sid); + continue; + } + configs.emplace_back(); + LayerFrameConfig& config = configs.back().S(sid); + if (can_reference_frame_for_spatial_id_[sid]) { + config.ReferenceAndUpdate(sid); + } else { + config.Keyframe().Update(sid); + can_reference_frame_for_spatial_id_.set(sid); + } + } + + return configs; +} + +GenericFrameInfo ScalabilityStructureS2T1::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications = { + config.SpatialId() == 0 ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent, + config.SpatialId() == 1 ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent, + }; + frame_info.part_of_chain = {config.SpatialId() == 0, config.SpatialId() == 1}; + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureS2T1::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + active_decode_targets_.set(0, bitrates.GetBitrate(/*sid=*/0, /*tid=*/0) > 0); + active_decode_targets_.set(1, bitrates.GetBitrate(/*sid=*/1, /*tid=*/0) > 0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h similarity index 61% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h index 06a99775c..0f27e480f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h @@ -7,14 +7,15 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ #include #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -29,13 +30,16 @@ class ScalabilityStructureS2T1 : public ScalableVideoController { FrameDependencyStructure DependencyStructure() const override; std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; private: - bool keyframe_ = true; + static constexpr int kNumSpatialLayers = 2; + + std::bitset can_reference_frame_for_spatial_id_; + std::bitset<32> active_decode_targets_ = 0b11; }; } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc new file mode 100644 index 000000000..2b0393f9c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" + +#include + +#include +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_frame_type.h" +#include "modules/video_coding/chain_diff_calculator.h" +#include "modules/video_coding/frame_dependencies_calculator.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "test/gtest.h" + +namespace webrtc { + +VideoBitrateAllocation EnableTemporalLayers(int s0, int s1, int s2) { + VideoBitrateAllocation bitrate; + for (int tid = 0; tid < s0; ++tid) { + bitrate.SetBitrate(0, tid, 1'000'000); + } + for (int tid = 0; tid < s1; ++tid) { + bitrate.SetBitrate(1, tid, 1'000'000); + } + for (int tid = 0; tid < s2; ++tid) { + bitrate.SetBitrate(2, tid, 1'000'000); + } + return bitrate; +} + +void ScalabilityStructureWrapper::GenerateFrames( + int num_temporal_units, + std::vector& frames) { + for (int i = 0; i < num_temporal_units; ++i) { + for (auto& layer_frame : + structure_controller_.NextFrameConfig(/*restart=*/false)) { + int64_t frame_id = ++frame_id_; + bool is_keyframe = layer_frame.IsKeyframe(); + + GenericFrameInfo frame_info = + structure_controller_.OnEncodeDone(layer_frame); + if (is_keyframe) { + chain_diff_calculator_.Reset(frame_info.part_of_chain); + } + frame_info.chain_diffs = + chain_diff_calculator_.From(frame_id, frame_info.part_of_chain); + for (int64_t base_frame_id : frame_deps_calculator_.FromBuffersUsage( + is_keyframe ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta, + frame_id, frame_info.encoder_buffers)) { + frame_info.frame_diffs.push_back(frame_id - base_frame_id); + } + + frames.push_back(std::move(frame_info)); + } + } +} + +bool ScalabilityStructureWrapper::FrameReferencesAreValid( + rtc::ArrayView frames) const { + bool valid = true; + // VP9 and AV1 supports up to 8 buffers. Expect no more buffers are not used. + std::bitset<8> buffer_contains_frame; + for (size_t i = 0; i < frames.size(); ++i) { + const GenericFrameInfo& frame = frames[i]; + for (const CodecBufferUsage& buffer_usage : frame.encoder_buffers) { + if (buffer_usage.id < 0 || buffer_usage.id >= 8) { + ADD_FAILURE() << "Invalid buffer id " << buffer_usage.id + << " for frame#" << i + << ". Up to 8 buffers are supported."; + valid = false; + continue; + } + if (buffer_usage.referenced && !buffer_contains_frame[buffer_usage.id]) { + ADD_FAILURE() << "buffer " << buffer_usage.id << " for frame#" << i + << " was reference before updated."; + valid = false; + } + if (buffer_usage.updated) { + buffer_contains_frame.set(buffer_usage.id); + } + } + for (int fdiff : frame.frame_diffs) { + if (fdiff <= 0 || static_cast(fdiff) > i) { + ADD_FAILURE() << "Invalid frame diff " << fdiff << " for frame#" << i; + valid = false; + } + } + } + return valid; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.h new file mode 100644 index 000000000..d183be476 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ + +#include + +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/chain_diff_calculator.h" +#include "modules/video_coding/frame_dependencies_calculator.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// Creates bitrate allocation with non-zero bitrate for given number of temporal +// layers for each spatial layer. +VideoBitrateAllocation EnableTemporalLayers(int s0, int s1 = 0, int s2 = 0); + +class ScalabilityStructureWrapper { + public: + explicit ScalabilityStructureWrapper(ScalableVideoController& structure) + : structure_controller_(structure) {} + + std::vector GenerateFrames(int num_temporal_units) { + std::vector frames; + GenerateFrames(num_temporal_units, frames); + return frames; + } + void GenerateFrames(int num_temporal_units, + std::vector& frames); + + // Returns false and ADD_FAILUREs for frames with invalid references. + // In particular validates no frame frame reference to frame before frames[0]. + // In error messages frames are indexed starting with 0. + bool FrameReferencesAreValid( + rtc::ArrayView frames) const; + + private: + ScalableVideoController& structure_controller_; + FrameDependenciesCalculator frame_deps_calculator_; + ChainDiffCalculator chain_diff_calculator_; + int64_t frame_id_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller.h similarity index 91% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller.h index 016782079..d2d848686 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller.h @@ -7,13 +7,12 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" @@ -85,9 +84,7 @@ class ScalableVideoController { // Notifies Controller with updated bitrates per layer. In particular notifies // when certain layers should be disabled. // Controller shouldn't produce LayerFrameConfig for disabled layers. - // TODO(bugs.webrtc.org/11404): Make pure virtual when implemented by all - // structures. - virtual void OnRatesUpdated(const VideoBitrateAllocation& bitrates) {} + virtual void OnRatesUpdated(const VideoBitrateAllocation& bitrates) = 0; // When `restart` is true, first `LayerFrameConfig` should have `is_keyframe` // set to true. @@ -95,8 +92,7 @@ class ScalableVideoController { virtual std::vector NextFrameConfig(bool restart) = 0; // Returns configuration to pass to EncoderCallback. - virtual absl::optional OnEncodeDone( - LayerFrameConfig config) = 0; + virtual GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) = 0; }; // Below are implementation details. @@ -138,4 +134,4 @@ ScalableVideoController::LayerFrameConfig::ReferenceAndUpdate(int buffer_id) { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc similarity index 79% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc index 0d211fb91..6d8e6e8fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include #include @@ -50,17 +50,17 @@ ScalableVideoControllerNoLayering::NextFrameConfig(bool restart) { return result; } -absl::optional -ScalableVideoControllerNoLayering::OnEncodeDone(LayerFrameConfig config) { +GenericFrameInfo ScalableVideoControllerNoLayering::OnEncodeDone( + const LayerFrameConfig& config) { RTC_DCHECK_EQ(config.Id(), 0); - absl::optional frame_info(absl::in_place); - frame_info->encoder_buffers = config.Buffers(); + GenericFrameInfo frame_info; + frame_info.encoder_buffers = config.Buffers(); if (config.IsKeyframe()) { - for (auto& buffer : frame_info->encoder_buffers) { + for (auto& buffer : frame_info.encoder_buffers) { buffer.referenced = false; } } - frame_info->decode_target_indications = {DecodeTargetIndication::kSwitch}; + frame_info.decode_target_indications = {DecodeTargetIndication::kSwitch}; return frame_info; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h similarity index 66% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h index ad730989a..e253ffe84 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h @@ -7,14 +7,15 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ #include #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/codecs/av1/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -26,8 +27,8 @@ class ScalableVideoControllerNoLayering : public ScalableVideoController { FrameDependencyStructure DependencyStructure() const override; std::vector NextFrameConfig(bool restart) override; - absl::optional OnEncodeDone( - LayerFrameConfig config) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override {} private: bool start_ = true; @@ -35,4 +36,4 @@ class ScalableVideoControllerNoLayering : public ScalableVideoController { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ +#endif // MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc similarity index 82% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc index 25bca63c0..a51bdb05d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_rate_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/svc_rate_allocator.h" #include #include @@ -17,40 +17,38 @@ #include #include "absl/container/inlined_vector.h" +#include "modules/video_coding/svc/create_scalability_structure.h" #include "rtc_base/checks.h" namespace webrtc { namespace { -const float kSpatialLayeringRateScalingFactor = 0.55f; -const float kTemporalLayeringRateScalingFactor = 0.55f; +constexpr float kSpatialLayeringRateScalingFactor = 0.55f; +constexpr float kTemporalLayeringRateScalingFactor = 0.55f; -// Returns numberOfSpatialLayers if no layers are active. -size_t GetFirstActiveLayer(const VideoCodec& codec) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - size_t layer = 0; - for (; layer < codec.VP9().numberOfSpatialLayers; ++layer) { - if (codec.spatialLayers[layer].active) { +struct ActiveSpatialLayers { + size_t first = 0; + size_t num = 0; +}; + +ActiveSpatialLayers GetActiveSpatialLayers(const VideoCodec& codec, + size_t num_spatial_layers) { + ActiveSpatialLayers active; + for (active.first = 0; active.first < num_spatial_layers; ++active.first) { + if (codec.spatialLayers[active.first].active) { break; } } - return layer; -} -static size_t GetNumActiveSpatialLayers(const VideoCodec& codec) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - - const size_t first_active_layer = GetFirstActiveLayer(codec); - size_t last_active_layer = first_active_layer; - for (; last_active_layer < codec.VP9().numberOfSpatialLayers; - ++last_active_layer) { + size_t last_active_layer = active.first; + for (; last_active_layer < num_spatial_layers; ++last_active_layer) { if (!codec.spatialLayers[last_active_layer].active) { break; } } - return last_active_layer - first_active_layer; + active.num = last_active_layer - active.first; + + return active; } std::vector AdjustAndVerify( @@ -173,16 +171,39 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, } // namespace +SvcRateAllocator::NumLayers SvcRateAllocator::GetNumLayers( + const VideoCodec& codec) { + NumLayers layers; + if (!codec.ScalabilityMode().empty()) { + if (auto structure = CreateScalabilityStructure(codec.ScalabilityMode())) { + ScalableVideoController::StreamLayersConfig config = + structure->StreamConfig(); + layers.spatial = config.num_spatial_layers; + layers.temporal = config.num_temporal_layers; + return layers; + } + } + if (codec.codecType == kVideoCodecVP9) { + layers.spatial = codec.VP9().numberOfSpatialLayers; + layers.temporal = codec.VP9().numberOfTemporalLayers; + return layers; + } + layers.spatial = 1; + layers.temporal = 1; + return layers; +} + SvcRateAllocator::SvcRateAllocator(const VideoCodec& codec) : codec_(codec), + num_layers_(GetNumLayers(codec)), experiment_settings_(StableTargetRateExperiment::ParseFromFieldTrials()), cumulative_layer_start_bitrates_(GetLayerStartBitrates(codec)), last_active_layer_count_(0) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - RTC_DCHECK_GT(codec.VP9().numberOfTemporalLayers, 0u); - for (size_t layer_idx = 0; layer_idx < codec.VP9().numberOfSpatialLayers; - ++layer_idx) { + RTC_DCHECK_GT(num_layers_.spatial, 0); + RTC_DCHECK_LE(num_layers_.spatial, kMaxSpatialLayers); + RTC_DCHECK_GT(num_layers_.temporal, 0); + RTC_DCHECK_LE(num_layers_.temporal, 3); + for (size_t layer_idx = 0; layer_idx < num_layers_.spatial; ++layer_idx) { // Verify min <= target <= max. if (codec.spatialLayers[layer_idx].active) { RTC_DCHECK_GT(codec.spatialLayers[layer_idx].maxBitrate, 0); @@ -205,16 +226,16 @@ VideoBitrateAllocation SvcRateAllocator::Allocate( } if (codec_.spatialLayers[0].targetBitrate == 0) { - // Delegate rate distribution to VP9 encoder wrapper if bitrate thresholds + // Delegate rate distribution to encoder wrapper if bitrate thresholds // are not set. VideoBitrateAllocation bitrate_allocation; bitrate_allocation.SetBitrate(0, 0, total_bitrate.bps()); return bitrate_allocation; } - const size_t first_active_layer = GetFirstActiveLayer(codec_); - const size_t num_active_layers = GetNumActiveSpatialLayers(codec_); - size_t num_spatial_layers = num_active_layers; + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec_, num_layers_.spatial); + size_t num_spatial_layers = active_layers.num; if (num_spatial_layers == 0) { return VideoBitrateAllocation(); // All layers are deactivated. @@ -249,13 +270,13 @@ VideoBitrateAllocation SvcRateAllocator::Allocate( VideoBitrateAllocation allocation; if (codec_.mode == VideoCodecMode::kRealtimeVideo) { - allocation = GetAllocationNormalVideo(total_bitrate, first_active_layer, + allocation = GetAllocationNormalVideo(total_bitrate, active_layers.first, num_spatial_layers); } else { - allocation = GetAllocationScreenSharing(total_bitrate, first_active_layer, + allocation = GetAllocationScreenSharing(total_bitrate, active_layers.first, num_spatial_layers); } - allocation.set_bw_limited(num_spatial_layers < num_active_layers); + allocation.set_bw_limited(num_spatial_layers < active_layers.num); return allocation; } @@ -279,25 +300,24 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationNormalVideo( VideoBitrateAllocation bitrate_allocation; - const size_t num_temporal_layers = codec_.VP9().numberOfTemporalLayers; for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { std::vector temporal_layer_rates = - SplitBitrate(num_temporal_layers, spatial_layer_rates[sl_idx], + SplitBitrate(num_layers_.temporal, spatial_layer_rates[sl_idx], kTemporalLayeringRateScalingFactor); // Distribute rate across temporal layers. Allocate more bits to lower // layers since they are used for prediction of higher layers and their // references are far apart. - if (num_temporal_layers == 1) { + if (num_layers_.temporal == 1) { bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, temporal_layer_rates[0].bps()); - } else if (num_temporal_layers == 2) { + } else if (num_layers_.temporal == 2) { bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, temporal_layer_rates[1].bps()); bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, temporal_layer_rates[0].bps()); } else { - RTC_CHECK_EQ(num_temporal_layers, 3); + RTC_CHECK_EQ(num_layers_.temporal, 3); // In case of three temporal layers the high layer has two frames and the // middle layer has one frame within GOP (in between two consecutive low // layer frames). Thus high layer requires more bits (comparing pure @@ -383,13 +403,14 @@ size_t SvcRateAllocator::FindNumEnabledLayers(DataRate target_rate) const { } DataRate SvcRateAllocator::GetMaxBitrate(const VideoCodec& codec) { - const size_t first_active_layer = GetFirstActiveLayer(codec); - const size_t num_spatial_layers = GetNumActiveSpatialLayers(codec); + const NumLayers num_layers = GetNumLayers(codec); + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec, num_layers.spatial); DataRate max_bitrate = DataRate::Zero(); - for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { + for (size_t sl_idx = 0; sl_idx < active_layers.num; ++sl_idx) { max_bitrate += DataRate::KilobitsPerSec( - codec.spatialLayers[first_active_layer + sl_idx].maxBitrate); + codec.spatialLayers[active_layers.first + sl_idx].maxBitrate); } if (codec.maxBitrate != 0) { @@ -412,12 +433,13 @@ DataRate SvcRateAllocator::GetPaddingBitrate(const VideoCodec& codec) { absl::InlinedVector SvcRateAllocator::GetLayerStartBitrates(const VideoCodec& codec) { absl::InlinedVector start_bitrates; - const size_t first_active_layer = GetFirstActiveLayer(codec); - const size_t num_layers = GetNumActiveSpatialLayers(codec); + const NumLayers num_layers = GetNumLayers(codec); + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec, num_layers.spatial); DataRate last_rate = DataRate::Zero(); - for (size_t i = 1; i <= num_layers; ++i) { + for (size_t i = 1; i <= active_layers.num; ++i) { DataRate layer_toggling_rate = - FindLayerTogglingThreshold(codec, first_active_layer, i); + FindLayerTogglingThreshold(codec, active_layers.first, i); start_bitrates.push_back(layer_toggling_rate); RTC_DCHECK_LE(last_rate, layer_toggling_rate); last_rate = layer_toggling_rate; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.h new file mode 100644 index 000000000..bd75fca28 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ +#define MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/video_codec.h" +#include "rtc_base/experiments/stable_target_rate_experiment.h" + +namespace webrtc { + +class SvcRateAllocator : public VideoBitrateAllocator { + public: + explicit SvcRateAllocator(const VideoCodec& codec); + + VideoBitrateAllocation Allocate( + VideoBitrateAllocationParameters parameters) override; + + static DataRate GetMaxBitrate(const VideoCodec& codec); + static DataRate GetPaddingBitrate(const VideoCodec& codec); + static absl::InlinedVector GetLayerStartBitrates( + const VideoCodec& codec); + + private: + struct NumLayers { + size_t spatial = 1; + size_t temporal = 1; + }; + + static NumLayers GetNumLayers(const VideoCodec& codec); + VideoBitrateAllocation GetAllocationNormalVideo( + DataRate total_bitrate, + size_t first_active_layer, + size_t num_spatial_layers) const; + + VideoBitrateAllocation GetAllocationScreenSharing( + DataRate total_bitrate, + size_t first_active_layer, + size_t num_spatial_layers) const; + + // Returns the number of layers that are active and have enough bitrate to + // actually be enabled. + size_t FindNumEnabledLayers(DataRate target_rate) const; + + const VideoCodec codec_; + const NumLayers num_layers_; + const StableTargetRateExperiment experiment_settings_; + const absl::InlinedVector + cumulative_layer_start_bitrates_; + size_t last_active_layer_count_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc index d93293704..d79075ff2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc @@ -60,4 +60,13 @@ VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) { bool VCMTimestampMap::IsEmpty() const { return (next_add_idx_ == next_pop_idx_); } + +size_t VCMTimestampMap::Size() const { + // The maximum number of elements in the list is |capacity_| - 1. The list is + // empty if the add and pop indices are equal. + return next_add_idx_ >= next_pop_idx_ + ? next_add_idx_ - next_pop_idx_ + : next_add_idx_ + capacity_ - next_pop_idx_; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h index c85666c9a..cfa12573e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h @@ -24,6 +24,7 @@ class VCMTimestampMap { void Add(uint32_t timestamp, VCMFrameInformation* data); VCMFrameInformation* Pop(uint32_t timestamp); + size_t Size() const; private: struct TimestampDataTuple { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc index f046edf49..eddac4f5d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc @@ -14,16 +14,18 @@ #include +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/time/timestamp_extrapolator.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { -VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing) +VCMTiming::VCMTiming(Clock* clock) : clock_(clock), - master_(false), - ts_extrapolator_(), - codec_timer_(new VCMCodecTimer()), + ts_extrapolator_(std::make_unique( + clock_->TimeInMilliseconds())), + codec_timer_(std::make_unique()), render_delay_ms_(kDefaultRenderDelayMs), min_playout_delay_ms_(0), max_playout_delay_ms_(10000), @@ -31,25 +33,16 @@ VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing) current_delay_ms_(0), prev_frame_timestamp_(0), timing_frame_info_(), - num_decoded_frames_(0) { - if (master_timing == NULL) { - master_ = true; - ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds()); - } else { - ts_extrapolator_ = master_timing->ts_extrapolator_; - } -} - -VCMTiming::~VCMTiming() { - if (master_) { - delete ts_extrapolator_; - } + num_decoded_frames_(0), + low_latency_renderer_enabled_("enabled", true) { + ParseFieldTrial({&low_latency_renderer_enabled_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } void VCMTiming::Reset() { MutexLock lock(&mutex_); ts_extrapolator_->Reset(clock_->TimeInMilliseconds()); - codec_timer_.reset(new VCMCodecTimer()); + codec_timer_ = std::make_unique(); render_delay_ms_ = kDefaultRenderDelayMs; min_playout_delay_ms_ = 0; jitter_delay_ms_ = 0; @@ -177,10 +170,16 @@ int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const { - if (min_playout_delay_ms_ == 0 && max_playout_delay_ms_ == 0) { - // Render as soon as possible. + constexpr int kLowLatencyRendererMaxPlayoutDelayMs = 500; + if (min_playout_delay_ms_ == 0 && + (max_playout_delay_ms_ == 0 || + (low_latency_renderer_enabled_ && + max_playout_delay_ms_ <= kLowLatencyRendererMaxPlayoutDelayMs))) { + // Render as soon as possible or with low-latency renderer algorithm. return 0; } + // Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const + // method; it mutates the object's wraparound state. int64_t estimated_complete_time_ms = ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp); if (estimated_complete_time_ms == -1) { @@ -246,4 +245,15 @@ absl::optional VCMTiming::GetTimingFrameInfo() { return timing_frame_info_; } +void VCMTiming::SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames) { + MutexLock lock(&mutex_); + max_composition_delay_in_frames_ = max_composition_delay_in_frames; +} + +absl::optional VCMTiming::MaxCompositionDelayInFrames() const { + MutexLock lock(&mutex_); + return max_composition_delay_in_frames_; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h index 75b8e7d99..736b5e9ae 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h @@ -16,8 +16,10 @@ #include "absl/types/optional.h" #include "api/video/video_timing.h" #include "modules/video_coding/codec_timer.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/time/timestamp_extrapolator.h" namespace webrtc { @@ -26,10 +28,8 @@ class TimestampExtrapolator; class VCMTiming { public: - // The primary timing component should be passed - // if this is the dual timing component. - explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL); - virtual ~VCMTiming(); + explicit VCMTiming(Clock* clock); + virtual ~VCMTiming() = default; // Resets the timing to the initial state. void Reset(); @@ -100,6 +100,10 @@ class VCMTiming { void SetTimingFrameInfo(const TimingFrameInfo& info); absl::optional GetTimingFrameInfo(); + void SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames); + absl::optional MaxCompositionDelayInFrames() const; + enum { kDefaultRenderDelayMs = 10 }; enum { kDelayMaxChangeMsPerS = 100 }; @@ -112,9 +116,10 @@ class VCMTiming { private: mutable Mutex mutex_; Clock* const clock_; - bool master_ RTC_GUARDED_BY(mutex_); - TimestampExtrapolator* ts_extrapolator_ RTC_GUARDED_BY(mutex_); - std::unique_ptr codec_timer_ RTC_GUARDED_BY(mutex_); + const std::unique_ptr ts_extrapolator_ + RTC_PT_GUARDED_BY(mutex_); + std::unique_ptr codec_timer_ RTC_GUARDED_BY(mutex_) + RTC_PT_GUARDED_BY(mutex_); int render_delay_ms_ RTC_GUARDED_BY(mutex_); // Best-effort playout delay range for frames from capture to render. // The receiver tries to keep the delay between |min_playout_delay_ms_| @@ -128,6 +133,12 @@ class VCMTiming { uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_); absl::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_); + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter enabled + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_ + RTC_GUARDED_BY(mutex_); + absl::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc index 9667bb7ce..e3c249947 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -27,6 +27,7 @@ constexpr int kCodecTypeBytesCount = 4; constexpr uint8_t kFileHeaderStart[kCodecTypeBytesCount] = {'D', 'K', 'I', 'F'}; constexpr uint8_t kVp8Header[kCodecTypeBytesCount] = {'V', 'P', '8', '0'}; constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; +constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; } // namespace @@ -170,7 +171,6 @@ absl::optional IvfFileReader::NextFrame() { if (is_first_frame) { image._frameType = VideoFrameType::kVideoFrameKey; } - image._completeFrame = true; return image; } @@ -191,6 +191,9 @@ absl::optional IvfFileReader::ParseCodecType(uint8_t* buffer, if (memcmp(&buffer[start_pos], kVp9Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecVP9; } + if (memcmp(&buffer[start_pos], kAv1Header, kCodecTypeBytesCount) == 0) { + return VideoCodecType::kVideoCodecAV1; + } if (memcmp(&buffer[start_pos], kH264Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecH264; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc index 46b8e87ba..496da894a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc @@ -75,6 +75,12 @@ bool IvfFileWriter::WriteHeader() { ivf_header[10] = '9'; ivf_header[11] = '0'; break; + case kVideoCodecAV1: + ivf_header[8] = 'A'; + ivf_header[9] = 'V'; + ivf_header[10] = '0'; + ivf_header[11] = '1'; + break; case kVideoCodecH264: ivf_header[8] = 'H'; ivf_header[9] = '2'; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc index 71bf93429..2859ac2e2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc @@ -189,13 +189,17 @@ QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, // Protected ctor, should not be called directly. QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds, - int64_t sampling_period_ms) + int64_t default_sampling_period_ms) : handler_(handler), thresholds_(thresholds), - sampling_period_ms_(sampling_period_ms), + sampling_period_ms_(QualityScalerSettings::ParseFromFieldTrials() + .SamplingPeriodMs() + .value_or(default_sampling_period_ms)), fast_rampup_(true), // Arbitrarily choose size based on 30 fps for 5 seconds. - average_qp_(5 * 30), + average_qp_(QualityScalerSettings::ParseFromFieldTrials() + .AverageQpWindow() + .value_or(5 * 30)), framedrop_percent_media_opt_(5 * 30), framedrop_percent_all_(5 * 30), experiment_enabled_(QualityScalingExperiment::Enabled()), diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc index 13de8755d..39e39abca 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc @@ -151,7 +151,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( size_t top_active_layer = active_layer; // Allocate up to the target bitrate for each active simulcast layer. for (; active_layer < codec_.numberOfSimulcastStreams; ++active_layer) { - const SimulcastStream& stream = + const SpatialLayer& stream = codec_.simulcastStream[layer_index[active_layer]]; if (!stream.active) { stream_enabled_[layer_index[active_layer]] = false; @@ -194,7 +194,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( // TODO(sprang): Allocate up to max bitrate for all layers once we have a // better idea of possible performance implications. if (left_in_total_allocation > DataRate::Zero()) { - const SimulcastStream& stream = codec_.simulcastStream[top_active_layer]; + const SpatialLayer& stream = codec_.simulcastStream[top_active_layer]; DataRate initial_layer_rate = DataRate::BitsPerSec( allocated_bitrates->GetSpatialLayerSum(top_active_layer)); DataRate additional_allocation = std::min( @@ -229,7 +229,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers( uint32_t max_bitrate_kbps; // Legacy temporal-layered only screenshare, or simulcast screenshare // with legacy mode for simulcast stream 0. - if (legacy_conference_mode_ && simulcast_id == 0) { + if (codec_.mode == VideoCodecMode::kScreensharing && + legacy_conference_mode_ && simulcast_id == 0) { // TODO(holmer): This is a "temporary" hack for screensharing, where we // interpret the startBitrate as the encoder target bitrate. This is // to allow for a different max bitrate, so if the codec can't meet @@ -249,7 +250,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers( if (num_temporal_streams == 1) { tl_allocation.push_back(target_bitrate_kbps); } else { - if (legacy_conference_mode_ && simulcast_id == 0) { + if (codec_.mode == VideoCodecMode::kScreensharing && + legacy_conference_mode_ && simulcast_id == 0) { tl_allocation = ScreenshareTemporalLayerAllocation( target_bitrate_kbps, max_bitrate_kbps, simulcast_id); } else { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc new file mode 100644 index 000000000..a9af64344 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -0,0 +1,915 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/utility/simulcast_test_fixture_impl.h" + +#include +#include +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/checks.h" +#include "test/gtest.h" + +using ::testing::_; +using ::testing::AllOf; +using ::testing::Field; +using ::testing::Return; + +namespace webrtc { +namespace test { + +namespace { + +const int kDefaultWidth = 1280; +const int kDefaultHeight = 720; +const int kNumberOfSimulcastStreams = 3; +const int kColorY = 66; +const int kColorU = 22; +const int kColorV = 33; +const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200}; +const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600}; +const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000}; +const float kMaxFramerates[kNumberOfSimulcastStreams] = {30, 30, 30}; +const int kDefaultTemporalLayerProfile[3] = {3, 3, 3}; +const int kNoTemporalLayerProfile[3] = {0, 0, 0}; + +const VideoEncoder::Capabilities kCapabilities(false); +const VideoEncoder::Settings kSettings(kCapabilities, 1, 1200); + +template +void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) { + expected_values[0] = value0; + expected_values[1] = value1; + expected_values[2] = value2; +} + +enum PlaneType { + kYPlane = 0, + kUPlane = 1, + kVPlane = 2, + kNumOfPlanes = 3, +}; + +} // namespace + +class SimulcastTestFixtureImpl::TestEncodedImageCallback + : public EncodedImageCallback { + public: + TestEncodedImageCallback() { + memset(temporal_layer_, -1, sizeof(temporal_layer_)); + memset(layer_sync_, false, sizeof(layer_sync_)); + } + + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + bool is_vp8 = (codec_specific_info->codecType == kVideoCodecVP8); + bool is_h264 = (codec_specific_info->codecType == kVideoCodecH264); + // Only store the base layer. + if (encoded_image.SpatialIndex().value_or(0) == 0) { + if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { + encoded_key_frame_.SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); + encoded_key_frame_._frameType = VideoFrameType::kVideoFrameKey; + } else { + encoded_frame_.SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); + } + } + if (is_vp8) { + layer_sync_[encoded_image.SpatialIndex().value_or(0)] = + codec_specific_info->codecSpecific.VP8.layerSync; + temporal_layer_[encoded_image.SpatialIndex().value_or(0)] = + codec_specific_info->codecSpecific.VP8.temporalIdx; + } else if (is_h264) { + layer_sync_[encoded_image.SpatialIndex().value_or(0)] = + codec_specific_info->codecSpecific.H264.base_layer_sync; + temporal_layer_[encoded_image.SpatialIndex().value_or(0)] = + codec_specific_info->codecSpecific.H264.temporal_idx; + } + return Result(Result::OK, encoded_image.Timestamp()); + } + // This method only makes sense for VP8. + void GetLastEncodedFrameInfo(int* temporal_layer, + bool* layer_sync, + int stream) { + *temporal_layer = temporal_layer_[stream]; + *layer_sync = layer_sync_[stream]; + } + void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) { + *encoded_key_frame = encoded_key_frame_; + } + void GetLastEncodedFrame(EncodedImage* encoded_frame) { + *encoded_frame = encoded_frame_; + } + + private: + EncodedImage encoded_key_frame_; + EncodedImage encoded_frame_; + int temporal_layer_[kNumberOfSimulcastStreams]; + bool layer_sync_[kNumberOfSimulcastStreams]; +}; + +class SimulcastTestFixtureImpl::TestDecodedImageCallback + : public DecodedImageCallback { + public: + TestDecodedImageCallback() : decoded_frames_(0) {} + int32_t Decoded(VideoFrame& decoded_image) override { + rtc::scoped_refptr i420_buffer = + decoded_image.video_frame_buffer()->ToI420(); + for (int i = 0; i < decoded_image.width(); ++i) { + EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1); + } + + // TODO(mikhal): Verify the difference between U,V and the original. + for (int i = 0; i < i420_buffer->ChromaWidth(); ++i) { + EXPECT_NEAR(kColorU, i420_buffer->DataU()[i], 4); + EXPECT_NEAR(kColorV, i420_buffer->DataV()[i], 4); + } + decoded_frames_++; + return 0; + } + int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { + RTC_NOTREACHED(); + return -1; + } + void Decoded(VideoFrame& decoded_image, + absl::optional decode_time_ms, + absl::optional qp) override { + Decoded(decoded_image); + } + int DecodedFrames() { return decoded_frames_; } + + private: + int decoded_frames_; +}; + +namespace { + +void SetPlane(uint8_t* data, uint8_t value, int width, int height, int stride) { + for (int i = 0; i < height; i++, data += stride) { + // Setting allocated area to zero - setting only image size to + // requested values - will make it easier to distinguish between image + // size and frame size (accounting for stride). + memset(data, value, width); + memset(data + width, 0, stride - width); + } +} + +// Fills in an I420Buffer from |plane_colors|. +void CreateImage(const rtc::scoped_refptr& buffer, + int plane_colors[kNumOfPlanes]) { + SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(), + buffer->height(), buffer->StrideY()); + + SetPlane(buffer->MutableDataU(), plane_colors[1], buffer->ChromaWidth(), + buffer->ChromaHeight(), buffer->StrideU()); + + SetPlane(buffer->MutableDataV(), plane_colors[2], buffer->ChromaWidth(), + buffer->ChromaHeight(), buffer->StrideV()); +} + +void ConfigureStream(int width, + int height, + int max_bitrate, + int min_bitrate, + int target_bitrate, + float max_framerate, + SpatialLayer* stream, + int num_temporal_layers) { + assert(stream); + stream->width = width; + stream->height = height; + stream->maxBitrate = max_bitrate; + stream->minBitrate = min_bitrate; + stream->targetBitrate = target_bitrate; + stream->maxFramerate = max_framerate; + if (num_temporal_layers >= 0) { + stream->numberOfTemporalLayers = num_temporal_layers; + } + stream->qpMax = 45; + stream->active = true; +} + +} // namespace + +void SimulcastTestFixtureImpl::DefaultSettings( + VideoCodec* settings, + const int* temporal_layer_profile, + VideoCodecType codec_type, + bool reverse_layer_order) { + RTC_CHECK(settings); + *settings = {}; + settings->codecType = codec_type; + settings->startBitrate = 300; + settings->minBitrate = 30; + settings->maxBitrate = 0; + settings->maxFramerate = 30; + settings->width = kDefaultWidth; + settings->height = kDefaultHeight; + settings->numberOfSimulcastStreams = kNumberOfSimulcastStreams; + settings->active = true; + ASSERT_EQ(3, kNumberOfSimulcastStreams); + int layer_order[3] = {0, 1, 2}; + if (reverse_layer_order) { + layer_order[0] = 2; + layer_order[2] = 0; + } + settings->timing_frame_thresholds = {kDefaultTimingFramesDelayMs, + kDefaultOutlierFrameSizePercent}; + ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, kMaxBitrates[0], + kMinBitrates[0], kTargetBitrates[0], kMaxFramerates[0], + &settings->simulcastStream[layer_order[0]], + temporal_layer_profile[0]); + ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, kMaxBitrates[1], + kMinBitrates[1], kTargetBitrates[1], kMaxFramerates[1], + &settings->simulcastStream[layer_order[1]], + temporal_layer_profile[1]); + ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2], + kMinBitrates[2], kTargetBitrates[2], kMaxFramerates[2], + &settings->simulcastStream[layer_order[2]], + temporal_layer_profile[2]); + if (codec_type == kVideoCodecVP8) { + settings->VP8()->denoisingOn = true; + settings->VP8()->automaticResizeOn = false; + settings->VP8()->frameDroppingOn = true; + settings->VP8()->keyFrameInterval = 3000; + } else { + settings->H264()->frameDroppingOn = true; + settings->H264()->keyFrameInterval = 3000; + } +} + +SimulcastTestFixtureImpl::SimulcastTestFixtureImpl( + std::unique_ptr encoder_factory, + std::unique_ptr decoder_factory, + SdpVideoFormat video_format) + : codec_type_(PayloadStringToCodecType(video_format.name)) { + encoder_ = encoder_factory->CreateVideoEncoder(video_format); + decoder_ = decoder_factory->CreateVideoDecoder(video_format); + SetUpCodec((codec_type_ == kVideoCodecVP8 || codec_type_ == kVideoCodecH264) + ? kDefaultTemporalLayerProfile + : kNoTemporalLayerProfile); +} + +SimulcastTestFixtureImpl::~SimulcastTestFixtureImpl() { + encoder_->Release(); + decoder_->Release(); +} + +void SimulcastTestFixtureImpl::SetUpCodec(const int* temporal_layer_profile) { + encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); + decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); + DefaultSettings(&settings_, temporal_layer_profile, codec_type_); + SetUpRateAllocator(); + EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); + EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); + input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight); + input_buffer_->InitializeData(); + input_frame_ = std::make_unique( + webrtc::VideoFrame::Builder() + .set_video_frame_buffer(input_buffer_) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_us(0) + .build()); +} + +void SimulcastTestFixtureImpl::SetUpRateAllocator() { + rate_allocator_.reset(new SimulcastRateAllocator(settings_)); +} + +void SimulcastTestFixtureImpl::SetRates(uint32_t bitrate_kbps, uint32_t fps) { + encoder_->SetRates(VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(bitrate_kbps * 1000, fps)), + static_cast(fps))); +} + +void SimulcastTestFixtureImpl::RunActiveStreamsTest( + const std::vector active_streams) { + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + UpdateActiveStreams(active_streams); + // Set sufficient bitrate for all streams so we can test active without + // bitrate being an issue. + SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); + + ExpectStreams(VideoFrameType::kVideoFrameKey, active_streams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, active_streams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::UpdateActiveStreams( + const std::vector active_streams) { + ASSERT_EQ(static_cast(active_streams.size()), kNumberOfSimulcastStreams); + for (size_t i = 0; i < active_streams.size(); ++i) { + settings_.simulcastStream[i].active = active_streams[i]; + } + // Re initialize the allocator and encoder with the new settings. + // TODO(bugs.webrtc.org/8807): Currently, we do a full "hard" + // reconfiguration of the allocator and encoder. When the video bitrate + // allocator has support for updating active streams without a + // reinitialization, we can just call that here instead. + SetUpRateAllocator(); + EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); +} + +void SimulcastTestFixtureImpl::ExpectStreams( + VideoFrameType frame_type, + const std::vector expected_streams_active) { + ASSERT_EQ(static_cast(expected_streams_active.size()), + kNumberOfSimulcastStreams); + if (expected_streams_active[0]) { + EXPECT_CALL( + encoder_callback_, + OnEncodedImage( + AllOf(Field(&EncodedImage::_frameType, frame_type), + Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), + Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + } + if (expected_streams_active[1]) { + EXPECT_CALL( + encoder_callback_, + OnEncodedImage( + AllOf(Field(&EncodedImage::_frameType, frame_type), + Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), + Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + } + if (expected_streams_active[2]) { + EXPECT_CALL(encoder_callback_, + OnEncodedImage( + AllOf(Field(&EncodedImage::_frameType, frame_type), + Field(&EncodedImage::_encodedWidth, kDefaultWidth), + Field(&EncodedImage::_encodedHeight, kDefaultHeight)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + } +} + +void SimulcastTestFixtureImpl::ExpectStreams(VideoFrameType frame_type, + int expected_video_streams) { + ASSERT_GE(expected_video_streams, 0); + ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); + std::vector expected_streams_active(kNumberOfSimulcastStreams, false); + for (int i = 0; i < expected_video_streams; ++i) { + expected_streams_active[i] = true; + } + ExpectStreams(frame_type, expected_streams_active); +} + +void SimulcastTestFixtureImpl::VerifyTemporalIdxAndSyncForAllSpatialLayers( + TestEncodedImageCallback* encoder_callback, + const int* expected_temporal_idx, + const bool* expected_layer_sync, + int num_spatial_layers) { + int temporal_layer = -1; + bool layer_sync = false; + for (int i = 0; i < num_spatial_layers; i++) { + encoder_callback->GetLastEncodedFrameInfo(&temporal_layer, &layer_sync, i); + EXPECT_EQ(expected_temporal_idx[i], temporal_layer); + EXPECT_EQ(expected_layer_sync[i], layer_sync); + } +} + +// We currently expect all active streams to generate a key frame even though +// a key frame was only requested for some of them. +void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { + SetRates(kMaxBitrates[2], 30); // To get all three streams. + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + frame_types[0] = VideoFrameType::kVideoFrameKey; + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[1] = VideoFrameType::kVideoFrameKey; + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[2] = VideoFrameType::kVideoFrameKey; + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestPaddingAllStreams() { + // We should always encode the base layer. + SetRates(kMinBitrates[0] - 1, 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 1); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestPaddingTwoStreams() { + // We have just enough to get only the first stream and padding for two. + SetRates(kMinBitrates[0], 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 1); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestPaddingTwoStreamsOneMaxedOut() { + // We are just below limit of sending second stream, so we should get + // the first stream maxed out (at |maxBitrate|), and padding for two. + SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 1); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestPaddingOneStream() { + // We have just enough to send two streams, so padding for one stream. + SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 2); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestPaddingOneStreamTwoMaxedOut() { + // We are just below limit of sending third stream, so we should get + // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. + SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 2); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestSendAllStreams() { + // We have just enough to send all streams. + SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 3); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestDisablingStreams() { + // We should get three media streams. + SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, 3); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // We should only get two streams and padding for one. + SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); + ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // We should only get the first stream and padding for two. + SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); + ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // We don't have enough bitrate for the thumbnail stream, but we should get + // it anyway with current configuration. + SetRates(kTargetBitrates[0] - 1, 30); + ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // We should only get two streams and padding for one. + SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); + // We get a key frame because a new stream is being enabled. + ExpectStreams(VideoFrameType::kVideoFrameKey, 2); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // We should get all three streams. + SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); + // We get a key frame because a new stream is being enabled. + ExpectStreams(VideoFrameType::kVideoFrameKey, 3); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestActiveStreams() { + // All streams on. + RunActiveStreamsTest({true, true, true}); + // All streams off. + RunActiveStreamsTest({false, false, false}); + // Low stream off. + RunActiveStreamsTest({false, true, true}); + // Middle stream off. + RunActiveStreamsTest({true, false, true}); + // High stream off. + RunActiveStreamsTest({true, true, false}); + // Only low stream turned on. + RunActiveStreamsTest({true, false, false}); + // Only middle stream turned on. + RunActiveStreamsTest({false, true, false}); + // Only high stream turned on. + RunActiveStreamsTest({false, false, true}); +} + +void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) { + const int* temporal_layer_profile = nullptr; + // Disable all streams except the last and set the bitrate of the last to + // 100 kbps. This verifies the way GTP switches to screenshare mode. + if (codec_type_ == kVideoCodecVP8) { + settings_.VP8()->numberOfTemporalLayers = 1; + temporal_layer_profile = kDefaultTemporalLayerProfile; + } else { + temporal_layer_profile = kNoTemporalLayerProfile; + } + settings_.maxBitrate = 100; + settings_.startBitrate = 100; + settings_.width = width; + settings_.height = height; + for (int i = 0; i < settings_.numberOfSimulcastStreams - 1; ++i) { + settings_.simulcastStream[i].maxBitrate = 0; + settings_.simulcastStream[i].width = settings_.width; + settings_.simulcastStream[i].height = settings_.height; + settings_.simulcastStream[i].numberOfTemporalLayers = 1; + } + // Setting input image to new resolution. + input_buffer_ = I420Buffer::Create(settings_.width, settings_.height); + input_buffer_->InitializeData(); + + input_frame_ = std::make_unique( + webrtc::VideoFrame::Builder() + .set_video_frame_buffer(input_buffer_) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_us(0) + .build()); + + // The for loop above did not set the bitrate of the highest layer. + settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].maxBitrate = + 0; + // The highest layer has to correspond to the non-simulcast resolution. + settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = + settings_.width; + settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = + settings_.height; + SetUpRateAllocator(); + EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); + + // Encode one frame and verify. + SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + EXPECT_CALL( + encoder_callback_, + OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, + VideoFrameType::kVideoFrameKey), + Field(&EncodedImage::_encodedWidth, width), + Field(&EncodedImage::_encodedHeight, height)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + // Switch back. + DefaultSettings(&settings_, temporal_layer_profile, codec_type_); + // Start at the lowest bitrate for enabling base stream. + settings_.startBitrate = kMinBitrates[0]; + SetUpRateAllocator(); + EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); + SetRates(settings_.startBitrate, 30); + ExpectStreams(VideoFrameType::kVideoFrameKey, 1); + // Resize |input_frame_| to the new resolution. + input_buffer_ = I420Buffer::Create(settings_.width, settings_.height); + input_buffer_->InitializeData(); + input_frame_ = std::make_unique( + webrtc::VideoFrame::Builder() + .set_video_frame_buffer(input_buffer_) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_us(0) + .build()); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + +void SimulcastTestFixtureImpl::TestSwitchingToOneStream() { + SwitchingToOneStream(1024, 768); +} + +void SimulcastTestFixtureImpl::TestSwitchingToOneOddStream() { + SwitchingToOneStream(1023, 769); +} + +void SimulcastTestFixtureImpl::TestSwitchingToOneSmallStream() { + SwitchingToOneStream(4, 4); +} + +// Test the layer pattern and sync flag for various spatial-temporal patterns. +// 3-3-3 pattern: 3 temporal layers for all spatial streams, so same +// temporal_layer id and layer_sync is expected for all streams. +void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { + bool is_h264 = codec_type_ == kVideoCodecH264; + TestEncodedImageCallback encoder_callback; + encoder_->RegisterEncodeCompleteCallback(&encoder_callback); + SetRates(kMaxBitrates[2], 30); // To get all three streams. + + int expected_temporal_idx[3] = {-1, -1, -1}; + bool expected_layer_sync[3] = {false, false, false}; + + // First frame: #0. + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(0, 0, 0, expected_temporal_idx); + SetExpectedValues3(!is_h264, !is_h264, !is_h264, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #1. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 2, 2, expected_temporal_idx); + SetExpectedValues3(true, true, true, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #2. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(1, 1, 1, expected_temporal_idx); + SetExpectedValues3(true, true, true, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #3. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 2, 2, expected_temporal_idx); + SetExpectedValues3(false, false, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #4. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(0, 0, 0, expected_temporal_idx); + SetExpectedValues3(false, false, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #5. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 2, 2, expected_temporal_idx); + SetExpectedValues3(is_h264, is_h264, is_h264, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); +} + +// Test the layer pattern and sync flag for various spatial-temporal patterns. +// 3-2-1 pattern: 3 temporal layers for lowest resolution, 2 for middle, and +// 1 temporal layer for highest resolution. +// For this profile, we expect the temporal index pattern to be: +// 1st stream: 0, 2, 1, 2, .... +// 2nd stream: 0, 1, 0, 1, ... +// 3rd stream: -1, -1, -1, -1, .... +// Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer +// should always have temporal layer idx set to kNoTemporalIdx = -1. +// Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. +// TODO(marpan): Although this seems safe for now, we should fix this. +void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { + EXPECT_EQ(codec_type_, kVideoCodecVP8); + int temporal_layer_profile[3] = {3, 2, 1}; + SetUpCodec(temporal_layer_profile); + TestEncodedImageCallback encoder_callback; + encoder_->RegisterEncodeCompleteCallback(&encoder_callback); + SetRates(kMaxBitrates[2], 30); // To get all three streams. + + int expected_temporal_idx[3] = {-1, -1, -1}; + bool expected_layer_sync[3] = {false, false, false}; + + // First frame: #0. + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(0, 0, 255, expected_temporal_idx); + SetExpectedValues3(true, true, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #1. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 1, 255, expected_temporal_idx); + SetExpectedValues3(true, true, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #2. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(1, 0, 255, expected_temporal_idx); + SetExpectedValues3(true, false, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #3. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 1, 255, expected_temporal_idx); + SetExpectedValues3(false, false, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #4. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(0, 0, 255, expected_temporal_idx); + SetExpectedValues3(false, false, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); + + // Next frame: #5. + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + SetExpectedValues3(2, 1, 255, expected_temporal_idx); + SetExpectedValues3(false, true, false, expected_layer_sync); + VerifyTemporalIdxAndSyncForAllSpatialLayers( + &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); +} + +void SimulcastTestFixtureImpl::TestStrideEncodeDecode() { + TestEncodedImageCallback encoder_callback; + TestDecodedImageCallback decoder_callback; + encoder_->RegisterEncodeCompleteCallback(&encoder_callback); + decoder_->RegisterDecodeCompleteCallback(&decoder_callback); + + SetRates(kMaxBitrates[2], 30); // To get all three streams. + // Setting two (possibly) problematic use cases for stride: + // 1. stride > width 2. stride_y != stride_uv/2 + int stride_y = kDefaultWidth + 20; + int stride_uv = ((kDefaultWidth + 1) / 2) + 5; + input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, + stride_uv, stride_uv); + input_frame_ = std::make_unique( + webrtc::VideoFrame::Builder() + .set_video_frame_buffer(input_buffer_) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_us(0) + .build()); + + // Set color. + int plane_offset[kNumOfPlanes]; + plane_offset[kYPlane] = kColorY; + plane_offset[kUPlane] = kColorU; + plane_offset[kVPlane] = kColorV; + CreateImage(input_buffer_, plane_offset); + + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + + // Change color. + plane_offset[kYPlane] += 1; + plane_offset[kUPlane] += 1; + plane_offset[kVPlane] += 1; + CreateImage(input_buffer_, plane_offset); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + + EncodedImage encoded_frame; + // Only encoding one frame - so will be a key frame. + encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); + EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, 0)); + encoder_callback.GetLastEncodedFrame(&encoded_frame); + decoder_->Decode(encoded_frame, false, 0); + EXPECT_EQ(2, decoder_callback.DecodedFrames()); +} + +void SimulcastTestFixtureImpl::TestDecodeWidthHeightSet() { + MockEncodedImageCallback encoder_callback; + MockDecodedImageCallback decoder_callback; + + EncodedImage encoded_frame[3]; + SetRates(kMaxBitrates[2], 30); // To get all three streams. + encoder_->RegisterEncodeCompleteCallback(&encoder_callback); + decoder_->RegisterDecodeCompleteCallback(&decoder_callback); + + EXPECT_CALL(encoder_callback, OnEncodedImage(_, _)) + .Times(3) + .WillRepeatedly( + ::testing::Invoke([&](const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) { + EXPECT_EQ(encoded_image._frameType, VideoFrameType::kVideoFrameKey); + + size_t index = encoded_image.SpatialIndex().value_or(0); + encoded_frame[index].SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); + encoded_frame[index]._frameType = encoded_image._frameType; + return EncodedImageCallback::Result( + EncodedImageCallback::Result::OK, 0); + })); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); + + EXPECT_CALL(decoder_callback, Decoded(_, _, _)) + .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, + absl::optional decode_time_ms, + absl::optional qp) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth / 4); + EXPECT_EQ(decodedImage.height(), kDefaultHeight / 4); + })); + EXPECT_EQ(0, decoder_->Decode(encoded_frame[0], false, 0)); + + EXPECT_CALL(decoder_callback, Decoded(_, _, _)) + .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, + absl::optional decode_time_ms, + absl::optional qp) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth / 2); + EXPECT_EQ(decodedImage.height(), kDefaultHeight / 2); + })); + EXPECT_EQ(0, decoder_->Decode(encoded_frame[1], false, 0)); + + EXPECT_CALL(decoder_callback, Decoded(_, _, _)) + .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, + absl::optional decode_time_ms, + absl::optional qp) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth); + EXPECT_EQ(decodedImage.height(), kDefaultHeight); + })); + EXPECT_EQ(0, decoder_->Decode(encoded_frame[2], false, 0)); +} + +void SimulcastTestFixtureImpl:: + TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() { + VideoEncoder::EncoderInfo encoder_info = encoder_->GetEncoderInfo(); + EXPECT_EQ(encoder_info.fps_allocation[0].size(), + static_cast(kDefaultTemporalLayerProfile[0])); + EXPECT_EQ(encoder_info.fps_allocation[1].size(), + static_cast(kDefaultTemporalLayerProfile[1])); + EXPECT_EQ(encoder_info.fps_allocation[2].size(), + static_cast(kDefaultTemporalLayerProfile[2])); +} +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h new file mode 100644 index 000000000..a3d3fc66a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ +#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ + +#include +#include + +#include "api/test/mock_video_decoder.h" +#include "api/test/mock_video_encoder.h" +#include "api/test/simulcast_test_fixture.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" + +namespace webrtc { +namespace test { + +class SimulcastTestFixtureImpl final : public SimulcastTestFixture { + public: + SimulcastTestFixtureImpl(std::unique_ptr encoder_factory, + std::unique_ptr decoder_factory, + SdpVideoFormat video_format); + ~SimulcastTestFixtureImpl() final; + + // Implements SimulcastTestFixture. + void TestKeyFrameRequestsOnAllStreams() override; + void TestPaddingAllStreams() override; + void TestPaddingTwoStreams() override; + void TestPaddingTwoStreamsOneMaxedOut() override; + void TestPaddingOneStream() override; + void TestPaddingOneStreamTwoMaxedOut() override; + void TestSendAllStreams() override; + void TestDisablingStreams() override; + void TestActiveStreams() override; + void TestSwitchingToOneStream() override; + void TestSwitchingToOneOddStream() override; + void TestSwitchingToOneSmallStream() override; + void TestSpatioTemporalLayers333PatternEncoder() override; + void TestSpatioTemporalLayers321PatternEncoder() override; + void TestStrideEncodeDecode() override; + void TestDecodeWidthHeightSet() override; + void TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() override; + + static void DefaultSettings(VideoCodec* settings, + const int* temporal_layer_profile, + VideoCodecType codec_type, + bool reverse_layer_order = false); + + private: + class TestEncodedImageCallback; + class TestDecodedImageCallback; + + void SetUpCodec(const int* temporal_layer_profile); + void SetUpRateAllocator(); + void SetRates(uint32_t bitrate_kbps, uint32_t fps); + void RunActiveStreamsTest(const std::vector active_streams); + void UpdateActiveStreams(const std::vector active_streams); + void ExpectStreams(VideoFrameType frame_type, + const std::vector expected_streams_active); + void ExpectStreams(VideoFrameType frame_type, int expected_video_streams); + void VerifyTemporalIdxAndSyncForAllSpatialLayers( + TestEncodedImageCallback* encoder_callback, + const int* expected_temporal_idx, + const bool* expected_layer_sync, + int num_spatial_layers); + void SwitchingToOneStream(int width, int height); + + std::unique_ptr encoder_; + MockEncodedImageCallback encoder_callback_; + std::unique_ptr decoder_; + MockDecodedImageCallback decoder_callback_; + VideoCodec settings_; + rtc::scoped_refptr input_buffer_; + std::unique_ptr input_frame_; + std::unique_ptr rate_allocator_; + VideoCodecType codec_type_; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc index 2859dd022..4777fe51c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc @@ -94,8 +94,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( int max_framerate = 0; + absl::optional scalability_mode = streams[0].scalability_mode; for (size_t i = 0; i < streams.size(); ++i) { - SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; + SpatialLayer* sim_stream = &video_codec.simulcastStream[i]; RTC_DCHECK_GT(streams[i].width, 0); RTC_DCHECK_GT(streams[i].height, 0); RTC_DCHECK_GT(streams[i].max_framerate, 0); @@ -126,6 +127,15 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( video_codec.qpMax = std::max(video_codec.qpMax, static_cast(streams[i].max_qp)); max_framerate = std::max(max_framerate, streams[i].max_framerate); + + if (streams[0].scalability_mode != streams[i].scalability_mode) { + RTC_LOG(LS_WARNING) << "Inconsistent scalability modes configured."; + scalability_mode.reset(); + } + } + + if (scalability_mode.has_value()) { + video_codec.SetScalabilityMode(*scalability_mode); } if (video_codec.maxBitrate == 0) { @@ -210,7 +220,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( for (size_t spatial_idx = first_active_layer; spatial_idx < config.simulcast_layers.size() && - spatial_idx < spatial_layers.size(); + spatial_idx < spatial_layers.size() + first_active_layer; ++spatial_idx) { spatial_layers[spatial_idx - first_active_layer].active = config.simulcast_layers[spatial_idx].active; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc index 01fb378af..c2c8f8aa1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc @@ -16,7 +16,6 @@ #include "api/rtp_headers.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" -#include "modules/include/module_common_types.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" @@ -209,9 +208,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { clock_->TimeInMilliseconds()); if (first_frame_received_()) { - RTC_LOG(LS_INFO) << "Received first " - << (frame->Complete() ? "complete" : "incomplete") - << " decodable video frame"; + RTC_LOG(LS_INFO) << "Received first complete decodable video frame"; } const int32_t ret = Decode(*frame); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc index d6b5094a5..0e1570114 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc @@ -41,7 +41,7 @@ std::unique_ptr DenoiserFilter::Create( filter.reset(new DenoiserFilterSSE2()); #else // x86 CPU detection required. - if (WebRtc_GetCPUInfo(kSSE2)) { + if (GetCPUInfo(kSSE2)) { filter.reset(new DenoiserFilterSSE2()); } else { filter.reset(new DenoiserFilterC()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc index 40568a5ec..3a1812514 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc @@ -235,7 +235,7 @@ rtc::scoped_refptr VideoDenoiser::DenoiseFrame( const uint8_t* y_src = frame->DataY(); int stride_y_src = frame->StrideY(); rtc::scoped_refptr dst = - buffer_pool_.CreateBuffer(width_, height_); + buffer_pool_.CreateI420Buffer(width_, height_); uint8_t* y_dst = dst->MutableDataY(); int stride_y_dst = dst->StrideY(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h index 37d624bb2..eb98c5bc5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h @@ -15,7 +15,7 @@ #include "api/scoped_refptr.h" #include "api/video/video_frame_buffer.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_processing/util/denoiser_filter.h" #include "modules/video_processing/util/noise_estimation.h" #include "modules/video_processing/util/skin_detection.h" @@ -77,7 +77,7 @@ class VideoDenoiser { std::unique_ptr y_density_; // Save the return values by MbDenoise for each block. std::unique_ptr mb_filter_decision_; - I420BufferPool buffer_pool_; + VideoFrameBufferPool buffer_pool_; rtc::scoped_refptr prev_buffer_; }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc index 0863865a0..fe6042102 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc @@ -187,13 +187,13 @@ void ConnectionRequest::Prepare(StunMessage* request) { uint32_t network_info = connection_->port()->Network()->id(); network_info = (network_info << 16) | connection_->port()->network_cost(); request->AddAttribute(std::make_unique( - STUN_ATTR_NETWORK_INFO, network_info)); + STUN_ATTR_GOOG_NETWORK_INFO, network_info)); if (webrtc::field_trial::IsEnabled( "WebRTC-PiggybackIceCheckAcknowledgement") && connection_->last_ping_id_received()) { request->AddAttribute(std::make_unique( - STUN_ATTR_LAST_ICE_CHECK_RECEIVED, + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED, connection_->last_ping_id_received().value())); } @@ -616,7 +616,7 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { // Note: If packets are re-ordered, we may get incorrect network cost // temporarily, but it should get the correct value shortly after that. const StunUInt32Attribute* network_attr = - msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); if (network_attr) { uint32_t network_info = network_attr->value(); uint16_t network_cost = static_cast(network_info); @@ -868,7 +868,7 @@ void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { RTC_DCHECK(msg->type() == STUN_BINDING_REQUEST || msg->type() == GOOG_PING_REQUEST); const StunByteStringAttribute* last_ice_check_received_attr = - msg->GetByteString(STUN_ATTR_LAST_ICE_CHECK_RECEIVED); + msg->GetByteString(STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED); if (last_ice_check_received_attr) { const std::string request_id = last_ice_check_received_attr->GetString(); auto iter = absl::c_find_if( diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h index 4b71a7da5..88e930c21 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h @@ -65,13 +65,13 @@ class ConnectionRequest : public StunRequest { int resend_delay() override; private: - Connection* connection_; + Connection* const connection_; }; // Represents a communication link between a port on the local client and a // port on the remote client. class Connection : public CandidatePairInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: struct SentPing { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc index 1b7a66000..52fe5c65a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc @@ -73,6 +73,8 @@ rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, size_t buffer_len, size_t* read, int* error) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (state_ == rtc::SS_CLOSED) return rtc::SR_EOS; if (state_ == rtc::SS_OPENING) @@ -89,6 +91,7 @@ rtc::StreamResult StreamInterfaceChannel::Write(const void* data, size_t data_len, size_t* written, int* error) { + RTC_DCHECK_RUN_ON(&sequence_checker_); // Always succeeds, since this is an unreliable transport anyway. // TODO(zhihuang): Should this block if ice_transport_'s temporarily // unwritable? @@ -102,6 +105,7 @@ rtc::StreamResult StreamInterfaceChannel::Write(const void* data, } bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) { + RTC_DCHECK_RUN_ON(&sequence_checker_); if (packets_.size() > 0) { RTC_LOG(LS_WARNING) << "Packet already in queue."; } @@ -118,10 +122,12 @@ bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) { } rtc::StreamState StreamInterfaceChannel::GetState() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); return state_; } void StreamInterfaceChannel::Close() { + RTC_DCHECK_RUN_ON(&sequence_checker_); packets_.Clear(); state_ = rtc::SS_CLOSED; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h index 89156a15d..430c91233 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h @@ -24,6 +24,7 @@ #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_checker.h" namespace rtc { @@ -54,9 +55,10 @@ class StreamInterfaceChannel : public rtc::StreamInterface { int* error) override; private: - IceTransportInternal* ice_transport_; // owned by DtlsTransport - rtc::StreamState state_; - rtc::BufferQueue packets_; + webrtc::SequenceChecker sequence_checker_; + IceTransportInternal* const ice_transport_; // owned by DtlsTransport + rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); + rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterfaceChannel); }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc index 6f0df0415..9bf0b23db 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc @@ -696,6 +696,9 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { // Make sure that nomination reaching ICE controlled asap. "send_ping_on_switch_ice_controlling", &field_trials_.send_ping_on_switch_ice_controlling, + // Make sure that nomination reaching ICE controlled asap. + "send_ping_on_selected_ice_controlling", + &field_trials_.send_ping_on_selected_ice_controlling, // Reply to nomination ASAP. "send_ping_on_nomination_ice_controlled", &field_trials_.send_ping_on_nomination_ice_controlled, @@ -1013,7 +1016,7 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, uint16_t network_id = 0; uint16_t network_cost = 0; const StunUInt32Attribute* network_attr = - stun_msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + stun_msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); if (network_attr) { uint32_t network_info = network_attr->value(); network_id = static_cast(network_info >> 16); @@ -1768,9 +1771,10 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, RTC_LOG(LS_INFO) << ToString() << ": No selected connection"; } - if (field_trials_.send_ping_on_switch_ice_controlling && - ice_role_ == ICEROLE_CONTROLLING && old_selected_connection != nullptr && - conn != nullptr) { + if (conn != nullptr && ice_role_ == ICEROLE_CONTROLLING && + ((field_trials_.send_ping_on_switch_ice_controlling && + old_selected_connection != nullptr) || + field_trials_.send_ping_on_selected_ice_controlling)) { PingConnection(conn); MarkConnectionPinged(conn); } @@ -1784,6 +1788,15 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, pair_change.selected_candidate_pair = *GetSelectedCandidatePair(); pair_change.last_data_received_ms = selected_connection_->last_data_received(); + + if (old_selected_connection) { + pair_change.estimated_disconnected_time_ms = + ComputeEstimatedDisconnectedTimeMs(rtc::TimeMillis(), + old_selected_connection); + } else { + pair_change.estimated_disconnected_time_ms = 0; + } + SignalCandidatePairChanged(pair_change); } @@ -1792,6 +1805,16 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, ice_controller_->SetSelectedConnection(selected_connection_); } +int64_t P2PTransportChannel::ComputeEstimatedDisconnectedTimeMs( + int64_t now_ms, + Connection* old_connection) { + // TODO(jonaso): nicer keeps estimate of how frequently data _should_ be + // received, this could be used to give better estimate (if needed). + int64_t last_data_or_old_ping = + std::max(old_connection->last_received(), last_data_received_ms_); + return (now_ms - last_data_or_old_ping); +} + // Warning: UpdateState should eventually be called whenever a connection // is added, deleted, or the write state of any connection changes so that the // transport controller will get the up-to-date channel state. However it @@ -2110,6 +2133,9 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, if (connection == selected_connection_) { // Let the client know of an incoming packet + RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); + last_data_received_ms_ = + std::max(last_data_received_ms_, connection->last_data_received()); SignalReadPacket(this, data, len, packet_time_us, 0); return; } @@ -2118,6 +2144,10 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, if (!FindConnection(connection)) return; + RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); + last_data_received_ms_ = + std::max(last_data_received_ms_, connection->last_data_received()); + // Let the client know of an incoming packet SignalReadPacket(this, data, len, packet_time_us, 0); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h index 4f891beb1..69a32e462 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h @@ -358,6 +358,9 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { return const_cast(conn); } + int64_t ComputeEstimatedDisconnectedTimeMs(int64_t now, + Connection* old_connection); + std::string transport_name_ RTC_GUARDED_BY(network_thread_); int component_ RTC_GUARDED_BY(network_thread_); PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_); @@ -440,6 +443,10 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // Number of times the selected_connection_ has been modified. uint32_t selected_candidate_pair_changes_ = 0; + // When was last data received on a existing connection, + // from connection->last_data_received() that uses rtc::TimeMillis(). + int64_t last_data_received_ms_ = 0; + IceFieldTrials field_trials_; RTC_DISALLOW_COPY_AND_ASSIGN(P2PTransportChannel); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h index f30366fd1..00e1151ba 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -44,8 +44,14 @@ struct IceFieldTrials { int rtt_estimate_halftime_ms = 500; // Sending a PING directly after a switch on ICE_CONTROLLING-side. + // TODO(jonaso) : Deprecate this in favor of + // |send_ping_on_selected_ice_controlling|. bool send_ping_on_switch_ice_controlling = false; + // Sending a PING directly after selecting a connection + // (i.e either a switch or the inital selection). + bool send_ping_on_selected_ice_controlling = false; + // Sending a PING directly after a nomination on ICE_CONTROLLED-side. bool send_ping_on_nomination_ice_controlled = false; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h index 893e80b20..1e20d1346 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h @@ -150,6 +150,8 @@ struct CandidatePairChangeEvent { CandidatePair selected_candidate_pair; int64_t last_data_received_ms; std::string reason; + // How long do we estimate that we've been disconnected. + int64_t estimated_disconnected_time_ms; }; typedef std::set ServerAddresses; @@ -158,7 +160,7 @@ typedef std::set ServerAddresses; // connections to similar mechanisms of the other client. Subclasses of this // one add support for specific mechanisms like local UDP ports. class Port : public PortInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: // INIT: The state when a port is just created. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.cc index 56e6b9b6a..13e7a2214 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include @@ -402,9 +403,7 @@ uint32_t PseudoTcp::GetBytesInFlight() const { } uint32_t PseudoTcp::GetBytesBufferedNotSent() const { - size_t buffered_bytes = 0; - m_sbuf.GetBuffered(&buffered_bytes); - return static_cast(m_snd_una + buffered_bytes - m_snd_nxt); + return static_cast(m_snd_una + m_sbuf.GetBuffered() - m_snd_nxt); } uint32_t PseudoTcp::GetRoundTripTimeEstimateMs() const { @@ -422,15 +421,11 @@ int PseudoTcp::Recv(char* buffer, size_t len) { } size_t read = 0; - rtc::StreamResult result = m_rbuf.Read(buffer, len, &read, NULL); - - // If there's no data in |m_rbuf|. - if (result == rtc::SR_BLOCK) { + if (!m_rbuf.Read(buffer, len, &read)) { m_bReadEnable = true; m_error = EWOULDBLOCK; return SOCKET_ERROR; } - RTC_DCHECK(result == rtc::SR_SUCCESS); size_t available_space = 0; m_rbuf.GetWriteRemaining(&available_space); @@ -497,14 +492,13 @@ uint32_t PseudoTcp::queue(const char* data, uint32_t len, bool bCtrl) { (m_slist.back().xmit == 0)) { m_slist.back().len += len; } else { - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); - SSegment sseg(static_cast(m_snd_una + snd_buffered), len, bCtrl); + SSegment sseg(static_cast(m_snd_una + m_sbuf.GetBuffered()), len, + bCtrl); m_slist.push_back(sseg); } size_t written = 0; - m_sbuf.Write(data, len, &written, NULL); + m_sbuf.Write(data, len, &written); return static_cast(written); } @@ -532,9 +526,9 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32_t seq, if (len) { size_t bytes_read = 0; - rtc::StreamResult result = + bool result = m_sbuf.ReadOffset(buffer.get() + HEADER_SIZE, len, offset, &bytes_read); - RTC_DCHECK(result == rtc::SR_SUCCESS); + RTC_DCHECK(result); RTC_DCHECK(static_cast(bytes_read) == len); } @@ -601,11 +595,9 @@ bool PseudoTcp::clock_check(uint32_t now, long& nTimeout) { if (m_shutdown == SD_FORCEFUL) return false; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); if ((m_shutdown == SD_GRACEFUL) && ((m_state != TCP_ESTABLISHED) || - ((snd_buffered == 0) && (m_t_ack == 0)))) { + ((m_sbuf.GetBuffered() == 0) && (m_t_ack == 0)))) { return false; } @@ -830,10 +822,8 @@ bool PseudoTcp::process(Segment& seg) { // The goal it to make sure we always have at least enough data to fill the // window. We'd like to notify the app when we are halfway to that point. const uint32_t kIdealRefillSize = (m_sbuf_len + m_rbuf_len) / 2; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); if (m_bWriteEnable && - static_cast(snd_buffered) < kIdealRefillSize) { + static_cast(m_sbuf.GetBuffered()) < kIdealRefillSize) { m_bWriteEnable = false; if (m_notify) { m_notify->OnTcpWriteable(this); @@ -912,8 +902,7 @@ bool PseudoTcp::process(Segment& seg) { // there's not already data ready to read, but this should always be // true in the problematic scenario, since control frames are always // sent first in the stream. - size_t rcv_buffered; - if (m_rbuf.GetBuffered(&rcv_buffered) && rcv_buffered == 0) { + if (m_rbuf.GetBuffered() == 0) { m_rbuf.ConsumeWriteBuffer(seg.len); m_rbuf.ConsumeReadData(seg.len); // After shifting the position in the buffer, we may have @@ -924,15 +913,11 @@ bool PseudoTcp::process(Segment& seg) { } else { uint32_t nOffset = seg.seq - m_rcv_nxt; - rtc::StreamResult result = - m_rbuf.WriteOffset(seg.data, seg.len, nOffset, NULL); - if (result == rtc::SR_BLOCK) { + if (!m_rbuf.WriteOffset(seg.data, seg.len, nOffset, NULL)) { // Ignore incoming packets outside of the receive window. return false; } - RTC_DCHECK(result == rtc::SR_SUCCESS); - if (seg.seq == m_rcv_nxt) { m_rbuf.ConsumeWriteBuffer(seg.len); m_rcv_nxt += seg.len; @@ -1078,8 +1063,7 @@ void PseudoTcp::attemptSend(SendFlags sflags) { uint32_t nInFlight = m_snd_nxt - m_snd_una; uint32_t nUseable = (nInFlight < nWindow) ? (nWindow - nInFlight) : 0; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); + size_t snd_buffered = m_sbuf.GetBuffered(); uint32_t nAvailable = std::min(static_cast(snd_buffered) - nInFlight, m_mss); @@ -1300,4 +1284,149 @@ void PseudoTcp::resizeReceiveBuffer(uint32_t new_size) { m_rcv_wnd = static_cast(available_space); } +PseudoTcp::LockedFifoBuffer::LockedFifoBuffer(size_t size) + : buffer_(new char[size]), + buffer_length_(size), + data_length_(0), + read_position_(0) {} + +PseudoTcp::LockedFifoBuffer::~LockedFifoBuffer() {} + +size_t PseudoTcp::LockedFifoBuffer::GetBuffered() const { + webrtc::MutexLock lock(&mutex_); + return data_length_; +} + +bool PseudoTcp::LockedFifoBuffer::SetCapacity(size_t size) { + webrtc::MutexLock lock(&mutex_); + if (data_length_ > size) + return false; + + if (size != buffer_length_) { + char* buffer = new char[size]; + const size_t copy = data_length_; + const size_t tail_copy = std::min(copy, buffer_length_ - read_position_); + memcpy(buffer, &buffer_[read_position_], tail_copy); + memcpy(buffer + tail_copy, &buffer_[0], copy - tail_copy); + buffer_.reset(buffer); + read_position_ = 0; + buffer_length_ = size; + } + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::ReadOffset(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) { + webrtc::MutexLock lock(&mutex_); + return ReadOffsetLocked(buffer, bytes, offset, bytes_read); +} + +bool PseudoTcp::LockedFifoBuffer::WriteOffset(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) { + webrtc::MutexLock lock(&mutex_); + return WriteOffsetLocked(buffer, bytes, offset, bytes_written); +} + +bool PseudoTcp::LockedFifoBuffer::Read(void* buffer, + size_t bytes, + size_t* bytes_read) { + webrtc::MutexLock lock(&mutex_); + size_t copy = 0; + if (!ReadOffsetLocked(buffer, bytes, 0, ©)) + return false; + + // If read was successful then adjust the read position and number of + // bytes buffered. + read_position_ = (read_position_ + copy) % buffer_length_; + data_length_ -= copy; + if (bytes_read) + *bytes_read = copy; + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::Write(const void* buffer, + size_t bytes, + size_t* bytes_written) { + webrtc::MutexLock lock(&mutex_); + size_t copy = 0; + if (!WriteOffsetLocked(buffer, bytes, 0, ©)) + return false; + + // If write was successful then adjust the number of readable bytes. + data_length_ += copy; + if (bytes_written) { + *bytes_written = copy; + } + + return true; +} + +void PseudoTcp::LockedFifoBuffer::ConsumeReadData(size_t size) { + webrtc::MutexLock lock(&mutex_); + RTC_DCHECK(size <= data_length_); + read_position_ = (read_position_ + size) % buffer_length_; + data_length_ -= size; +} + +void PseudoTcp::LockedFifoBuffer::ConsumeWriteBuffer(size_t size) { + webrtc::MutexLock lock(&mutex_); + RTC_DCHECK(size <= buffer_length_ - data_length_); + data_length_ += size; +} + +bool PseudoTcp::LockedFifoBuffer::GetWriteRemaining(size_t* size) const { + webrtc::MutexLock lock(&mutex_); + *size = buffer_length_ - data_length_; + return true; +} + +bool PseudoTcp::LockedFifoBuffer::ReadOffsetLocked(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) { + if (offset >= data_length_) + return false; + + const size_t available = data_length_ - offset; + const size_t read_position = (read_position_ + offset) % buffer_length_; + const size_t copy = std::min(bytes, available); + const size_t tail_copy = std::min(copy, buffer_length_ - read_position); + char* const p = static_cast(buffer); + memcpy(p, &buffer_[read_position], tail_copy); + memcpy(p + tail_copy, &buffer_[0], copy - tail_copy); + + if (bytes_read) + *bytes_read = copy; + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::WriteOffsetLocked(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) { + if (data_length_ + offset >= buffer_length_) + return false; + + const size_t available = buffer_length_ - data_length_ - offset; + const size_t write_position = + (read_position_ + data_length_ + offset) % buffer_length_; + const size_t copy = std::min(bytes, available); + const size_t tail_copy = std::min(copy, buffer_length_ - write_position); + const char* const p = static_cast(buffer); + memcpy(&buffer_[write_position], p, tail_copy); + memcpy(&buffer_[0], p + tail_copy, copy - tail_copy); + + if (bytes_written) + *bytes_written = copy; + + return true; +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.h b/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.h index cb6d97449..74ffee631 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/pseudo_tcp.h @@ -15,8 +15,9 @@ #include #include +#include -#include "rtc_base/memory/fifo_buffer.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" namespace cricket { @@ -196,6 +197,50 @@ class RTC_EXPORT PseudoTcp { // window scale factor |m_swnd_scale| accordingly. void resizeReceiveBuffer(uint32_t new_size); + class LockedFifoBuffer final { + public: + explicit LockedFifoBuffer(size_t size); + ~LockedFifoBuffer(); + + size_t GetBuffered() const; + bool SetCapacity(size_t size); + bool ReadOffset(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read); + bool WriteOffset(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written); + bool Read(void* buffer, size_t bytes, size_t* bytes_read); + bool Write(const void* buffer, size_t bytes, size_t* bytes_written); + void ConsumeReadData(size_t size); + void ConsumeWriteBuffer(size_t size); + bool GetWriteRemaining(size_t* size) const; + + private: + bool ReadOffsetLocked(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool WriteOffsetLocked(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + // the allocated buffer + std::unique_ptr buffer_ RTC_GUARDED_BY(mutex_); + // size of the allocated buffer + size_t buffer_length_ RTC_GUARDED_BY(mutex_); + // amount of readable data in the buffer + size_t data_length_ RTC_GUARDED_BY(mutex_); + // offset to the readable data + size_t read_position_ RTC_GUARDED_BY(mutex_); + mutable webrtc::Mutex mutex_; + }; + IPseudoTcpNotify* m_notify; enum Shutdown { SD_NONE, SD_GRACEFUL, SD_FORCEFUL } m_shutdown; int m_error; @@ -211,13 +256,13 @@ class RTC_EXPORT PseudoTcp { RList m_rlist; uint32_t m_rbuf_len, m_rcv_nxt, m_rcv_wnd, m_lastrecv; uint8_t m_rwnd_scale; // Window scale factor. - rtc::FifoBuffer m_rbuf; + LockedFifoBuffer m_rbuf; // Outgoing data SList m_slist; uint32_t m_sbuf_len, m_snd_nxt, m_snd_wnd, m_lastsend, m_snd_una; uint8_t m_swnd_scale; // Window scale factor. - rtc::FifoBuffer m_sbuf; + LockedFifoBuffer m_sbuf; // Maximum segment size, estimated protocol level, largest segment sent uint32_t m_mss, m_msslevel, m_largest, m_mtu_advise; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h index d45376ea5..39f928eaf 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h @@ -76,7 +76,7 @@ class StunRequestManager { private: typedef std::map RequestMap; - rtc::Thread* thread_; + rtc::Thread* const thread_; RequestMap requests_; std::string origin_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h index 8247dbc77..a9ec43419 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h @@ -33,6 +33,8 @@ class TurnCustomizer; namespace cricket { +const int kMaxTurnUsernameLength = 509; // RFC 8489 section 14.3 + extern const int STUN_ATTR_TURN_LOGGING_ID; extern const char TURN_PORT_TYPE[]; class TurnAllocateRequest; @@ -61,6 +63,10 @@ class TurnPort : public Port { int server_priority, const std::string& origin, webrtc::TurnCustomizer* customizer) { + // Do basic parameter validation. + if (credentials.username.size() > kMaxTurnUsernameLength) { + return nullptr; + } // Using `new` to access a non-public constructor. return absl::WrapUnique(new TurnPort( thread, factory, network, socket, username, password, server_address, @@ -102,6 +108,10 @@ class TurnPort : public Port { const std::vector& tls_elliptic_curves, webrtc::TurnCustomizer* customizer, rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr) { + // Do basic parameter validation. + if (credentials.username.size() > kMaxTurnUsernameLength) { + return nullptr; + } // Using `new` to access a non-public constructor. return absl::WrapUnique( new TurnPort(thread, factory, network, min_port, max_port, username, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc index 3a4784ac5..17a49e403 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc @@ -59,7 +59,7 @@ enum { // Encapsulates a TURN permission. // The object is created when a create permission request is received by an // allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Permission : public rtc::MessageHandler { +class TurnServerAllocation::Permission : public rtc::MessageHandlerAutoCleanup { public: Permission(rtc::Thread* thread, const rtc::IPAddress& peer); ~Permission() override; @@ -79,7 +79,7 @@ class TurnServerAllocation::Permission : public rtc::MessageHandler { // Encapsulates a TURN channel binding. // The object is created when a channel bind request is received by an // allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Channel : public rtc::MessageHandler { +class TurnServerAllocation::Channel : public rtc::MessageHandlerAutoCleanup { public: Channel(rtc::Thread* thread, int id, const rtc::SocketAddress& peer); ~Channel() override; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h index 0f4fefea8..ca856448b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h @@ -66,7 +66,7 @@ class TurnServerConnection { // handles TURN messages (via HandleTurnMessage) and channel data messages // (via HandleChannelData) for this allocation when received by the server. // The object self-deletes and informs the server if its lifetime timer expires. -class TurnServerAllocation : public rtc::MessageHandler, +class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: TurnServerAllocation(TurnServer* server_, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h index b27016a1d..26b181807 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h @@ -106,8 +106,9 @@ enum class SessionState { // process will be started. }; -class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession, - public rtc::MessageHandler { +class RTC_EXPORT BasicPortAllocatorSession + : public PortAllocatorSession, + public rtc::MessageHandlerAutoCleanup { public: BasicPortAllocatorSession(BasicPortAllocator* allocator, const std::string& content_name, @@ -323,7 +324,7 @@ class TurnPort; // Performs the allocation of ports, in a sequenced (timed) manner, for a given // network and IP address. -class AllocationSequence : public rtc::MessageHandler, +class AllocationSequence : public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: enum State { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc index de4b9e6a0..fd3420c01 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc @@ -28,6 +28,8 @@ std::unique_ptr TurnPortFactory::Create( args.username, args.password, *args.server_address, args.config->credentials, args.config->priority, args.origin, args.turn_customizer); + if (!port) + return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); port->SetTurnLoggingId(args.config->turn_logging_id); return std::move(port); @@ -42,6 +44,8 @@ std::unique_ptr TurnPortFactory::Create(const CreateRelayPortArgs& args, args.config->credentials, args.config->priority, args.origin, args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, args.turn_customizer, args.config->tls_cert_verifier); + if (!port) + return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); port->SetTurnLoggingId(args.config->turn_logging_id); return std::move(port); diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc index 54912a5d7..8ff685d8e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc @@ -42,8 +42,9 @@ AudioRtpReceiver::AudioRtpReceiver( : worker_thread_(worker_thread), id_(receiver_id), source_(new rtc::RefCountedObject(worker_thread)), - track_(AudioTrackProxy::Create(rtc::Thread::Current(), - AudioTrack::Create(receiver_id, source_))), + track_(AudioTrackProxyWithInternal::Create( + rtc::Thread::Current(), + AudioTrack::Create(receiver_id, source_))), cached_track_enabled_(track_->enabled()), attachment_id_(GenerateUniqueId()), delay_(JitterBufferDelayProxy::Create( @@ -146,6 +147,11 @@ void AudioRtpReceiver::Stop() { stopped_ = true; } +void AudioRtpReceiver::StopAndEndTrack() { + Stop(); + track_->internal()->set_ended(); +} + void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK(media_channel_); if (!stopped_ && ssrc_ == ssrc) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h index 88b16ee68..f4b821068 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h @@ -19,10 +19,12 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/media_stream_interface.h" +#include "api/media_stream_track_proxy.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "media/base/media_channel.h" +#include "pc/audio_track.h" #include "pc/jitter_buffer_delay_interface.h" #include "pc/remote_audio_source.h" #include "pc/rtp_receiver.h" @@ -84,6 +86,7 @@ class AudioRtpReceiver : public ObserverInterface, // RtpReceiverInternal implementation. void Stop() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override { return ssrc_.value_or(0); } @@ -116,7 +119,7 @@ class AudioRtpReceiver : public ObserverInterface, rtc::Thread* const worker_thread_; const std::string id_; const rtc::scoped_refptr source_; - const rtc::scoped_refptr track_; + const rtc::scoped_refptr> track_; cricket::VoiceMediaChannel* media_channel_ = nullptr; absl::optional ssrc_; std::vector> streams_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc index ff680652c..4f4c6b475 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc @@ -39,7 +39,6 @@ AudioTrack::~AudioTrack() { } std::string AudioTrack::kind() const { - RTC_DCHECK(thread_checker_.IsCurrent()); return kAudioKind; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h index f89bbcdd1..8cff79e8b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h @@ -14,9 +14,8 @@ #include #include "api/media_stream_interface.h" +#include "api/media_stream_track.h" #include "api/scoped_refptr.h" -#include "pc/media_stream_track.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread_checker.h" namespace webrtc { @@ -27,6 +26,11 @@ class AudioTrack : public MediaStreamTrack, // Protected ctor to force use of factory method. AudioTrack(const std::string& label, const rtc::scoped_refptr& source); + + AudioTrack() = delete; + AudioTrack(const AudioTrack&) = delete; + AudioTrack& operator=(const AudioTrack&) = delete; + ~AudioTrack() override; public: @@ -34,10 +38,10 @@ class AudioTrack : public MediaStreamTrack, const std::string& id, const rtc::scoped_refptr& source); - private: // MediaStreamTrack implementation. std::string kind() const override; + private: // AudioTrackInterface implementation. AudioSourceInterface* GetSource() const override; @@ -50,7 +54,6 @@ class AudioTrack : public MediaStreamTrack, private: const rtc::scoped_refptr audio_source_; rtc::ThreadChecker thread_checker_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTrack); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.cc b/TMessagesProj/jni/voip/webrtc/pc/channel.cc index eeba19b2c..02ee9d249 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.cc @@ -30,6 +30,7 @@ #include "rtc_base/logging.h" #include "rtc_base/network_route.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/trace_event.h" namespace cricket { @@ -206,7 +207,7 @@ void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { } void BaseChannel::Deinit() { - RTC_DCHECK(worker_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread()); media_channel_->SetInterface(/*iface=*/nullptr); // Packets arrive on the network thread, processing packets calls virtual // functions, so need to stop this process in Deinit that is called in @@ -289,6 +290,13 @@ bool BaseChannel::SetRemoteContent(const MediaContentDescription* content, Bind(&BaseChannel::SetRemoteContent_w, this, content, type, error_desc)); } +bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) { + TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled"); + return InvokeOnWorker( + RTC_FROM_HERE, + Bind(&BaseChannel::SetPayloadTypeDemuxingEnabled_w, this, enabled)); +} + bool BaseChannel::IsReadyToReceiveMedia_w() const { // Receive data if we are enabled and have local content, return enabled() && @@ -330,7 +338,7 @@ int BaseChannel::SetOption(SocketType type, int BaseChannel::SetOption_n(SocketType type, rtc::Socket::Option opt, int value) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(rtp_transport_); switch (type) { case ST_RTP: @@ -346,7 +354,7 @@ int BaseChannel::SetOption_n(SocketType type, } void BaseChannel::OnWritableState(bool writable) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (writable) { ChannelWritable_n(); } else { @@ -358,7 +366,7 @@ void BaseChannel::OnNetworkRouteChanged( absl::optional network_route) { RTC_LOG(LS_INFO) << "Network route for " << ToString() << " was changed."; - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); rtc::NetworkRoute new_route; if (network_route) { new_route = *(network_route); @@ -372,6 +380,18 @@ void BaseChannel::OnNetworkRouteChanged( }); } +sigslot::signal1& BaseChannel::SignalFirstPacketReceived() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return SignalFirstPacketReceived_; +} + +sigslot::signal1& BaseChannel::SignalSentPacket() { + // TODO(bugs.webrtc.org/11994): Uncomment this check once callers have been + // fixed to access this variable from the correct thread. + // RTC_DCHECK_RUN_ON(worker_thread_); + return SignalSentPacket_; +} + void BaseChannel::OnTransportReadyToSend(bool ready) { invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [=] { media_channel_->OnReadyToSend(ready); }); @@ -479,7 +499,7 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { invoker_.AsyncInvoke( RTC_FROM_HERE, worker_thread_, [this, packet_buffer, packet_time_us] { - RTC_DCHECK(worker_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread()); media_channel_->OnPacketReceived(packet_buffer, packet_time_us); }); } @@ -537,7 +557,7 @@ void BaseChannel::UpdateWritableState_n() { } void BaseChannel::ChannelWritable_n() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (writable_) { return; } @@ -551,7 +571,7 @@ void BaseChannel::ChannelWritable_n() { } void BaseChannel::ChannelNotWritable_n() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (!writable_) return; @@ -575,6 +595,37 @@ void BaseChannel::ResetUnsignaledRecvStream_w() { media_channel()->ResetUnsignaledRecvStream(); } +bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { + RTC_DCHECK_RUN_ON(worker_thread()); + if (enabled == payload_type_demuxing_enabled_) { + return true; + } + payload_type_demuxing_enabled_ = enabled; + if (!enabled) { + // TODO(crbug.com/11477): This will remove *all* unsignaled streams (those + // without an explicitly signaled SSRC), which may include streams that + // were matched to this channel by MID or RID. Ideally we'd remove only the + // streams that were matched based on payload type alone, but currently + // there is no straightforward way to identify those streams. + media_channel()->ResetUnsignaledRecvStream(); + demuxer_criteria_.payload_types.clear(); + if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to disable payload type demuxing for " + << ToString(); + return false; + } + } else if (!payload_types_.empty()) { + demuxer_criteria_.payload_types.insert(payload_types_.begin(), + payload_types_.end()); + if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to enable payload type demuxing for " + << ToString(); + return false; + } + } + return true; +} + bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, SdpType type, std::string* error_desc) { @@ -716,6 +767,7 @@ bool BaseChannel::UpdateRemoteStreams_w( // Re-register the sink to update the receiving ssrcs. if (!RegisterRtpDemuxerSink()) { RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); + ret = false; } remote_streams_ = streams; return ret; @@ -741,7 +793,7 @@ void BaseChannel::OnMessage(rtc::Message* pmsg) { switch (pmsg->message_id) { case MSG_SEND_RTP_PACKET: case MSG_SEND_RTCP_PACKET: { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); SendPacketMessageData* data = static_cast(pmsg->pdata); bool rtcp = pmsg->message_id == MSG_SEND_RTCP_PACKET; @@ -750,24 +802,31 @@ void BaseChannel::OnMessage(rtc::Message* pmsg) { break; } case MSG_FIRSTPACKETRECEIVED: { + RTC_DCHECK_RUN_ON(signaling_thread_); SignalFirstPacketReceived_(this); break; } } } -void BaseChannel::AddHandledPayloadType(int payload_type) { - demuxer_criteria_.payload_types.insert(static_cast(payload_type)); +void BaseChannel::MaybeAddHandledPayloadType(int payload_type) { + if (payload_type_demuxing_enabled_) { + demuxer_criteria_.payload_types.insert(static_cast(payload_type)); + } + // Even if payload type demuxing is currently disabled, we need to remember + // the payload types in case it's re-enabled later. + payload_types_.insert(static_cast(payload_type)); } void BaseChannel::ClearHandledPayloadTypes() { demuxer_criteria_.payload_types.clear(); + payload_types_.clear(); } void BaseChannel::FlushRtcpMessages_n() { // Flush all remaining RTCP messages. This should only be called in // destructor. - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); rtc::MessageList rtcp_messages; network_thread_->Clear(this, MSG_SEND_RTCP_PACKET, &rtcp_messages); for (const auto& message : rtcp_messages) { @@ -777,11 +836,11 @@ void BaseChannel::FlushRtcpMessages_n() { } void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this, sent_packet] { - RTC_DCHECK(worker_thread_->IsCurrent()); - SignalSentPacket(sent_packet); + RTC_DCHECK_RUN_ON(worker_thread()); + SignalSentPacket()(sent_packet); }); } @@ -810,7 +869,7 @@ VoiceChannel::~VoiceChannel() { } void BaseChannel::UpdateMediaSendRecvState() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this] { UpdateMediaSendRecvState_w(); }); } @@ -869,7 +928,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, if (webrtc::RtpTransceiverDirectionHasRecv(audio->direction())) { for (const AudioCodec& codec : audio->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { @@ -1062,7 +1121,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, if (webrtc::RtpTransceiverDirectionHasRecv(video->direction())) { for (const VideoCodec& codec : video->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { @@ -1287,7 +1346,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content, return false; } for (const DataCodec& codec : data->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.h b/TMessagesProj/jni/voip/webrtc/pc/channel.h index 44374b176..51cc40fc5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.h @@ -39,7 +39,9 @@ #include "rtc_base/async_invoker.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/network.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" namespace webrtc { @@ -69,7 +71,7 @@ struct CryptoParams; // NetworkInterface. class BaseChannel : public ChannelInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<>, public MediaChannel::NetworkInterface, public webrtc::RtpPacketSinkInterface { @@ -124,6 +126,15 @@ class BaseChannel : public ChannelInterface, bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, std::string* error_desc) override; + // Controls whether this channel will receive packets on the basis of + // matching payload type alone. This is needed for legacy endpoints that + // don't signal SSRCs or use MID/RID, but doesn't make sense if there is + // more than channel of specific media type, As that creates an ambiguity. + // + // This method will also remove any existing streams that were bound to this + // channel on the basis of payload type, since one of these streams might + // actually belong to a new channel. See: crbug.com/webrtc/11477 + bool SetPayloadTypeDemuxingEnabled(bool enabled) override; bool Enable(bool enable) override; @@ -134,22 +145,11 @@ class BaseChannel : public ChannelInterface, return remote_streams_; } - sigslot::signal2 SignalDtlsSrtpSetupFailure; - void SignalDtlsSrtpSetupFailure_n(bool rtcp); - void SignalDtlsSrtpSetupFailure_s(bool rtcp); - // Used for latency measurements. - sigslot::signal1& SignalFirstPacketReceived() override { - return SignalFirstPacketReceived_; - } + sigslot::signal1& SignalFirstPacketReceived() override; // Forward SignalSentPacket to worker thread. - sigslot::signal1 SignalSentPacket; - - // Emitted whenever rtcp-mux is fully negotiated and the rtcp-transport can - // be destroyed. - // Fired on the network thread. - sigslot::signal1 SignalRtcpMuxFullyActive; + sigslot::signal1& SignalSentPacket(); // From RtpTransport - public for testing only void OnTransportReadyToSend(bool ready); @@ -224,6 +224,7 @@ class BaseChannel : public ChannelInterface, bool AddRecvStream_w(const StreamParams& sp); bool RemoveRecvStream_w(uint32_t ssrc); void ResetUnsignaledRecvStream_w(); + bool SetPayloadTypeDemuxingEnabled_w(bool enabled); bool AddSendStream_w(const StreamParams& sp); bool RemoveSendStream_w(uint32_t ssrc); @@ -261,9 +262,11 @@ class BaseChannel : public ChannelInterface, return worker_thread_->Invoke(posted_from, functor); } - void AddHandledPayloadType(int payload_type); + // Add |payload_type| to |demuxer_criteria_| if payload type demuxing is + // enabled. + void MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); - void ClearHandledPayloadTypes(); + void ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); void UpdateRtpHeaderExtensionMap( const RtpHeaderExtensions& header_extensions); @@ -285,7 +288,10 @@ class BaseChannel : public ChannelInterface, rtc::Thread* const network_thread_; rtc::Thread* const signaling_thread_; rtc::AsyncInvoker invoker_; - sigslot::signal1 SignalFirstPacketReceived_; + sigslot::signal1 SignalFirstPacketReceived_ + RTC_GUARDED_BY(signaling_thread_); + sigslot::signal1 SignalSentPacket_ + RTC_GUARDED_BY(worker_thread_); const std::string content_name_; @@ -308,6 +314,7 @@ class BaseChannel : public ChannelInterface, // well, but it can be changed only when signaling thread does a synchronous // call to the worker thread, so it should be safe. bool enabled_ = false; + bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; std::vector local_streams_; std::vector remote_streams_; webrtc::RtpTransceiverDirection local_content_direction_ = @@ -315,6 +322,8 @@ class BaseChannel : public ChannelInterface, webrtc::RtpTransceiverDirection remote_content_direction_ = webrtc::RtpTransceiverDirection::kInactive; + // Cached list of payload types, used if payload type demuxing is re-enabled. + std::set payload_types_ RTC_GUARDED_BY(worker_thread()); webrtc::RtpDemuxerCriteria demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h index cd29ed4f8..68b648630 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h @@ -52,6 +52,7 @@ class ChannelInterface { virtual bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, std::string* error_desc) = 0; + virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0; // Access to the local and remote streams that were set on the channel. virtual const std::vector& local_streams() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc index 84d74678b..9d5adcad4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc @@ -193,6 +193,9 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( const webrtc::CryptoOptions& crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator, const AudioOptions& options) { + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { return CreateVoiceChannel(call, media_config, rtp_transport, @@ -262,6 +265,9 @@ VideoChannel* ChannelManager::CreateVideoChannel( rtc::UniqueRandomIdGenerator* ssrc_generator, const VideoOptions& options, webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { return CreateVideoChannel(call, media_config, rtp_transport, diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h index 8d5fc0aa5..ba2c26009 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h @@ -96,16 +96,15 @@ class ChannelManager final { // call the appropriate Destroy*Channel method when done. // Creates a voice channel, to be associated with the specified session. - VoiceChannel* CreateVoiceChannel( - webrtc::Call* call, - const cricket::MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const AudioOptions& options); + VoiceChannel* CreateVoiceChannel(webrtc::Call* call, + const cricket::MediaConfig& media_config, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + rtc::UniqueRandomIdGenerator* ssrc_generator, + const AudioOptions& options); // Destroys a voice channel created by CreateVoiceChannel. void DestroyVoiceChannel(VoiceChannel* voice_channel); diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc new file mode 100644 index 000000000..727fbd654 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc @@ -0,0 +1,147 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/connection_context.h" + +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "media/base/rtp_data_engine.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +namespace { + +rtc::Thread* MaybeStartThread(rtc::Thread* old_thread, + const std::string& thread_name, + bool with_socket_server, + std::unique_ptr& thread_holder) { + if (old_thread) { + return old_thread; + } + if (with_socket_server) { + thread_holder = rtc::Thread::CreateWithSocketServer(); + } else { + thread_holder = rtc::Thread::Create(); + } + thread_holder->SetName(thread_name, nullptr); + thread_holder->Start(); + return thread_holder.get(); +} + +rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread, + bool& wraps_current_thread) { + wraps_current_thread = false; + if (signaling_thread) { + return signaling_thread; + } + auto this_thread = rtc::Thread::Current(); + if (!this_thread) { + // If this thread isn't already wrapped by an rtc::Thread, create a + // wrapper and own it in this class. + this_thread = rtc::ThreadManager::Instance()->WrapCurrentThread(); + wraps_current_thread = true; + } + return this_thread; +} + +std::unique_ptr MaybeCreateSctpFactory( + std::unique_ptr factory, + rtc::Thread* network_thread) { + if (factory) { + return factory; + } +#ifdef HAVE_SCTP + return std::make_unique(network_thread); +#else + return nullptr; +#endif +} + +} // namespace + +// Static +rtc::scoped_refptr ConnectionContext::Create( + PeerConnectionFactoryDependencies* dependencies) { + auto context = new rtc::RefCountedObject(dependencies); + if (!context->channel_manager_->Init()) { + return nullptr; + } + return context; +} + +ConnectionContext::ConnectionContext( + PeerConnectionFactoryDependencies* dependencies) + : network_thread_(MaybeStartThread(dependencies->network_thread, + "pc_network_thread", + true, + owned_network_thread_)), + worker_thread_(MaybeStartThread(dependencies->worker_thread, + "pc_worker_thread", + false, + owned_worker_thread_)), + signaling_thread_(MaybeWrapThread(dependencies->signaling_thread, + wraps_current_thread_)), + network_monitor_factory_( + std::move(dependencies->network_monitor_factory)), + call_factory_(std::move(dependencies->call_factory)), + media_engine_(std::move(dependencies->media_engine)), + sctp_factory_( + MaybeCreateSctpFactory(std::move(dependencies->sctp_factory), + network_thread())), + trials_(dependencies->trials + ? std::move(dependencies->trials) + : std::make_unique()) { + signaling_thread_->AllowInvokesToThread(worker_thread_); + signaling_thread_->AllowInvokesToThread(network_thread_); + worker_thread_->AllowInvokesToThread(network_thread_); + network_thread_->DisallowAllInvokes(); + + RTC_DCHECK_RUN_ON(signaling_thread_); + rtc::InitRandom(rtc::Time32()); + + // If network_monitor_factory_ is non-null, it will be used to create a + // network monitor while on the network thread. + default_network_manager_ = std::make_unique( + network_monitor_factory_.get()); + + default_socket_factory_ = + std::make_unique(network_thread()); + + channel_manager_ = std::make_unique( + std::move(media_engine_), std::make_unique(), + worker_thread(), network_thread()); + + channel_manager_->SetVideoRtxEnabled(true); +} + +ConnectionContext::~ConnectionContext() { + RTC_DCHECK_RUN_ON(signaling_thread_); + channel_manager_.reset(nullptr); + + // Make sure |worker_thread()| and |signaling_thread()| outlive + // |default_socket_factory_| and |default_network_manager_|. + default_socket_factory_ = nullptr; + default_network_manager_ = nullptr; + + if (wraps_current_thread_) + rtc::ThreadManager::Instance()->UnwrapCurrentThread(); +} + +cricket::ChannelManager* ConnectionContext::channel_manager() const { + return channel_manager_.get(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h new file mode 100644 index 000000000..02d08a191 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h @@ -0,0 +1,134 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_CONNECTION_CONTEXT_H_ +#define PC_CONNECTION_CONTEXT_H_ + +#include +#include + +#include "api/call/call_factory_interface.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/sctp_transport_factory_interface.h" +#include "api/transport/webrtc_key_value_config.h" +#include "media/base/media_engine.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "pc/channel_manager.h" +#include "rtc_base/checks.h" +#include "rtc_base/network.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { +class BasicNetworkManager; +class BasicPacketSocketFactory; +} // namespace rtc + +namespace webrtc { + +class RtcEventLog; + +// This class contains resources needed by PeerConnection and associated +// objects. A reference to this object is passed to each PeerConnection. The +// methods on this object are assumed not to change the state in any way that +// interferes with the operation of other PeerConnections. +// +// This class must be created and destroyed on the signaling thread. +class ConnectionContext : public rtc::RefCountInterface { + public: + // Creates a ConnectionContext. May return null if initialization fails. + // The Dependencies class allows simple management of all new dependencies + // being added to the ConnectionContext. + static rtc::scoped_refptr Create( + PeerConnectionFactoryDependencies* dependencies); + + // This class is not copyable or movable. + ConnectionContext(const ConnectionContext&) = delete; + ConnectionContext& operator=(const ConnectionContext&) = delete; + + // Functions called from PeerConnection and friends + SctpTransportFactoryInterface* sctp_transport_factory() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return sctp_factory_.get(); + } + + cricket::ChannelManager* channel_manager() const; + + rtc::Thread* signaling_thread() { return signaling_thread_; } + const rtc::Thread* signaling_thread() const { return signaling_thread_; } + rtc::Thread* worker_thread() { return worker_thread_; } + const rtc::Thread* worker_thread() const { return worker_thread_; } + rtc::Thread* network_thread() { return network_thread_; } + const rtc::Thread* network_thread() const { return network_thread_; } + + const WebRtcKeyValueConfig& trials() const { return *trials_.get(); } + + // Accessors only used from the PeerConnectionFactory class + rtc::BasicNetworkManager* default_network_manager() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return default_network_manager_.get(); + } + rtc::BasicPacketSocketFactory* default_socket_factory() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return default_socket_factory_.get(); + } + CallFactoryInterface* call_factory() { + RTC_DCHECK_RUN_ON(worker_thread_); + return call_factory_.get(); + } + + protected: + explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies); + + virtual ~ConnectionContext(); + + private: + // The following three variables are used to communicate between the + // constructor and the destructor, and are never exposed externally. + bool wraps_current_thread_; + // Note: Since owned_network_thread_ and owned_worker_thread_ are used + // in the initialization of network_thread_ and worker_thread_, they + // must be declared before them, so that they are initialized first. + std::unique_ptr owned_network_thread_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr owned_worker_thread_ + RTC_GUARDED_BY(signaling_thread_); + rtc::Thread* const network_thread_; + rtc::Thread* const worker_thread_; + rtc::Thread* const signaling_thread_; + // channel_manager is accessed both on signaling thread and worker thread. + std::unique_ptr channel_manager_; + std::unique_ptr const network_monitor_factory_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr default_network_manager_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr const call_factory_ + RTC_GUARDED_BY(worker_thread_); + + std::unique_ptr default_socket_factory_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr media_engine_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr const sctp_factory_ + RTC_GUARDED_BY(signaling_thread_); + // Accessed both on signaling thread and worker thread. + std::unique_ptr const trials_; +}; + +} // namespace webrtc + +#endif // PC_CONNECTION_CONTEXT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc index 04a4bb624..9fabe13cc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc @@ -129,7 +129,7 @@ void DataChannelController::OnDataReceived( cricket::ReceiveDataParams params; params.sid = channel_id; params.type = ToCricketDataMessageType(type); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, params, buffer] { RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(bugs.webrtc.org/11547): The data being received should be @@ -148,7 +148,7 @@ void DataChannelController::OnDataReceived( void DataChannelController::OnChannelClosing(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, channel_id] { RTC_DCHECK_RUN_ON(signaling_thread()); SignalDataChannelTransportChannelClosing_s(channel_id); @@ -157,7 +157,7 @@ void DataChannelController::OnChannelClosing(int channel_id) { void DataChannelController::OnChannelClosed(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, channel_id] { RTC_DCHECK_RUN_ON(signaling_thread()); SignalDataChannelTransportChannelClosed_s(channel_id); @@ -166,7 +166,7 @@ void DataChannelController::OnChannelClosed(int channel_id) { void DataChannelController::OnReadyToSend() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this] { RTC_DCHECK_RUN_ON(signaling_thread()); data_channel_transport_ready_to_send_ = true; @@ -177,7 +177,7 @@ void DataChannelController::OnReadyToSend() { void DataChannelController::OnTransportClosed() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this] { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportChannelClosed(); @@ -186,7 +186,6 @@ void DataChannelController::OnTransportClosed() { void DataChannelController::SetupDataChannelTransport_n() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_ = std::make_unique(); // There's a new data channel transport. This needs to be signaled to the // |sctp_data_channels_| so that they can reopen and reconnect. This is @@ -196,7 +195,6 @@ void DataChannelController::SetupDataChannelTransport_n() { void DataChannelController::TeardownDataChannelTransport_n() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_ = nullptr; if (data_channel_transport()) { data_channel_transport()->SetDataSink(nullptr); } @@ -592,7 +590,7 @@ bool DataChannelController::DataChannelSendData( void DataChannelController::NotifyDataChannelsOfTransportCreated() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this] { RTC_DCHECK_RUN_ON(signaling_thread()); for (const auto& channel : sctp_data_channels_) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h index 3daee1138..675928882 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h @@ -221,8 +221,9 @@ class DataChannelController : public RtpDataChannelProviderInterface, sigslot::signal1 SignalSctpDataChannelCreated_ RTC_GUARDED_BY(signaling_thread()); - // Used to invoke data channel transport signals on the signaling thread. - std::unique_ptr data_channel_transport_invoker_ + // Used from the network thread to invoke data channel transport signals on + // the signaling thread. + rtc::AsyncInvoker data_channel_transport_invoker_ RTC_GUARDED_BY(network_thread()); // Owning PeerConnection. diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h index c63a3ca5d..f50928fc8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h @@ -63,6 +63,16 @@ class DtlsSrtpTransport : public SrtpTransport { active_reset_srtp_params_ = active_reset_srtp_params; } + virtual void OnErrorDemuxingPacket(uint32_t ssrc) override { + if (SignalOnErrorDemuxingPacket_) { + SignalOnErrorDemuxingPacket_(ssrc); + } + } + + void SetOnErrorDemuxingPacket(std::function f) { + SignalOnErrorDemuxingPacket_ = std::move(f); + } + private: bool IsDtlsActive(); bool IsDtlsConnected(); @@ -96,6 +106,8 @@ class DtlsSrtpTransport : public SrtpTransport { absl::optional> recv_extension_ids_; bool active_reset_srtp_params_ = false; + + std::function SignalOnErrorDemuxingPacket_ = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc index f0882de3b..550ede790 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc @@ -31,6 +31,7 @@ DtlsTransportState TranslateState(cricket::DtlsTransportState internal_state) { case cricket::DTLS_TRANSPORT_FAILED: return DtlsTransportState::kFailed; } + RTC_CHECK_NOTREACHED(); } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h index 69b69e41d..c1529de6b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h @@ -12,7 +12,6 @@ #define PC_ICE_TRANSPORT_H_ #include "api/ice_transport_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread.h" #include "rtc_base/thread_checker.h" @@ -29,6 +28,10 @@ class IceTransportWithPointer : public IceTransportInterface { RTC_DCHECK(internal_); } + IceTransportWithPointer() = delete; + IceTransportWithPointer(const IceTransportWithPointer&) = delete; + IceTransportWithPointer& operator=(const IceTransportWithPointer&) = delete; + cricket::IceTransportInternal* internal() override; // This call will ensure that the pointer passed at construction is // no longer in use by this object. Later calls to internal() will return @@ -39,7 +42,6 @@ class IceTransportWithPointer : public IceTransportInterface { ~IceTransportWithPointer() override; private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(IceTransportWithPointer); const rtc::Thread* creator_thread_; cricket::IceTransportInternal* internal_ RTC_GUARDED_BY(creator_thread_); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc index 71dd8acc2..0b0532002 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc @@ -535,6 +535,9 @@ JsepTransportController::CreateDtlsSrtpTransport( config_.active_reset_srtp_params); dtls_srtp_transport->SignalDtlsStateChange.connect( this, &JsepTransportController::UpdateAggregateStates_n); + dtls_srtp_transport->SetOnErrorDemuxingPacket([this](uint32_t ssrc) { + this->JsepTransportController::ErrorDemuxingPacket_n(ssrc); + }); return dtls_srtp_transport; } @@ -1256,10 +1259,11 @@ void JsepTransportController::UpdateAggregateStates_n() { } if (ice_connection_state_ != new_connection_state) { ice_connection_state_ = new_connection_state; - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, - [this, new_connection_state] { - SignalIceConnectionState(new_connection_state); - }); + + invoker_.AsyncInvoke( + RTC_FROM_HERE, signaling_thread_, [this, new_connection_state] { + SignalIceConnectionState.Send(new_connection_state); + }); } // Compute the current RTCIceConnectionState as described in @@ -1381,7 +1385,10 @@ void JsepTransportController::UpdateAggregateStates_n() { }); } - if (all_done_gathering) { + // Compute the gathering state. + if (dtls_transports.empty()) { + new_gathering_state = cricket::kIceGatheringNew; + } else if (all_done_gathering) { new_gathering_state = cricket::kIceGatheringComplete; } else if (any_gathering) { new_gathering_state = cricket::kIceGatheringGathering; @@ -1395,6 +1402,10 @@ void JsepTransportController::UpdateAggregateStates_n() { } } +void JsepTransportController::ErrorDemuxingPacket_n(uint32_t ssrc) { + SignalErrorDemuxingPacket.emit(ssrc); +} + void JsepTransportController::OnRtcpPacketReceived_n( rtc::CopyOnWriteBuffer* packet, int64_t packet_time_us) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h index d95b47596..025a7a1fc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h @@ -35,6 +35,7 @@ #include "rtc_base/async_invoker.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/callback_list.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -101,7 +102,7 @@ class JsepTransportController : public sigslot::has_slots<> { RtcEventLog* event_log = nullptr; // Factory for SCTP transports. - cricket::SctpTransportInternalFactory* sctp_factory = nullptr; + SctpTransportFactoryInterface* sctp_factory = nullptr; }; // The ICE related events are signaled on the |signaling_thread|. @@ -197,10 +198,11 @@ class JsepTransportController : public sigslot::has_slots<> { // Else if all completed => completed, // Else if all connected => connected, // Else => connecting - sigslot::signal1 SignalIceConnectionState; + CallbackList SignalIceConnectionState; sigslot::signal1 SignalConnectionState; + sigslot::signal1 SignalStandardizedIceConnectionState; @@ -224,6 +226,8 @@ class JsepTransportController : public sigslot::has_slots<> { sigslot::signal1 SignalDtlsHandshakeError; + sigslot::signal1 SignalErrorDemuxingPacket; + private: RTCError ApplyDescription_n(bool local, SdpType type, @@ -347,6 +351,7 @@ class JsepTransportController : public sigslot::has_slots<> { void OnTransportCandidatePairChanged_n( const cricket::CandidatePairChangeEvent& event); void UpdateAggregateStates_n(); + void ErrorDemuxingPacket_n(uint32_t ssrc); void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet, int64_t packet_time_us); diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc index 0cff84d79..6d8a9a489 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc @@ -36,6 +36,7 @@ #include "rtc_base/logging.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/unique_id_generator.h" +#include "system_wrappers/include/field_trial.h" namespace { @@ -336,6 +337,12 @@ static StreamParams CreateStreamParamsForNewSenderWithSsrcs( "a single media streams. This session has multiple " "media streams however, so no FlexFEC SSRC will be generated."; } + if (include_flexfec_stream && + !webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03")) { + include_flexfec_stream = false; + RTC_LOG(LS_WARNING) + << "WebRTC-FlexFEC trial is not enabled, not sending FlexFEC"; + } result.GenerateSsrcs(sender.num_sim_layers, include_rtx_streams, include_flexfec_stream, ssrc_generator); @@ -1559,6 +1566,13 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( return nullptr; } break; + case MEDIA_TYPE_UNSUPPORTED: + if (!AddUnsupportedContentForOffer( + media_description_options, session_options, current_content, + current_description, offer.get(), &ice_credentials)) { + return nullptr; + } + break; default: RTC_NOTREACHED(); } @@ -1713,6 +1727,14 @@ MediaSessionDescriptionFactory::CreateAnswer( return nullptr; } break; + case MEDIA_TYPE_UNSUPPORTED: + if (!AddUnsupportedContentForAnswer( + media_description_options, session_options, offer_content, + offer, current_content, current_description, + bundle_transport.get(), answer.get(), &ice_credentials)) { + return nullptr; + } + break; default: RTC_NOTREACHED(); } @@ -1803,6 +1825,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForOffer( case RtpTransceiverDirection::kRecvOnly: return audio_recv_codecs_; } + RTC_CHECK_NOTREACHED(); } const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( @@ -1821,6 +1844,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( case RtpTransceiverDirection::kRecvOnly: return audio_recv_codecs_; } + RTC_CHECK_NOTREACHED(); } const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( @@ -1836,6 +1860,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( case RtpTransceiverDirection::kRecvOnly: return video_recv_codecs_; } + RTC_CHECK_NOTREACHED(); } const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( @@ -1854,6 +1879,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( case RtpTransceiverDirection::kRecvOnly: return video_recv_codecs_; } + RTC_CHECK_NOTREACHED(); } void MergeCodecsFromDescription( @@ -2362,7 +2388,7 @@ bool MediaSessionDescriptionFactory::AddRtpDataContentForOffer( return false; } - data->set_bandwidth(kDataMaxBandwidth); + data->set_bandwidth(kRtpDataMaxBandwidth); SetMediaProtocol(secure_transport, data.get()); desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, media_description_options.stopped, std::move(data)); @@ -2403,6 +2429,31 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( } } +bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + const SessionDescription* current_description, + SessionDescription* desc, + IceCredentialsIterator* ice_credentials) const { + RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_UNSUPPORTED)); + + const UnsupportedContentDescription* current_unsupported_description = + current_content->media_description()->as_unsupported(); + auto unsupported = std::make_unique( + current_unsupported_description->media_type()); + unsupported->set_protocol(current_content->media_description()->protocol()); + desc->AddContent(media_description_options.mid, MediaProtocolType::kOther, + /*rejected=*/true, std::move(unsupported)); + + if (!AddTransportOffer(media_description_options.mid, + media_description_options.transport_options, + current_description, desc, ice_credentials)) { + return false; + } + return true; +} + // |audio_codecs| = set of all possible codecs that can be used, with correct // payload type mappings // @@ -2733,8 +2784,8 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( return false; } - if (!rejected) { - data_answer->set_bandwidth(kDataMaxBandwidth); + if (!rejected && session_options.data_channel_type == DCT_RTP) { + data_answer->set_bandwidth(kRtpDataMaxBandwidth); } else { // RFC 3264 // The answer MUST contain the same number of m-lines as the offer. @@ -2745,6 +2796,42 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( return true; } +bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* offer_content, + const SessionDescription* offer_description, + const ContentInfo* current_content, + const SessionDescription* current_description, + const TransportInfo* bundle_transport, + SessionDescription* answer, + IceCredentialsIterator* ice_credentials) const { + std::unique_ptr unsupported_transport = + CreateTransportAnswer(media_description_options.mid, offer_description, + media_description_options.transport_options, + current_description, bundle_transport != nullptr, + ice_credentials); + if (!unsupported_transport) { + return false; + } + RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_UNSUPPORTED)); + + const UnsupportedContentDescription* offer_unsupported_description = + offer_content->media_description()->as_unsupported(); + std::unique_ptr unsupported_answer = + std::make_unique( + offer_unsupported_description->media_type()); + unsupported_answer->set_protocol(offer_unsupported_description->protocol()); + + if (!AddTransportAnswer(media_description_options.mid, + *(unsupported_transport.get()), answer)) { + return false; + } + answer->AddContent(media_description_options.mid, offer_content->type, + /*rejected=*/true, std::move(unsupported_answer)); + return true; +} + void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { audio_sendrecv_codecs_.clear(); all_audio_codecs_.clear(); @@ -2822,6 +2909,10 @@ bool IsDataContent(const ContentInfo* content) { return IsMediaContentOfType(content, MEDIA_TYPE_DATA); } +bool IsUnsupportedContent(const ContentInfo* content) { + return IsMediaContentOfType(content, MEDIA_TYPE_UNSUPPORTED); +} + const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, MediaType media_type) { for (const ContentInfo& content : contents) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.h b/TMessagesProj/jni/voip/webrtc/pc/media_session.h index f305a6214..58a31a2ab 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.h @@ -283,6 +283,14 @@ class MediaSessionDescriptionFactory { SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; + bool AddUnsupportedContentForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + const SessionDescription* current_description, + SessionDescription* desc, + IceCredentialsIterator* ice_credentials) const; + bool AddAudioContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, @@ -324,6 +332,17 @@ class MediaSessionDescriptionFactory { SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; + bool AddUnsupportedContentForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* offer_content, + const SessionDescription* offer_description, + const ContentInfo* current_content, + const SessionDescription* current_description, + const TransportInfo* bundle_transport, + SessionDescription* answer, + IceCredentialsIterator* ice_credentials) const; + void ComputeAudioCodecsIntersectionAndUnion(); void ComputeVideoCodecsIntersectionAndUnion(); @@ -356,6 +375,7 @@ bool IsMediaContent(const ContentInfo* content); bool IsAudioContent(const ContentInfo* content); bool IsVideoContent(const ContentInfo* content); bool IsDataContent(const ContentInfo* content); +bool IsUnsupportedContent(const ContentInfo* content); const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, MediaType media_type); const ContentInfo* GetFirstAudioContent(const ContentInfos& contents); diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h index 34299f46e..6f16bea1d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h @@ -48,7 +48,7 @@ class MediaStream : public Notifier { template bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track); - std::string id_; + const std::string id_; AudioTrackVector audio_tracks_; VideoTrackVector video_tracks_; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc index 7cee2c743..dd8f7d0ff 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc @@ -10,55 +10,54 @@ #include "pc/peer_connection.h" +#include +#include #include -#include #include -#include #include #include -#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "api/jsep_ice_candidate.h" -#include "api/jsep_session_description.h" -#include "api/media_stream_proxy.h" -#include "api/media_stream_track_proxy.h" -#include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/rtc_event_log_output_file.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/task_queue/queued_task.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/uma_metrics.h" -#include "api/video/builtin_video_bitrate_allocator_factory.h" -#include "call/call.h" -#include "logging/rtc_event_log/ice_logger.h" +#include "api/video/video_codec_constants.h" +#include "call/audio_state.h" +#include "call/packet_receiver.h" +#include "media/base/media_channel.h" +#include "media/base/media_config.h" #include "media/base/rid_description.h" -#include "media/sctp/sctp_transport.h" -#include "pc/audio_rtp_receiver.h" -#include "pc/audio_track.h" -#include "pc/channel.h" -#include "pc/channel_manager.h" -#include "pc/dtmf_sender.h" -#include "pc/media_stream.h" -#include "pc/media_stream_observer.h" -#include "pc/remote_audio_source.h" -#include "pc/rtp_media_utils.h" +#include "media/base/stream_params.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/transport_info.h" +#include "pc/ice_server_parsing.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/sctp_transport.h" -#include "pc/sctp_utils.h" -#include "pc/sdp_utils.h" -#include "pc/stream_collection.h" -#include "pc/video_rtp_receiver.h" -#include "pc/video_track.h" +#include "pc/simulcast_description.h" +#include "pc/webrtc_session_description_factory.h" #include "rtc_base/bind.h" -#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/location.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/callback_list.h" +#include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" -#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/metrics.h" using cricket::ContentInfo; @@ -81,158 +80,14 @@ using cricket::STUN_PORT_TYPE; namespace webrtc { -// Error messages -const char kBundleWithoutRtcpMux[] = - "rtcp-mux must be enabled when BUNDLE " - "is enabled."; -const char kInvalidCandidates[] = "Description contains invalid candidates."; -const char kInvalidSdp[] = "Invalid session description."; -const char kMlineMismatchInAnswer[] = - "The order of m-lines in answer doesn't match order in offer. Rejecting " - "answer."; -const char kMlineMismatchInSubsequentOffer[] = - "The order of m-lines in subsequent offer doesn't match order from " - "previous offer/answer."; -const char kSdpWithoutDtlsFingerprint[] = - "Called with SDP without DTLS fingerprint."; -const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; -const char kSdpWithoutIceUfragPwd[] = - "Called with SDP without ice-ufrag and ice-pwd."; -const char kSessionError[] = "Session error code: "; -const char kSessionErrorDesc[] = "Session error description: "; -const char kDtlsSrtpSetupFailureRtp[] = - "Couldn't set up DTLS-SRTP on RTP channel."; -const char kDtlsSrtpSetupFailureRtcp[] = - "Couldn't set up DTLS-SRTP on RTCP channel."; - namespace { // UMA metric names. -const char kSimulcastVersionApplyLocalDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"; -const char kSimulcastVersionApplyRemoteDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"; const char kSimulcastNumberOfEncodings[] = "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings"; -const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; - -static const char kDefaultStreamId[] = "default"; -static const char kDefaultAudioSenderId[] = "defaulta0"; -static const char kDefaultVideoSenderId[] = "defaultv0"; - -// The length of RTCP CNAMEs. -static const int kRtcpCnameLength = 16; - -enum { - MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0, - MSG_SET_SESSIONDESCRIPTION_FAILED, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, - MSG_GETSTATS, - MSG_REPORT_USAGE_PATTERN, -}; static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000; -struct SetSessionDescriptionMsg : public rtc::MessageData { - explicit SetSessionDescriptionMsg( - webrtc::SetSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct CreateSessionDescriptionMsg : public rtc::MessageData { - explicit CreateSessionDescriptionMsg( - webrtc::CreateSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct GetStatsMsg : public rtc::MessageData { - GetStatsMsg(webrtc::StatsObserver* observer, - webrtc::MediaStreamTrackInterface* track) - : observer(observer), track(track) {} - rtc::scoped_refptr observer; - rtc::scoped_refptr track; -}; - -// Check if we can send |new_stream| on a PeerConnection. -bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, - webrtc::MediaStreamInterface* new_stream) { - if (!new_stream || !current_streams) { - return false; - } - if (current_streams->find(new_stream->id()) != nullptr) { - RTC_LOG(LS_ERROR) << "MediaStream with ID " << new_stream->id() - << " is already added."; - return false; - } - return true; -} - -// If the direction is "recvonly" or "inactive", treat the description -// as containing no streams. -// See: https://code.google.com/p/webrtc/issues/detail?id=5054 -std::vector GetActiveStreams( - const cricket::MediaContentDescription* desc) { - return RtpTransceiverDirectionHasSend(desc->direction()) - ? desc->streams() - : std::vector(); -} - -bool IsValidOfferToReceiveMedia(int value) { - typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; - return (value >= Options::kUndefined) && - (value <= Options::kMaxOfferToReceiveMedia); -} - -// Add options to |[audio/video]_media_description_options| from |senders|. -void AddPlanBRtpSenderOptions( - const std::vector>>& senders, - cricket::MediaDescriptionOptions* audio_media_description_options, - cricket::MediaDescriptionOptions* video_media_description_options, - int num_sim_layers) { - for (const auto& sender : senders) { - if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { - if (audio_media_description_options) { - audio_media_description_options->AddAudioSender( - sender->id(), sender->internal()->stream_ids()); - } - } else { - RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO); - if (video_media_description_options) { - video_media_description_options->AddVideoSender( - sender->id(), sender->internal()->stream_ids(), {}, - SimulcastLayerList(), num_sim_layers); - } - } - } -} - -// Add options to |session_options| from |rtp_data_channels|. -void AddRtpDataChannelOptions( - const std::map>& - rtp_data_channels, - cricket::MediaDescriptionOptions* data_media_description_options) { - if (!data_media_description_options) { - return; - } - // Check for data channels. - for (const auto& kv : rtp_data_channels) { - const RtpDataChannel* channel = kv.second; - if (channel->state() == RtpDataChannel::kConnecting || - channel->state() == RtpDataChannel::kOpen) { - // Legacy RTP data channels are signaled with the track/stream ID set to - // the data channel's label. - data_media_description_options->AddRtpDataChannel(channel->label(), - channel->label()); - } - } -} uint32_t ConvertIceTransportTypeToCandidateFilter( PeerConnectionInterface::IceTransportsType type) { @@ -251,28 +106,6 @@ uint32_t ConvertIceTransportTypeToCandidateFilter( return cricket::CF_NONE; } -// Map internal signaling state name to spec name: -// https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum -std::string GetSignalingStateString( - PeerConnectionInterface::SignalingState state) { - switch (state) { - case PeerConnectionInterface::kStable: - return "stable"; - case PeerConnectionInterface::kHaveLocalOffer: - return "have-local-offer"; - case PeerConnectionInterface::kHaveLocalPrAnswer: - return "have-local-pranswer"; - case PeerConnectionInterface::kHaveRemoteOffer: - return "have-remote-offer"; - case PeerConnectionInterface::kHaveRemotePrAnswer: - return "have-remote-pranswer"; - case PeerConnectionInterface::kClosed: - return "closed"; - } - RTC_NOTREACHED(); - return ""; -} - IceCandidatePairType GetIceCandidatePairCounter( const cricket::Candidate& local, const cricket::Candidate& remote) { @@ -346,253 +179,6 @@ IceCandidatePairType GetIceCandidatePairCounter( return kIceCandidatePairMax; } -// Logic to decide if an m= section can be recycled. This means that the new -// m= section is not rejected, but the old local or remote m= section is -// rejected. |old_content_one| and |old_content_two| refer to the m= section -// of the old remote and old local descriptions in no particular order. -// We need to check both the old local and remote because either -// could be the most current from the latest negotation. -bool IsMediaSectionBeingRecycled(SdpType type, - const ContentInfo& content, - const ContentInfo* old_content_one, - const ContentInfo* old_content_two) { - return type == SdpType::kOffer && !content.rejected && - ((old_content_one && old_content_one->rejected) || - (old_content_two && old_content_two->rejected)); -} - -// Verify that the order of media sections in |new_desc| matches -// |current_desc|. The number of m= sections in |new_desc| should be no -// less than |current_desc|. In the case of checking an answer's -// |new_desc|, the |current_desc| is the last offer that was set as the -// local or remote. In the case of checking an offer's |new_desc| we -// check against the local and remote descriptions stored from the last -// negotiation, because either of these could be the most up to date for -// possible rejected m sections. These are the |current_desc| and -// |secondary_current_desc|. -bool MediaSectionsInSameOrder(const SessionDescription& current_desc, - const SessionDescription* secondary_current_desc, - const SessionDescription& new_desc, - const SdpType type) { - if (current_desc.contents().size() > new_desc.contents().size()) { - return false; - } - - for (size_t i = 0; i < current_desc.contents().size(); ++i) { - const cricket::ContentInfo* secondary_content_info = nullptr; - if (secondary_current_desc && - i < secondary_current_desc->contents().size()) { - secondary_content_info = &secondary_current_desc->contents()[i]; - } - if (IsMediaSectionBeingRecycled(type, new_desc.contents()[i], - ¤t_desc.contents()[i], - secondary_content_info)) { - // For new offer descriptions, if the media section can be recycled, it's - // valid for the MID and media type to change. - continue; - } - if (new_desc.contents()[i].name != current_desc.contents()[i].name) { - return false; - } - const MediaContentDescription* new_desc_mdesc = - new_desc.contents()[i].media_description(); - const MediaContentDescription* current_desc_mdesc = - current_desc.contents()[i].media_description(); - if (new_desc_mdesc->type() != current_desc_mdesc->type()) { - return false; - } - } - return true; -} - -bool MediaSectionsHaveSameCount(const SessionDescription& desc1, - const SessionDescription& desc2) { - return desc1.contents().size() == desc2.contents().size(); -} - -void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, - cricket::MediaType media_type) { - // Array of structs needed to map {KeyExchangeProtocolType, - // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in - // order to avoid -Wglobal-constructors and -Wexit-time-destructors. - static constexpr struct { - KeyExchangeProtocolType protocol_type; - cricket::MediaType media_type; - KeyExchangeProtocolMedia protocol_media; - } kEnumCounterKeyProtocolMediaMap[] = { - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeDtlsAudio}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeDtlsVideo}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeDtlsData}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeSdesAudio}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeSdesVideo}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeSdesData}, - }; - - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, - kEnumCounterKeyProtocolMax); - - for (const auto& i : kEnumCounterKeyProtocolMediaMap) { - if (i.protocol_type == protocol_type && i.media_type == media_type) { - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", - i.protocol_media, - kEnumCounterKeyProtocolMediaTypeMax); - } - } -} - -void NoteAddIceCandidateResult(int result) { - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.AddIceCandidate", result, - kAddIceCandidateMax); -} - -// Checks that each non-rejected content has SDES crypto keys or a DTLS -// fingerprint, unless it's in a BUNDLE group, in which case only the -// BUNDLE-tag section (first media section/description in the BUNDLE group) -// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint -// to SDES keys, will be caught in JsepTransport negotiation, and backstopped -// by Channel's |srtp_required| check. -RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentInfo& content_info : desc->contents()) { - if (content_info.rejected) { - continue; - } - // Note what media is used with each crypto protocol, for all sections. - NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls - : webrtc::kEnumCounterKeyProtocolSdes, - content_info.media_description()->type()); - const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { - // This isn't the first media section in the BUNDLE group, so it's not - // required to have crypto attributes, since only the crypto attributes - // from the first section actually get used. - continue; - } - - // If the content isn't rejected or bundled into another m= section, crypto - // must be present. - const MediaContentDescription* media = content_info.media_description(); - const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); - if (!media || !tinfo) { - // Something is not right. - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); - } - if (dtls_enabled) { - if (!tinfo->description.identity_fingerprint) { - RTC_LOG(LS_WARNING) - << "Session description must have DTLS fingerprint if " - "DTLS enabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutDtlsFingerprint); - } - } else { - if (media->cryptos().empty()) { - RTC_LOG(LS_WARNING) - << "Session description must have SDES when DTLS disabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutSdesCrypto); - } - } - } - return RTCError::OK(); -} - -// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless -// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first -// media section/description in the BUNDLE group) needs a ufrag and pwd. -bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentInfo& content_info : desc->contents()) { - if (content_info.rejected) { - continue; - } - const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { - // This isn't the first media section in the BUNDLE group, so it's not - // required to have ufrag/password, since only the ufrag/password from - // the first section actually get used. - continue; - } - - // If the content isn't rejected or bundled into another m= section, - // ice-ufrag and ice-pwd must be present. - const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); - if (!tinfo) { - // Something is not right. - RTC_LOG(LS_ERROR) << kInvalidSdp; - return false; - } - if (tinfo->description.ice_ufrag.empty() || - tinfo->description.ice_pwd.empty()) { - RTC_LOG(LS_ERROR) << "Session description must have ice ufrag and pwd."; - return false; - } - } - return true; -} - -// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd). -bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, - const SessionDescriptionInterface* new_desc, - const std::string& content_name) { - if (!old_desc) { - return false; - } - const SessionDescription* new_sd = new_desc->description(); - const SessionDescription* old_sd = old_desc->description(); - const ContentInfo* cinfo = new_sd->GetContentByName(content_name); - if (!cinfo || cinfo->rejected) { - return false; - } - // If the content isn't rejected, check if ufrag and password has changed. - const cricket::TransportDescription* new_transport_desc = - new_sd->GetTransportDescriptionByName(content_name); - const cricket::TransportDescription* old_transport_desc = - old_sd->GetTransportDescriptionByName(content_name); - if (!new_transport_desc || !old_transport_desc) { - // No transport description exists. This is not an ICE restart. - return false; - } - if (cricket::IceCredentialsChanged( - old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd, - new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) { - RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name - << "."; - return true; - } - return false; -} - -// Generates a string error message for SetLocalDescription/SetRemoteDescription -// from an RTCError. -std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, - SdpType type, - const RTCError& error) { - rtc::StringBuilder oss; - oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") - << " " << SdpTypeToString(type) << " sdp: " << error.message(); - return oss.Release(); -} - -std::string GetStreamIdsString(rtc::ArrayView stream_ids) { - std::string output = "streams=["; - const char* separator = ""; - for (const auto& stream_id : stream_ids) { - output.append(separator).append(stream_id); - separator = ", "; - } - output.append("]"); - return output; -} absl::optional RTCConfigurationToIceConfigOptionalInt( int rtc_configuration_parameter) { @@ -603,88 +189,6 @@ absl::optional RTCConfigurationToIceConfigOptionalInt( return rtc_configuration_parameter; } -void ReportSimulcastApiVersion(const char* name, - const SessionDescription& session) { - bool has_legacy = false; - bool has_spec_compliant = false; - for (const ContentInfo& content : session.contents()) { - if (!content.media_description()) { - continue; - } - has_spec_compliant |= content.media_description()->HasSimulcast(); - for (const StreamParams& sp : content.media_description()->streams()) { - has_legacy |= sp.has_ssrc_group(cricket::kSimSsrcGroupSemantics); - } - } - - if (has_legacy) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionLegacy, - kSimulcastApiVersionMax); - } - if (has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionSpecCompliant, - kSimulcastApiVersionMax); - } - if (!has_legacy && !has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionNone, - kSimulcastApiVersionMax); - } -} - -const ContentInfo* FindTransceiverMSection( - RtpTransceiverProxyWithInternal* transceiver, - const SessionDescriptionInterface* session_description) { - return transceiver->mid() - ? session_description->description()->GetContentByName( - *transceiver->mid()) - : nullptr; -} - -// Wraps a CreateSessionDescriptionObserver and an OperationsChain operation -// complete callback. When the observer is invoked, the wrapped observer is -// invoked followed by invoking the completion callback. -class CreateSessionDescriptionObserverOperationWrapper - : public CreateSessionDescriptionObserver { - public: - CreateSessionDescriptionObserverOperationWrapper( - rtc::scoped_refptr observer, - std::function operation_complete_callback) - : observer_(std::move(observer)), - operation_complete_callback_(std::move(operation_complete_callback)) { - RTC_DCHECK(observer_); - } - ~CreateSessionDescriptionObserverOperationWrapper() override { - RTC_DCHECK(was_called_); - } - - void OnSuccess(SessionDescriptionInterface* desc) override { - RTC_DCHECK(!was_called_); -#ifdef RTC_DCHECK_IS_ON - was_called_ = true; -#endif // RTC_DCHECK_IS_ON - // Completing the operation before invoking the observer allows the observer - // to execute SetLocalDescription() without delay. - operation_complete_callback_(); - observer_->OnSuccess(desc); - } - - void OnFailure(RTCError error) override { - RTC_DCHECK(!was_called_); -#ifdef RTC_DCHECK_IS_ON - was_called_ = true; -#endif // RTC_DCHECK_IS_ON - operation_complete_callback_(); - observer_->OnFailure(std::move(error)); - } - - private: -#ifdef RTC_DCHECK_IS_ON - bool was_called_ = false; -#endif // RTC_DCHECK_IS_ON - rtc::scoped_refptr observer_; - std::function operation_complete_callback_; -}; - // Check if the changes of IceTransportsType motives an ice restart. bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed, PeerConnectionInterface::IceTransportsType current, @@ -705,156 +209,62 @@ bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed, return (current_filter & modified_filter) != current_filter; } -} // namespace - -// Used by parameterless SetLocalDescription() to create an offer or answer. -// Upon completion of creating the session description, SetLocalDescription() is -// invoked with the result. -class PeerConnection::ImplicitCreateSessionDescriptionObserver - : public CreateSessionDescriptionObserver { - public: - ImplicitCreateSessionDescriptionObserver( - rtc::WeakPtr pc, - rtc::scoped_refptr - set_local_description_observer) - : pc_(std::move(pc)), - set_local_description_observer_( - std::move(set_local_description_observer)) {} - ~ImplicitCreateSessionDescriptionObserver() override { - RTC_DCHECK(was_called_); +cricket::IceConfig ParseIceConfig( + const PeerConnectionInterface::RTCConfiguration& config) { + cricket::ContinualGatheringPolicy gathering_policy; + switch (config.continual_gathering_policy) { + case PeerConnectionInterface::GATHER_ONCE: + gathering_policy = cricket::GATHER_ONCE; + break; + case PeerConnectionInterface::GATHER_CONTINUALLY: + gathering_policy = cricket::GATHER_CONTINUALLY; + break; + default: + RTC_NOTREACHED(); + gathering_policy = cricket::GATHER_ONCE; } - void SetOperationCompleteCallback( - std::function operation_complete_callback) { - operation_complete_callback_ = std::move(operation_complete_callback); - } + cricket::IceConfig ice_config; + ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt( + config.ice_connection_receiving_timeout); + ice_config.prioritize_most_likely_candidate_pairs = + config.prioritize_most_likely_ice_candidate_pairs; + ice_config.backup_connection_ping_interval = + RTCConfigurationToIceConfigOptionalInt( + config.ice_backup_candidate_pair_ping_interval); + ice_config.continual_gathering_policy = gathering_policy; + ice_config.presume_writable_when_fully_relayed = + config.presume_writable_when_fully_relayed; + ice_config.surface_ice_candidates_on_ice_transport_type_changed = + config.surface_ice_candidates_on_ice_transport_type_changed; + ice_config.ice_check_interval_strong_connectivity = + config.ice_check_interval_strong_connectivity; + ice_config.ice_check_interval_weak_connectivity = + config.ice_check_interval_weak_connectivity; + ice_config.ice_check_min_interval = config.ice_check_min_interval; + ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout; + ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks; + ice_config.ice_inactive_timeout = config.ice_inactive_timeout; + ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; + ice_config.network_preference = config.network_preference; + return ice_config; +} - bool was_called() const { return was_called_; } - - void OnSuccess(SessionDescriptionInterface* desc_ptr) override { - RTC_DCHECK(!was_called_); - std::unique_ptr desc(desc_ptr); - was_called_ = true; - - // Abort early if |pc_| is no longer valid. - if (!pc_) { - operation_complete_callback_(); - return; - } - // DoSetLocalDescription() is a synchronous operation that invokes - // |set_local_description_observer_| with the result. - pc_->DoSetLocalDescription(std::move(desc), - std::move(set_local_description_observer_)); - operation_complete_callback_(); - } - - void OnFailure(RTCError error) override { - RTC_DCHECK(!was_called_); - was_called_ = true; - set_local_description_observer_->OnSetLocalDescriptionComplete(RTCError( - error.type(), std::string("SetLocalDescription failed to create " - "session description - ") + - error.message())); - operation_complete_callback_(); - } - - private: - bool was_called_ = false; - rtc::WeakPtr pc_; - rtc::scoped_refptr - set_local_description_observer_; - std::function operation_complete_callback_; -}; - -class PeerConnection::LocalIceCredentialsToReplace { - public: - // Sets the ICE credentials that need restarting to the ICE credentials of - // the current and pending descriptions. - void SetIceCredentialsFromLocalDescriptions( - const SessionDescriptionInterface* current_local_description, - const SessionDescriptionInterface* pending_local_description) { - ice_credentials_.clear(); - if (current_local_description) { - AppendIceCredentialsFromSessionDescription(*current_local_description); - } - if (pending_local_description) { - AppendIceCredentialsFromSessionDescription(*pending_local_description); - } - } - - void ClearIceCredentials() { ice_credentials_.clear(); } - - // Returns true if we have ICE credentials that need restarting. - bool HasIceCredentials() const { return !ice_credentials_.empty(); } - - // Returns true if |local_description| shares no ICE credentials with the - // ICE credentials that need restarting. - bool SatisfiesIceRestart( - const SessionDescriptionInterface& local_description) const { - for (const auto& transport_info : - local_description.description()->transport_infos()) { - if (ice_credentials_.find(std::make_pair( - transport_info.description.ice_ufrag, - transport_info.description.ice_pwd)) != ice_credentials_.end()) { - return false; - } - } - return true; - } - - private: - void AppendIceCredentialsFromSessionDescription( - const SessionDescriptionInterface& desc) { - for (const auto& transport_info : desc.description()->transport_infos()) { - ice_credentials_.insert( - std::make_pair(transport_info.description.ice_ufrag, - transport_info.description.ice_pwd)); - } - } - - std::set> ice_credentials_; -}; - -// Wrapper for SetSessionDescriptionObserver that invokes the success or failure -// callback in a posted message handled by the peer connection. This introduces -// a delay that prevents recursive API calls by the observer, but this also -// means that the PeerConnection can be modified before the observer sees the -// result of the operation. This is ill-advised for synchronizing states. +// Ensures the configuration doesn't have any parameters with invalid values, +// or values that conflict with other parameters. // -// Implements both the SetLocalDescriptionObserverInterface and the -// SetRemoteDescriptionObserverInterface. -class PeerConnection::SetSessionDescriptionObserverAdapter - : public SetLocalDescriptionObserverInterface, - public SetRemoteDescriptionObserverInterface { - public: - SetSessionDescriptionObserverAdapter( - rtc::WeakPtr pc, - rtc::scoped_refptr inner_observer) - : pc_(std::move(pc)), inner_observer_(std::move(inner_observer)) {} +// Returns RTCError::OK() if there are no issues. +RTCError ValidateConfiguration( + const PeerConnectionInterface::RTCConfiguration& config) { + return cricket::P2PTransportChannel::ValidateIceConfig( + ParseIceConfig(config)); +} - // SetLocalDescriptionObserverInterface implementation. - void OnSetLocalDescriptionComplete(RTCError error) override { - OnSetDescriptionComplete(std::move(error)); - } - // SetRemoteDescriptionObserverInterface implementation. - void OnSetRemoteDescriptionComplete(RTCError error) override { - OnSetDescriptionComplete(std::move(error)); - } +bool HasRtcpMuxEnabled(const cricket::ContentInfo* content) { + return content->media_description()->rtcp_mux(); +} - private: - void OnSetDescriptionComplete(RTCError error) { - if (!pc_) - return; - if (error.ok()) { - pc_->PostSetSessionDescriptionSuccess(inner_observer_); - } else { - pc_->PostSetSessionDescriptionFailure(inner_observer_, std::move(error)); - } - } - - rtc::WeakPtr pc_; - rtc::scoped_refptr inner_observer_; -}; +} // namespace bool PeerConnectionInterface::RTCConfiguration::operator==( const PeerConnectionInterface::RTCConfiguration& o) const { @@ -908,6 +318,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( std::string turn_logging_id; bool enable_implicit_rollback; absl::optional allow_codec_switching; + absl::optional report_usage_pattern_delay_ms; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -967,7 +378,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( offer_extmap_allow_mixed == o.offer_extmap_allow_mixed && turn_logging_id == o.turn_logging_id && enable_implicit_rollback == o.enable_implicit_rollback && - allow_codec_switching == o.allow_codec_switching; + allow_codec_switching == o.allow_codec_switching && + report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -975,82 +387,84 @@ bool PeerConnectionInterface::RTCConfiguration::operator!=( return !(*this == o); } -void PeerConnection::TransceiverStableState::set_newly_created() { - RTC_DCHECK(!has_m_section_); - newly_created_ = true; -} - -void PeerConnection::TransceiverStableState::SetMSectionIfUnset( - absl::optional mid, - absl::optional mline_index) { - if (!has_m_section_) { - mid_ = mid; - mline_index_ = mline_index; - has_m_section_ = true; +rtc::scoped_refptr PeerConnection::Create( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + std::unique_ptr event_log, + std::unique_ptr call, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies) { + RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( + ParseIceConfig(configuration)); + if (!config_error.ok()) { + RTC_LOG(LS_ERROR) << "Invalid configuration: " << config_error.message(); + return nullptr; } -} -void PeerConnection::TransceiverStableState::SetRemoteStreamIdsIfUnset( - const std::vector& ids) { - if (!remote_stream_ids_.has_value()) { - remote_stream_ids_ = ids; + if (!dependencies.allocator) { + RTC_LOG(LS_ERROR) + << "PeerConnection initialized without a PortAllocator? " + "This shouldn't happen if using PeerConnectionFactory."; + return nullptr; } -} -// Generate a RTCP CNAME when a PeerConnection is created. -std::string GenerateRtcpCname() { - std::string cname; - if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) { - RTC_LOG(LS_ERROR) << "Failed to generate CNAME."; - RTC_NOTREACHED(); + if (!dependencies.observer) { + // TODO(deadbeef): Why do we do this? + RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " + "PeerConnectionObserver"; + return nullptr; } - return cname; + + bool is_unified_plan = + configuration.sdp_semantics == SdpSemantics::kUnifiedPlan; + // The PeerConnection constructor consumes some, but not all, dependencies. + rtc::scoped_refptr pc( + new rtc::RefCountedObject( + context, options, is_unified_plan, std::move(event_log), + std::move(call), dependencies)); + if (!pc->Initialize(configuration, std::move(dependencies))) { + return nullptr; + } + return pc; } -bool ValidateOfferAnswerOptions( - const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options) { - return IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) && - IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video); -} - -// From |rtc_options|, fill parts of |session_options| shared by all generated -// m= sections (in other words, nothing that involves a map/array). -void ExtractSharedMediaSessionOptions( - const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, - cricket::MediaSessionOptions* session_options) { - session_options->vad_enabled = rtc_options.voice_activity_detection; - session_options->bundle_enabled = rtc_options.use_rtp_mux; - session_options->raw_packetization_for_video = - rtc_options.raw_packetization_for_video; -} - -PeerConnection::PeerConnection(PeerConnectionFactory* factory, - std::unique_ptr event_log, - std::unique_ptr call) - : factory_(factory), +PeerConnection::PeerConnection( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + bool is_unified_plan, + std::unique_ptr event_log, + std::unique_ptr call, + PeerConnectionDependencies& dependencies) + : context_(context), + options_(options), + observer_(dependencies.observer), + is_unified_plan_(is_unified_plan), event_log_(std::move(event_log)), event_log_ptr_(event_log_.get()), - operations_chain_(rtc::OperationsChain::Create()), - rtcp_cname_(GenerateRtcpCname()), - local_streams_(StreamCollection::Create()), - remote_streams_(StreamCollection::Create()), + async_resolver_factory_(std::move(dependencies.async_resolver_factory)), + port_allocator_(std::move(dependencies.allocator)), + ice_transport_factory_(std::move(dependencies.ice_transport_factory)), + tls_cert_verifier_(std::move(dependencies.tls_cert_verifier)), call_(std::move(call)), call_ptr_(call_.get()), - local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()), data_channel_controller_(this), - weak_ptr_factory_(this) {} + message_handler_(signaling_thread()) {} PeerConnection::~PeerConnection() { TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection"); RTC_DCHECK_RUN_ON(signaling_thread()); - weak_ptr_factory_.InvalidateWeakPtrs(); + if (sdp_handler_) { + sdp_handler_->PrepareForShutdown(); + } // Need to stop transceivers before destroying the stats collector because // AudioRtpSender has a reference to the StatsCollector it will update when // stopping. - for (const auto& transceiver : transceivers_) { - transceiver->StopInternal(); + if (rtp_manager()) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + transceiver->StopInternal(); + } } stats_.reset(nullptr); @@ -1059,14 +473,15 @@ PeerConnection::~PeerConnection() { stats_collector_ = nullptr; } - // Don't destroy BaseChannels until after stats has been cleaned up so that - // the last stats request can still read from the channels. - DestroyAllChannels(); + if (sdp_handler_) { + // Don't destroy BaseChannels until after stats has been cleaned up so that + // the last stats request can still read from the channels. + sdp_handler_->DestroyAllChannels(); - RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed."; + RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed."; - webrtc_session_desc_factory_.reset(); - sctp_factory_.reset(); + sdp_handler_->ResetSessionDescFactory(); + } transport_controller_.reset(); // port_allocator_ lives on the network thread and should be destroyed there. @@ -1077,43 +492,11 @@ PeerConnection::~PeerConnection() { // call_ and event_log_ must be destroyed on the worker thread. worker_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(worker_thread()); + call_safety_.reset(); call_.reset(); // The event log must outlive call (and any other object that uses it). event_log_.reset(); }); - - // Process all pending notifications in the message queue. If we don't do - // this, requests will linger and not know they succeeded or failed. - rtc::MessageList list; - signaling_thread()->Clear(this, rtc::MQID_ANY, &list); - for (auto& msg : list) { - if (msg.message_id == MSG_CREATE_SESSIONDESCRIPTION_FAILED) { - // Processing CreateOffer() and CreateAnswer() messages ensures their - // observers are invoked even if the PeerConnection is destroyed early. - OnMessage(&msg); - } else { - // TODO(hbos): Consider processing all pending messages. This would mean - // that SetLocalDescription() and SetRemoteDescription() observers are - // informed of successes and failures; this is currently NOT the case. - delete msg.pdata; - } - } -} - -void PeerConnection::DestroyAllChannels() { - // Destroy video channels first since they may have a pointer to a voice - // channel. - for (const auto& transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - DestroyTransceiverChannel(transceiver); - } - } - for (const auto& transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - DestroyTransceiverChannel(transceiver); - } - } - DestroyDataChannelTransport(); } bool PeerConnection::Initialize( @@ -1122,33 +505,6 @@ bool PeerConnection::Initialize( RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::Initialize"); - RTCError config_error = ValidateConfiguration(configuration); - if (!config_error.ok()) { - RTC_LOG(LS_ERROR) << "Invalid configuration: " << config_error.message(); - return false; - } - - if (!dependencies.allocator) { - RTC_LOG(LS_ERROR) - << "PeerConnection initialized without a PortAllocator? " - "This shouldn't happen if using PeerConnectionFactory."; - return false; - } - - if (!dependencies.observer) { - // TODO(deadbeef): Why do we do this? - RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " - "PeerConnectionObserver"; - return false; - } - - observer_ = dependencies.observer; - async_resolver_factory_ = std::move(dependencies.async_resolver_factory); - port_allocator_ = std::move(dependencies.allocator); - packet_socket_factory_ = std::move(dependencies.packet_socket_factory); - ice_transport_factory_ = std::move(dependencies.ice_transport_factory); - tls_cert_verifier_ = std::move(dependencies.tls_cert_verifier); - cricket::ServerAddresses stun_servers; std::vector turn_servers; @@ -1171,8 +527,7 @@ bool PeerConnection::Initialize( rtc::Bind(&PeerConnection::InitializePortAllocator_n, this, stun_servers, turn_servers, configuration)); - // If initialization was successful, note if STUN or TURN servers - // were supplied. + // Note if STUN or TURN servers were supplied. if (!stun_servers.empty()) { NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); } @@ -1190,8 +545,6 @@ bool PeerConnection::Initialize( RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, kPeerConnectionAddressFamilyCounter_Max); - const PeerConnectionFactoryInterface::Options& options = factory_->options(); - // RFC 3264: The numeric value of the session id and version in the // o line MUST be representable with a "64 bit signed integer". // Due to this constraint session id |session_id_| is max limited to @@ -1200,63 +553,35 @@ bool PeerConnection::Initialize( JsepTransportController::Config config; config.redetermine_role_on_ice_restart = configuration.redetermine_role_on_ice_restart; - config.ssl_max_version = factory_->options().ssl_max_version; - config.disable_encryption = options.disable_encryption; + config.ssl_max_version = options_.ssl_max_version; + config.disable_encryption = options_.disable_encryption; config.bundle_policy = configuration.bundle_policy; config.rtcp_mux_policy = configuration.rtcp_mux_policy; - // TODO(bugs.webrtc.org/9891) - Remove options.crypto_options then remove this - // stub. + // TODO(bugs.webrtc.org/9891) - Remove options_.crypto_options then remove + // this stub. config.crypto_options = configuration.crypto_options.has_value() ? *configuration.crypto_options - : options.crypto_options; + : options_.crypto_options; config.transport_observer = this; - // It's safe to pass |this| and using |rtcp_invoker_| and the |call_| pointer - // since the JsepTransportController instance is owned by this PeerConnection - // instance and is destroyed before both |rtcp_invoker_| and the |call_| - // pointer. - config.rtcp_handler = [this](const rtc::CopyOnWriteBuffer& packet, - int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(network_thread()); - rtcp_invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread(), [this, packet, packet_time_us] { - RTC_DCHECK_RUN_ON(worker_thread()); - // |call_| is reset on the worker thread in the PeerConnection - // destructor, so we check that it's still valid before propagating - // the packet. - if (call_) { - call_->Receiver()->DeliverPacket(MediaType::ANY, packet, - packet_time_us); - } - }); - }; + config.rtcp_handler = InitializeRtcpCallback(); config.event_log = event_log_ptr_; #if defined(ENABLE_EXTERNAL_AUTH) config.enable_external_auth = true; #endif config.active_reset_srtp_params = configuration.active_reset_srtp_params; - // Obtain a certificate from RTCConfiguration if any were provided (optional). - rtc::scoped_refptr certificate; - if (!configuration.certificates.empty()) { - // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of - // just picking the first one. The decision should be made based on the DTLS - // handshake. The DTLS negotiations need to know about all certificates. - certificate = configuration.certificates[0]; - } - - if (options.disable_encryption) { + if (options_.disable_encryption) { dtls_enabled_ = false; } else { // Enable DTLS by default if we have an identity store or a certificate. - dtls_enabled_ = (dependencies.cert_generator || certificate); + dtls_enabled_ = + (dependencies.cert_generator || !configuration.certificates.empty()); // |configuration| can override the default |dtls_enabled_| value. if (configuration.enable_dtls_srtp) { dtls_enabled_ = *(configuration.enable_dtls_srtp); } } - sctp_factory_ = factory_->CreateSctpTransportInternalFactory(); - if (configuration.enable_rtp_data_channel) { // Enable creation of RTP data channels if the kEnableRtpDataChannels is // set. It takes precendence over the disable_sctp_data_channels @@ -1264,9 +589,9 @@ bool PeerConnection::Initialize( data_channel_controller_.set_data_channel_type(cricket::DCT_RTP); } else { // DTLS has to be enabled to use SCTP. - if (!options.disable_sctp_data_channels && dtls_enabled_) { + if (!options_.disable_sctp_data_channels && dtls_enabled_) { data_channel_controller_.set_data_channel_type(cricket::DCT_SCTP); - config.sctp_factory = sctp_factory_.get(); + config.sctp_factory = context_->sctp_transport_factory(); } } @@ -1275,8 +600,6 @@ bool PeerConnection::Initialize( transport_controller_.reset(new JsepTransportController( signaling_thread(), network_thread(), port_allocator_.get(), async_resolver_factory_.get(), config)); - transport_controller_->SignalIceConnectionState.connect( - this, &PeerConnection::OnTransportControllerConnectionState); transport_controller_->SignalStandardizedIceConnectionState.connect( this, &PeerConnection::SetStandardizedIceConnectionState); transport_controller_->SignalConnectionState.connect( @@ -1293,92 +616,63 @@ bool PeerConnection::Initialize( this, &PeerConnection::OnTransportControllerDtlsHandshakeError); transport_controller_->SignalIceCandidatePairChanged.connect( this, &PeerConnection::OnTransportControllerCandidateChanged); + transport_controller_->SignalErrorDemuxingPacket.connect( + this, &PeerConnection::OnErrorDemuxingPacket); - stats_.reset(new StatsCollector(this)); - stats_collector_ = RTCStatsCollector::Create(this); + transport_controller_->SignalIceConnectionState.AddReceiver( + [this](cricket::IceConnectionState s) { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerConnectionState(s); + }); configuration_ = configuration; transport_controller_->SetIceConfig(ParseIceConfig(configuration)); - video_options_.screencast_min_bitrate_kbps = - configuration.screencast_min_bitrate; - audio_options_.combined_audio_video_bwe = - configuration.combined_audio_video_bwe; + stats_ = std::make_unique(this); + stats_collector_ = RTCStatsCollector::Create(this); - audio_options_.audio_jitter_buffer_max_packets = - configuration.audio_jitter_buffer_max_packets; + demuxing_observer_ = new rtc::RefCountedObject(observer_); - audio_options_.audio_jitter_buffer_fast_accelerate = - configuration.audio_jitter_buffer_fast_accelerate; + sdp_handler_ = + SdpOfferAnswerHandler::Create(this, configuration, dependencies); - audio_options_.audio_jitter_buffer_min_delay_ms = - configuration.audio_jitter_buffer_min_delay_ms; - - audio_options_.audio_jitter_buffer_enable_rtx_handling = - configuration.audio_jitter_buffer_enable_rtx_handling; - - // Whether the certificate generator/certificate is null or not determines - // what PeerConnectionDescriptionFactory will do, so make sure that we give it - // the right instructions by clearing the variables if needed. - if (!dtls_enabled_) { - dependencies.cert_generator.reset(); - certificate = nullptr; - } else if (certificate) { - // Favor generated certificate over the certificate generator. - dependencies.cert_generator.reset(); - } - - webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory( - signaling_thread(), channel_manager(), this, session_id(), - std::move(dependencies.cert_generator), certificate, &ssrc_generator_)); - webrtc_session_desc_factory_->SignalCertificateReady.connect( - this, &PeerConnection::OnCertificateReady); - - if (options.disable_encryption) { - webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); - } - - webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions( - GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions); - webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan()); + rtp_manager_ = std::make_unique( + IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), + &usage_pattern_, observer_, stats_.get(), [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->UpdateNegotiationNeeded(); + }); // Add default audio/video transceivers for Plan B SDP. if (!IsUnifiedPlan()) { - transceivers_.push_back( + rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO))); - transceivers_.push_back( + rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO))); } - int delay_ms = - return_histogram_very_quickly_ ? 0 : REPORT_USAGE_PATTERN_DELAY_MS; - signaling_thread()->PostDelayed(RTC_FROM_HERE, delay_ms, this, - MSG_REPORT_USAGE_PATTERN, nullptr); - if (dependencies.video_bitrate_allocator_factory) { - video_bitrate_allocator_factory_ = - std::move(dependencies.video_bitrate_allocator_factory); - } else { - video_bitrate_allocator_factory_ = - CreateBuiltinVideoBitrateAllocatorFactory(); - } + int delay_ms = configuration.report_usage_pattern_delay_ms + ? *configuration.report_usage_pattern_delay_ms + : REPORT_USAGE_PATTERN_DELAY_MS; + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + delay_ms); + return true; } -RTCError PeerConnection::ValidateConfiguration( - const RTCConfiguration& config) const { - return cricket::P2PTransportChannel::ValidateIceConfig( - ParseIceConfig(config)); -} - rtc::scoped_refptr PeerConnection::local_streams() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " "Plan SdpSemantics. Please use GetSenders " "instead."; - return local_streams_; + return sdp_handler_->local_streams(); } rtc::scoped_refptr PeerConnection::remote_streams() { @@ -1386,7 +680,7 @@ rtc::scoped_refptr PeerConnection::remote_streams() { RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " "Plan SdpSemantics. Please use GetReceivers " "instead."; - return remote_streams_; + return sdp_handler_->remote_streams(); } bool PeerConnection::AddStream(MediaStreamInterface* local_stream) { @@ -1394,35 +688,7 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) { RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan " "SdpSemantics. Please use AddTrack instead."; TRACE_EVENT0("webrtc", "PeerConnection::AddStream"); - if (IsClosed()) { - return false; - } - if (!CanAddLocalMediaStream(local_streams_, local_stream)) { - return false; - } - - local_streams_->AddStream(local_stream); - MediaStreamObserver* observer = new MediaStreamObserver(local_stream); - observer->SignalAudioTrackAdded.connect(this, - &PeerConnection::OnAudioTrackAdded); - observer->SignalAudioTrackRemoved.connect( - this, &PeerConnection::OnAudioTrackRemoved); - observer->SignalVideoTrackAdded.connect(this, - &PeerConnection::OnVideoTrackAdded); - observer->SignalVideoTrackRemoved.connect( - this, &PeerConnection::OnVideoTrackRemoved); - stream_observers_.push_back(std::unique_ptr(observer)); - - for (const auto& track : local_stream->GetAudioTracks()) { - AddAudioTrack(track.get(), local_stream); - } - for (const auto& track : local_stream->GetVideoTracks()) { - AddVideoTrack(track.get(), local_stream); - } - - stats_->AddStream(local_stream); - UpdateNegotiationNeeded(); - return true; + return sdp_handler_->AddStream(local_stream); } void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { @@ -1431,27 +697,7 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { "Plan SdpSemantics. Please use RemoveTrack " "instead."; TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream"); - if (!IsClosed()) { - for (const auto& track : local_stream->GetAudioTracks()) { - RemoveAudioTrack(track.get(), local_stream); - } - for (const auto& track : local_stream->GetVideoTracks()) { - RemoveVideoTrack(track.get(), local_stream); - } - } - local_streams_->RemoveStream(local_stream); - stream_observers_.erase( - std::remove_if( - stream_observers_.begin(), stream_observers_.end(), - [local_stream](const std::unique_ptr& observer) { - return observer->stream()->id().compare(local_stream->id()) == 0; - }), - stream_observers_.end()); - - if (IsClosed()) { - return; - } - UpdateNegotiationNeeded(); + sdp_handler_->RemoveStream(local_stream); } RTCErrorOr> PeerConnection::AddTrack( @@ -1471,126 +717,19 @@ RTCErrorOr> PeerConnection::AddTrack( LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "PeerConnection is closed."); } - if (FindSenderForTrack(track)) { + if (rtp_manager()->FindSenderForTrack(track)) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Sender already exists for track " + track->id() + "."); } - auto sender_or_error = - (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) - : AddTrackPlanB(track, stream_ids)); + auto sender_or_error = rtp_manager()->AddTrack(track, stream_ids); if (sender_or_error.ok()) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); stats_->AddTrack(track); } return sender_or_error; } -RTCErrorOr> -PeerConnection::AddTrackPlanB( - rtc::scoped_refptr track, - const std::vector& stream_ids) { - if (stream_ids.size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, - "AddTrack with more than one stream is not " - "supported with Plan B semantics."); - } - std::vector adjusted_stream_ids = stream_ids; - if (adjusted_stream_ids.empty()) { - adjusted_stream_ids.push_back(rtc::CreateRandomUuid()); - } - cricket::MediaType media_type = - (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); - auto new_sender = - CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); - if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - new_sender->internal()->SetMediaChannel(voice_media_channel()); - GetAudioTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_audio_sender_infos_, - new_sender->internal()->stream_ids()[0], track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } - } else { - RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); - new_sender->internal()->SetMediaChannel(video_media_channel()); - GetVideoTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_video_sender_infos_, - new_sender->internal()->stream_ids()[0], track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } - } - return rtc::scoped_refptr(new_sender); -} - -RTCErrorOr> -PeerConnection::AddTrackUnifiedPlan( - rtc::scoped_refptr track, - const std::vector& stream_ids) { - auto transceiver = FindFirstTransceiverForAddedTrack(track); - if (transceiver) { - RTC_LOG(LS_INFO) << "Reusing an existing " - << cricket::MediaTypeToString(transceiver->media_type()) - << " transceiver for AddTrack."; - if (transceiver->stopping()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "The existing transceiver is stopping."); - } - - if (transceiver->direction() == RtpTransceiverDirection::kRecvOnly) { - transceiver->internal()->set_direction( - RtpTransceiverDirection::kSendRecv); - } else if (transceiver->direction() == RtpTransceiverDirection::kInactive) { - transceiver->internal()->set_direction( - RtpTransceiverDirection::kSendOnly); - } - transceiver->sender()->SetTrack(track); - transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); - transceiver->internal()->set_reused_for_addtrack(true); - } else { - cricket::MediaType media_type = - (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); - RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) - << " transceiver in response to a call to AddTrack."; - std::string sender_id = track->id(); - // Avoid creating a sender with an existing ID by generating a random ID. - // This can happen if this is the second time AddTrack has created a sender - // for this track. - if (FindSenderById(sender_id)) { - sender_id = rtc::CreateRandomUuid(); - } - auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); - auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); - transceiver = CreateAndAddTransceiver(sender, receiver); - transceiver->internal()->set_created_by_addtrack(true); - transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv); - } - return transceiver->sender(); -} - -rtc::scoped_refptr> -PeerConnection::FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) { - RTC_DCHECK(track); - for (auto transceiver : transceivers_) { - if (!transceiver->sender()->track() && - cricket::MediaTypeToString(transceiver->media_type()) == - track->kind() && - !transceiver->internal()->has_ever_been_used_to_send() && - !transceiver->stopped()) { - return transceiver; - } - } - return nullptr; -} - bool PeerConnection::RemoveTrack(RtpSenderInterface* sender) { TRACE_EVENT0("webrtc", "PeerConnection::RemoveTrack"); return RemoveTrackNew(sender).ok(); @@ -1622,10 +761,12 @@ RTCError PeerConnection::RemoveTrackNew( } else { bool removed; if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { - removed = GetAudioTransceiver()->internal()->RemoveSender(sender); + removed = rtp_manager()->GetAudioTransceiver()->internal()->RemoveSender( + sender); } else { RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type()); - removed = GetVideoTransceiver()->internal()->RemoveSender(sender); + removed = rtp_manager()->GetVideoTransceiver()->internal()->RemoveSender( + sender); } if (!removed) { LOG_AND_RETURN_ERROR( @@ -1633,19 +774,14 @@ RTCError PeerConnection::RemoveTrackNew( "Couldn't find sender " + sender->id() + " to remove."); } } - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); return RTCError::OK(); } rtc::scoped_refptr> PeerConnection::FindTransceiverBySender( rtc::scoped_refptr sender) { - for (auto transceiver : transceivers_) { - if (transceiver->sender() == sender) { - return transceiver; - } - } - return nullptr; + return rtp_manager()->transceivers()->FindBySender(sender); } RTCErrorOr> @@ -1701,6 +837,7 @@ PeerConnection::AddTransceiver( rtc::scoped_refptr track, const RtpTransceiverInit& init, bool update_negotiation_needed) { + RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK((media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO)); if (track) { @@ -1780,100 +917,27 @@ PeerConnection::AddTransceiver( << " transceiver in response to a call to AddTransceiver."; // Set the sender ID equal to the track ID if the track is specified unless // that sender ID is already in use. - std::string sender_id = - (track && !FindSenderById(track->id()) ? track->id() - : rtc::CreateRandomUuid()); - auto sender = CreateSender(media_type, sender_id, track, init.stream_ids, - parameters.encodings); - auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); - auto transceiver = CreateAndAddTransceiver(sender, receiver); + std::string sender_id = (track && !rtp_manager()->FindSenderById(track->id()) + ? track->id() + : rtc::CreateRandomUuid()); + auto sender = rtp_manager()->CreateSender( + media_type, sender_id, track, init.stream_ids, parameters.encodings); + auto receiver = + rtp_manager()->CreateReceiver(media_type, rtc::CreateRandomUuid()); + auto transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_direction(init.direction); if (update_negotiation_needed) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } return rtc::scoped_refptr(transceiver); } -rtc::scoped_refptr> -PeerConnection::CreateSender( - cricket::MediaType media_type, - const std::string& id, - rtc::scoped_refptr track, - const std::vector& stream_ids, - const std::vector& send_encodings) { - RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::scoped_refptr> sender; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - RTC_DCHECK(!track || - (track->kind() == MediaStreamTrackInterface::kAudioKind)); - sender = RtpSenderProxyWithInternal::Create( - signaling_thread(), - AudioRtpSender::Create(worker_thread(), id, stats_.get(), this)); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); - } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); - RTC_DCHECK(!track || - (track->kind() == MediaStreamTrackInterface::kVideoKind)); - sender = RtpSenderProxyWithInternal::Create( - signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); - } - bool set_track_succeeded = sender->SetTrack(track); - RTC_DCHECK(set_track_succeeded); - sender->internal()->set_stream_ids(stream_ids); - sender->internal()->set_init_send_encodings(send_encodings); - return sender; -} - -rtc::scoped_refptr> -PeerConnection::CreateReceiver(cricket::MediaType media_type, - const std::string& receiver_id) { - rtc::scoped_refptr> - receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new AudioRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); - } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); - receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); - } - return receiver; -} - -rtc::scoped_refptr> -PeerConnection::CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver) { - // Ensure that the new sender does not have an ID that is already in use by - // another sender. - // Allow receiver IDs to conflict since those come from remote SDP (which - // could be invalid, but should not cause a crash). - RTC_DCHECK(!FindSenderById(sender->id())); - auto transceiver = RtpTransceiverProxyWithInternal::Create( - signaling_thread(), - new RtpTransceiver( - sender, receiver, channel_manager(), - sender->media_type() == cricket::MEDIA_TYPE_AUDIO - ? channel_manager()->GetSupportedAudioRtpHeaderExtensions() - : channel_manager()->GetSupportedVideoRtpHeaderExtensions())); - transceivers_.push_back(transceiver); - transceiver->internal()->SignalNegotiationNeeded.connect( - this, &PeerConnection::OnNegotiationNeeded); - return transceiver; -} - void PeerConnection::OnNegotiationNeeded() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsClosed()); - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } rtc::scoped_refptr PeerConnection::CreateSender( @@ -1904,18 +968,18 @@ rtc::scoped_refptr PeerConnection::CreateSender( rtc::scoped_refptr> new_sender; if (kind == MediaStreamTrackInterface::kAudioKind) { auto audio_sender = AudioRtpSender::Create( - worker_thread(), rtc::CreateRandomUuid(), stats_.get(), this); - audio_sender->SetMediaChannel(voice_media_channel()); + worker_thread(), rtc::CreateRandomUuid(), stats_.get(), rtp_manager()); + audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), audio_sender); - GetAudioTransceiver()->internal()->AddSender(new_sender); + rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); } else if (kind == MediaStreamTrackInterface::kVideoKind) { - auto video_sender = - VideoRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), this); - video_sender->SetMediaChannel(video_media_channel()); + auto video_sender = VideoRtpSender::Create( + worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); + video_sender->SetMediaChannel(rtp_manager()->video_media_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), video_sender); - GetVideoTransceiver()->internal()->AddSender(new_sender); + rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender); } else { RTC_LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind; return nullptr; @@ -1929,67 +993,30 @@ std::vector> PeerConnection::GetSenders() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& sender : GetSendersInternal()) { + for (const auto& sender : rtp_manager()->GetSendersInternal()) { ret.push_back(sender); } return ret; } -std::vector>> -PeerConnection::GetSendersInternal() const { - std::vector>> - all_senders; - for (const auto& transceiver : transceivers_) { - if (IsUnifiedPlan() && transceiver->internal()->stopped()) - continue; - - auto senders = transceiver->internal()->senders(); - all_senders.insert(all_senders.end(), senders.begin(), senders.end()); - } - return all_senders; -} - std::vector> PeerConnection::GetReceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& receiver : GetReceiversInternal()) { + for (const auto& receiver : rtp_manager()->GetReceiversInternal()) { ret.push_back(receiver); } return ret; } -std::vector< - rtc::scoped_refptr>> -PeerConnection::GetReceiversInternal() const { - std::vector< - rtc::scoped_refptr>> - all_receivers; - for (const auto& transceiver : transceivers_) { - if (IsUnifiedPlan() && transceiver->internal()->stopped()) - continue; - - auto receivers = transceiver->internal()->receivers(); - all_receivers.insert(all_receivers.end(), receivers.begin(), - receivers.end()); - } - return all_receivers; -} - std::vector> PeerConnection::GetTransceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(IsUnifiedPlan()) << "GetTransceivers is only supported with Unified Plan SdpSemantics."; std::vector> all_transceivers; - for (const auto& transceiver : transceivers_) { - // Temporary fix: Do not show stopped transceivers. - // The long term fix is to remove them from transceivers_, but this - // turns out to cause issues with audio channel lifetimes. - // TODO(https://crbug.com/webrtc/11840): Fix issue. - if (!transceiver->stopped()) { - all_transceivers.push_back(transceiver); - } + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + all_transceivers.push_back(transceiver); } return all_transceivers; } @@ -2012,8 +1039,7 @@ bool PeerConnection::GetStats(StatsObserver* observer, << track->id(); return false; } - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_GETSTATS, - new GetStatsMsg(observer, track)); + message_handler_.PostGetStats(observer, stats_.get(), track); return true; } @@ -2034,7 +1060,8 @@ void PeerConnection::GetStats( RTC_DCHECK(stats_collector_); rtc::scoped_refptr internal_sender; if (selector) { - for (const auto& proxy_transceiver : transceivers_) { + for (const auto& proxy_transceiver : + rtp_manager()->transceivers()->List()) { for (const auto& proxy_sender : proxy_transceiver->internal()->senders()) { if (proxy_sender == selector) { @@ -2063,7 +1090,8 @@ void PeerConnection::GetStats( RTC_DCHECK(stats_collector_); rtc::scoped_refptr internal_receiver; if (selector) { - for (const auto& proxy_transceiver : transceivers_) { + for (const auto& proxy_transceiver : + rtp_manager()->transceivers()->List()) { for (const auto& proxy_receiver : proxy_transceiver->internal()->receivers()) { if (proxy_receiver == selector) { @@ -2085,7 +1113,7 @@ void PeerConnection::GetStats( PeerConnectionInterface::SignalingState PeerConnection::signaling_state() { RTC_DCHECK_RUN_ON(signaling_thread()); - return signaling_state_; + return sdp_handler_->signaling_state(); } PeerConnectionInterface::IceConnectionState @@ -2114,9 +1142,9 @@ PeerConnection::ice_gathering_state() { absl::optional PeerConnection::can_trickle_ice_candidates() { RTC_DCHECK_RUN_ON(signaling_thread()); - SessionDescriptionInterface* description = current_remote_description_.get(); + const SessionDescriptionInterface* description = current_remote_description(); if (!description) { - description = pending_remote_description_.get(); + description = pending_remote_description(); } if (!description) { return absl::nullopt; @@ -2151,7 +1179,7 @@ rtc::scoped_refptr PeerConnection::CreateDataChannel( // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or // the first SCTP DataChannel. if (data_channel_type() == cricket::DCT_RTP || first_datachannel) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } NoteUsageEvent(UsageEvent::DATA_ADDED); return channel; @@ -2159,1768 +1187,59 @@ rtc::scoped_refptr PeerConnection::CreateDataChannel( void PeerConnection::RestartIce() { RTC_DCHECK_RUN_ON(signaling_thread()); - local_ice_credentials_to_replace_->SetIceCredentialsFromLocalDescriptions( - current_local_description_.get(), pending_local_description_.get()); - UpdateNegotiationNeeded(); + sdp_handler_->RestartIce(); } void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - options](std::function operations_chain_callback) { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer_refptr->OnFailure( - RTCError(RTCErrorType::INTERNAL_ERROR, - "CreateOffer failed because the session was shut down")); - operations_chain_callback(); - return; - } - // The operation completes asynchronously when the wrapper is invoked. - rtc::scoped_refptr - observer_wrapper(new rtc::RefCountedObject< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), - std::move(operations_chain_callback))); - this_weak_ptr->DoCreateOffer(options, observer_wrapper); - }); -} - -void PeerConnection::DoCreateOffer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoCreateOffer"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "CreateOffer - observer is NULL."; - return; - } - - if (IsClosed()) { - std::string error = "CreateOffer called when PeerConnection is closed."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message; - PostCreateSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - if (!ValidateOfferAnswerOptions(options)) { - std::string error = "CreateOffer called with invalid options."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); - return; - } - - // Legacy handling for offer_to_receive_audio and offer_to_receive_video. - // Specified in WebRTC section 4.4.3.2 "Legacy configuration extensions". - if (IsUnifiedPlan()) { - RTCError error = HandleLegacyOfferOptions(options); - if (!error.ok()) { - PostCreateSessionDescriptionFailure(observer, std::move(error)); - return; - } - } - - cricket::MediaSessionOptions session_options; - GetOptionsForOffer(options, &session_options); - webrtc_session_desc_factory_->CreateOffer(observer, options, session_options); -} - -RTCError PeerConnection::HandleLegacyOfferOptions( - const RTCOfferAnswerOptions& options) { - RTC_DCHECK(IsUnifiedPlan()); - - if (options.offer_to_receive_audio == 0) { - RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_AUDIO); - } else if (options.offer_to_receive_audio == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO); - } else if (options.offer_to_receive_audio > 1) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, - "offer_to_receive_audio > 1 is not supported."); - } - - if (options.offer_to_receive_video == 0) { - RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_VIDEO); - } else if (options.offer_to_receive_video == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); - } else if (options.offer_to_receive_video > 1) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, - "offer_to_receive_video > 1 is not supported."); - } - - return RTCError::OK(); -} - -void PeerConnection::RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) { - for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) { - RtpTransceiverDirection new_direction = - RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false); - if (new_direction != transceiver->direction()) { - RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type) - << " transceiver (MID=" - << transceiver->mid().value_or("") << ") from " - << RtpTransceiverDirectionToString( - transceiver->direction()) - << " to " - << RtpTransceiverDirectionToString(new_direction) - << " since CreateOffer specified offer_to_receive=0"; - transceiver->internal()->set_direction(new_direction); - } - } -} - -void PeerConnection::AddUpToOneReceivingTransceiverOfType( - cricket::MediaType media_type) { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (GetReceivingTransceiversOfType(media_type).empty()) { - RTC_LOG(LS_INFO) - << "Adding one recvonly " << cricket::MediaTypeToString(media_type) - << " transceiver since CreateOffer specified offer_to_receive=1"; - RtpTransceiverInit init; - init.direction = RtpTransceiverDirection::kRecvOnly; - AddTransceiver(media_type, nullptr, init, - /*update_negotiation_needed=*/false); - } -} - -std::vector>> -PeerConnection::GetReceivingTransceiversOfType(cricket::MediaType media_type) { - std::vector< - rtc::scoped_refptr>> - receiving_transceivers; - for (const auto& transceiver : transceivers_) { - if (!transceiver->stopped() && transceiver->media_type() == media_type && - RtpTransceiverDirectionHasRecv(transceiver->direction())) { - receiving_transceivers.push_back(transceiver); - } - } - return receiving_transceivers; + sdp_handler_->CreateOffer(observer, options); } void PeerConnection::CreateAnswer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - options](std::function operations_chain_callback) { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer_refptr->OnFailure(RTCError( - RTCErrorType::INTERNAL_ERROR, - "CreateAnswer failed because the session was shut down")); - operations_chain_callback(); - return; - } - // The operation completes asynchronously when the wrapper is invoked. - rtc::scoped_refptr - observer_wrapper(new rtc::RefCountedObject< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), - std::move(operations_chain_callback))); - this_weak_ptr->DoCreateAnswer(options, observer_wrapper); - }); -} - -void PeerConnection::DoCreateAnswer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoCreateAnswer"); - if (!observer) { - RTC_LOG(LS_ERROR) << "CreateAnswer - observer is NULL."; - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message; - PostCreateSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - if (!(signaling_state_ == kHaveRemoteOffer || - signaling_state_ == kHaveLocalPrAnswer)) { - std::string error = - "PeerConnection cannot create an answer in a state other than " - "have-remote-offer or have-local-pranswer."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); - return; - } - - // The remote description should be set if we're in the right state. - RTC_DCHECK(remote_description()); - - if (IsUnifiedPlan()) { - if (options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) { - RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_audio is not " - "supported with Unified Plan semantics. Use the " - "RtpTransceiver API instead."; - } - if (options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) { - RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_video is not " - "supported with Unified Plan semantics. Use the " - "RtpTransceiver API instead."; - } - } - - cricket::MediaSessionOptions session_options; - GetOptionsForAnswer(options, &session_options); - - webrtc_session_desc_factory_->CreateAnswer(observer, session_options); + sdp_handler_->CreateAnswer(observer, options); } void PeerConnection::SetLocalDescription( SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc_ptr) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - desc = std::unique_ptr(desc_ptr)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - // For consistency with SetSessionDescriptionObserverAdapter whose - // posted messages doesn't get processed when the PC is destroyed, we - // do not inform |observer_refptr| that the operation failed. - operations_chain_callback(); - return; - } - // SetSessionDescriptionObserverAdapter takes care of making sure the - // |observer_refptr| is invoked in a posted message. - this_weak_ptr->DoSetLocalDescription( - std::move(desc), - rtc::scoped_refptr( - new rtc::RefCountedObject( - this_weak_ptr, observer_refptr))); - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in a post), so that the operation chain - // is not blocked by this operation when the observer is invoked. This - // allows the observer to trigger subsequent offer/answer operations - // synchronously if the operation chain is now empty. - operations_chain_callback(); - }); + sdp_handler_->SetLocalDescription(observer, desc_ptr); } void PeerConnection::SetLocalDescription( std::unique_ptr desc, rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, - desc = std::move(desc)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer->OnSetLocalDescriptionComplete(RTCError( - RTCErrorType::INTERNAL_ERROR, - "SetLocalDescription failed because the session was shut down")); - operations_chain_callback(); - return; - } - this_weak_ptr->DoSetLocalDescription(std::move(desc), observer); - // DoSetLocalDescription() is implemented as a synchronous operation. - // The |observer| will already have been informed that it completed, and - // we can mark this operation as complete without any loose ends. - operations_chain_callback(); - }); + sdp_handler_->SetLocalDescription(std::move(desc), observer); } void PeerConnection::SetLocalDescription( SetSessionDescriptionObserver* observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - SetLocalDescription( - new rtc::RefCountedObject( - weak_ptr_factory_.GetWeakPtr(), observer)); + sdp_handler_->SetLocalDescription(observer); } void PeerConnection::SetLocalDescription( rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - // The |create_sdp_observer| handles performing DoSetLocalDescription() with - // the resulting description as well as completing the operation. - rtc::scoped_refptr - create_sdp_observer( - new rtc::RefCountedObject( - weak_ptr_factory_.GetWeakPtr(), observer)); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - create_sdp_observer](std::function operations_chain_callback) { - // The |create_sdp_observer| is responsible for completing the - // operation. - create_sdp_observer->SetOperationCompleteCallback( - std::move(operations_chain_callback)); - // Abort early if |this_weak_ptr| is no longer valid. This triggers the - // same code path as if DoCreateOffer() or DoCreateAnswer() failed. - if (!this_weak_ptr) { - create_sdp_observer->OnFailure(RTCError( - RTCErrorType::INTERNAL_ERROR, - "SetLocalDescription failed because the session was shut down")); - return; - } - switch (this_weak_ptr->signaling_state()) { - case PeerConnectionInterface::kStable: - case PeerConnectionInterface::kHaveLocalOffer: - case PeerConnectionInterface::kHaveRemotePrAnswer: - // TODO(hbos): If [LastCreatedOffer] exists and still represents the - // current state of the system, use that instead of creating another - // offer. - this_weak_ptr->DoCreateOffer(RTCOfferAnswerOptions(), - create_sdp_observer); - break; - case PeerConnectionInterface::kHaveLocalPrAnswer: - case PeerConnectionInterface::kHaveRemoteOffer: - // TODO(hbos): If [LastCreatedAnswer] exists and still represents - // the current state of the system, use that instead of creating - // another answer. - this_weak_ptr->DoCreateAnswer(RTCOfferAnswerOptions(), - create_sdp_observer); - break; - case PeerConnectionInterface::kClosed: - create_sdp_observer->OnFailure(RTCError( - RTCErrorType::INVALID_STATE, - "SetLocalDescription called when PeerConnection is closed.")); - break; - } - }); -} - -void PeerConnection::DoSetLocalDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoSetLocalDescription"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "SetLocalDescription - observer is NULL."; - return; - } - - if (!desc) { - observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, "SessionDescription is NULL.")); - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "SetLocalDescription: " << error_message; - observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - // For SLD we support only explicit rollback. - if (desc->GetType() == SdpType::kRollback) { - if (IsUnifiedPlan()) { - observer->OnSetLocalDescriptionComplete(Rollback(desc->GetType())); - } else { - observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Rollback not supported in Plan B")); - } - return; - } - - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL); - if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_LOCAL, desc->GetType(), error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - // Grab the description type before moving ownership to ApplyLocalDescription, - // which may destroy it before returning. - const SdpType type = desc->GetType(); - - error = ApplyLocalDescription(std::move(desc)); - // |desc| may be destroyed at this point. - - if (!error.ok()) { - // If ApplyLocalDescription fails, the PeerConnection could be in an - // inconsistent state, so act conservatively here and set the session error - // so that future calls to SetLocalDescription/SetRemoteDescription fail. - SetSessionError(SessionError::kContent, error.message()); - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - RTC_DCHECK(local_description()); - - if (local_description()->GetType() == SdpType::kAnswer) { - // 3.2.10.1: For each transceiver in the connection's set of transceivers - // run the following steps: - if (IsUnifiedPlan()) { - for (auto it = transceivers_.begin(); it != transceivers_.end();) { - const auto& transceiver = *it; - // 3.2.10.1.1: If transceiver is stopped, associated with an m= section - // and the associated m= section is rejected in - // connection.[[CurrentLocalDescription]] or - // connection.[[CurrentRemoteDescription]], remove the - // transceiver from the connection's set of transceivers. - if (transceiver->stopped()) { - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - - if (content && content->rejected) { - RTC_LOG(LS_INFO) << "Dissociating transceiver" - << " since the media section is being recycled."; - (*it)->internal()->set_mid(absl::nullopt); - (*it)->internal()->set_mline_index(absl::nullopt); - it = transceivers_.erase(it); - } else { - ++it; - } - } else { - ++it; - } - } - } - - // TODO(deadbeef): We already had to hop to the network thread for - // MaybeStartGathering... - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*local_description()); - } - - if (IsUnifiedPlan()) { - bool was_negotiation_needed = is_negotiation_needed_; - UpdateNegotiationNeeded(); - if (signaling_state() == kStable && was_negotiation_needed && - is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } - } - - observer->OnSetLocalDescriptionComplete(RTCError::OK()); - NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED); - - // MaybeStartGathering needs to be called after informing the observer so that - // we don't signal any candidates before signaling that SetLocalDescription - // completed. - transport_controller_->MaybeStartGathering(); -} - -RTCError PeerConnection::ApplyLocalDescription( - std::unique_ptr desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(desc); - - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - stats_->UpdateStats(kStatsOutputLevelStandard); - - // Take a reference to the old local description since it's used below to - // compare against the new local description. When setting the new local - // description, grab ownership of the replaced session description in case it - // is the same as |old_local_description|, to keep it alive for the duration - // of the method. - const SessionDescriptionInterface* old_local_description = - local_description(); - std::unique_ptr replaced_local_description; - SdpType type = desc->GetType(); - if (type == SdpType::kAnswer) { - replaced_local_description = pending_local_description_ - ? std::move(pending_local_description_) - : std::move(current_local_description_); - current_local_description_ = std::move(desc); - pending_local_description_ = nullptr; - current_remote_description_ = std::move(pending_remote_description_); - } else { - replaced_local_description = std::move(pending_local_description_); - pending_local_description_ = std::move(desc); - } - // The session description to apply now must be accessed by - // |local_description()|. - RTC_DCHECK(local_description()); - - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyLocalDescription, - *local_description()->description()); - - if (!is_caller_) { - if (remote_description()) { - // Remote description was applied first, so this PC is the callee. - is_caller_ = false; - } else { - // Local description is applied first, so this PC is the caller. - is_caller_ = true; - } - } - - RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type); - if (!error.ok()) { - return error; - } - - if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( - cricket::CS_LOCAL, *local_description(), old_local_description, - remote_description()); - if (!error.ok()) { - return error; - } - std::vector> remove_list; - std::vector> removed_streams; - for (const auto& transceiver : transceivers_) { - if (transceiver->stopped()) { - continue; - } - - // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. - // Note that code paths that don't set MID won't be able to use - // information about DTLS transports. - if (transceiver->mid()) { - auto dtls_transport = - LookupDtlsTransportByMidInternal(*transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); - } - - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and - // transceiver's [[FiredDirection]] slot is either "sendrecv" or - // "recvonly", process the removal of a remote track for the media - // description, given transceiver, removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && - (transceiver->internal()->fired_direction() && - RtpTransceiverDirectionHasRecv( - *transceiver->internal()->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, - &removed_streams); - } - // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and - // [[FiredDirection]] slots to direction. - transceiver->internal()->set_current_direction(media_desc->direction()); - transceiver->internal()->set_fired_direction(media_desc->direction()); - } - } - auto observer = Observer(); - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } - } else { - // Media channels will be created only when offer is set. These may use new - // transports just created by PushdownTransportDescription. - if (type == SdpType::kOffer) { - // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - RTCError error = CreateChannels(*local_description()->description()); - if (!error.ok()) { - return error; - } - } - // Remove unused channels if MediaContentDescription is rejected. - RemoveUnusedChannels(local_description()->description()); - } - - error = UpdateSessionState(type, cricket::CS_LOCAL, - local_description()->description()); - if (!error.ok()) { - return error; - } - - if (remote_description()) { - // Now that we have a local description, we can push down remote candidates. - UseCandidatesInSessionDescription(remote_description()); - } - - pending_ice_restarts_.clear(); - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } - - // If setting the description decided our SSL role, allocate any necessary - // SCTP sids. - rtc::SSLRole role; - if (IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) { - data_channel_controller_.AllocateSctpSids(role); - } - - if (IsUnifiedPlan()) { - for (const auto& transceiver : transceivers_) { - if (transceiver->stopped()) { - continue; - } - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { - continue; - } - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (content->rejected || !channel || channel->local_streams().empty()) { - // 0 is a special value meaning "this sender has no associated send - // stream". Need to call this so the sender won't attempt to configure - // a no longer existing stream and run into DCHECKs in the lower - // layers. - transceiver->internal()->sender_internal()->SetSsrc(0); - } else { - // Get the StreamParams from the channel which could generate SSRCs. - const std::vector& streams = channel->local_streams(); - transceiver->internal()->sender_internal()->set_stream_ids( - streams[0].stream_ids()); - transceiver->internal()->sender_internal()->SetSsrc( - streams[0].first_ssrc()); - } - } - } else { - // Plan B semantics. - - // Update state and SSRC of local MediaStreams and DataChannels based on the - // local session description. - const cricket::ContentInfo* audio_content = - GetFirstAudioContent(local_description()->description()); - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - const cricket::AudioContentDescription* audio_desc = - audio_content->media_description()->as_audio(); - UpdateLocalSenders(audio_desc->streams(), audio_desc->type()); - } - } - - const cricket::ContentInfo* video_content = - GetFirstVideoContent(local_description()->description()); - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); - } else { - const cricket::VideoContentDescription* video_desc = - video_content->media_description()->as_video(); - UpdateLocalSenders(video_desc->streams(), video_desc->type()); - } - } - } - - const cricket::ContentInfo* data_content = - GetFirstDataContent(local_description()->description()); - if (data_content) { - const cricket::RtpDataContentDescription* rtp_data_desc = - data_content->media_description()->as_rtp_data(); - // rtp_data_desc will be null if this is an SCTP description. - if (rtp_data_desc) { - data_channel_controller_.UpdateLocalRtpDataChannels( - rtp_data_desc->streams()); - } - } - - if (type == SdpType::kAnswer && - local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); - } - - return RTCError::OK(); -} - -// The SDP parser used to populate these values by default for the 'content -// name' if an a=mid line was absent. -static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { - switch (media_type) { - case cricket::MEDIA_TYPE_AUDIO: - return cricket::CN_AUDIO; - case cricket::MEDIA_TYPE_VIDEO: - return cricket::CN_VIDEO; - case cricket::MEDIA_TYPE_DATA: - return cricket::CN_DATA; - } - RTC_NOTREACHED(); - return ""; -} - -void PeerConnection::FillInMissingRemoteMids( - cricket::SessionDescription* new_remote_description) { - RTC_DCHECK(new_remote_description); - const cricket::ContentInfos no_infos; - const cricket::ContentInfos& local_contents = - (local_description() ? local_description()->description()->contents() - : no_infos); - const cricket::ContentInfos& remote_contents = - (remote_description() ? remote_description()->description()->contents() - : no_infos); - for (size_t i = 0; i < new_remote_description->contents().size(); ++i) { - cricket::ContentInfo& content = new_remote_description->contents()[i]; - if (!content.name.empty()) { - continue; - } - std::string new_mid; - absl::string_view source_explanation; - if (IsUnifiedPlan()) { - if (i < local_contents.size()) { - new_mid = local_contents[i].name; - source_explanation = "from the matching local media section"; - } else if (i < remote_contents.size()) { - new_mid = remote_contents[i].name; - source_explanation = "from the matching previous remote media section"; - } else { - new_mid = mid_generator_(); - source_explanation = "generated just now"; - } - } else { - new_mid = std::string( - GetDefaultMidForPlanB(content.media_description()->type())); - source_explanation = "to match pre-existing behavior"; - } - RTC_DCHECK(!new_mid.empty()); - content.name = new_mid; - new_remote_description->transport_infos()[i].content_name = new_mid; - RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i - << " is missing an a=mid line. Filling in the value '" - << new_mid << "' " << source_explanation << "."; - } + sdp_handler_->SetLocalDescription(observer); } void PeerConnection::SetRemoteDescription( SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc_ptr) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - desc = std::unique_ptr(desc_ptr)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - // For consistency with SetSessionDescriptionObserverAdapter whose - // posted messages doesn't get processed when the PC is destroyed, we - // do not inform |observer_refptr| that the operation failed. - operations_chain_callback(); - return; - } - // SetSessionDescriptionObserverAdapter takes care of making sure the - // |observer_refptr| is invoked in a posted message. - this_weak_ptr->DoSetRemoteDescription( - std::move(desc), - rtc::scoped_refptr( - new rtc::RefCountedObject( - this_weak_ptr, observer_refptr))); - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in a post), so that the operation chain - // is not blocked by this operation when the observer is invoked. This - // allows the observer to trigger subsequent offer/answer operations - // synchronously if the operation chain is now empty. - operations_chain_callback(); - }); + sdp_handler_->SetRemoteDescription(observer, desc_ptr); } void PeerConnection::SetRemoteDescription( std::unique_ptr desc, rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, - desc = std::move(desc)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer->OnSetRemoteDescriptionComplete(RTCError( - RTCErrorType::INTERNAL_ERROR, - "SetRemoteDescription failed because the session was shut down")); - operations_chain_callback(); - return; - } - this_weak_ptr->DoSetRemoteDescription(std::move(desc), - std::move(observer)); - // DoSetRemoteDescription() is implemented as a synchronous operation. - // The |observer| will already have been informed that it completed, and - // we can mark this operation as complete without any loose ends. - operations_chain_callback(); - }); -} - -void PeerConnection::DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoSetRemoteDescription"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; - return; - } - - if (!desc) { - observer->OnSetRemoteDescriptionComplete(RTCError( - RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - if (IsUnifiedPlan()) { - if (configuration_.enable_implicit_rollback) { - if (desc->GetType() == SdpType::kOffer && - signaling_state() == kHaveLocalOffer) { - Rollback(desc->GetType()); - } - } - // Explicit rollback. - if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); - return; - } - } else if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Rollback not supported in Plan B")); - return; - } - if (desc->GetType() == SdpType::kOffer) { - // Report to UMA the format of the received offer. - ReportSdpFormatReceived(*desc); - } - - // Handle remote descriptions missing a=mid lines for interop with legacy end - // points. - FillInMissingRemoteMids(desc->description()); - - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE); - if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_REMOTE, desc->GetType(), error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); - return; - } - - // Grab the description type before moving ownership to - // ApplyRemoteDescription, which may destroy it before returning. - const SdpType type = desc->GetType(); - - error = ApplyRemoteDescription(std::move(desc)); - // |desc| may be destroyed at this point. - - if (!error.ok()) { - // If ApplyRemoteDescription fails, the PeerConnection could be in an - // inconsistent state, so act conservatively here and set the session error - // so that future calls to SetLocalDescription/SetRemoteDescription fail. - SetSessionError(SessionError::kContent, error.message()); - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); - return; - } - RTC_DCHECK(remote_description()); - - if (type == SdpType::kAnswer) { - // TODO(deadbeef): We already had to hop to the network thread for - // MaybeStartGathering... - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*remote_description()); - } - - if (IsUnifiedPlan()) { - bool was_negotiation_needed = is_negotiation_needed_; - UpdateNegotiationNeeded(); - if (signaling_state() == kStable && was_negotiation_needed && - is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } - } - - observer->OnSetRemoteDescriptionComplete(RTCError::OK()); - NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); -} - -RTCError PeerConnection::ApplyRemoteDescription( - std::unique_ptr desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(desc); - - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - stats_->UpdateStats(kStatsOutputLevelStandard); - - // Take a reference to the old remote description since it's used below to - // compare against the new remote description. When setting the new remote - // description, grab ownership of the replaced session description in case it - // is the same as |old_remote_description|, to keep it alive for the duration - // of the method. - const SessionDescriptionInterface* old_remote_description = - remote_description(); - std::unique_ptr replaced_remote_description; - SdpType type = desc->GetType(); - if (type == SdpType::kAnswer) { - replaced_remote_description = pending_remote_description_ - ? std::move(pending_remote_description_) - : std::move(current_remote_description_); - current_remote_description_ = std::move(desc); - pending_remote_description_ = nullptr; - current_local_description_ = std::move(pending_local_description_); - } else { - replaced_remote_description = std::move(pending_remote_description_); - pending_remote_description_ = std::move(desc); - } - // The session description to apply now must be accessed by - // |remote_description()|. - RTC_DCHECK(remote_description()); - - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, - *remote_description()->description()); - - RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); - if (!error.ok()) { - return error; - } - // Transport and Media channels will be created only when offer is set. - if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( - cricket::CS_REMOTE, *remote_description(), local_description(), - old_remote_description); - if (!error.ok()) { - return error; - } - } else { - // Media channels will be created only when offer is set. These may use new - // transports just created by PushdownTransportDescription. - if (type == SdpType::kOffer) { - // TODO(mallinath) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - RTCError error = CreateChannels(*remote_description()->description()); - if (!error.ok()) { - return error; - } - } - // Remove unused channels if MediaContentDescription is rejected. - RemoveUnusedChannels(remote_description()->description()); - } - - // NOTE: Candidates allocation will be initiated only when - // SetLocalDescription is called. - error = UpdateSessionState(type, cricket::CS_REMOTE, - remote_description()->description()); - if (!error.ok()) { - return error; - } - - if (local_description() && - !UseCandidatesInSessionDescription(remote_description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); - } - - if (old_remote_description) { - for (const cricket::ContentInfo& content : - old_remote_description->description()->contents()) { - // Check if this new SessionDescription contains new ICE ufrag and - // password that indicates the remote peer requests an ICE restart. - // TODO(deadbeef): When we start storing both the current and pending - // remote description, this should reset pending_ice_restarts and compare - // against the current description. - if (CheckForRemoteIceRestart(old_remote_description, remote_description(), - content.name)) { - if (type == SdpType::kOffer) { - pending_ice_restarts_.insert(content.name); - } - } else { - // We retain all received candidates only if ICE is not restarted. - // When ICE is restarted, all previous candidates belong to an old - // generation and should not be kept. - // TODO(deadbeef): This goes against the W3C spec which says the remote - // description should only contain candidates from the last set remote - // description plus any candidates added since then. We should remove - // this once we're sure it won't break anything. - WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( - old_remote_description, content.name, mutable_remote_description()); - } - } - } - - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } - - // Set the the ICE connection state to connecting since the connection may - // become writable with peer reflexive candidates before any remote candidate - // is signaled. - // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix - // is to have a new signal the indicates a change in checking state from the - // transport and expose a new checking() member from transport that can be - // read to determine the current checking state. The existing SignalConnecting - // actually means "gathering candidates", so cannot be be used here. - if (remote_description()->GetType() != SdpType::kOffer && - remote_description()->number_of_mediasections() > 0u && - ice_connection_state() == PeerConnectionInterface::kIceConnectionNew) { - SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); - } - - // If setting the description decided our SSL role, allocate any necessary - // SCTP sids. - rtc::SSLRole role; - if (IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) { - data_channel_controller_.AllocateSctpSids(role); - } - - if (IsUnifiedPlan()) { - std::vector> - now_receiving_transceivers; - std::vector> remove_list; - std::vector> added_streams; - std::vector> removed_streams; - for (const auto& transceiver : transceivers_) { - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, remote_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - RtpTransceiverDirection local_direction = - RtpTransceiverDirectionReversed(media_desc->direction()); - // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the - // RTCSessionDescription: Set the associated remote streams given - // transceiver.[[Receiver]], msids, addList, and removeList". - // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription - if (RtpTransceiverDirectionHasRecv(local_direction)) { - std::vector stream_ids; - if (!media_desc->streams().empty()) { - // The remote description has signaled the stream IDs. - stream_ids = media_desc->streams()[0].stream_ids(); - } - transceiver_stable_states_by_transceivers_[transceiver] - .SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); - - RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name - << " (" << GetStreamIdsString(stream_ids) << ")."; - SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - stream_ids, &added_streams, - &removed_streams); - // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 - // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, - // and transceiver's current direction is neither sendrecv nor recvonly, - // process the addition of a remote track for the media description. - if (!transceiver->fired_direction() || - !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { - RTC_LOG(LS_INFO) - << "Processing the addition of a remote track for MID=" - << content->name << "."; - now_receiving_transceivers.push_back(transceiver); - } - } - // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's - // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the - // removal of a remote track for the media description, given transceiver, - // removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(local_direction) && - (transceiver->fired_direction() && - RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, - &removed_streams); - } - // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. - transceiver->internal()->set_fired_direction(local_direction); - // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to - // direction. - transceiver->internal()->set_current_direction(local_direction); - // 2.2.8.1.11.[3-6]: Set the transport internal slots. - if (transceiver->mid()) { - auto dtls_transport = - LookupDtlsTransportByMidInternal(*transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); - } - } - // 2.2.8.1.12: If the media description is rejected, and transceiver is - // not already stopped, stop the RTCRtpTransceiver transceiver. - if (content->rejected && !transceiver->stopped()) { - RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name - << " since the media section was rejected."; - transceiver->StopInternal(); - } - if (!content->rejected && - RtpTransceiverDirectionHasRecv(local_direction)) { - if (!media_desc->streams().empty() && - media_desc->streams()[0].has_ssrcs()) { - uint32_t ssrc = media_desc->streams()[0].first_ssrc(); - transceiver->internal()->receiver_internal()->SetupMediaChannel(ssrc); - } else { - transceiver->internal() - ->receiver_internal() - ->SetupUnsignaledMediaChannel(); - } - } - } - // Once all processing has finished, fire off callbacks. - auto observer = Observer(); - for (const auto& transceiver : now_receiving_transceivers) { - stats_->AddTrack(transceiver->receiver()->track()); - observer->OnTrack(transceiver); - observer->OnAddTrack(transceiver->receiver(), - transceiver->receiver()->streams()); - } - for (const auto& stream : added_streams) { - observer->OnAddStream(stream); - } - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } - } - - const cricket::ContentInfo* audio_content = - GetFirstAudioContent(remote_description()->description()); - const cricket::ContentInfo* video_content = - GetFirstVideoContent(remote_description()->description()); - const cricket::AudioContentDescription* audio_desc = - GetFirstAudioContentDescription(remote_description()->description()); - const cricket::VideoContentDescription* video_desc = - GetFirstVideoContentDescription(remote_description()->description()); - const cricket::RtpDataContentDescription* rtp_data_desc = - GetFirstRtpDataContentDescription(remote_description()->description()); - - // Check if the descriptions include streams, just in case the peer supports - // MSID, but doesn't indicate so with "a=msid-semantic". - if (remote_description()->description()->msid_supported() || - (audio_desc && !audio_desc->streams().empty()) || - (video_desc && !video_desc->streams().empty())) { - remote_peer_supports_msid_ = true; - } - - // We wait to signal new streams until we finish processing the description, - // since only at that point will new streams have all their tracks. - rtc::scoped_refptr new_streams(StreamCollection::Create()); - - if (!IsUnifiedPlan()) { - // TODO(steveanton): When removing RTP senders/receivers in response to a - // rejected media section, there is some cleanup logic that expects the - // voice/ video channel to still be set. But in this method the voice/video - // channel would have been destroyed by the SetRemoteDescription caller - // above so the cleanup that relies on them fails to run. The RemoveSenders - // calls should be moved to right before the DestroyChannel calls to fix - // this. - - // Find all audio rtp streams and create corresponding remote AudioTracks - // and MediaStreams. - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - bool default_audio_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(audio_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(audio_desc), - default_audio_track_needed, audio_desc->type(), - new_streams); - } - } - - // Find all video rtp streams and create corresponding remote VideoTracks - // and MediaStreams. - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); - } else { - bool default_video_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(video_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(video_desc), - default_video_track_needed, video_desc->type(), - new_streams); - } - } - - // If this is an RTP data transport, update the DataChannels with the - // information from the remote peer. - if (rtp_data_desc) { - data_channel_controller_.UpdateRemoteRtpDataChannels( - GetActiveStreams(rtp_data_desc)); - } - - // Iterate new_streams and notify the observer about new MediaStreams. - auto observer = Observer(); - for (size_t i = 0; i < new_streams->count(); ++i) { - MediaStreamInterface* new_stream = new_streams->at(i); - stats_->AddStream(new_stream); - observer->OnAddStream( - rtc::scoped_refptr(new_stream)); - } - - UpdateEndedRemoteMediaStreams(); - } - - if (type == SdpType::kAnswer && - local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); - } - - return RTCError::OK(); -} - -void PeerConnection::SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, - const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams) { - std::vector> media_streams; - for (const std::string& stream_id : stream_ids) { - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); - if (!stream) { - stream = MediaStreamProxy::Create(rtc::Thread::Current(), - MediaStream::Create(stream_id)); - remote_streams_->AddStream(stream); - added_streams->push_back(stream); - } - media_streams.push_back(stream); - } - // Special case: "a=msid" missing, use random stream ID. - if (media_streams.empty() && - !(remote_description()->description()->msid_signaling() & - cricket::kMsidSignalingMediaSection)) { - if (!missing_msid_default_stream_) { - missing_msid_default_stream_ = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid())); - added_streams->push_back(missing_msid_default_stream_); - } - media_streams.push_back(missing_msid_default_stream_); - } - std::vector> previous_streams = - receiver->streams(); - // SetStreams() will add/remove the receiver's track to/from the streams. This - // differs from the spec - the spec uses an "addList" and "removeList" to - // update the stream-track relationships in a later step. We do this earlier, - // changing the order of things, but the end-result is the same. - // TODO(hbos): When we remove remote_streams(), use set_stream_ids() - // instead. https://crbug.com/webrtc/9480 - receiver->SetStreams(media_streams); - RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); -} - -void PeerConnection::ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> - transceiver, - std::vector>* remove_list, - std::vector>* removed_streams) { - RTC_DCHECK(transceiver->mid()); - RTC_LOG(LS_INFO) << "Processing the removal of a track for MID=" - << *transceiver->mid(); - std::vector> previous_streams = - transceiver->internal()->receiver_internal()->streams(); - // This will remove the remote track from the streams. - transceiver->internal()->receiver_internal()->set_stream_ids({}); - remove_list->push_back(transceiver); - RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); -} - -void PeerConnection::RemoveRemoteStreamsIfEmpty( - const std::vector>& remote_streams, - std::vector>* removed_streams) { - // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of - // streams, see if the stream was removed by checking if this was the last - // receiver with that stream ID. - for (const auto& remote_stream : remote_streams) { - if (remote_stream->GetAudioTracks().empty() && - remote_stream->GetVideoTracks().empty()) { - remote_streams_->RemoveStream(remote_stream); - removed_streams->push_back(remote_stream); - } - } -} - -RTCError PeerConnection::UpdateTransceiversAndDataChannels( - cricket::ContentSource source, - const SessionDescriptionInterface& new_session, - const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description) { - RTC_DCHECK(IsUnifiedPlan()); - - const cricket::ContentGroup* bundle_group = nullptr; - if (new_session.GetType() == SdpType::kOffer) { - auto bundle_group_or_error = - GetEarlyBundleGroup(*new_session.description()); - if (!bundle_group_or_error.ok()) { - return bundle_group_or_error.MoveError(); - } - bundle_group = bundle_group_or_error.MoveValue(); - } - - const ContentInfos& new_contents = new_session.description()->contents(); - for (size_t i = 0; i < new_contents.size(); ++i) { - const cricket::ContentInfo& new_content = new_contents[i]; - cricket::MediaType media_type = new_content.media_description()->type(); - mid_generator_.AddKnownId(new_content.name); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - const cricket::ContentInfo* old_local_content = nullptr; - if (old_local_description && - i < old_local_description->description()->contents().size()) { - old_local_content = - &old_local_description->description()->contents()[i]; - } - const cricket::ContentInfo* old_remote_content = nullptr; - if (old_remote_description && - i < old_remote_description->description()->contents().size()) { - old_remote_content = - &old_remote_description->description()->contents()[i]; - } - auto transceiver_or_error = - AssociateTransceiver(source, new_session.GetType(), i, new_content, - old_local_content, old_remote_content); - if (!transceiver_or_error.ok()) { - return transceiver_or_error.MoveError(); - } - auto transceiver = transceiver_or_error.MoveValue(); - RTCError error = - UpdateTransceiverChannel(transceiver, new_content, bundle_group); - if (!error.ok()) { - return error; - } - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - if (GetDataMid() && new_content.name != *GetDataMid()) { - // Ignore all but the first data section. - RTC_LOG(LS_INFO) << "Ignoring data media section with MID=" - << new_content.name; - continue; - } - RTCError error = UpdateDataChannel(source, new_content, bundle_group); - if (!error.ok()) { - return error; - } - } else { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Unknown section type."); - } - } - - return RTCError::OK(); -} - -RTCError PeerConnection::UpdateTransceiverChannel( - rtc::scoped_refptr> - transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { - RTC_DCHECK(IsUnifiedPlan()); - RTC_DCHECK(transceiver); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (content.rejected) { - if (channel) { - transceiver->internal()->SetChannel(nullptr); - DestroyChannelInterface(channel); - } - } else { - if (!channel) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - channel = CreateVoiceChannel(content.name); - } else { - RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); - channel = CreateVideoChannel(content.name); - } - if (!channel) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INTERNAL_ERROR, - "Failed to create channel for mid=" + content.name); - } - transceiver->internal()->SetChannel(channel); - } - } - return RTCError::OK(); -} - -RTCError PeerConnection::UpdateDataChannel( - cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { - if (data_channel_type() == cricket::DCT_NONE) { - // If data channels are disabled, ignore this media section. CreateAnswer - // will take care of rejecting it. - return RTCError::OK(); - } - if (content.rejected) { - RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid(); - DestroyDataChannelTransport(); - } else { - if (!data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.data_channel_transport()) { - RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); - if (!CreateDataChannel(content.name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); - } - } - if (source == cricket::CS_REMOTE) { - const MediaContentDescription* data_desc = content.media_description(); - if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) { - data_channel_controller_.UpdateRemoteRtpDataChannels( - GetActiveStreams(data_desc)); - } - } - } - return RTCError::OK(); -} - -// This method will extract any send encodings that were sent by the remote -// connection. This is currently only relevant for Simulcast scenario (where -// the number of layers may be communicated by the server). -static std::vector GetSendEncodingsFromRemoteDescription( - const MediaContentDescription& desc) { - if (!desc.HasSimulcast()) { - return {}; - } - std::vector result; - const SimulcastDescription& simulcast = desc.simulcast_description(); - - // This is a remote description, the parameters we are after should appear - // as receive streams. - for (const auto& alternatives : simulcast.receive_layers()) { - RTC_DCHECK(!alternatives.empty()); - // There is currently no way to specify or choose from alternatives. - // We will always use the first alternative, which is the most preferred. - const SimulcastLayer& layer = alternatives[0]; - RtpEncodingParameters parameters; - parameters.rid = layer.rid; - parameters.active = !layer.is_paused; - result.push_back(parameters); - } - - return result; -} - -static RTCError UpdateSimulcastLayerStatusInSender( - const std::vector& layers, - rtc::scoped_refptr sender) { - RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); - std::vector disabled_layers; - - // The simulcast envelope cannot be changed, only the status of the streams. - // So we will iterate over the send encodings rather than the layers. - for (RtpEncodingParameters& encoding : parameters.encodings) { - auto iter = std::find_if(layers.begin(), layers.end(), - [&encoding](const SimulcastLayer& layer) { - return layer.rid == encoding.rid; - }); - // A layer that cannot be found may have been removed by the remote party. - if (iter == layers.end()) { - disabled_layers.push_back(encoding.rid); - continue; - } - - encoding.active = !iter->is_paused; - } - - RTCError result = sender->SetParametersInternal(parameters); - if (result.ok()) { - result = sender->DisableEncodingLayers(disabled_layers); - } - - return result; -} - -static bool SimulcastIsRejected( - const ContentInfo* local_content, - const MediaContentDescription& answer_media_desc) { - bool simulcast_offered = local_content && - local_content->media_description() && - local_content->media_description()->HasSimulcast(); - bool simulcast_answered = answer_media_desc.HasSimulcast(); - bool rids_supported = RtpExtension::FindHeaderExtensionByUri( - answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri); - return simulcast_offered && (!simulcast_answered || !rids_supported); -} - -static RTCError DisableSimulcastInSender( - rtc::scoped_refptr sender) { - RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); - if (parameters.encodings.size() <= 1) { - return RTCError::OK(); - } - - std::vector disabled_layers; - std::transform( - parameters.encodings.begin() + 1, parameters.encodings.end(), - std::back_inserter(disabled_layers), - [](const RtpEncodingParameters& encoding) { return encoding.rid; }); - return sender->DisableEncodingLayers(disabled_layers); -} - -RTCErrorOr>> -PeerConnection::AssociateTransceiver(cricket::ContentSource source, - SdpType type, - size_t mline_index, - const ContentInfo& content, - const ContentInfo* old_local_content, - const ContentInfo* old_remote_content) { - RTC_DCHECK(IsUnifiedPlan()); - // If this is an offer then the m= section might be recycled. If the m= - // section is being recycled (defined as: rejected in the current local or - // remote description and not rejected in new description), dissociate the - // currently associated RtpTransceiver by setting its mid property to null, - // and discard the mapping between the transceiver and its m= section index. - if (IsMediaSectionBeingRecycled(type, content, old_local_content, - old_remote_content)) { - // We want to dissociate the transceiver that has the rejected mid. - const std::string& old_mid = - (old_local_content && old_local_content->rejected) - ? old_local_content->name - : old_remote_content->name; - auto old_transceiver = GetAssociatedTransceiver(old_mid); - if (old_transceiver) { - RTC_LOG(LS_INFO) << "Dissociating transceiver for MID=" << old_mid - << " since the media section is being recycled."; - old_transceiver->internal()->set_mid(absl::nullopt); - old_transceiver->internal()->set_mline_index(absl::nullopt); - } - } - const MediaContentDescription* media_desc = content.media_description(); - auto transceiver = GetAssociatedTransceiver(content.name); - if (source == cricket::CS_LOCAL) { - // Find the RtpTransceiver that corresponds to this m= section, using the - // mapping between transceivers and m= section indices established when - // creating the offer. - if (!transceiver) { - transceiver = GetTransceiverByMLineIndex(mline_index); - } - if (!transceiver) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Unknown transceiver"); - } - } else { - RTC_DCHECK_EQ(source, cricket::CS_REMOTE); - // If the m= section is sendrecv or recvonly, and there are RtpTransceivers - // of the same type... - // When simulcast is requested, a transceiver cannot be associated because - // AddTrack cannot be called to initialize it. - if (!transceiver && - RtpTransceiverDirectionHasRecv(media_desc->direction()) && - !media_desc->HasSimulcast()) { - transceiver = FindAvailableTransceiverToReceive(media_desc->type()); - } - // If no RtpTransceiver was found in the previous step, create one with a - // recvonly direction. - if (!transceiver) { - RTC_LOG(LS_INFO) << "Adding " - << cricket::MediaTypeToString(media_desc->type()) - << " transceiver for MID=" << content.name - << " at i=" << mline_index - << " in response to the remote description."; - std::string sender_id = rtc::CreateRandomUuid(); - std::vector send_encodings = - GetSendEncodingsFromRemoteDescription(*media_desc); - auto sender = CreateSender(media_desc->type(), sender_id, nullptr, {}, - send_encodings); - std::string receiver_id; - if (!media_desc->streams().empty()) { - receiver_id = media_desc->streams()[0].id; - } else { - receiver_id = rtc::CreateRandomUuid(); - } - auto receiver = CreateReceiver(media_desc->type(), receiver_id); - transceiver = CreateAndAddTransceiver(sender, receiver); - transceiver->internal()->set_direction( - RtpTransceiverDirection::kRecvOnly); - if (type == SdpType::kOffer) { - transceiver_stable_states_by_transceivers_[transceiver] - .set_newly_created(); - } - } - // Check if the offer indicated simulcast but the answer rejected it. - // This can happen when simulcast is not supported on the remote party. - if (SimulcastIsRejected(old_local_content, *media_desc)) { - RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true); - RTCError error = - DisableSimulcastInSender(transceiver->internal()->sender_internal()); - if (!error.ok()) { - RTC_LOG(LS_ERROR) << "Failed to remove rejected simulcast."; - return std::move(error); - } - } - } - RTC_DCHECK(transceiver); - if (transceiver->media_type() != media_desc->type()) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Transceiver type does not match media description type."); - } - if (media_desc->HasSimulcast()) { - std::vector layers = - source == cricket::CS_LOCAL - ? media_desc->simulcast_description().send_layers().GetAllLayers() - : media_desc->simulcast_description() - .receive_layers() - .GetAllLayers(); - RTCError error = UpdateSimulcastLayerStatusInSender( - layers, transceiver->internal()->sender_internal()); - if (!error.ok()) { - RTC_LOG(LS_ERROR) << "Failed updating status for simulcast layers."; - return std::move(error); - } - } - if (type == SdpType::kOffer) { - bool state_changes = transceiver->internal()->mid() != content.name || - transceiver->internal()->mline_index() != mline_index; - if (state_changes) { - transceiver_stable_states_by_transceivers_[transceiver] - .SetMSectionIfUnset(transceiver->internal()->mid(), - transceiver->internal()->mline_index()); - } - } - // Associate the found or created RtpTransceiver with the m= section by - // setting the value of the RtpTransceiver's mid property to the MID of the m= - // section, and establish a mapping between the transceiver and the index of - // the m= section. - transceiver->internal()->set_mid(content.name); - transceiver->internal()->set_mline_index(mline_index); - return std::move(transceiver); -} - -rtc::scoped_refptr> -PeerConnection::GetAssociatedTransceiver(const std::string& mid) const { - RTC_DCHECK(IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->mid() == mid) { - return transceiver; - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::GetTransceiverByMLineIndex(size_t mline_index) const { - RTC_DCHECK(IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->internal()->mline_index() == mline_index) { - return transceiver; - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::FindAvailableTransceiverToReceive( - cricket::MediaType media_type) const { - RTC_DCHECK(IsUnifiedPlan()); - // From JSEP section 5.10 (Applying a Remote Description): - // If the m= section is sendrecv or recvonly, and there are RtpTransceivers of - // the same type that were added to the PeerConnection by addTrack and are not - // associated with any m= section and are not stopped, find the first such - // RtpTransceiver. - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == media_type && - transceiver->internal()->created_by_addtrack() && !transceiver->mid() && - !transceiver->stopped()) { - return transceiver; - } - } - return nullptr; -} - -const cricket::ContentInfo* PeerConnection::FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, - const SessionDescriptionInterface* sdesc) const { - RTC_DCHECK(transceiver); - RTC_DCHECK(sdesc); - if (IsUnifiedPlan()) { - if (!transceiver->internal()->mid()) { - // This transceiver is not associated with a media section yet. - return nullptr; - } - return sdesc->description()->GetContentByName( - *transceiver->internal()->mid()); - } else { - // Plan B only allows at most one audio and one video section, so use the - // first media section of that type. - return cricket::GetFirstMediaContent(sdesc->description()->contents(), - transceiver->media_type()); - } + sdp_handler_->SetRemoteDescription(std::move(desc), observer); } PeerConnectionInterface::RTCConfiguration PeerConnection::GetConfiguration() { @@ -4058,11 +1377,24 @@ RTCError PeerConnection::SetConfiguration( } if (modified_config.allow_codec_switching.has_value()) { - cricket::VideoMediaChannel* video_channel = video_media_channel(); - if (video_channel) { - video_channel->SetVideoCodecSwitchingEnabled( - *modified_config.allow_codec_switching); + std::vector channels; + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) + continue; + + auto* video_channel = static_cast( + transceiver->internal()->channel()); + if (video_channel) + channels.push_back(video_channel->media_channel()); } + + worker_thread()->Invoke( + RTC_FROM_HERE, + [channels = std::move(channels), + allow_codec_switching = *modified_config.allow_codec_switching]() { + for (auto* ch : channels) + ch->SetVideoCodecSwitchingEnabled(allow_codec_switching); + }); } configuration_ = modified_config; @@ -4072,124 +1404,21 @@ RTCError PeerConnection::SetConfiguration( bool PeerConnection::AddIceCandidate( const IceCandidateInterface* ice_candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::AddIceCandidate"); - if (IsClosed()) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed."; - NoteAddIceCandidateResult(kAddIceCandidateFailClosed); - return false; - } - - if (!remote_description()) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added " - "without any remote session description."; - NoteAddIceCandidateResult(kAddIceCandidateFailNoRemoteDescription); - return false; - } - - if (!ice_candidate) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null."; - NoteAddIceCandidateResult(kAddIceCandidateFailNullCandidate); - return false; - } - - bool valid = false; - bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid); - if (!valid) { - NoteAddIceCandidateResult(kAddIceCandidateFailNotValid); - return false; - } - - // Add this candidate to the remote session description. - if (!mutable_remote_description()->AddCandidate(ice_candidate)) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used."; - NoteAddIceCandidateResult(kAddIceCandidateFailInAddition); - return false; - } - - if (ready) { - bool result = UseCandidate(ice_candidate); - if (result) { - NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); - NoteAddIceCandidateResult(kAddIceCandidateSuccess); - } else { - NoteAddIceCandidateResult(kAddIceCandidateFailNotUsable); - } - return result; - } else { - RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate."; - NoteAddIceCandidateResult(kAddIceCandidateFailNotReady); - return true; - } + return sdp_handler_->AddIceCandidate(ice_candidate); } void PeerConnection::AddIceCandidate( std::unique_ptr candidate, std::function callback) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - candidate = std::move(candidate), callback = std::move(callback)]( - std::function operations_chain_callback) { - if (!this_weak_ptr) { - operations_chain_callback(); - callback(RTCError( - RTCErrorType::INVALID_STATE, - "AddIceCandidate failed because the session was shut down")); - return; - } - if (!this_weak_ptr->AddIceCandidate(candidate.get())) { - operations_chain_callback(); - // Fail with an error type and message consistent with Chromium. - // TODO(hbos): Fail with error types according to spec. - callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Error processing ICE candidate")); - return; - } - operations_chain_callback(); - callback(RTCError::OK()); - }); + sdp_handler_->AddIceCandidate(std::move(candidate), callback); } bool PeerConnection::RemoveIceCandidates( const std::vector& candidates) { TRACE_EVENT0("webrtc", "PeerConnection::RemoveIceCandidates"); RTC_DCHECK_RUN_ON(signaling_thread()); - if (IsClosed()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: PeerConnection is closed."; - return false; - } - - if (!remote_description()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: ICE candidates can't be removed " - "without any remote session description."; - return false; - } - - if (candidates.empty()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: candidates are empty."; - return false; - } - - size_t number_removed = - mutable_remote_description()->RemoveCandidates(candidates); - if (number_removed != candidates.size()) { - RTC_LOG(LS_ERROR) - << "RemoveIceCandidates: Failed to remove candidates. Requested " - << candidates.size() << " but only " << number_removed - << " are removed."; - } - - // Remove the candidates from the transport controller. - RTCError error = transport_controller_->RemoveRemoteCandidates(candidates); - if (!error.ok()) { - RTC_LOG(LS_ERROR) - << "RemoveIceCandidates: Error when removing remote candidates: " - << error.message(); - } - return true; + return sdp_handler_->RemoveIceCandidates(candidates); } RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { @@ -4242,7 +1471,7 @@ void PeerConnection::SetAudioPlayout(bool playout) { return; } auto audio_state = - factory_->channel_manager()->media_engine()->voice().GetAudioState(); + context_->channel_manager()->media_engine()->voice().GetAudioState(); audio_state->SetPlayout(playout); } @@ -4254,40 +1483,10 @@ void PeerConnection::SetAudioRecording(bool recording) { return; } auto audio_state = - factory_->channel_manager()->media_engine()->voice().GetAudioState(); + context_->channel_manager()->media_engine()->voice().GetAudioState(); audio_state->SetRecording(recording); } -std::unique_ptr -PeerConnection::GetRemoteAudioSSLCertificate() { - std::unique_ptr chain = GetRemoteAudioSSLCertChain(); - if (!chain || !chain->GetSize()) { - return nullptr; - } - return chain->Get(0).Clone(); -} - -std::unique_ptr -PeerConnection::GetRemoteAudioSSLCertChain() { - RTC_DCHECK_RUN_ON(signaling_thread()); - auto audio_transceiver = GetFirstAudioTransceiver(); - if (!audio_transceiver || !audio_transceiver->internal()->channel()) { - return nullptr; - } - return transport_controller_->GetRemoteSSLCertChain( - audio_transceiver->internal()->channel()->transport_name()); -} - -rtc::scoped_refptr> -PeerConnection::GetFirstAudioTransceiver() const { - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return transceiver; - } - } - return nullptr; -} - void PeerConnection::AddAdaptationResource( rtc::scoped_refptr resource) { if (!worker_thread()->IsCurrent()) { @@ -4315,7 +1514,8 @@ bool PeerConnection::StartRtcEventLog(std::unique_ptr output, bool PeerConnection::StartRtcEventLog( std::unique_ptr output) { int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput; - if (field_trial::IsEnabled("WebRTC-RtcEventLogNewFormat")) { + if (absl::StartsWith(context_->trials().Lookup("WebRTC-RtcEventLogNewFormat"), + "Enabled")) { output_period_ms = 5000; } return StartRtcEventLog(std::move(output), output_period_ms); @@ -4349,51 +1549,61 @@ rtc::scoped_refptr PeerConnection::GetSctpTransport() const SessionDescriptionInterface* PeerConnection::local_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_local_description_ ? pending_local_description_.get() - : current_local_description_.get(); + return sdp_handler_->local_description(); } const SessionDescriptionInterface* PeerConnection::remote_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_remote_description_ ? pending_remote_description_.get() - : current_remote_description_.get(); + return sdp_handler_->remote_description(); } const SessionDescriptionInterface* PeerConnection::current_local_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return current_local_description_.get(); + return sdp_handler_->current_local_description(); } const SessionDescriptionInterface* PeerConnection::current_remote_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return current_remote_description_.get(); + return sdp_handler_->current_remote_description(); } const SessionDescriptionInterface* PeerConnection::pending_local_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_local_description_.get(); + return sdp_handler_->pending_local_description(); } const SessionDescriptionInterface* PeerConnection::pending_remote_description() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_remote_description_.get(); + return sdp_handler_->pending_remote_description(); } void PeerConnection::Close() { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::Close"); + + if (IsClosed()) { + return; + } // Update stats here so that we have the most recent stats for tracks and // streams before the channels are closed. stats_->UpdateStats(kStatsOutputLevelStandard); - ChangeSignalingState(PeerConnectionInterface::kClosed); + ice_connection_state_ = PeerConnectionInterface::kIceConnectionClosed; + Observer()->OnIceConnectionChange(ice_connection_state_); + standardized_ice_connection_state_ = + PeerConnectionInterface::IceConnectionState::kIceConnectionClosed; + connection_state_ = PeerConnectionInterface::PeerConnectionState::kClosed; + Observer()->OnConnectionChange(connection_state_); + + sdp_handler_->Close(); + NoteUsageEvent(UsageEvent::CLOSE_CALLED); - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { transceiver->internal()->SetPeerConnectionClosed(); if (!transceiver->stopped()) transceiver->StopInternal(); @@ -4407,15 +1617,16 @@ void PeerConnection::Close() { // Don't destroy BaseChannels until after stats has been cleaned up so that // the last stats request can still read from the channels. - DestroyAllChannels(); + sdp_handler_->DestroyAllChannels(); // The event log is used in the transport controller, which must be outlived // by the former. CreateOffer by the peer connection is implemented // asynchronously and if the peer connection is closed without resetting the // WebRTC session description factory, the session description factory would // call the transport controller. - webrtc_session_desc_factory_.reset(); + sdp_handler_->ResetSessionDescFactory(); transport_controller_.reset(); + rtp_manager_->Close(); network_thread()->Invoke( RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, @@ -4423,6 +1634,7 @@ void PeerConnection::Close() { worker_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(worker_thread()); + call_safety_.reset(); call_.reset(); // The event log must outlive call (and any other object that uses it). event_log_.reset(); @@ -4433,215 +1645,8 @@ void PeerConnection::Close() { observer_ = nullptr; } -void PeerConnection::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(signaling_thread()); - switch (msg->message_id) { - case MSG_SET_SESSIONDESCRIPTION_SUCCESS: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnSuccess(); - delete param; - break; - } - case MSG_SET_SESSIONDESCRIPTION_FAILED: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { - CreateSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_GETSTATS: { - GetStatsMsg* param = static_cast(msg->pdata); - StatsReports reports; - stats_->GetStats(param->track, &reports); - param->observer->OnComplete(reports); - delete param; - break; - } - case MSG_REPORT_USAGE_PATTERN: { - ReportUsagePattern(); - break; - } - default: - RTC_NOTREACHED() << "Not implemented"; - break; - } -} - -cricket::VoiceMediaChannel* PeerConnection::voice_media_channel() const { - RTC_DCHECK(!IsUnifiedPlan()); - auto* voice_channel = static_cast( - GetAudioTransceiver()->internal()->channel()); - if (voice_channel) { - return voice_channel->media_channel(); - } else { - return nullptr; - } -} - -cricket::VideoMediaChannel* PeerConnection::video_media_channel() const { - RTC_DCHECK(!IsUnifiedPlan()); - auto* video_channel = static_cast( - GetVideoTransceiver()->internal()->channel()); - if (video_channel) { - return video_channel->media_channel(); - } else { - return nullptr; - } -} - -void PeerConnection::CreateAudioReceiver( - MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) { - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); - // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use - // the constructor taking stream IDs instead. - auto* audio_receiver = new AudioRtpReceiver( - worker_thread(), remote_sender_info.sender_id, streams); - audio_receiver->SetMediaChannel(voice_media_channel()); - if (remote_sender_info.sender_id == kDefaultAudioSenderId) { - audio_receiver->SetupUnsignaledMediaChannel(); - } else { - audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); - } - auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), audio_receiver); - GetAudioTransceiver()->internal()->AddReceiver(receiver); - Observer()->OnAddTrack(receiver, streams); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); -} - -void PeerConnection::CreateVideoReceiver( - MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) { - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); - // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use - // the constructor taking stream IDs instead. - auto* video_receiver = new VideoRtpReceiver( - worker_thread(), remote_sender_info.sender_id, streams); - video_receiver->SetMediaChannel(video_media_channel()); - if (remote_sender_info.sender_id == kDefaultVideoSenderId) { - video_receiver->SetupUnsignaledMediaChannel(); - } else { - video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); - } - auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), video_receiver); - GetVideoTransceiver()->internal()->AddReceiver(receiver); - Observer()->OnAddTrack(receiver, streams); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); -} - -// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote -// description. -rtc::scoped_refptr PeerConnection::RemoveAndStopReceiver( - const RtpSenderInfo& remote_sender_info) { - auto receiver = FindReceiverById(remote_sender_info.sender_id); - if (!receiver) { - RTC_LOG(LS_WARNING) << "RtpReceiver for track with id " - << remote_sender_info.sender_id << " doesn't exist."; - return nullptr; - } - if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - GetAudioTransceiver()->internal()->RemoveReceiver(receiver); - } else { - GetVideoTransceiver()->internal()->RemoveReceiver(receiver); - } - return receiver; -} - -void PeerConnection::AddAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - RTC_DCHECK(track); - RTC_DCHECK(stream); - auto sender = FindSenderForTrack(track); - if (sender) { - // We already have a sender for this track, so just change the stream_id - // so that it's correct in the next call to CreateOffer. - sender->internal()->set_stream_ids({stream->id()}); - return; - } - - // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, - {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(voice_media_channel()); - GetAudioTransceiver()->internal()->AddSender(new_sender); - // If the sender has already been configured in SDP, we call SetSsrc, - // which will connect the sender to the underlying transport. This can - // occur if a local session description that contains the ID of the sender - // is set before AddStream is called. It can also occur if the local - // session description is not changed and RemoveStream is called, and - // later AddStream is called again with the same stream. - const RtpSenderInfo* sender_info = - FindSenderInfo(local_audio_sender_infos_, stream->id(), track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } -} - -// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around -// indefinitely, when we have unified plan SDP. -void PeerConnection::RemoveAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - auto sender = FindSenderForTrack(track); - if (!sender) { - RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() - << " doesn't exist."; - return; - } - GetAudioTransceiver()->internal()->RemoveSender(sender); -} - -void PeerConnection::AddVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - RTC_DCHECK(track); - RTC_DCHECK(stream); - auto sender = FindSenderForTrack(track); - if (sender) { - // We already have a sender for this track, so just change the stream_id - // so that it's correct in the next call to CreateOffer. - sender->internal()->set_stream_ids({stream->id()}); - return; - } - - // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, - {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(video_media_channel()); - GetVideoTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } -} - -void PeerConnection::RemoveVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - auto sender = FindSenderForTrack(track); - if (!sender) { - RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() - << " doesn't exist."; - return; - } - GetVideoTransceiver()->internal()->RemoveSender(sender); -} - void PeerConnection::SetIceConnectionState(IceConnectionState new_state) { + RTC_DCHECK_RUN_ON(signaling_thread()); if (ice_connection_state_ == new_state) { return; } @@ -4743,623 +1748,8 @@ void PeerConnection::OnSelectedCandidatePairChanged( Observer()->OnIceSelectedCandidatePairChanged(event); } -void PeerConnection::ChangeSignalingState( - PeerConnectionInterface::SignalingState signaling_state) { - if (signaling_state_ == signaling_state) { - return; - } - RTC_LOG(LS_INFO) << "Session: " << session_id() << " Old state: " - << GetSignalingStateString(signaling_state_) - << " New state: " - << GetSignalingStateString(signaling_state); - signaling_state_ = signaling_state; - if (signaling_state == kClosed) { - ice_connection_state_ = kIceConnectionClosed; - Observer()->OnIceConnectionChange(ice_connection_state_); - standardized_ice_connection_state_ = - PeerConnectionInterface::IceConnectionState::kIceConnectionClosed; - connection_state_ = PeerConnectionInterface::PeerConnectionState::kClosed; - Observer()->OnConnectionChange(connection_state_); - } - Observer()->OnSignalingChange(signaling_state_); -} - -void PeerConnection::OnAudioTrackAdded(AudioTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - AddAudioTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnAudioTrackRemoved(AudioTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - RemoveAudioTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnVideoTrackAdded(VideoTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - AddVideoTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnVideoTrackRemoved(VideoTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - RemoveVideoTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::PostSetSessionDescriptionSuccess( - SetSessionDescriptionObserver* observer) { - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg); -} - -void PeerConnection::PostSetSessionDescriptionFailure( - SetSessionDescriptionObserver* observer, - RTCError&& error) { - RTC_DCHECK(!error.ok()); - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_FAILED, msg); -} - -void PeerConnection::PostCreateSessionDescriptionFailure( - CreateSessionDescriptionObserver* observer, - RTCError error) { - RTC_DCHECK(!error.ok()); - CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); -} - -void PeerConnection::GetOptionsForOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - ExtractSharedMediaSessionOptions(offer_answer_options, session_options); - - if (IsUnifiedPlan()) { - GetOptionsForUnifiedPlanOffer(offer_answer_options, session_options); - } else { - GetOptionsForPlanBOffer(offer_answer_options, session_options); - } - - // Intentionally unset the data channel type for RTP data channel with the - // second condition. Otherwise the RTP data channels would be successfully - // negotiated by default and the unit tests in WebRtcDataBrowserTest will fail - // when building with chromium. We want to leave RTP data channels broken, so - // people won't try to use them. - if (data_channel_controller_.HasRtpDataChannels() || - data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = data_channel_type(); - } - - // Apply ICE restart flag and renomination flag. - bool ice_restart = offer_answer_options.ice_restart || - local_ice_credentials_to_replace_->HasIceCredentials(); - for (auto& options : session_options->media_description_options) { - options.transport_options.ice_restart = ice_restart; - options.transport_options.enable_ice_renomination = - configuration_.enable_ice_renomination; - } - - session_options->rtcp_cname = rtcp_cname_; - session_options->crypto_options = GetCryptoOptions(); - session_options->pooled_ice_credentials = - network_thread()->Invoke>( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator_.get())); - session_options->offer_extmap_allow_mixed = - configuration_.offer_extmap_allow_mixed; - - // Allow fallback for using obsolete SCTP syntax. - // Note that the default in |session_options| is true, while - // the default in |options| is false. - session_options->use_obsolete_sctp_sdp = - offer_answer_options.use_obsolete_sctp_sdp; -} - -void PeerConnection::GetOptionsForPlanBOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = !GetAudioTransceiver()->internal()->senders().empty(); - bool send_video = !GetVideoTransceiver()->internal()->senders().empty(); - - // By default, generate sendrecv/recvonly m= sections. - bool recv_audio = true; - bool recv_video = true; - - // By default, only offer a new m= section if we have media to send with it. - bool offer_new_audio_description = send_audio; - bool offer_new_video_description = send_video; - bool offer_new_data_description = data_channel_controller_.HasDataChannels(); - - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - offer_new_audio_description = - offer_new_audio_description || - (offer_answer_options.offer_to_receive_audio > 0); - } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); - offer_new_video_description = - offer_new_video_description || - (offer_answer_options.offer_to_receive_video > 0); - } - - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; - // If a current description exists, generate m= sections in the same order, - // using the first audio/video/data section that appears and rejecting - // extraneous ones. - if (local_description()) { - GenerateMediaDescriptionOptions( - local_description(), - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), - &audio_index, &video_index, &data_index, session_options); - } - - // Add audio/video/data m= sections to the end if needed. - if (!audio_index && offer_new_audio_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); - options.header_extensions = - channel_manager()->GetSupportedAudioRtpHeaderExtensions(); - session_options->media_description_options.push_back(options); - audio_index = session_options->media_description_options.size() - 1; - } - if (!video_index && offer_new_video_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); - options.header_extensions = - channel_manager()->GetSupportedVideoRtpHeaderExtensions(); - session_options->media_description_options.push_back(options); - video_index = session_options->media_description_options.size() - 1; - } - if (!data_index && offer_new_data_description) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); - data_index = session_options->media_description_options.size() - 1; - } - - cricket::MediaDescriptionOptions* audio_media_description_options = - !audio_index ? nullptr - : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = - !video_index ? nullptr - : &session_options->media_description_options[*video_index]; - - AddPlanBRtpSenderOptions(GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); -} - -static cricket::MediaDescriptionOptions -GetMediaDescriptionOptionsForTransceiver( - rtc::scoped_refptr> - transceiver, - const std::string& mid, - bool is_create_offer) { - // NOTE: a stopping transceiver should be treated as a stopped one in - // createOffer as specified in - // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. - bool stopped = - is_create_offer ? transceiver->stopping() : transceiver->stopped(); - cricket::MediaDescriptionOptions media_description_options( - transceiver->media_type(), mid, transceiver->direction(), stopped); - media_description_options.codec_preferences = - transceiver->codec_preferences(); - media_description_options.header_extensions = - transceiver->HeaderExtensionsToOffer(); - // This behavior is specified in JSEP. The gist is that: - // 1. The MSID is included if the RtpTransceiver's direction is sendonly or - // sendrecv. - // 2. If the MSID is included, then it must be included in any subsequent - // offer/answer exactly the same until the RtpTransceiver is stopped. - if (stopped || (!RtpTransceiverDirectionHasSend(transceiver->direction()) && - !transceiver->internal()->has_ever_been_used_to_send())) { - return media_description_options; - } - - cricket::SenderOptions sender_options; - sender_options.track_id = transceiver->sender()->id(); - sender_options.stream_ids = transceiver->sender()->stream_ids(); - - // The following sets up RIDs and Simulcast. - // RIDs are included if Simulcast is requested or if any RID was specified. - RtpParameters send_parameters = - transceiver->internal()->sender_internal()->GetParametersInternal(); - bool has_rids = std::any_of(send_parameters.encodings.begin(), - send_parameters.encodings.end(), - [](const RtpEncodingParameters& encoding) { - return !encoding.rid.empty(); - }); - - std::vector send_rids; - SimulcastLayerList send_layers; - for (const RtpEncodingParameters& encoding : send_parameters.encodings) { - if (encoding.rid.empty()) { - continue; - } - send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend)); - send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active)); - } - - if (has_rids) { - sender_options.rids = send_rids; - } - - sender_options.simulcast_layers = send_layers; - // When RIDs are configured, we must set num_sim_layers to 0 to. - // Otherwise, num_sim_layers must be 1 because either there is no - // simulcast, or simulcast is acheived by munging the SDP. - sender_options.num_sim_layers = has_rids ? 0 : 1; - media_description_options.sender_options.push_back(sender_options); - - return media_description_options; -} - -// Returns the ContentInfo at mline index |i|, or null if none exists. -static const ContentInfo* GetContentByIndex( - const SessionDescriptionInterface* sdesc, - size_t i) { - if (!sdesc) { - return nullptr; - } - const ContentInfos& contents = sdesc->description()->contents(); - return (i < contents.size() ? &contents[i] : nullptr); -} - -void PeerConnection::GetOptionsForUnifiedPlanOffer( - const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial - // Offers) and 5.2.2 (Subsequent Offers). - RTC_DCHECK_EQ(session_options->media_description_options.size(), 0); - const ContentInfos no_infos; - const ContentInfos& local_contents = - (local_description() ? local_description()->description()->contents() - : no_infos); - const ContentInfos& remote_contents = - (remote_description() ? remote_description()->description()->contents() - : no_infos); - // The mline indices that can be recycled. New transceivers should reuse these - // slots first. - std::queue recycleable_mline_indices; - // First, go through each media section that exists in either the local or - // remote description and generate a media section in this offer for the - // associated transceiver. If a media section can be recycled, generate a - // default, rejected media section here that can be later overwritten. - for (size_t i = 0; - i < std::max(local_contents.size(), remote_contents.size()); ++i) { - // Either |local_content| or |remote_content| is non-null. - const ContentInfo* local_content = - (i < local_contents.size() ? &local_contents[i] : nullptr); - const ContentInfo* current_local_content = - GetContentByIndex(current_local_description(), i); - const ContentInfo* remote_content = - (i < remote_contents.size() ? &remote_contents[i] : nullptr); - const ContentInfo* current_remote_content = - GetContentByIndex(current_remote_description(), i); - bool had_been_rejected = - (current_local_content && current_local_content->rejected) || - (current_remote_content && current_remote_content->rejected); - const std::string& mid = - (local_content ? local_content->name : remote_content->name); - cricket::MediaType media_type = - (local_content ? local_content->media_description()->type() - : remote_content->media_description()->type()); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - // A media section is considered eligible for recycling if it is marked as - // rejected in either the current local or current remote description. - auto transceiver = GetAssociatedTransceiver(mid); - if (!transceiver) { - // No associated transceiver. The media section has been stopped. - recycleable_mline_indices.push(i); - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(media_type, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); - } else { - // NOTE: a stopping transceiver should be treated as a stopped one in - // createOffer as specified in - // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. - if (had_been_rejected && transceiver->stopping()) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - transceiver->media_type(), mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); - recycleable_mline_indices.push(i); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver( - transceiver, mid, - /*is_create_offer=*/true)); - // CreateOffer shouldn't really cause any state changes in - // PeerConnection, but we need a way to match new transceivers to new - // media sections in SetLocalDescription and JSEP specifies this is - // done by recording the index of the media section generated for the - // transceiver in the offer. - transceiver->internal()->set_mline_index(i); - } - } - } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); - if (had_been_rejected) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(mid)); - } else { - RTC_CHECK(GetDataMid()); - if (mid == *GetDataMid()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(mid)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(mid)); - } - } - } - } - - // Next, look for transceivers that are newly added (that is, are not stopped - // and not associated). Reuse media sections marked as recyclable first, - // otherwise append to the end of the offer. New media sections should be - // added in the order they were added to the PeerConnection. - for (const auto& transceiver : transceivers_) { - if (transceiver->mid() || transceiver->stopping()) { - continue; - } - size_t mline_index; - if (!recycleable_mline_indices.empty()) { - mline_index = recycleable_mline_indices.front(); - recycleable_mline_indices.pop(); - session_options->media_description_options[mline_index] = - GetMediaDescriptionOptionsForTransceiver( - transceiver, mid_generator_(), /*is_create_offer=*/true); - } else { - mline_index = session_options->media_description_options.size(); - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver( - transceiver, mid_generator_(), /*is_create_offer=*/true)); - } - // See comment above for why CreateOffer changes the transceiver's state. - transceiver->internal()->set_mline_index(mline_index); - } - // Lastly, add a m-section if we have local data channels and an m section - // does not already exist. - if (!GetDataMid() && data_channel_controller_.HasDataChannels()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(mid_generator_())); - } -} - -void PeerConnection::GetOptionsForAnswer( - const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - ExtractSharedMediaSessionOptions(offer_answer_options, session_options); - - if (IsUnifiedPlan()) { - GetOptionsForUnifiedPlanAnswer(offer_answer_options, session_options); - } else { - GetOptionsForPlanBAnswer(offer_answer_options, session_options); - } - - // Intentionally unset the data channel type for RTP data channel. Otherwise - // the RTP data channels would be successfully negotiated by default and the - // unit tests in WebRtcDataBrowserTest will fail when building with chromium. - // We want to leave RTP data channels broken, so people won't try to use them. - if (data_channel_controller_.HasRtpDataChannels() || - data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = data_channel_type(); - } - - // Apply ICE renomination flag. - for (auto& options : session_options->media_description_options) { - options.transport_options.enable_ice_renomination = - configuration_.enable_ice_renomination; - } - - session_options->rtcp_cname = rtcp_cname_; - session_options->crypto_options = GetCryptoOptions(); - session_options->pooled_ice_credentials = - network_thread()->Invoke>( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator_.get())); -} - -void PeerConnection::GetOptionsForPlanBAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = !GetAudioTransceiver()->internal()->senders().empty(); - bool send_video = !GetVideoTransceiver()->internal()->senders().empty(); - - // By default, generate sendrecv/recvonly m= sections. The direction is also - // restricted by the direction in the offer. - bool recv_audio = true; - bool recv_video = true; - - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); - } - - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; - - // Generate m= sections that match those in the offer. - // Note that mediasession.cc will handle intersection our preferred - // direction with the offered direction. - GenerateMediaDescriptionOptions( - remote_description(), - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index, - &video_index, &data_index, session_options); - - cricket::MediaDescriptionOptions* audio_media_description_options = - !audio_index ? nullptr - : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = - !video_index ? nullptr - : &session_options->media_description_options[*video_index]; - - AddPlanBRtpSenderOptions(GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); -} - -void PeerConnection::GetOptionsForUnifiedPlanAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial - // Answers) and 5.3.2 (Subsequent Answers). - RTC_DCHECK(remote_description()); - RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer); - for (const ContentInfo& content : - remote_description()->description()->contents()) { - cricket::MediaType media_type = content.media_description()->type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - auto transceiver = GetAssociatedTransceiver(content.name); - RTC_CHECK(transceiver); - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver(transceiver, content.name, - /*is_create_offer=*/false)); - } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); - // Reject all data sections if data channels are disabled. - // Reject a data section if it has already been rejected. - // Reject all data sections except for the first one. - if (data_channel_type() == cricket::DCT_NONE || content.rejected || - content.name != *GetDataMid()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); - } - } - } -} - -void PeerConnection::GenerateMediaDescriptionOptions( - const SessionDescriptionInterface* session_desc, - RtpTransceiverDirection audio_direction, - RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options) { - for (const cricket::ContentInfo& content : - session_desc->description()->contents()) { - if (IsAudioContent(&content)) { - // If we already have an audio m= section, reject this extra one. - if (*audio_index) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_AUDIO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); - } else { - bool stopped = (audio_direction == RtpTransceiverDirection::kInactive); - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO, - content.name, audio_direction, - stopped)); - *audio_index = session_options->media_description_options.size() - 1; - } - session_options->media_description_options.back().header_extensions = - channel_manager()->GetSupportedAudioRtpHeaderExtensions(); - } else if (IsVideoContent(&content)) { - // If we already have an video m= section, reject this extra one. - if (*video_index) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_VIDEO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); - } else { - bool stopped = (video_direction == RtpTransceiverDirection::kInactive); - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO, - content.name, video_direction, - stopped)); - *video_index = session_options->media_description_options.size() - 1; - } - session_options->media_description_options.back().header_extensions = - channel_manager()->GetSupportedVideoRtpHeaderExtensions(); - } else { - RTC_DCHECK(IsDataContent(&content)); - // If we already have an data m= section, reject this extra one. - if (*data_index) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); - *data_index = session_options->media_description_options.size() - 1; - } - } - } -} - -cricket::MediaDescriptionOptions -PeerConnection::GetMediaDescriptionOptionsForActiveData( - const std::string& mid) const { - // Direction for data sections is meaningless, but legacy endpoints might - // expect sendrecv. - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kSendRecv, - /*stopped=*/false); - AddRtpDataChannelOptions(*data_channel_controller_.rtp_data_channels(), - &options); - return options; -} - -cricket::MediaDescriptionOptions -PeerConnection::GetMediaDescriptionOptionsForRejectedData( - const std::string& mid) const { - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true); - AddRtpDataChannelOptions(*data_channel_controller_.rtp_data_channels(), - &options); - return options; -} - absl::optional PeerConnection::GetDataMid() const { + RTC_DCHECK_RUN_ON(signaling_thread()); switch (data_channel_type()) { case cricket::DCT_RTP: if (!data_channel_controller_.rtp_data_channel()) { @@ -5373,259 +1763,6 @@ absl::optional PeerConnection::GetDataMid() const { } } -void PeerConnection::RemoveSenders(cricket::MediaType media_type) { - UpdateLocalSenders(std::vector(), media_type); - UpdateRemoteSendersList(std::vector(), false, - media_type, nullptr); -} - -void PeerConnection::UpdateRemoteSendersList( - const cricket::StreamParamsVec& streams, - bool default_sender_needed, - cricket::MediaType media_type, - StreamCollection* new_streams) { - RTC_DCHECK(!IsUnifiedPlan()); - - std::vector* current_senders = - GetRemoteSenderInfos(media_type); - - // Find removed senders. I.e., senders where the sender id or ssrc don't match - // the new StreamParam. - for (auto sender_it = current_senders->begin(); - sender_it != current_senders->end(); - /* incremented manually */) { - const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); - std::string params_stream_id; - if (params) { - params_stream_id = - (!params->first_stream_id().empty() ? params->first_stream_id() - : kDefaultStreamId); - } - bool sender_exists = params && params->id == info.sender_id && - params_stream_id == info.stream_id; - // If this is a default track, and we still need it, don't remove it. - if ((info.stream_id == kDefaultStreamId && default_sender_needed) || - sender_exists) { - ++sender_it; - } else { - OnRemoteSenderRemoved(info, media_type); - sender_it = current_senders->erase(sender_it); - } - } - - // Find new and active senders. - for (const cricket::StreamParams& params : streams) { - if (!params.has_ssrcs()) { - // The remote endpoint has streams, but didn't signal ssrcs. For an active - // sender, this means it is coming from a Unified Plan endpoint,so we just - // create a default. - default_sender_needed = true; - break; - } - - // |params.id| is the sender id and the stream id uses the first of - // |params.stream_ids|. The remote description could come from a Unified - // Plan endpoint, with multiple or no stream_ids() signaled. Since this is - // not supported in Plan B, we just take the first here and create the - // default stream ID if none is specified. - const std::string& stream_id = - (!params.first_stream_id().empty() ? params.first_stream_id() - : kDefaultStreamId); - const std::string& sender_id = params.id; - uint32_t ssrc = params.first_ssrc(); - - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); - if (!stream) { - // This is a new MediaStream. Create a new remote MediaStream. - stream = MediaStreamProxy::Create(rtc::Thread::Current(), - MediaStream::Create(stream_id)); - remote_streams_->AddStream(stream); - new_streams->AddStream(stream); - } - - const RtpSenderInfo* sender_info = - FindSenderInfo(*current_senders, stream_id, sender_id); - if (!sender_info) { - current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); - OnRemoteSenderAdded(current_senders->back(), media_type); - } - } - - // Add default sender if necessary. - if (default_sender_needed) { - rtc::scoped_refptr default_stream = - remote_streams_->find(kDefaultStreamId); - if (!default_stream) { - // Create the new default MediaStream. - default_stream = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId)); - remote_streams_->AddStream(default_stream); - new_streams->AddStream(default_stream); - } - std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO) - ? kDefaultAudioSenderId - : kDefaultVideoSenderId; - const RtpSenderInfo* default_sender_info = - FindSenderInfo(*current_senders, kDefaultStreamId, default_sender_id); - if (!default_sender_info) { - current_senders->push_back( - RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0)); - OnRemoteSenderAdded(current_senders->back(), media_type); - } - } -} - -void PeerConnection::OnRemoteSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type) - << " receiver for track_id=" << sender_info.sender_id - << " and stream_id=" << sender_info.stream_id; - - MediaStreamInterface* stream = remote_streams_->find(sender_info.stream_id); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - CreateAudioReceiver(stream, sender_info); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - CreateVideoReceiver(stream, sender_info); - } else { - RTC_NOTREACHED() << "Invalid media type"; - } -} - -void PeerConnection::OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type) - << " receiver for track_id=" << sender_info.sender_id - << " and stream_id=" << sender_info.stream_id; - - MediaStreamInterface* stream = remote_streams_->find(sender_info.stream_id); - - rtc::scoped_refptr receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - // When the MediaEngine audio channel is destroyed, the RemoteAudioSource - // will be notified which will end the AudioRtpReceiver::track(). - receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr audio_track = - stream->FindAudioTrack(sender_info.sender_id); - if (audio_track) { - stream->RemoveTrack(audio_track); - } - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - // Stopping or destroying a VideoRtpReceiver will end the - // VideoRtpReceiver::track(). - receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr video_track = - stream->FindVideoTrack(sender_info.sender_id); - if (video_track) { - // There's no guarantee the track is still available, e.g. the track may - // have been removed from the stream by an application. - stream->RemoveTrack(video_track); - } - } else { - RTC_NOTREACHED() << "Invalid media type"; - } - if (receiver) { - Observer()->OnRemoveTrack(receiver); - } -} - -void PeerConnection::UpdateEndedRemoteMediaStreams() { - std::vector> streams_to_remove; - for (size_t i = 0; i < remote_streams_->count(); ++i) { - MediaStreamInterface* stream = remote_streams_->at(i); - if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { - streams_to_remove.push_back(stream); - } - } - - for (auto& stream : streams_to_remove) { - remote_streams_->RemoveStream(stream); - Observer()->OnRemoveStream(std::move(stream)); - } -} - -void PeerConnection::UpdateLocalSenders( - const std::vector& streams, - cricket::MediaType media_type) { - std::vector* current_senders = GetLocalSenderInfos(media_type); - - // Find removed tracks. I.e., tracks where the track id, stream id or ssrc - // don't match the new StreamParam. - for (auto sender_it = current_senders->begin(); - sender_it != current_senders->end(); - /* incremented manually */) { - const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); - if (!params || params->id != info.sender_id || - params->first_stream_id() != info.stream_id) { - OnLocalSenderRemoved(info, media_type); - sender_it = current_senders->erase(sender_it); - } else { - ++sender_it; - } - } - - // Find new and active senders. - for (const cricket::StreamParams& params : streams) { - // The sync_label is the MediaStream label and the |stream.id| is the - // sender id. - const std::string& stream_id = params.first_stream_id(); - const std::string& sender_id = params.id; - uint32_t ssrc = params.first_ssrc(); - const RtpSenderInfo* sender_info = - FindSenderInfo(*current_senders, stream_id, sender_id); - if (!sender_info) { - current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); - OnLocalSenderAdded(current_senders->back(), media_type); - } - } -} - -void PeerConnection::OnLocalSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_DCHECK(!IsUnifiedPlan()); - auto sender = FindSenderById(sender_info.sender_id); - if (!sender) { - RTC_LOG(LS_WARNING) << "An unknown RtpSender with id " - << sender_info.sender_id - << " has been configured in the local description."; - return; - } - - if (sender->media_type() != media_type) { - RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" - " description with an unexpected media type."; - return; - } - - sender->internal()->set_stream_ids({sender_info.stream_id}); - sender->internal()->SetSsrc(sender_info.first_ssrc); -} - -void PeerConnection::OnLocalSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - auto sender = FindSenderById(sender_info.sender_id); - if (!sender) { - // This is the normal case. I.e., RemoveStream has been called and the - // SessionDescriptions has been renegotiated. - return; - } - - // A sender has been removed from the SessionDescription but it's still - // associated with the PeerConnection. This only occurs if the SDP doesn't - // match with the calls to CreateSender, AddStream and RemoveStream. - if (sender->media_type() != media_type) { - RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" - " description with an unexpected media type."; - return; - } - - sender->internal()->SetSsrc(0); -} - void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) { // Since data_channel_controller doesn't do signals, this // signal is relayed here. @@ -5633,100 +1770,6 @@ void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) { static_cast(channel)); } -rtc::scoped_refptr> -PeerConnection::GetAudioTransceiver() const { - // This method only works with Plan B SDP, where there is a single - // audio/video transceiver. - RTC_DCHECK(!IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return transceiver; - } - } - RTC_NOTREACHED(); - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::GetVideoTransceiver() const { - // This method only works with Plan B SDP, where there is a single - // audio/video transceiver. - RTC_DCHECK(!IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - return transceiver; - } - } - RTC_NOTREACHED(); - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) const { - for (const auto& transceiver : transceivers_) { - for (auto sender : transceiver->internal()->senders()) { - if (sender->track() == track) { - return sender; - } - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::FindSenderById(const std::string& sender_id) const { - for (const auto& transceiver : transceivers_) { - for (auto sender : transceiver->internal()->senders()) { - if (sender->id() == sender_id) { - return sender; - } - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::FindReceiverById(const std::string& receiver_id) const { - for (const auto& transceiver : transceivers_) { - for (auto receiver : transceiver->internal()->receivers()) { - if (receiver->id() == receiver_id) { - return receiver; - } - } - } - return nullptr; -} - -std::vector* -PeerConnection::GetRemoteSenderInfos(cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) - ? &remote_audio_sender_infos_ - : &remote_video_sender_infos_; -} - -std::vector* PeerConnection::GetLocalSenderInfos( - cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_ - : &local_video_sender_infos_; -} - -const PeerConnection::RtpSenderInfo* PeerConnection::FindSenderInfo( - const std::vector& infos, - const std::string& stream_id, - const std::string sender_id) const { - for (const RtpSenderInfo& sender_info : infos) { - if (sender_info.stream_id == stream_id && - sender_info.sender_id == sender_id) { - return &sender_info; - } - } - return nullptr; -} - SctpDataChannel* PeerConnection::FindDataChannelBySid(int sid) const { return data_channel_controller_.FindDataChannelBySid(sid); } @@ -5749,12 +1792,10 @@ PeerConnection::InitializePortAllocator_n( // by experiment. if (configuration.disable_ipv6) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); - } else if (absl::StartsWith( - webrtc::field_trial::FindFullName("WebRTC-IPv6Default"), - "Disabled")) { + } else if (absl::StartsWith(context_->trials().Lookup("WebRTC-IPv6Default"), + "Disabled")) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); } - if (configuration.disable_ipv6_on_wifi) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); RTC_LOG(LS_INFO) << "IPv6 candidates on Wi-Fi are disabled."; @@ -5809,6 +1850,7 @@ bool PeerConnection::ReconfigurePortAllocator_n( webrtc::TurnCustomizer* turn_customizer, absl::optional stun_candidate_keepalive_interval, bool have_local_description) { + RTC_DCHECK_RUN_ON(network_thread()); port_allocator_->SetCandidateFilter( ConvertIceTransportTypeToCandidateFilter(type)); // According to JSEP, after setLocalDescription, changing the candidate pool @@ -5831,7 +1873,7 @@ bool PeerConnection::ReconfigurePortAllocator_n( } cricket::ChannelManager* PeerConnection::channel_manager() const { - return factory_->channel_manager(); + return context_->channel_manager(); } bool PeerConnection::StartRtcEventLog_w( @@ -5853,7 +1895,7 @@ void PeerConnection::StopRtcEventLog_w() { cricket::ChannelInterface* PeerConnection::GetChannel( const std::string& content_name) { - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel && channel->content_name() == content_name) { return channel; @@ -5883,8 +1925,9 @@ bool PeerConnection::GetSctpSslRole(rtc::SSLRole* role) { absl::optional dtls_role; if (sctp_mid_s_) { dtls_role = transport_controller_->GetDtlsRole(*sctp_mid_s_); - if (!dtls_role && is_caller_.has_value()) { - dtls_role = *is_caller_ ? rtc::SSL_SERVER : rtc::SSL_CLIENT; + if (!dtls_role && sdp_handler_->is_caller().has_value()) { + dtls_role = + *sdp_handler_->is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT; } *role = *dtls_role; return true; @@ -5910,155 +1953,6 @@ bool PeerConnection::GetSslRole(const std::string& content_name, return false; } -void PeerConnection::SetSessionError(SessionError error, - const std::string& error_desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (error != session_error_) { - session_error_ = error; - session_error_desc_ = error_desc; - } -} - -RTCError PeerConnection::UpdateSessionState( - SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description) { - RTC_DCHECK_RUN_ON(signaling_thread()); - - // If there's already a pending error then no state transition should happen. - // But all call-sites should be verifying this before calling us! - RTC_DCHECK(session_error() == SessionError::kNone); - - // If this is answer-ish we're ready to let media flow. - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - EnableSending(); - } - - // Update the signaling state according to the specified state machine (see - // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum). - if (type == SdpType::kOffer) { - ChangeSignalingState(source == cricket::CS_LOCAL - ? PeerConnectionInterface::kHaveLocalOffer - : PeerConnectionInterface::kHaveRemoteOffer); - } else if (type == SdpType::kPrAnswer) { - ChangeSignalingState(source == cricket::CS_LOCAL - ? PeerConnectionInterface::kHaveLocalPrAnswer - : PeerConnectionInterface::kHaveRemotePrAnswer); - } else { - RTC_DCHECK(type == SdpType::kAnswer); - ChangeSignalingState(PeerConnectionInterface::kStable); - transceiver_stable_states_by_transceivers_.clear(); - have_pending_rtp_data_channel_ = false; - } - - // Update internal objects according to the session description's media - // descriptions. - RTCError error = PushdownMediaDescription(type, source); - if (!error.ok()) { - return error; - } - - return RTCError::OK(); -} - -RTCError PeerConnection::PushdownMediaDescription( - SdpType type, - cricket::ContentSource source) { - const SessionDescriptionInterface* sdesc = - (source == cricket::CS_LOCAL ? local_description() - : remote_description()); - RTC_DCHECK(sdesc); - - // Push down the new SDP media section for each audio/video transceiver. - for (const auto& transceiver : transceivers_) { - const ContentInfo* content_info = - FindMediaSectionForTransceiver(transceiver, sdesc); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (!channel || !content_info || content_info->rejected) { - continue; - } - const MediaContentDescription* content_desc = - content_info->media_description(); - if (!content_desc) { - continue; - } - std::string error; - bool success = (source == cricket::CS_LOCAL) - ? channel->SetLocalContent(content_desc, type, &error) - : channel->SetRemoteContent(content_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - } - - // If using the RtpDataChannel, push down the new SDP section for it too. - if (data_channel_controller_.rtp_data_channel()) { - const ContentInfo* data_content = - cricket::GetFirstDataContent(sdesc->description()); - if (data_content && !data_content->rejected) { - const MediaContentDescription* data_desc = - data_content->media_description(); - if (data_desc) { - std::string error; - bool success = - (source == cricket::CS_LOCAL) - ? data_channel_controller_.rtp_data_channel()->SetLocalContent( - data_desc, type, &error) - : data_channel_controller_.rtp_data_channel()->SetRemoteContent( - data_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - } - } - } - - // Need complete offer/answer with an SCTP m= section before starting SCTP, - // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 - if (sctp_mid_s_ && local_description() && remote_description()) { - rtc::scoped_refptr sctp_transport = - transport_controller_->GetSctpTransport(*sctp_mid_s_); - auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( - local_description()->description()); - auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( - remote_description()->description()); - if (sctp_transport && local_sctp_description && remote_sctp_description) { - int max_message_size; - // A remote max message size of zero means "any size supported". - // We configure the connection with our own max message size. - if (remote_sctp_description->max_message_size() == 0) { - max_message_size = local_sctp_description->max_message_size(); - } else { - max_message_size = - std::min(local_sctp_description->max_message_size(), - remote_sctp_description->max_message_size()); - } - sctp_transport->Start(local_sctp_description->port(), - remote_sctp_description->port(), max_message_size); - } - } - - return RTCError::OK(); -} - -RTCError PeerConnection::PushdownTransportDescription( - cricket::ContentSource source, - SdpType type) { - RTC_DCHECK_RUN_ON(signaling_thread()); - - if (source == cricket::CS_LOCAL) { - const SessionDescriptionInterface* sdesc = local_description(); - RTC_DCHECK(sdesc); - return transport_controller_->SetLocalDescription(type, - sdesc->description()); - } else { - const SessionDescriptionInterface* sdesc = remote_description(); - RTC_DCHECK(sdesc); - return transport_controller_->SetRemoteDescription(type, - sdesc->description()); - } -} - bool PeerConnection::GetTransportDescription( const SessionDescription* description, const std::string& content_name, @@ -6075,47 +1969,6 @@ bool PeerConnection::GetTransportDescription( return true; } -cricket::IceConfig PeerConnection::ParseIceConfig( - const PeerConnectionInterface::RTCConfiguration& config) const { - cricket::ContinualGatheringPolicy gathering_policy; - switch (config.continual_gathering_policy) { - case PeerConnectionInterface::GATHER_ONCE: - gathering_policy = cricket::GATHER_ONCE; - break; - case PeerConnectionInterface::GATHER_CONTINUALLY: - gathering_policy = cricket::GATHER_CONTINUALLY; - break; - default: - RTC_NOTREACHED(); - gathering_policy = cricket::GATHER_ONCE; - } - - cricket::IceConfig ice_config; - ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt( - config.ice_connection_receiving_timeout); - ice_config.prioritize_most_likely_candidate_pairs = - config.prioritize_most_likely_ice_candidate_pairs; - ice_config.backup_connection_ping_interval = - RTCConfigurationToIceConfigOptionalInt( - config.ice_backup_candidate_pair_ping_interval); - ice_config.continual_gathering_policy = gathering_policy; - ice_config.presume_writable_when_fully_relayed = - config.presume_writable_when_fully_relayed; - ice_config.surface_ice_candidates_on_ice_transport_type_changed = - config.surface_ice_candidates_on_ice_transport_type_changed; - ice_config.ice_check_interval_strong_connectivity = - config.ice_check_interval_strong_connectivity; - ice_config.ice_check_interval_weak_connectivity = - config.ice_check_interval_weak_connectivity; - ice_config.ice_check_min_interval = config.ice_check_min_interval; - ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout; - ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks; - ice_config.ice_inactive_timeout = config.ice_inactive_timeout; - ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; - ice_config.network_preference = config.network_preference; - return ice_config; -} - std::vector PeerConnection::GetDataChannelStats() const { RTC_DCHECK_RUN_ON(signaling_thread()); return data_channel_controller_.GetDataChannelStats(); @@ -6146,7 +1999,7 @@ std::map PeerConnection::GetTransportNamesByMid() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::map transport_names_by_mid; - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel) { transport_names_by_mid[channel->content_name()] = @@ -6212,24 +2065,13 @@ cricket::DataChannelType PeerConnection::data_channel_type() const { bool PeerConnection::IceRestartPending(const std::string& content_name) const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_ice_restarts_.find(content_name) != - pending_ice_restarts_.end(); + return sdp_handler_->IceRestartPending(content_name); } bool PeerConnection::NeedsIceRestart(const std::string& content_name) const { return transport_controller_->NeedsIceRestart(content_name); } -void PeerConnection::OnCertificateReady( - const rtc::scoped_refptr& certificate) { - transport_controller_->SetLocalCertificate(certificate); -} - -void PeerConnection::OnDtlsSrtpSetupFailure(cricket::BaseChannel*, bool rtcp) { - SetSessionError(SessionError::kTransport, - rtcp ? kDtlsSrtpSetupFailureRtcp : kDtlsSrtpSetupFailureRtp); -} - void PeerConnection::OnTransportControllerConnectionState( cricket::IceConnectionState state) { switch (state) { @@ -6291,9 +2133,7 @@ void PeerConnection::OnTransportControllerCandidatesGathered( // Use transport_name as the candidate media id. std::unique_ptr candidate( new JsepIceCandidate(transport_name, sdp_mline_index, *citer)); - if (local_description()) { - mutable_local_description()->AddCandidate(candidate.get()); - } + sdp_handler_->AddLocalIceCandidate(candidate.get()); OnIceCandidate(std::move(candidate)); } } @@ -6315,10 +2155,7 @@ void PeerConnection::OnTransportControllerCandidatesRemoved( return; } } - - if (local_description()) { - mutable_local_description()->RemoveCandidates(candidates); - } + sdp_handler_->RemoveLocalIceCandidates(candidates); OnIceCandidatesRemoved(candidates); } @@ -6327,6 +2164,10 @@ void PeerConnection::OnTransportControllerCandidateChanged( OnSelectedCandidatePairChanged(event); } +void PeerConnection::OnErrorDemuxingPacket(uint32_t ssrc) { + message_handler_.PostErrorDemuxingPacket(demuxing_observer_, ssrc); +} + void PeerConnection::OnTransportControllerDtlsHandshakeError( rtc::SSLHandshakeError error) { RTC_HISTOGRAM_ENUMERATION( @@ -6334,20 +2175,6 @@ void PeerConnection::OnTransportControllerDtlsHandshakeError( static_cast(rtc::SSLHandshakeError::MAX_VALUE)); } -void PeerConnection::EnableSending() { - for (const auto& transceiver : transceivers_) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel && !channel->enabled()) { - channel->Enable(true); - } - } - - if (data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.rtp_data_channel()->enabled()) { - data_channel_controller_.rtp_data_channel()->Enable(true); - } -} - // Returns the media index for a local ice candidate given the content name. bool PeerConnection::GetLocalCandidateMediaIndex( const std::string& content_name, @@ -6368,258 +2195,6 @@ bool PeerConnection::GetLocalCandidateMediaIndex( return content_found; } -bool PeerConnection::UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc) { - if (!remote_desc) { - return true; - } - bool ret = true; - - for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) { - const IceCandidateCollection* candidates = remote_desc->candidates(m); - for (size_t n = 0; n < candidates->count(); ++n) { - const IceCandidateInterface* candidate = candidates->at(n); - bool valid = false; - if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { - if (valid) { - RTC_LOG(LS_INFO) - << "UseCandidatesInSessionDescription: Not ready to use " - "candidate."; - } - continue; - } - ret = UseCandidate(candidate); - if (!ret) { - break; - } - } - } - return ret; -} - -bool PeerConnection::UseCandidate(const IceCandidateInterface* candidate) { - RTCErrorOr result = - FindContentInfo(remote_description(), candidate); - if (!result.ok()) { - RTC_LOG(LS_ERROR) << "UseCandidate: Invalid candidate. " - << result.error().message(); - return false; - } - std::vector candidates; - candidates.push_back(candidate->candidate()); - // Invoking BaseSession method to handle remote candidates. - RTCError error = transport_controller_->AddRemoteCandidates( - result.value()->name, candidates); - if (error.ok()) { - ReportRemoteIceCandidateAdded(candidate->candidate()); - // Candidates successfully submitted for checking. - if (ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew || - ice_connection_state_ == - PeerConnectionInterface::kIceConnectionDisconnected) { - // If state is New, then the session has just gotten its first remote ICE - // candidates, so go to Checking. - // If state is Disconnected, the session is re-using old candidates or - // receiving additional ones, so go to Checking. - // If state is Connected, stay Connected. - // TODO(bemasc): If state is Connected, and the new candidates are for a - // newly added transport, then the state actually _should_ move to - // checking. Add a way to distinguish that case. - SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); - } - // TODO(bemasc): If state is Completed, go back to Connected. - } else { - RTC_LOG(LS_WARNING) << error.message(); - } - return true; -} - -RTCErrorOr PeerConnection::FindContentInfo( - const SessionDescriptionInterface* description, - const IceCandidateInterface* candidate) { - if (candidate->sdp_mline_index() >= 0) { - size_t mediacontent_index = - static_cast(candidate->sdp_mline_index()); - size_t content_size = description->description()->contents().size(); - if (mediacontent_index < content_size) { - return &description->description()->contents()[mediacontent_index]; - } else { - return RTCError(RTCErrorType::INVALID_RANGE, - "Media line index (" + - rtc::ToString(candidate->sdp_mline_index()) + - ") out of range (number of mlines: " + - rtc::ToString(content_size) + ")."); - } - } else if (!candidate->sdp_mid().empty()) { - auto& contents = description->description()->contents(); - auto it = absl::c_find_if( - contents, [candidate](const cricket::ContentInfo& content_info) { - return content_info.mid() == candidate->sdp_mid(); - }); - if (it == contents.end()) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "Mid " + candidate->sdp_mid() + - " specified but no media section with that mid found."); - } else { - return &*it; - } - } - - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Neither sdp_mline_index nor sdp_mid specified."); -} - -void PeerConnection::RemoveUnusedChannels(const SessionDescription* desc) { - // Destroy video channel first since it may have a pointer to the - // voice channel. - const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); - if (!video_info || video_info->rejected) { - DestroyTransceiverChannel(GetVideoTransceiver()); - } - - const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); - if (!audio_info || audio_info->rejected) { - DestroyTransceiverChannel(GetAudioTransceiver()); - } - - const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); - if (!data_info || data_info->rejected) { - DestroyDataChannelTransport(); - } -} - -RTCErrorOr PeerConnection::GetEarlyBundleGroup( - const SessionDescription& desc) const { - const cricket::ContentGroup* bundle_group = nullptr; - if (configuration_.bundle_policy == - PeerConnectionInterface::kBundlePolicyMaxBundle) { - bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - if (!bundle_group) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max-bundle configured but session description " - "has no BUNDLE group"); - } - } - return bundle_group; -} - -RTCError PeerConnection::CreateChannels(const SessionDescription& desc) { - // Creating the media channels. Transports should already have been created - // at this point. - const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); - if (voice && !voice->rejected && - !GetAudioTransceiver()->internal()->channel()) { - cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); - if (!voice_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create voice channel."); - } - GetAudioTransceiver()->internal()->SetChannel(voice_channel); - } - - const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); - if (video && !video->rejected && - !GetVideoTransceiver()->internal()->channel()) { - cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); - if (!video_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create video channel."); - } - GetVideoTransceiver()->internal()->SetChannel(video_channel); - } - - const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); - if (data_channel_type() != cricket::DCT_NONE && data && !data->rejected && - !data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.data_channel_transport()) { - if (!CreateDataChannel(data->name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); - } - } - - return RTCError::OK(); -} - -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VoiceChannel* PeerConnection::CreateVoiceChannel( - const std::string& mid) { - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - - cricket::VoiceChannel* voice_channel = channel_manager()->CreateVoiceChannel( - call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(), - mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_, - audio_options_); - if (!voice_channel) { - return nullptr; - } - voice_channel->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - voice_channel->SignalSentPacket.connect(this, - &PeerConnection::OnSentPacket_w); - voice_channel->SetRtpTransport(rtp_transport); - - return voice_channel; -} - -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VideoChannel* PeerConnection::CreateVideoChannel( - const std::string& mid) { - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - - cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel( - call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(), - mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_, video_options_, - video_bitrate_allocator_factory_.get()); - if (!video_channel) { - return nullptr; - } - video_channel->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - video_channel->SignalSentPacket.connect(this, - &PeerConnection::OnSentPacket_w); - video_channel->SetRtpTransport(rtp_transport); - - return video_channel; -} - -bool PeerConnection::CreateDataChannel(const std::string& mid) { - switch (data_channel_type()) { - case cricket::DCT_SCTP: - if (network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, this, - mid))) { - sctp_mid_s_ = mid; - } else { - return false; - } - return true; - case cricket::DCT_RTP: - default: - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on - // the network thread like set_data_channel_transport is. - data_channel_controller_.set_rtp_data_channel( - channel_manager()->CreateRtpDataChannel( - configuration_.media_config, rtp_transport, signaling_thread(), - mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_)); - if (!data_channel_controller_.rtp_data_channel()) { - return false; - } - data_channel_controller_.rtp_data_channel() - ->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - data_channel_controller_.rtp_data_channel()->SignalSentPacket.connect( - this, &PeerConnection::OnSentPacket_w); - data_channel_controller_.rtp_data_channel()->SetRtpTransport( - rtp_transport); - have_pending_rtp_data_channel_ = true; - return true; - } - return false; -} - Call::Stats PeerConnection::GetCallStats() { if (!worker_thread()->IsCurrent()) { return worker_thread()->Invoke( @@ -6695,178 +2270,6 @@ bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) { return true; } -bool PeerConnection::HasRtcpMuxEnabled(const cricket::ContentInfo* content) { - return content->media_description()->rtcp_mux(); -} - -static RTCError ValidateMids(const cricket::SessionDescription& description) { - std::set mids; - for (const cricket::ContentInfo& content : description.contents()) { - if (content.name.empty()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "A media section is missing a MID attribute."); - } - if (!mids.insert(content.name).second) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Duplicate a=mid value '" + content.name + "'."); - } - } - return RTCError::OK(); -} - -RTCError PeerConnection::ValidateSessionDescription( - const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) { - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } - - if (!sdesc || !sdesc->description()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); - } - - SdpType type = sdesc->GetType(); - if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || - (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_STATE, - "Called in wrong state: " + GetSignalingStateString(signaling_state())); - } - - RTCError error = ValidateMids(*sdesc->description()); - if (!error.ok()) { - return error; - } - - // Verify crypto settings. - std::string crypto_error; - if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || - dtls_enabled_) { - RTCError crypto_error = VerifyCrypto(sdesc->description(), dtls_enabled_); - if (!crypto_error.ok()) { - return crypto_error; - } - } - - // Verify ice-ufrag and ice-pwd. - if (!VerifyIceUfragPwdPresent(sdesc->description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutIceUfragPwd); - } - - if (!ValidateBundleSettings(sdesc->description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kBundleWithoutRtcpMux); - } - - // TODO(skvlad): When the local rtcp-mux policy is Require, reject any - // m-lines that do not rtcp-mux enabled. - - // Verify m-lines in Answer when compared against Offer. - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // With an answer we want to compare the new answer session description with - // the offer's session description from the current negotiation. - const cricket::SessionDescription* offer_desc = - (source == cricket::CS_LOCAL) ? remote_description()->description() - : local_description()->description(); - if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || - !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), - type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInAnswer); - } - } else { - // The re-offers should respect the order of m= sections in current - // description. See RFC3264 Section 8 paragraph 4 for more details. - // With a re-offer, either the current local or current remote descriptions - // could be the most up to date, so we would like to check against both of - // them if they exist. It could be the case that one of them has a 0 port - // for a media section, but the other does not. This is important to check - // against in the case that we are recycling an m= section. - const cricket::SessionDescription* current_desc = nullptr; - const cricket::SessionDescription* secondary_current_desc = nullptr; - if (local_description()) { - current_desc = local_description()->description(); - if (remote_description()) { - secondary_current_desc = remote_description()->description(); - } - } else if (remote_description()) { - current_desc = remote_description()->description(); - } - if (current_desc && - !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, - *sdesc->description(), type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInSubsequentOffer); - } - } - - if (IsUnifiedPlan()) { - // Ensure that each audio and video media section has at most one - // "StreamParams". This will return an error if receiving a session - // description from a "Plan B" endpoint which adds multiple tracks of the - // same type. With Unified Plan, there can only be at most one track per - // media section. - for (const ContentInfo& content : sdesc->description()->contents()) { - const MediaContentDescription& desc = *content.media_description(); - if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || - desc.type() == cricket::MEDIA_TYPE_VIDEO) && - desc.streams().size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Media section has more than one track specified " - "with a=ssrc lines which is not supported with " - "Unified Plan."); - } - } - } - - return RTCError::OK(); -} - -bool PeerConnection::ExpectSetLocalDescription(SdpType type) { - PeerConnectionInterface::SignalingState state = signaling_state(); - if (type == SdpType::kOffer) { - return (state == PeerConnectionInterface::kStable) || - (state == PeerConnectionInterface::kHaveLocalOffer); - } else { - RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); - return (state == PeerConnectionInterface::kHaveRemoteOffer) || - (state == PeerConnectionInterface::kHaveLocalPrAnswer); - } -} - -bool PeerConnection::ExpectSetRemoteDescription(SdpType type) { - PeerConnectionInterface::SignalingState state = signaling_state(); - if (type == SdpType::kOffer) { - return (state == PeerConnectionInterface::kStable) || - (state == PeerConnectionInterface::kHaveRemoteOffer); - } else { - RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); - return (state == PeerConnectionInterface::kHaveLocalOffer) || - (state == PeerConnectionInterface::kHaveRemotePrAnswer); - } -} - -const char* PeerConnection::SessionErrorToString(SessionError error) const { - switch (error) { - case SessionError::kNone: - return "ERROR_NONE"; - case SessionError::kContent: - return "ERROR_CONTENT"; - case SessionError::kTransport: - return "ERROR_TRANSPORT"; - } - RTC_NOTREACHED(); - return ""; -} - -std::string PeerConnection::GetSessionErrorMsg() { - rtc::StringBuilder desc; - desc << kSessionError << SessionErrorToString(session_error()) << ". "; - desc << kSessionErrorDesc << session_error_desc() << "."; - return desc.Release(); -} - void PeerConnection::ReportSdpFormatReceived( const SessionDescriptionInterface& remote_offer) { int num_audio_mlines = 0; @@ -6893,8 +2296,21 @@ void PeerConnection::ReportSdpFormatReceived( } else if (num_audio_tracks > 0 || num_video_tracks > 0) { format = kSdpFormatReceivedSimple; } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", format, - kSdpFormatReceivedMax); + switch (remote_offer.GetType()) { + case SdpType::kOffer: + // Historically only offers were counted. + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", + format, kSdpFormatReceivedMax); + break; + case SdpType::kAnswer: + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceivedAnswer", + format, kSdpFormatReceivedMax); + break; + default: + RTC_LOG(LS_ERROR) << "Can not report SdpFormatReceived for " + << SdpTypeToString(remote_offer.GetType()); + break; + } } void PeerConnection::ReportIceCandidateCollected( @@ -6911,110 +2327,19 @@ void PeerConnection::ReportIceCandidateCollected( } } -void PeerConnection::ReportRemoteIceCandidateAdded( - const cricket::Candidate& candidate) { - NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); - if (candidate.address().IsPrivateIP()) { - NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); - } - if (candidate.address().IsUnresolvedIP()) { - NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); - } - if (candidate.address().family() == AF_INET6) { - NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); - } -} - void PeerConnection::NoteUsageEvent(UsageEvent event) { RTC_DCHECK_RUN_ON(signaling_thread()); - usage_event_accumulator_ |= static_cast(event); + usage_pattern_.NoteUsageEvent(event); } void PeerConnection::ReportUsagePattern() const { - RTC_DLOG(LS_INFO) << "Usage signature is " << usage_event_accumulator_; - RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern", - usage_event_accumulator_, - static_cast(UsageEvent::MAX_VALUE)); - const int bad_bits = - static_cast(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::CANDIDATE_COLLECTED); - const int good_bits = - static_cast(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::REMOTE_CANDIDATE_ADDED) | - static_cast(UsageEvent::ICE_STATE_CONNECTED); - if ((usage_event_accumulator_ & bad_bits) == bad_bits && - (usage_event_accumulator_ & good_bits) == 0) { - // If called after close(), we can't report, because observer may have - // been deallocated, and therefore pointer is null. Write to log instead. - if (observer_) { - Observer()->OnInterestingUsage(usage_event_accumulator_); - } else { - RTC_LOG(LS_INFO) << "Interesting usage signature " - << usage_event_accumulator_ - << " observed after observer shutdown"; - } - } -} - -void PeerConnection::ReportNegotiatedSdpSemantics( - const SessionDescriptionInterface& answer) { - SdpSemanticNegotiated semantics_negotiated; - switch (answer.description()->msid_signaling()) { - case 0: - semantics_negotiated = kSdpSemanticNegotiatedNone; - break; - case cricket::kMsidSignalingMediaSection: - semantics_negotiated = kSdpSemanticNegotiatedUnifiedPlan; - break; - case cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedPlanB; - break; - case cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedMixed; - break; - default: - RTC_NOTREACHED(); - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpSemanticNegotiated", - semantics_negotiated, kSdpSemanticNegotiatedMax); -} - -// We need to check the local/remote description for the Transport instead of -// the session, because a new Transport added during renegotiation may have -// them unset while the session has them set from the previous negotiation. -// Not doing so may trigger the auto generation of transport description and -// mess up DTLS identity information, ICE credential, etc. -bool PeerConnection::ReadyToUseRemoteCandidate( - const IceCandidateInterface* candidate, - const SessionDescriptionInterface* remote_desc, - bool* valid) { - *valid = true; - - const SessionDescriptionInterface* current_remote_desc = - remote_desc ? remote_desc : remote_description(); - - if (!current_remote_desc) { - return false; - } - - RTCErrorOr result = - FindContentInfo(current_remote_desc, candidate); - if (!result.ok()) { - RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. " - << result.error().message(); - - *valid = false; - return false; - } - - std::string transport_name = GetTransportName(result.value()->name); - return !transport_name.empty(); + usage_pattern_.ReportUsagePattern(observer_); } bool PeerConnection::SrtpRequired() const { return (dtls_enabled_ || - webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED); + sdp_handler_->webrtc_session_desc_factory()->SdesPolicy() == + cricket::SEC_REQUIRED); } void PeerConnection::OnTransportControllerGatheringState( @@ -7024,13 +2349,18 @@ void PeerConnection::OnTransportControllerGatheringState( OnIceGatheringChange(PeerConnectionInterface::kIceGatheringGathering); } else if (state == cricket::kIceGatheringComplete) { OnIceGatheringChange(PeerConnectionInterface::kIceGatheringComplete); + } else if (state == cricket::kIceGatheringNew) { + OnIceGatheringChange(PeerConnectionInterface::kIceGatheringNew); + } else { + RTC_LOG(LS_ERROR) << "Unknown state received: " << state; + RTC_NOTREACHED(); } } void PeerConnection::ReportTransportStats() { std::map> media_types_by_transport_name; - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { if (transceiver->internal()->channel()) { const std::string& transport_name = transceiver->internal()->channel()->transport_name(); @@ -7086,7 +2416,7 @@ void PeerConnection::ReportBestConnectionState( GetIceCandidatePairCounter(local, remote), kIceCandidatePairMax); } else { - RTC_CHECK(0); + RTC_CHECK_NOTREACHED(); } // Increment the counter for IP type. @@ -7179,79 +2509,6 @@ void PeerConnection::OnSentPacket_w(const rtc::SentPacket& sent_packet) { call_->OnSentPacket(sent_packet); } -const std::string PeerConnection::GetTransportName( - const std::string& content_name) { - cricket::ChannelInterface* channel = GetChannel(content_name); - if (channel) { - return channel->transport_name(); - } - if (data_channel_controller_.data_channel_transport()) { - RTC_DCHECK(sctp_mid_s_); - if (content_name == *sctp_mid_s_) { - return *sctp_transport_name(); - } - } - // Return an empty string if failed to retrieve the transport name. - return ""; -} - -void PeerConnection::DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver) { - RTC_DCHECK(transceiver); - - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel) { - transceiver->internal()->SetChannel(nullptr); - DestroyChannelInterface(channel); - } -} - -void PeerConnection::DestroyDataChannelTransport() { - if (data_channel_controller_.rtp_data_channel()) { - data_channel_controller_.OnTransportChannelClosed(); - DestroyChannelInterface(data_channel_controller_.rtp_data_channel()); - data_channel_controller_.set_rtp_data_channel(nullptr); - } - - // Note: Cannot use rtc::Bind to create a functor to invoke because it will - // grab a reference to this PeerConnection. If this is called from the - // PeerConnection destructor, the RefCountedObject vtable will have already - // been destroyed (since it is a subclass of PeerConnection) and using - // rtc::Bind will cause "Pure virtual function called" error to appear. - - if (sctp_mid_s_) { - data_channel_controller_.OnTransportChannelClosed(); - network_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(network_thread()); - TeardownDataChannelTransport_n(); - }); - sctp_mid_s_.reset(); - } -} - -void PeerConnection::DestroyChannelInterface( - cricket::ChannelInterface* channel) { - RTC_DCHECK(channel); - switch (channel->media_type()) { - case cricket::MEDIA_TYPE_AUDIO: - channel_manager()->DestroyVoiceChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_VIDEO: - channel_manager()->DestroyVideoChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_DATA: - channel_manager()->DestroyRtpDataChannel( - static_cast(channel)); - break; - default: - RTC_NOTREACHED() << "Unknown media type: " << channel->media_type(); - break; - } -} - bool PeerConnection::OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, @@ -7269,12 +2526,6 @@ bool PeerConnection::OnTransportChanged( return ret; } -void PeerConnection::OnSetStreams() { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (IsUnifiedPlan()) - UpdateNegotiationNeeded(); -} - PeerConnectionObserver* PeerConnection::Observer() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(observer_); @@ -7282,11 +2533,12 @@ PeerConnectionObserver* PeerConnection::Observer() const { } CryptoOptions PeerConnection::GetCryptoOptions() { + RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(bugs.webrtc.org/9891) - Remove PeerConnectionFactory::CryptoOptions // after it has been removed. return configuration_.crypto_options.has_value() ? *configuration_.crypto_options - : factory_->options().crypto_options; + : options_.crypto_options; } void PeerConnection::ClearStatsCache() { @@ -7296,283 +2548,52 @@ void PeerConnection::ClearStatsCache() { } } +bool PeerConnection::ShouldFireNegotiationNeededEvent(uint32_t event_id) { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->ShouldFireNegotiationNeededEvent(event_id); +} + void PeerConnection::RequestUsagePatternReportForTesting() { - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_REPORT_USAGE_PATTERN, - nullptr); + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + /* delay_ms= */ 0); } -void PeerConnection::UpdateNegotiationNeeded() { +std::function +PeerConnection::InitializeRtcpCallback() { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!IsUnifiedPlan()) { - Observer()->OnRenegotiationNeeded(); - return; - } - // If connection's [[IsClosed]] slot is true, abort these steps. - if (IsClosed()) - return; + auto flag = + worker_thread()->Invoke>( + RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + if (!call_) + return rtc::scoped_refptr(); + if (!call_safety_) + call_safety_.reset(new ScopedTaskSafety()); + return call_safety_->flag(); + }); - // If connection's signaling state is not "stable", abort these steps. - if (signaling_state() != kStable) - return; + if (!flag) + return [](const rtc::CopyOnWriteBuffer&, int64_t) {}; - // NOTE - // The negotiation-needed flag will be updated once the state transitions to - // "stable", as part of the steps for setting an RTCSessionDescription. - - // If the result of checking if negotiation is needed is false, clear the - // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot - // to false, and abort these steps. - bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); - if (!is_negotiation_needed) { - is_negotiation_needed_ = false; - return; - } - - // If connection's [[NegotiationNeeded]] slot is already true, abort these - // steps. - if (is_negotiation_needed_) - return; - - // Set connection's [[NegotiationNeeded]] slot to true. - is_negotiation_needed_ = true; - - // Queue a task that runs the following steps: - // If connection's [[IsClosed]] slot is true, abort these steps. - // If connection's [[NegotiationNeeded]] slot is false, abort these steps. - // Fire an event named negotiationneeded at connection. - Observer()->OnRenegotiationNeeded(); -} - -bool PeerConnection::CheckIfNegotiationIsNeeded() { - RTC_DCHECK_RUN_ON(signaling_thread()); - // 1. If any implementation-specific negotiation is required, as described at - // the start of this section, return true. - - // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return - // true. - if (local_ice_credentials_to_replace_->HasIceCredentials()) { - return true; - } - - // 3. Let description be connection.[[CurrentLocalDescription]]. - const SessionDescriptionInterface* description = current_local_description(); - if (!description) - return true; - - // 4. If connection has created any RTCDataChannels, and no m= section in - // description has been negotiated yet for data, return true. - if (data_channel_controller_.HasSctpDataChannels()) { - if (!cricket::GetFirstDataContent(description->description()->contents())) - return true; - } - - // 5. For each transceiver in connection's set of transceivers, perform the - // following checks: - for (const auto& transceiver : transceivers_) { - const ContentInfo* current_local_msection = - FindTransceiverMSection(transceiver.get(), description); - - const ContentInfo* current_remote_msection = FindTransceiverMSection( - transceiver.get(), current_remote_description()); - - // 5.4 If transceiver is stopped and is associated with an m= section, - // but the associated m= section is not yet rejected in - // connection.[[CurrentLocalDescription]] or - // connection.[[CurrentRemoteDescription]], return true. - if (transceiver->stopped()) { - RTC_DCHECK(transceiver->stopping()); - if (current_local_msection && !current_local_msection->rejected && - ((current_remote_msection && !current_remote_msection->rejected) || - !current_remote_msection)) { - return true; - } - continue; - } - - // 5.1 If transceiver.[[Stopping]] is true and transceiver.[[Stopped]] is - // false, return true. - if (transceiver->stopping() && !transceiver->stopped()) - return true; - - // 5.2 If transceiver isn't stopped and isn't yet associated with an m= - // section in description, return true. - if (!current_local_msection) - return true; - - const MediaContentDescription* current_local_media_description = - current_local_msection->media_description(); - // 5.3 If transceiver isn't stopped and is associated with an m= section - // in description then perform the following checks: - - // 5.3.1 If transceiver.[[Direction]] is "sendrecv" or "sendonly", and the - // associated m= section in description either doesn't contain a single - // "a=msid" line, or the number of MSIDs from the "a=msid" lines in this - // m= section, or the MSID values themselves, differ from what is in - // transceiver.sender.[[AssociatedMediaStreamIds]], return true. - if (RtpTransceiverDirectionHasSend(transceiver->direction())) { - if (current_local_media_description->streams().size() == 0) - return true; - - std::vector msection_msids; - for (const auto& stream : current_local_media_description->streams()) { - for (const std::string& msid : stream.stream_ids()) - msection_msids.push_back(msid); - } - - std::vector transceiver_msids = - transceiver->sender()->stream_ids(); - if (msection_msids.size() != transceiver_msids.size()) - return true; - - absl::c_sort(transceiver_msids); - absl::c_sort(msection_msids); - if (transceiver_msids != msection_msids) - return true; - } - - // 5.3.2 If description is of type "offer", and the direction of the - // associated m= section in neither connection.[[CurrentLocalDescription]] - // nor connection.[[CurrentRemoteDescription]] matches - // transceiver.[[Direction]], return true. - if (description->GetType() == SdpType::kOffer) { - if (!current_remote_description()) - return true; - - if (!current_remote_msection) - return true; - - RtpTransceiverDirection current_local_direction = - current_local_media_description->direction(); - RtpTransceiverDirection current_remote_direction = - current_remote_msection->media_description()->direction(); - if (transceiver->direction() != current_local_direction && - transceiver->direction() != - RtpTransceiverDirectionReversed(current_remote_direction)) { - return true; - } - } - - // 5.3.3 If description is of type "answer", and the direction of the - // associated m= section in the description does not match - // transceiver.[[Direction]] intersected with the offered direction (as - // described in [JSEP] (section 5.3.1.)), return true. - if (description->GetType() == SdpType::kAnswer) { - if (!remote_description()) - return true; - - const ContentInfo* offered_remote_msection = - FindTransceiverMSection(transceiver.get(), remote_description()); - - RtpTransceiverDirection offered_direction = - offered_remote_msection - ? offered_remote_msection->media_description()->direction() - : RtpTransceiverDirection::kInactive; - - if (current_local_media_description->direction() != - (RtpTransceiverDirectionIntersection( - transceiver->direction(), - RtpTransceiverDirectionReversed(offered_direction)))) { - return true; - } - } - } - - // If all the preceding checks were performed and true was not returned, - // nothing remains to be negotiated; return false. - return false; -} - -RTCError PeerConnection::Rollback(SdpType sdp_type) { - auto state = signaling_state(); - if (state != PeerConnectionInterface::kHaveLocalOffer && - state != PeerConnectionInterface::kHaveRemoteOffer) { - return RTCError(RTCErrorType::INVALID_STATE, - "Called in wrong signalingState: " + - GetSignalingStateString(signaling_state())); - } - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(IsUnifiedPlan()); - std::vector> all_added_streams; - std::vector> all_removed_streams; - std::vector> removed_receivers; - - for (auto&& transceivers_stable_state_pair : - transceiver_stable_states_by_transceivers_) { - auto transceiver = transceivers_stable_state_pair.first; - auto state = transceivers_stable_state_pair.second; - - if (state.remote_stream_ids()) { - std::vector> added_streams; - std::vector> removed_streams; - SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - state.remote_stream_ids().value(), - &added_streams, &removed_streams); - all_added_streams.insert(all_added_streams.end(), added_streams.begin(), - added_streams.end()); - all_removed_streams.insert(all_removed_streams.end(), - removed_streams.begin(), - removed_streams.end()); - if (!state.has_m_section() && !state.newly_created()) { - continue; - } - } - - RTC_DCHECK(transceiver->internal()->mid().has_value()); - DestroyTransceiverChannel(transceiver); - - if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && - transceiver->receiver()) { - removed_receivers.push_back(transceiver->receiver()); - } - if (state.newly_created()) { - if (transceiver->internal()->reused_for_addtrack()) { - transceiver->internal()->set_created_by_addtrack(true); - } else { - int remaining_transceiver_count = 0; - for (auto&& t : transceivers_) { - if (t != transceiver) { - transceivers_[remaining_transceiver_count++] = t; - } - } - transceivers_.resize(remaining_transceiver_count); - } - } - transceiver->internal()->sender_internal()->set_transport(nullptr); - transceiver->internal()->receiver_internal()->set_transport(nullptr); - transceiver->internal()->set_mid(state.mid()); - transceiver->internal()->set_mline_index(state.mline_index()); - } - transport_controller_->RollbackTransports(); - if (have_pending_rtp_data_channel_) { - DestroyDataChannelTransport(); - have_pending_rtp_data_channel_ = false; - } - transceiver_stable_states_by_transceivers_.clear(); - pending_local_description_.reset(); - pending_remote_description_.reset(); - ChangeSignalingState(PeerConnectionInterface::kStable); - - // Once all processing has finished, fire off callbacks. - for (const auto& receiver : removed_receivers) { - Observer()->OnRemoveTrack(receiver); - } - for (const auto& stream : all_added_streams) { - Observer()->OnAddStream(stream); - } - for (const auto& stream : all_removed_streams) { - Observer()->OnRemoveStream(stream); - } - - // The assumption is that in case of implicit rollback UpdateNegotiationNeeded - // gets called in SetRemoteDescription. - if (sdp_type == SdpType::kRollback) { - UpdateNegotiationNeeded(); - if (is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } - } - return RTCError::OK(); + return [this, flag = std::move(flag)](const rtc::CopyOnWriteBuffer& packet, + int64_t packet_time_us) { + RTC_DCHECK_RUN_ON(network_thread()); + // TODO(bugs.webrtc.org/11993): We should actually be delivering this call + // directly to the Call class somehow directly on the network thread and not + // incur this hop here. The DeliverPacket() method will eventually just have + // to hop back over to the network thread. + worker_thread()->PostTask(ToQueuedTask(flag, [this, packet, + packet_time_us] { + RTC_DCHECK_RUN_ON(worker_thread()); + call_->Receiver()->DeliverPacket(MediaType::ANY, packet, packet_time_us); + })); + }; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h index 41cb68c64..9c0541cc4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h @@ -11,6 +11,8 @@ #ifndef PC_PEER_CONNECTION_H_ #define PC_PEER_CONNECTION_H_ +#include +#include #include #include #include @@ -18,33 +20,86 @@ #include #include +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/async_resolver_factory.h" +#include "api/audio_options.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/packet_socket_factory.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/data_channel_transport_interface.h" +#include "api/transport/enums.h" #include "api/turn_customizer.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "call/call.h" +#include "media/base/media_channel.h" +#include "media/base/media_engine.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "pc/data_channel_controller.h" -#include "pc/ice_server_parsing.h" +#include "pc/data_channel_utils.h" +#include "pc/dtls_transport.h" #include "pc/jsep_transport_controller.h" -#include "pc/peer_connection_factory.h" #include "pc/peer_connection_internal.h" +#include "pc/peer_connection_message_handler.h" #include "pc/rtc_stats_collector.h" +#include "pc/rtp_data_channel.h" +#include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_data_channel.h" #include "pc/sctp_transport.h" +#include "pc/sdp_offer_answer.h" +#include "pc/session_description.h" #include "pc/stats_collector.h" #include "pc/stream_collection.h" -#include "pc/webrtc_session_description_factory.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/operations_chain.h" -#include "rtc_base/race_checker.h" +#include "pc/transceiver_list.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" -#include "rtc_base/weak_ptr.h" namespace webrtc { -class MediaStreamObserver; -class VideoRtpReceiver; -class RtcEventLog; - // PeerConnection is the implementation of the PeerConnection object as defined // by the PeerConnectionInterface API surface. // The class currently is solely responsible for the following: @@ -61,62 +116,19 @@ class RtcEventLog; // - Generating stats. class PeerConnection : public PeerConnectionInternal, public JsepTransportController::Observer, - public RtpSenderBase::SetStreamsObserver, - public rtc::MessageHandler, public sigslot::has_slots<> { public: - // A bit in the usage pattern is registered when its defining event occurs at - // least once. - enum class UsageEvent : int { - TURN_SERVER_ADDED = 0x01, - STUN_SERVER_ADDED = 0x02, - DATA_ADDED = 0x04, - AUDIO_ADDED = 0x08, - VIDEO_ADDED = 0x10, - // |SetLocalDescription| returns successfully. - SET_LOCAL_DESCRIPTION_SUCCEEDED = 0x20, - // |SetRemoteDescription| returns successfully. - SET_REMOTE_DESCRIPTION_SUCCEEDED = 0x40, - // A local candidate (with type host, server-reflexive, or relay) is - // collected. - CANDIDATE_COLLECTED = 0x80, - // A remote candidate is successfully added via |AddIceCandidate|. - ADD_ICE_CANDIDATE_SUCCEEDED = 0x100, - ICE_STATE_CONNECTED = 0x200, - CLOSE_CALLED = 0x400, - // A local candidate with private IP is collected. - PRIVATE_CANDIDATE_COLLECTED = 0x800, - // A remote candidate with private IP is added, either via AddiceCandidate - // or from the remote description. - REMOTE_PRIVATE_CANDIDATE_ADDED = 0x1000, - // A local mDNS candidate is collected. - MDNS_CANDIDATE_COLLECTED = 0x2000, - // A remote mDNS candidate is added, either via AddIceCandidate or from the - // remote description. - REMOTE_MDNS_CANDIDATE_ADDED = 0x4000, - // A local candidate with IPv6 address is collected. - IPV6_CANDIDATE_COLLECTED = 0x8000, - // A remote candidate with IPv6 address is added, either via AddIceCandidate - // or from the remote description. - REMOTE_IPV6_CANDIDATE_ADDED = 0x10000, - // A remote candidate (with type host, server-reflexive, or relay) is - // successfully added, either via AddIceCandidate or from the remote - // description. - REMOTE_CANDIDATE_ADDED = 0x20000, - // An explicit host-host candidate pair is selected, i.e. both the local and - // the remote candidates have the host type. This does not include candidate - // pairs formed with equivalent prflx remote candidates, e.g. a host-prflx - // pair where the prflx candidate has the same base as a host candidate of - // the remote peer. - DIRECT_CONNECTION_SELECTED = 0x40000, - MAX_VALUE = 0x80000, - }; - - explicit PeerConnection(PeerConnectionFactory* factory, - std::unique_ptr event_log, - std::unique_ptr call); - - bool Initialize( + // Creates a PeerConnection and initializes it with the given values. + // If the initialization fails, the function releases the PeerConnection + // and returns nullptr. + // + // Note that the function takes ownership of dependencies, and will + // either use them or release them, whether it succeeds or fails. + static rtc::scoped_refptr Create( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + std::unique_ptr event_log, + std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); @@ -143,18 +155,6 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, const RtpTransceiverInit& init) override; - // Gets the DTLS SSL certificate associated with the audio transport on the - // remote side. This will become populated once the DTLS connection with the - // peer has been completed, as indicated by the ICE connection state - // transitioning to kIceConnectionCompleted. - // Note that this will be removed once we implement RTCDtlsTransport which - // has standardized method for getting this information. - // See https://www.w3.org/TR/webrtc/#rtcdtlstransport-interface - std::unique_ptr GetRemoteAudioSSLCertificate(); - - // Version of the above method that returns the full certificate chain. - std::unique_ptr GetRemoteAudioSSLCertChain(); - rtc::scoped_refptr CreateSender( const std::string& kind, const std::string& stream_id) override; @@ -169,6 +169,7 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr CreateDataChannel( const std::string& label, const DataChannelInit* config) override; + // WARNING: LEGACY. See peerconnectioninterface.h bool GetStats(StatsObserver* observer, webrtc::MediaStreamTrackInterface* track, @@ -261,14 +262,14 @@ class PeerConnection : public PeerConnectionInternal, void Close() override; rtc::Thread* signaling_thread() const final { - return factory_->signaling_thread(); + return context_->signaling_thread(); } // PeerConnectionInternal implementation. rtc::Thread* network_thread() const final { - return factory_->network_thread(); + return context_->network_thread(); } - rtc::Thread* worker_thread() const final { return factory_->worker_thread(); } + rtc::Thread* worker_thread() const final { return context_->worker_thread(); } std::string session_id() const override { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -284,7 +285,7 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr>> GetTransceiversInternal() const override { RTC_DCHECK_RUN_ON(signaling_thread()); - return transceivers_; + return rtp_manager()->transceivers()->List(); } sigslot::signal1& SignalRtpDataChannelCreated() override { @@ -324,174 +325,94 @@ class PeerConnection : public PeerConnectionInternal, PeerConnectionObserver* Observer() const; bool IsClosed() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return signaling_state_ == PeerConnectionInterface::kClosed; + return sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. bool GetSctpSslRole(rtc::SSLRole* role); // Handler for the "channel closed" signal void OnSctpDataChannelClosed(DataChannelInterface* channel); - // Functions made public for testing. - void ReturnHistogramVeryQuicklyForTesting() { + bool ShouldFireNegotiationNeededEvent(uint32_t event_id) override; + + // Functions needed by SdpOfferAnswerHandler + StatsCollector* stats() { RTC_DCHECK_RUN_ON(signaling_thread()); - return_histogram_very_quickly_ = true; + return stats_.get(); + } + DataChannelController* data_channel_controller() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &data_channel_controller_; + } + bool dtls_enabled() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return dtls_enabled_; + } + const PeerConnectionInterface::RTCConfiguration* configuration() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &configuration_; } - void RequestUsagePatternReportForTesting(); absl::optional sctp_mid() { RTC_DCHECK_RUN_ON(signaling_thread()); return sctp_mid_s_; } + PeerConnectionMessageHandler* message_handler() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &message_handler_; + } - protected: - ~PeerConnection() override; + RtpTransmissionManager* rtp_manager() { return rtp_manager_.get(); } + const RtpTransmissionManager* rtp_manager() const { + return rtp_manager_.get(); + } + cricket::ChannelManager* channel_manager() const; - private: - class ImplicitCreateSessionDescriptionObserver; - friend class ImplicitCreateSessionDescriptionObserver; - class SetSessionDescriptionObserverAdapter; - friend class SetSessionDescriptionObserverAdapter; + JsepTransportController* transport_controller() { + return transport_controller_.get(); + } + cricket::PortAllocator* port_allocator() { return port_allocator_.get(); } + Call* call_ptr() { return call_ptr_; } - // Represents the [[LocalIceCredentialsToReplace]] internal slot in the spec. - // It makes the next CreateOffer() produce new ICE credentials even if - // RTCOfferAnswerOptions::ice_restart is false. - // https://w3c.github.io/webrtc-pc/#dfn-localufragstoreplace - // TODO(hbos): When JsepTransportController/JsepTransport supports rollback, - // move this type of logic to JsepTransportController/JsepTransport. - class LocalIceCredentialsToReplace; + ConnectionContext* context() { return context_.get(); } + const PeerConnectionFactoryInterface::Options* options() const { + return &options_; + } + cricket::DataChannelType data_channel_type() const; + void SetIceConnectionState(IceConnectionState new_state); + void NoteUsageEvent(UsageEvent event); - struct RtpSenderInfo { - RtpSenderInfo() : first_ssrc(0) {} - RtpSenderInfo(const std::string& stream_id, - const std::string sender_id, - uint32_t ssrc) - : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} - bool operator==(const RtpSenderInfo& other) { - return this->stream_id == other.stream_id && - this->sender_id == other.sender_id && - this->first_ssrc == other.first_ssrc; - } - std::string stream_id; - std::string sender_id; - // An RtpSender can have many SSRCs. The first one is used as a sort of ID - // for communicating with the lower layers. - uint32_t first_ssrc; - }; + // Report the UMA metric SdpFormatReceived for the given remote offer. + void ReportSdpFormatReceived(const SessionDescriptionInterface& remote_offer); - // Captures partial state to be used for rollback. Applicable only in - // Unified Plan. - class TransceiverStableState { - public: - TransceiverStableState() {} - void set_newly_created(); - void SetMSectionIfUnset(absl::optional mid, - absl::optional mline_index); - void SetRemoteStreamIdsIfUnset(const std::vector& ids); - absl::optional mid() const { return mid_; } - absl::optional mline_index() const { return mline_index_; } - absl::optional> remote_stream_ids() const { - return remote_stream_ids_; - } - bool has_m_section() const { return has_m_section_; } - bool newly_created() const { return newly_created_; } + // Returns true if the PeerConnection is configured to use Unified Plan + // semantics for creating offers/answers and setting local/remote + // descriptions. If this is true the RtpTransceiver API will also be available + // to the user. If this is false, Plan B semantics are assumed. + // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once + // sufficient time has passed. + bool IsUnifiedPlan() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return is_unified_plan_; + } + bool ValidateBundleSettings(const cricket::SessionDescription* desc); - private: - absl::optional mid_; - absl::optional mline_index_; - absl::optional> remote_stream_ids_; - // Indicates that mid value from stable state has been captured and - // that rollback has to restore the transceiver. Also protects against - // subsequent overwrites. - bool has_m_section_ = false; - // Indicates that the transceiver was created as part of applying a - // description to track potential need for removing transceiver during - // rollback. - bool newly_created_ = false; - }; + // Returns the MID for the data section associated with either the + // RtpDataChannel or SCTP data channel, if it has been set. If no data + // channels are configured this will return nullopt. + absl::optional GetDataMid() const; - // Implements MessageHandler. - void OnMessage(rtc::Message* msg) override; + void SetSctpDataMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_ = mid; + } + void ResetSctpDataMid() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_.reset(); + } - // Plan B helpers for getting the voice/video media channels for the single - // audio/video transceiver, if it exists. - cricket::VoiceMediaChannel* voice_media_channel() const - RTC_RUN_ON(signaling_thread()); - cricket::VideoMediaChannel* video_media_channel() const - RTC_RUN_ON(signaling_thread()); - - std::vector>> - GetSendersInternal() const RTC_RUN_ON(signaling_thread()); - std::vector< - rtc::scoped_refptr>> - GetReceiversInternal() const RTC_RUN_ON(signaling_thread()); - - rtc::scoped_refptr> - GetAudioTransceiver() const RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr> - GetVideoTransceiver() const RTC_RUN_ON(signaling_thread()); - - rtc::scoped_refptr> - GetFirstAudioTransceiver() const RTC_RUN_ON(signaling_thread()); - - // Implementation of the offer/answer exchange operations. These are chained - // onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(), - // SetLocalDescription() and SetRemoteDescription() methods are invoked. - void DoCreateOffer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); - void DoCreateAnswer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); - void DoSetLocalDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer); - void DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer); - - void CreateAudioReceiver(MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) - RTC_RUN_ON(signaling_thread()); - - void CreateVideoReceiver(MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) - RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr RemoveAndStopReceiver( - const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); - - // May be called either by AddStream/RemoveStream, or when a track is - // added/removed from a stream previously added via AddStream. - void AddAudioTrack(AudioTrackInterface* track, MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void RemoveAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void AddVideoTrack(VideoTrackInterface* track, MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void RemoveVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - - // AddTrack implementation when Unified Plan is specified. - RTCErrorOr> AddTrackUnifiedPlan( - rtc::scoped_refptr track, - const std::vector& stream_ids) - RTC_RUN_ON(signaling_thread()); - // AddTrack implementation when Plan B is specified. - RTCErrorOr> AddTrackPlanB( - rtc::scoped_refptr track, - const std::vector& stream_ids) - RTC_RUN_ON(signaling_thread()); - - // Returns the first RtpTransceiver suitable for a newly added track, if such - // transceiver is available. - rtc::scoped_refptr> - FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) - RTC_RUN_ON(signaling_thread()); - - rtc::scoped_refptr> - FindTransceiverBySender(rtc::scoped_refptr sender) - RTC_RUN_ON(signaling_thread()); + // Returns the CryptoOptions for this PeerConnection. This will always + // return the RTCConfiguration.crypto_options if set and will only default + // back to the PeerConnectionFactory settings if nothing was set. + CryptoOptions GetCryptoOptions(); // Internal implementation for AddTransceiver family of methods. If // |fire_callback| is set, fires OnRenegotiationNeeded callback if successful. @@ -499,28 +420,54 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, rtc::scoped_refptr track, const RtpTransceiverInit& init, - bool fire_callback = true) RTC_RUN_ON(signaling_thread()); + bool fire_callback = true); - rtc::scoped_refptr> - CreateSender(cricket::MediaType media_type, - const std::string& id, - rtc::scoped_refptr track, - const std::vector& stream_ids, - const std::vector& send_encodings); + // Returns rtp transport, result can not be nullptr. + RtpTransportInternal* GetRtpTransport(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + auto rtp_transport = transport_controller_->GetRtpTransport(mid); + RTC_DCHECK(rtp_transport); + return rtp_transport; + } - rtc::scoped_refptr> - CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id); + // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by + // this session. + bool SrtpRequired() const RTC_RUN_ON(signaling_thread()); + + void OnSentPacket_w(const rtc::SentPacket& sent_packet); + + bool SetupDataChannelTransport_n(const std::string& mid) + RTC_RUN_ON(network_thread()); + void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); + cricket::ChannelInterface* GetChannel(const std::string& content_name); + + // Functions made public for testing. + void ReturnHistogramVeryQuicklyForTesting() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return_histogram_very_quickly_ = true; + } + void RequestUsagePatternReportForTesting(); + + protected: + // Available for rtc::scoped_refptr creation + PeerConnection(rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + bool is_unified_plan, + std::unique_ptr event_log, + std::unique_ptr call, + PeerConnectionDependencies& dependencies); + + ~PeerConnection() override; + + private: + bool Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies); - // Create a new RtpTransceiver of the given type and add it to the list of - // transceivers. rtc::scoped_refptr> - CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver) RTC_RUN_ON(signaling_thread()); - - void SetIceConnectionState(IceConnectionState new_state) + FindTransceiverBySender(rtc::scoped_refptr sender) RTC_RUN_ON(signaling_thread()); + void SetStandardizedIceConnectionState( PeerConnectionInterface::IceConnectionState new_state) RTC_RUN_ON(signaling_thread()); @@ -549,304 +496,9 @@ class PeerConnection : public PeerConnectionInternal, const cricket::CandidatePairChangeEvent& event) RTC_RUN_ON(signaling_thread()); - // Update the state, signaling if necessary. - void ChangeSignalingState(SignalingState signaling_state) - RTC_RUN_ON(signaling_thread()); - - // Signals from MediaStreamObserver. - void OnAudioTrackAdded(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnAudioTrackRemoved(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnVideoTrackAdded(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnVideoTrackRemoved(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - - void PostSetSessionDescriptionSuccess( - SetSessionDescriptionObserver* observer); - void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer, - RTCError&& error); - void PostCreateSessionDescriptionFailure( - CreateSessionDescriptionObserver* observer, - RTCError error); - - // Synchronous implementations of SetLocalDescription/SetRemoteDescription - // that return an RTCError instead of invoking a callback. - RTCError ApplyLocalDescription( - std::unique_ptr desc); - RTCError ApplyRemoteDescription( - std::unique_ptr desc); - - // Updates the local RtpTransceivers according to the JSEP rules. Called as - // part of setting the local/remote description. - RTCError UpdateTransceiversAndDataChannels( - cricket::ContentSource source, - const SessionDescriptionInterface& new_session, - const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description) - RTC_RUN_ON(signaling_thread()); - - // Either creates or destroys the transceiver's BaseChannel according to the - // given media section. - RTCError UpdateTransceiverChannel( - rtc::scoped_refptr> - transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); - - // Either creates or destroys the local data channel according to the given - // media section. - RTCError UpdateDataChannel(cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) - RTC_RUN_ON(signaling_thread()); - - // Associate the given transceiver according to the JSEP rules. - RTCErrorOr< - rtc::scoped_refptr>> - AssociateTransceiver(cricket::ContentSource source, - SdpType type, - size_t mline_index, - const cricket::ContentInfo& content, - const cricket::ContentInfo* old_local_content, - const cricket::ContentInfo* old_remote_content) - RTC_RUN_ON(signaling_thread()); - - // Returns the RtpTransceiver, if found, that is associated to the given MID. - rtc::scoped_refptr> - GetAssociatedTransceiver(const std::string& mid) const - RTC_RUN_ON(signaling_thread()); - - // Returns the RtpTransceiver, if found, that was assigned to the given mline - // index in CreateOffer. - rtc::scoped_refptr> - GetTransceiverByMLineIndex(size_t mline_index) const - RTC_RUN_ON(signaling_thread()); - - // Returns an RtpTransciever, if available, that can be used to receive the - // given media type according to JSEP rules. - rtc::scoped_refptr> - FindAvailableTransceiverToReceive(cricket::MediaType media_type) const - RTC_RUN_ON(signaling_thread()); - - // Returns the media section in the given session description that is - // associated with the RtpTransceiver. Returns null if none found or this - // RtpTransceiver is not associated. Logic varies depending on the - // SdpSemantics specified in the configuration. - const cricket::ContentInfo* FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, - const SessionDescriptionInterface* sdesc) const - RTC_RUN_ON(signaling_thread()); - - // Runs the algorithm **set the associated remote streams** specified in - // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams. - void SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, - const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); - - // Runs the algorithm **process the removal of a remote track** specified in - // the WebRTC specification. - // This method will update the following lists: - // |remove_list| is the list of transceivers for which the receiving track is - // being removed. - // |removed_streams| is the list of streams which no longer have a receiving - // track so should be removed. - // https://w3c.github.io/webrtc-pc/#process-remote-track-removal - void ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> - transceiver, - std::vector>* remove_list, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); - - void RemoveRemoteStreamsIfEmpty( - const std::vector>& - remote_streams, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); void OnNegotiationNeeded(); - // Returns a MediaSessionOptions struct with options decided by |options|, - // the local MediaStreams and DataChannels. - void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForPlanBOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForUnifiedPlanOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - RTCError HandleLegacyOfferOptions(const RTCOfferAnswerOptions& options) - RTC_RUN_ON(signaling_thread()); - void RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) RTC_RUN_ON(signaling_thread()); - void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type); - std::vector< - rtc::scoped_refptr>> - GetReceivingTransceiversOfType(cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Returns a MediaSessionOptions struct with options decided by - // |constraints|, the local MediaStreams and DataChannels. - void GetOptionsForAnswer(const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForPlanBAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForUnifiedPlanAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - // Generates MediaDescriptionOptions for the |session_opts| based on existing - // local description or remote description. - void GenerateMediaDescriptionOptions( - const SessionDescriptionInterface* session_desc, - RtpTransceiverDirection audio_direction, - RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - // Generates the active MediaDescriptionOptions for the local data channel - // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( - const std::string& mid) const RTC_RUN_ON(signaling_thread()); - - // Generates the rejected MediaDescriptionOptions for the local data channel - // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( - const std::string& mid) const RTC_RUN_ON(signaling_thread()); - - // Returns the MID for the data section associated with either the - // RtpDataChannel or SCTP data channel, if it has been set. If no data - // channels are configured this will return nullopt. - absl::optional GetDataMid() const RTC_RUN_ON(signaling_thread()); - - // Remove all local and remote senders of type |media_type|. - // Called when a media type is rejected (m-line set to port 0). - void RemoveSenders(cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|, - // and existing MediaStreamTracks are removed if there is no corresponding - // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack - // is created if it doesn't exist; if false, it's removed if it exists. - // |media_type| is the type of the |streams| and can be either audio or video. - // If a new MediaStream is created it is added to |new_streams|. - void UpdateRemoteSendersList( - const std::vector& streams, - bool default_track_needed, - cricket::MediaType media_type, - StreamCollection* new_streams) RTC_RUN_ON(signaling_thread()); - - // Triggered when a remote sender has been seen for the first time in a remote - // session description. It creates a remote MediaStreamTrackInterface - // implementation and triggers CreateAudioReceiver or CreateVideoReceiver. - void OnRemoteSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a remote sender has been removed from a remote session - // description. It removes the remote sender with id |sender_id| from a remote - // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver. - void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Finds remote MediaStreams without any tracks and removes them from - // |remote_streams_| and notifies the observer that the MediaStreams no longer - // exist. - void UpdateEndedRemoteMediaStreams() RTC_RUN_ON(signaling_thread()); - - // Loops through the vector of |streams| and finds added and removed - // StreamParams since last time this method was called. - // For each new or removed StreamParam, OnLocalSenderSeen or - // OnLocalSenderRemoved is invoked. - void UpdateLocalSenders(const std::vector& streams, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a local sender has been seen for the first time in a local - // session description. - // This method triggers CreateAudioSender or CreateVideoSender if the rtp - // streams in the local SessionDescription can be mapped to a MediaStreamTrack - // in a MediaStream in |local_streams_| - void OnLocalSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a local sender has been removed from a local session - // description. - // This method triggers DestroyAudioSender or DestroyVideoSender if a stream - // has been removed from the local SessionDescription and the stream can be - // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|. - void OnLocalSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Returns true if the PeerConnection is configured to use Unified Plan - // semantics for creating offers/answers and setting local/remote - // descriptions. If this is true the RtpTransceiver API will also be available - // to the user. If this is false, Plan B semantics are assumed. - // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once - // sufficient time has passed. - bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()) { - return configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan; - } - - // The offer/answer machinery assumes the media section MID is present and - // unique. To support legacy end points that do not supply a=mid lines, this - // method will modify the session description to add MIDs generated according - // to the SDP semantics. - void FillInMissingRemoteMids(cricket::SessionDescription* remote_description) - RTC_RUN_ON(signaling_thread()); - - // Return the RtpSender with the given track attached. - rtc::scoped_refptr> - FindSenderForTrack(MediaStreamTrackInterface* track) const - RTC_RUN_ON(signaling_thread()); - - // Return the RtpSender with the given id, or null if none exists. - rtc::scoped_refptr> - FindSenderById(const std::string& sender_id) const - RTC_RUN_ON(signaling_thread()); - - // Return the RtpReceiver with the given id, or null if none exists. - rtc::scoped_refptr> - FindReceiverById(const std::string& receiver_id) const - RTC_RUN_ON(signaling_thread()); - - std::vector* GetRemoteSenderInfos( - cricket::MediaType media_type); - std::vector* GetLocalSenderInfos( - cricket::MediaType media_type); - const RtpSenderInfo* FindSenderInfo(const std::vector& infos, - const std::string& stream_id, - const std::string sender_id) const; // Returns the specified SCTP DataChannel in sctp_data_channels_, // or nullptr if not found. @@ -882,66 +534,6 @@ class PeerConnection : public PeerConnectionInternal, // This function should only be called from the worker thread. void StopRtcEventLog_w(); - // Ensures the configuration doesn't have any parameters with invalid values, - // or values that conflict with other parameters. - // - // Returns RTCError::OK() if there are no issues. - RTCError ValidateConfiguration(const RTCConfiguration& config) const; - - cricket::ChannelManager* channel_manager() const; - - enum class SessionError { - kNone, // No error. - kContent, // Error in BaseChannel SetLocalContent/SetRemoteContent. - kTransport, // Error from the underlying transport. - }; - - // Returns the last error in the session. See the enum above for details. - SessionError session_error() const RTC_RUN_ON(signaling_thread()) { - return session_error_; - } - const std::string& session_error_desc() const { return session_error_desc_; } - - cricket::ChannelInterface* GetChannel(const std::string& content_name); - - cricket::IceConfig ParseIceConfig( - const PeerConnectionInterface::RTCConfiguration& config) const; - - cricket::DataChannelType data_channel_type() const; - - // Called when an RTCCertificate is generated or retrieved by - // WebRTCSessionDescriptionFactory. Should happen before setLocalDescription. - void OnCertificateReady( - const rtc::scoped_refptr& certificate); - void OnDtlsSrtpSetupFailure(cricket::BaseChannel*, bool rtcp); - - // Non-const versions of local_description()/remote_description(), for use - // internally. - SessionDescriptionInterface* mutable_local_description() - RTC_RUN_ON(signaling_thread()) { - return pending_local_description_ ? pending_local_description_.get() - : current_local_description_.get(); - } - SessionDescriptionInterface* mutable_remote_description() - RTC_RUN_ON(signaling_thread()) { - return pending_remote_description_ ? pending_remote_description_.get() - : current_remote_description_.get(); - } - - // Updates the error state, signaling if necessary. - void SetSessionError(SessionError error, const std::string& error_desc); - - RTCError UpdateSessionState(SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description); - // Push the media parts of the local or remote session description - // down to all of the channels. - RTCError PushdownMediaDescription(SdpType type, cricket::ContentSource source) - RTC_RUN_ON(signaling_thread()); - - RTCError PushdownTransportDescription(cricket::ContentSource source, - SdpType type); - // Returns true and the TransportInfo of the given |content_name| // from |description|. Returns false if it's not available. static bool GetTransportDescription( @@ -949,89 +541,12 @@ class PeerConnection : public PeerConnectionInternal, const std::string& content_name, cricket::TransportDescription* info); - // Enables media channels to allow sending of media. - // This enables media to flow on all configured audio/video channels and the - // RtpDataChannel. - void EnableSending() RTC_RUN_ON(signaling_thread()); - - // Destroys all BaseChannels and destroys the SCTP data channel, if present. - void DestroyAllChannels() RTC_RUN_ON(signaling_thread()); - // Returns the media index for a local ice candidate given the content name. // Returns false if the local session description does not have a media // content called |content_name|. bool GetLocalCandidateMediaIndex(const std::string& content_name, int* sdp_mline_index) RTC_RUN_ON(signaling_thread()); - // Uses all remote candidates in |remote_desc| in this session. - bool UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc) - RTC_RUN_ON(signaling_thread()); - // Uses |candidate| in this session. - bool UseCandidate(const IceCandidateInterface* candidate) - RTC_RUN_ON(signaling_thread()); - RTCErrorOr FindContentInfo( - const SessionDescriptionInterface* description, - const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread()); - // Deletes the corresponding channel of contents that don't exist in |desc|. - // |desc| can be null. This means that all channels are deleted. - void RemoveUnusedChannels(const cricket::SessionDescription* desc) - RTC_RUN_ON(signaling_thread()); - - // Allocates media channels based on the |desc|. If |desc| doesn't have - // the BUNDLE option, this method will disable BUNDLE in PortAllocator. - // This method will also delete any existing media channels before creating. - RTCError CreateChannels(const cricket::SessionDescription& desc) - RTC_RUN_ON(signaling_thread()); - - // If the BUNDLE policy is max-bundle, then we know for sure that all - // transports will be bundled from the start. This method returns the BUNDLE - // group if that's the case, or null if BUNDLE will be negotiated later. An - // error is returned if max-bundle is specified but the session description - // does not have a BUNDLE group. - RTCErrorOr GetEarlyBundleGroup( - const cricket::SessionDescription& desc) const - RTC_RUN_ON(signaling_thread()); - - // Helper methods to create media channels. - cricket::VoiceChannel* CreateVoiceChannel(const std::string& mid) - RTC_RUN_ON(signaling_thread()); - cricket::VideoChannel* CreateVideoChannel(const std::string& mid) - RTC_RUN_ON(signaling_thread()); - bool CreateDataChannel(const std::string& mid) RTC_RUN_ON(signaling_thread()); - - bool SetupDataChannelTransport_n(const std::string& mid) - RTC_RUN_ON(network_thread()); - void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); - - bool ValidateBundleSettings(const cricket::SessionDescription* desc); - bool HasRtcpMuxEnabled(const cricket::ContentInfo* content); - // Below methods are helper methods which verifies SDP. - RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) - RTC_RUN_ON(signaling_thread()); - - // Check if a call to SetLocalDescription is acceptable with a session - // description of the given type. - bool ExpectSetLocalDescription(SdpType type); - // Check if a call to SetRemoteDescription is acceptable with a session - // description of the given type. - bool ExpectSetRemoteDescription(SdpType type); - // Verifies a=setup attribute as per RFC 5763. - bool ValidateDtlsSetupAttribute(const cricket::SessionDescription* desc, - SdpType type); - - // Returns true if we are ready to push down the remote candidate. - // |remote_desc| is the new remote description, or NULL if the current remote - // description should be used. Output |valid| is true if the candidate media - // index is valid. - bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate, - const SessionDescriptionInterface* remote_desc, - bool* valid) RTC_RUN_ON(signaling_thread()); - - // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by - // this session. - bool SrtpRequired() const RTC_RUN_ON(signaling_thread()); // JsepTransportController signal handlers. void OnTransportControllerConnectionState(cricket::IceConnectionState state) @@ -1053,15 +568,7 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); void OnTransportControllerDtlsHandshakeError(rtc::SSLHandshakeError error); - const char* SessionErrorToString(SessionError error) const; - std::string GetSessionErrorMsg() RTC_RUN_ON(signaling_thread()); - - // Report the UMA metric SdpFormatReceived for the given remote offer. - void ReportSdpFormatReceived(const SessionDescriptionInterface& remote_offer); - - // Report inferred negotiated SDP semantics from a local/remote answer to the - // UMA observer. - void ReportNegotiatedSdpSemantics(const SessionDescriptionInterface& answer); + void OnErrorDemuxingPacket(uint32_t ssrc); // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. @@ -1075,35 +582,9 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); void ReportIceCandidateCollected(const cricket::Candidate& candidate) RTC_RUN_ON(signaling_thread()); - void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate) - RTC_RUN_ON(signaling_thread()); - void NoteUsageEvent(UsageEvent event); void ReportUsagePattern() const RTC_RUN_ON(signaling_thread()); - void OnSentPacket_w(const rtc::SentPacket& sent_packet); - - const std::string GetTransportName(const std::string& content_name) - RTC_RUN_ON(signaling_thread()); - - // Functions for dealing with transports. - // Note that cricket code uses the term "channel" for what other code - // refers to as "transport". - - // Destroys and clears the BaseChannel associated with the given transceiver, - // if such channel is set. - void DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver); - - // Destroys the RTP data channel transport and/or the SCTP data channel - // transport and clears it. - void DestroyDataChannelTransport() RTC_RUN_ON(signaling_thread()); - - // Destroys the given ChannelInterface. - // The channel cannot be accessed after this method is called. - void DestroyChannelInterface(cricket::ChannelInterface* channel); - // JsepTransportController::Observer override. // // Called by |transport_controller_| when processing transport information @@ -1116,38 +597,17 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) override; - // RtpSenderBase::SetStreamsObserver override. - void OnSetStreams() override; + std::function + InitializeRtcpCallback(); - // Returns the CryptoOptions for this PeerConnection. This will always - // return the RTCConfiguration.crypto_options if set and will only default - // back to the PeerConnectionFactory settings if nothing was set. - CryptoOptions GetCryptoOptions() RTC_RUN_ON(signaling_thread()); - - // Returns rtp transport, result can not be nullptr. - RtpTransportInternal* GetRtpTransport(const std::string& mid) - RTC_RUN_ON(signaling_thread()) { - auto rtp_transport = transport_controller_->GetRtpTransport(mid); - RTC_DCHECK(rtp_transport); - return rtp_transport; - } - - void UpdateNegotiationNeeded(); - bool CheckIfNegotiationIsNeeded(); - - // | sdp_type | is the type of the SDP that caused the rollback. - RTCError Rollback(SdpType sdp_type); - - // Storing the factory as a scoped reference pointer ensures that the memory - // in the PeerConnectionFactoryImpl remains available as long as the - // PeerConnection is running. It is passed to PeerConnection as a raw pointer. - // However, since the reference counting is done in the - // PeerConnectionFactoryInterface all instances created using the raw pointer - // will refer to the same reference count. - const rtc::scoped_refptr factory_; + const rtc::scoped_refptr context_; + const PeerConnectionFactoryInterface::Options options_; PeerConnectionObserver* observer_ RTC_GUARDED_BY(signaling_thread()) = nullptr; + const bool is_unified_plan_; + // The EventLog needs to outlive |call_| (and any other object that uses it). std::unique_ptr event_log_ RTC_GUARDED_BY(worker_thread()); @@ -1155,15 +615,6 @@ class PeerConnection : public PeerConnectionInternal, // pointer (but not touch the object) from any thread. RtcEventLog* const event_log_ptr_ RTC_PT_GUARDED_BY(worker_thread()); - // The operations chain is used by the offer/answer exchange methods to ensure - // they are executed in the right order. For example, if - // SetRemoteDescription() is invoked while CreateOffer() is still pending, the - // SRD operation will not start until CreateOffer() has completed. See - // https://w3c.github.io/webrtc-pc/#dfn-operations-chain. - rtc::scoped_refptr operations_chain_ - RTC_GUARDED_BY(signaling_thread()); - - SignalingState signaling_state_ RTC_GUARDED_BY(signaling_thread()) = kStable; IceConnectionState ice_connection_state_ RTC_GUARDED_BY(signaling_thread()) = kIceConnectionNew; PeerConnectionInterface::IceConnectionState standardized_ice_connection_state_ @@ -1178,101 +629,45 @@ class PeerConnection : public PeerConnectionInternal, // TODO(zstein): |async_resolver_factory_| can currently be nullptr if it // is not injected. It should be required once chromium supplies it. - std::unique_ptr async_resolver_factory_ + const std::unique_ptr async_resolver_factory_ RTC_GUARDED_BY(signaling_thread()); std::unique_ptr port_allocator_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. - std::unique_ptr packet_socket_factory_; - std::unique_ptr + const std::unique_ptr ice_transport_factory_; // TODO(bugs.webrtc.org/9987): Accessed on the // signaling thread but the underlying raw // pointer is given to // |jsep_transport_controller_| and used on the // network thread. - std::unique_ptr - tls_cert_verifier_; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. - - // One PeerConnection has only one RTCP CNAME. - // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9 - const std::string rtcp_cname_; - - // Streams added via AddStream. - const rtc::scoped_refptr local_streams_ - RTC_GUARDED_BY(signaling_thread()); - // Streams created as a result of SetRemoteDescription. - const rtc::scoped_refptr remote_streams_ - RTC_GUARDED_BY(signaling_thread()); - - std::vector> stream_observers_ - RTC_GUARDED_BY(signaling_thread()); - - // These lists store sender info seen in local/remote descriptions. - std::vector remote_audio_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector remote_video_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector local_audio_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector local_video_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - - bool remote_peer_supports_msid_ RTC_GUARDED_BY(signaling_thread()) = false; + const std::unique_ptr tls_cert_verifier_ + RTC_GUARDED_BY(network_thread()); // The unique_ptr belongs to the worker thread, but the Call object manages // its own thread safety. std::unique_ptr call_ RTC_GUARDED_BY(worker_thread()); - - rtc::AsyncInvoker rtcp_invoker_ RTC_GUARDED_BY(network_thread()); + std::unique_ptr call_safety_ + RTC_GUARDED_BY(worker_thread()); // Points to the same thing as `call_`. Since it's const, we may read the // pointer from any thread. + // TODO(bugs.webrtc.org/11992): Remove this workaround (and potential dangling + // pointer). Call* const call_ptr_; std::unique_ptr stats_ RTC_GUARDED_BY(signaling_thread()); // A pointer is passed to senders_ rtc::scoped_refptr stats_collector_ RTC_GUARDED_BY(signaling_thread()); - // Holds changes made to transceivers during applying descriptors for - // potential rollback. Gets cleared once signaling state goes to stable. - std::map>, - TransceiverStableState> - transceiver_stable_states_by_transceivers_; - // Used when rolling back RTP data channels. - bool have_pending_rtp_data_channel_ RTC_GUARDED_BY(signaling_thread()) = - false; - // Holds remote stream ids for transceivers from stable state. - std::map>, - std::vector> - remote_stream_ids_by_transceivers_; - std::vector< - rtc::scoped_refptr>> - transceivers_; // TODO(bugs.webrtc.org/9987): Accessed on both signaling - // and network thread. - // In Unified Plan, if we encounter remote SDP that does not contain an a=msid - // line we create and use a stream with a random ID for our receivers. This is - // to support legacy endpoints that do not support the a=msid attribute (as - // opposed to streamless tracks with "a=msid:-"). - rtc::scoped_refptr missing_msid_default_stream_ + rtc::scoped_refptr demuxing_observer_ RTC_GUARDED_BY(signaling_thread()); - // MIDs will be generated using this generator which will keep track of - // all the MIDs that have been seen over the life of the PeerConnection. - rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); - - SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = - SessionError::kNone; - std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); std::string session_id_ RTC_GUARDED_BY(signaling_thread()); std::unique_ptr transport_controller_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. - std::unique_ptr - sctp_factory_; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. // |sctp_mid_| is the content name (MID) in SDP. // Note: this is used as the data channel MID by both SCTP and data channel @@ -1284,58 +679,24 @@ class PeerConnection : public PeerConnectionInternal, absl::optional sctp_mid_s_ RTC_GUARDED_BY(signaling_thread()); absl::optional sctp_mid_n_ RTC_GUARDED_BY(network_thread()); - // Whether this peer is the caller. Set when the local description is applied. - absl::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); + // The machinery for handling offers and answers. Const after initialization. + std::unique_ptr sdp_handler_ + RTC_GUARDED_BY(signaling_thread()); - - - std::unique_ptr current_local_description_ - RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr pending_local_description_ - RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr current_remote_description_ - RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr pending_remote_description_ - RTC_GUARDED_BY(signaling_thread()); bool dtls_enabled_ RTC_GUARDED_BY(signaling_thread()) = false; - // List of content names for which the remote side triggered an ICE restart. - std::set pending_ice_restarts_ - RTC_GUARDED_BY(signaling_thread()); - - std::unique_ptr webrtc_session_desc_factory_ - RTC_GUARDED_BY(signaling_thread()); - - // Member variables for caching global options. - cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); - cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); - - int usage_event_accumulator_ RTC_GUARDED_BY(signaling_thread()) = 0; + UsagePattern usage_pattern_ RTC_GUARDED_BY(signaling_thread()); bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) = false; - // This object should be used to generate any SSRC that is not explicitly - // specified by the user (or by the remote party). - // The generator is not used directly, instead it is passed on to the - // channel manager and the session description factory. - rtc::UniqueRandomIdGenerator ssrc_generator_ - RTC_GUARDED_BY(signaling_thread()); - - // A video bitrate allocator factory. - // This can injected using the PeerConnectionDependencies, - // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. - // Note that one can still choose to override this in a MediaEngine - // if one wants too. - std::unique_ptr - video_bitrate_allocator_factory_; - - std::unique_ptr - local_ice_credentials_to_replace_ RTC_GUARDED_BY(signaling_thread()); - bool is_negotiation_needed_ RTC_GUARDED_BY(signaling_thread()) = false; - DataChannelController data_channel_controller_; - rtc::WeakPtrFactory weak_ptr_factory_ - RTC_GUARDED_BY(signaling_thread()); + + // Machinery for handling messages posted to oneself + PeerConnectionMessageHandler message_handler_; + + // Administration of senders, receivers and transceivers + // Accessed on both signaling and network thread. Const after Initialize(). + std::unique_ptr rtp_manager_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc new file mode 100644 index 000000000..71d054eb9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc @@ -0,0 +1,161 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "call/adaptation/test/fake_resource.h" +#include "pc/test/fake_periodic_video_source.h" +#include "pc/test/fake_periodic_video_track_source.h" +#include "pc/test/peer_connection_test_wrapper.h" +#include "rtc_base/checks.h" +#include "rtc_base/gunit.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gtest.h" + +namespace webrtc { + +const int64_t kDefaultTimeoutMs = 5000; + +struct TrackWithPeriodicSource { + rtc::scoped_refptr track; + rtc::scoped_refptr periodic_track_source; +}; + +// Performs an O/A exchange and waits until the signaling state is stable again. +void Negotiate(rtc::scoped_refptr caller, + rtc::scoped_refptr callee) { + // Wire up callbacks and listeners such that a full O/A is performed in + // response to CreateOffer(). + PeerConnectionTestWrapper::Connect(caller.get(), callee.get()); + caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions()); + caller->WaitForNegotiation(); +} + +TrackWithPeriodicSource CreateTrackWithPeriodicSource( + rtc::scoped_refptr factory) { + FakePeriodicVideoSource::Config periodic_track_source_config; + periodic_track_source_config.frame_interval_ms = 100; + periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis(); + rtc::scoped_refptr periodic_track_source = + new rtc::RefCountedObject( + periodic_track_source_config, /* remote */ false); + TrackWithPeriodicSource track_with_source; + track_with_source.track = + factory->CreateVideoTrack("PeriodicTrack", periodic_track_source); + track_with_source.periodic_track_source = periodic_track_source; + return track_with_source; +} + +// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in +// parallel and this function makes no guarantee that the returnd VideoSinkWants +// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT +// to "spam overuse until a change is observed". +rtc::VideoSinkWants TriggerOveruseAndGetSinkWants( + rtc::scoped_refptr fake_resource, + const FakePeriodicVideoSource& source) { + fake_resource->SetUsageState(ResourceUsageState::kOveruse); + return source.wants(); +} + +class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { + public: + PeerConnectionAdaptationIntegrationTest() + : virtual_socket_server_(), + network_thread_(new rtc::Thread(&virtual_socket_server_)), + worker_thread_(rtc::Thread::Create()) { + RTC_CHECK(network_thread_->Start()); + RTC_CHECK(worker_thread_->Start()); + } + + rtc::scoped_refptr CreatePcWrapper( + const char* name) { + rtc::scoped_refptr pc_wrapper = + new rtc::RefCountedObject( + name, network_thread_.get(), worker_thread_.get()); + PeerConnectionInterface::RTCConfiguration config; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory())); + return pc_wrapper; + } + + protected: + rtc::VirtualSocketServer virtual_socket_server_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; +}; + +TEST_F(PeerConnectionAdaptationIntegrationTest, + ResouceInjectedAfterNegotiationCausesReductionInResolution) { + auto caller_wrapper = CreatePcWrapper("caller"); + auto caller = caller_wrapper->pc(); + auto callee_wrapper = CreatePcWrapper("callee"); + + // Adding a track and negotiating ensures that a VideoSendStream exists. + TrackWithPeriodicSource track_with_source = + CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); + auto sender = caller->AddTrack(track_with_source.track, {}).value(); + Negotiate(caller_wrapper, callee_wrapper); + // Prefer degrading resolution. + auto parameters = sender->GetParameters(); + parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; + sender->SetParameters(parameters); + + const auto& source = + track_with_source.periodic_track_source->fake_periodic_source(); + int pixel_count_before_overuse = source.wants().max_pixel_count; + + // Inject a fake resource and spam kOveruse until resolution becomes limited. + auto fake_resource = FakeResource::Create("FakeResource"); + caller->AddAdaptationResource(fake_resource); + EXPECT_TRUE_WAIT( + TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < + pixel_count_before_overuse, + kDefaultTimeoutMs); +} + +TEST_F(PeerConnectionAdaptationIntegrationTest, + ResouceInjectedBeforeNegotiationCausesReductionInResolution) { + auto caller_wrapper = CreatePcWrapper("caller"); + auto caller = caller_wrapper->pc(); + auto callee_wrapper = CreatePcWrapper("callee"); + + // Inject a fake resource before adding any tracks or negotiating. + auto fake_resource = FakeResource::Create("FakeResource"); + caller->AddAdaptationResource(fake_resource); + + // Adding a track and negotiating ensures that a VideoSendStream exists. + TrackWithPeriodicSource track_with_source = + CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); + auto sender = caller->AddTrack(track_with_source.track, {}).value(); + Negotiate(caller_wrapper, callee_wrapper); + // Prefer degrading resolution. + auto parameters = sender->GetParameters(); + parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; + sender->SetParameters(parameters); + + const auto& source = + track_with_source.periodic_track_source->fake_periodic_source(); + int pixel_count_before_overuse = source.wants().max_pixel_count; + + // Spam kOveruse until resolution becomes limited. + EXPECT_TRUE_WAIT( + TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < + pixel_count_before_overuse, + kDefaultTimeoutMs); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc index d79e43815..da42e5a09 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc @@ -12,23 +12,25 @@ #include #include +#include #include -#include #include "absl/strings/match.h" +#include "api/async_resolver_factory.h" +#include "api/call/call_factory_interface.h" #include "api/fec_controller.h" +#include "api/ice_transport_interface.h" #include "api/media_stream_proxy.h" #include "api/media_stream_track_proxy.h" #include "api/network_state_predictor.h" +#include "api/packet_socket_factory.h" #include "api/peer_connection_factory_proxy.h" #include "api/peer_connection_proxy.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/field_trial_based_config.h" -#include "api/turn_customizer.h" +#include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" -#include "api/video_track_source_proxy.h" -#include "media/base/rtp_data_engine.h" -#include "media/sctp/sctp_transport.h" +#include "call/audio_state.h" +#include "media/base/media_engine.h" #include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/default_ice_transport_factory.h" @@ -38,12 +40,17 @@ #include "pc/media_stream.h" #include "pc/peer_connection.h" #include "pc/rtp_parameters_conversion.h" +#include "pc/session_description.h" #include "pc/video_track.h" #include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { @@ -51,183 +58,134 @@ namespace webrtc { rtc::scoped_refptr CreateModularPeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) { - rtc::scoped_refptr pc_factory( - new rtc::RefCountedObject( - std::move(dependencies))); - // Call Initialize synchronously but make sure it is executed on - // |signaling_thread|. - MethodCall call( - pc_factory.get(), &PeerConnectionFactory::Initialize); - bool result = call.Marshal(RTC_FROM_HERE, pc_factory->signaling_thread()); + // The PeerConnectionFactory must be created on the signaling thread. + if (dependencies.signaling_thread && + !dependencies.signaling_thread->IsCurrent()) { + return dependencies.signaling_thread + ->Invoke>( + RTC_FROM_HERE, [&dependencies] { + return CreateModularPeerConnectionFactory( + std::move(dependencies)); + }); + } - if (!result) { + auto pc_factory = PeerConnectionFactory::Create(std::move(dependencies)); + if (!pc_factory) { return nullptr; } + // Verify that the invocation and the initialization ended up agreeing on the + // thread. + RTC_DCHECK_RUN_ON(pc_factory->signaling_thread()); return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(), pc_factory); } +// Static +rtc::scoped_refptr PeerConnectionFactory::Create( + PeerConnectionFactoryDependencies dependencies) { + auto context = ConnectionContext::Create(&dependencies); + if (!context) { + return nullptr; + } + return new rtc::RefCountedObject(context, + &dependencies); +} + +PeerConnectionFactory::PeerConnectionFactory( + rtc::scoped_refptr context, + PeerConnectionFactoryDependencies* dependencies) + : context_(context), + task_queue_factory_(std::move(dependencies->task_queue_factory)), + event_log_factory_(std::move(dependencies->event_log_factory)), + fec_controller_factory_(std::move(dependencies->fec_controller_factory)), + network_state_predictor_factory_( + std::move(dependencies->network_state_predictor_factory)), + injected_network_controller_factory_( + std::move(dependencies->network_controller_factory)), + neteq_factory_(std::move(dependencies->neteq_factory)) {} + PeerConnectionFactory::PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) - : wraps_current_thread_(false), - network_thread_(dependencies.network_thread), - worker_thread_(dependencies.worker_thread), - signaling_thread_(dependencies.signaling_thread), - task_queue_factory_(std::move(dependencies.task_queue_factory)), - network_monitor_factory_(std::move(dependencies.network_monitor_factory)), - media_engine_(std::move(dependencies.media_engine)), - call_factory_(std::move(dependencies.call_factory)), - event_log_factory_(std::move(dependencies.event_log_factory)), - fec_controller_factory_(std::move(dependencies.fec_controller_factory)), - network_state_predictor_factory_( - std::move(dependencies.network_state_predictor_factory)), - injected_network_controller_factory_( - std::move(dependencies.network_controller_factory)), - neteq_factory_(std::move(dependencies.neteq_factory)), - trials_(dependencies.trials ? std::move(dependencies.trials) - : std::make_unique()) { - if (!network_thread_) { - owned_network_thread_ = rtc::Thread::CreateWithSocketServer(); - owned_network_thread_->SetName("pc_network_thread", nullptr); - owned_network_thread_->Start(); - network_thread_ = owned_network_thread_.get(); - } - - if (!worker_thread_) { - owned_worker_thread_ = rtc::Thread::Create(); - owned_worker_thread_->SetName("pc_worker_thread", nullptr); - owned_worker_thread_->Start(); - worker_thread_ = owned_worker_thread_.get(); - } - - if (!signaling_thread_) { - signaling_thread_ = rtc::Thread::Current(); - if (!signaling_thread_) { - // If this thread isn't already wrapped by an rtc::Thread, create a - // wrapper and own it in this class. - signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread(); - wraps_current_thread_ = true; - } - } - signaling_thread_->AllowInvokesToThread(worker_thread_); - signaling_thread_->AllowInvokesToThread(network_thread_); - worker_thread_->AllowInvokesToThread(network_thread_); - network_thread_->DisallowAllInvokes(); -} + : PeerConnectionFactory(ConnectionContext::Create(&dependencies), + &dependencies) {} PeerConnectionFactory::~PeerConnectionFactory() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - channel_manager_.reset(nullptr); - - // Make sure |worker_thread_| and |signaling_thread_| outlive - // |default_socket_factory_| and |default_network_manager_|. - default_socket_factory_ = nullptr; - default_network_manager_ = nullptr; - - if (wraps_current_thread_) - rtc::ThreadManager::Instance()->UnwrapCurrentThread(); -} - -bool PeerConnectionFactory::Initialize() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - rtc::InitRandom(rtc::Time32()); - - // If network_monitor_factory_ is non-null, it will be used to create a - // network monitor while on the network thread. - default_network_manager_.reset( - new rtc::BasicNetworkManager(network_monitor_factory_.get())); - if (!default_network_manager_) { - return false; - } - - default_socket_factory_.reset( - new rtc::BasicPacketSocketFactory(network_thread_)); - if (!default_socket_factory_) { - return false; - } - - channel_manager_ = std::make_unique( - std::move(media_engine_), std::make_unique(), - worker_thread_, network_thread_); - - channel_manager_->SetVideoRtxEnabled(true); - if (!channel_manager_->Init()) { - return false; - } - - return true; + RTC_DCHECK_RUN_ON(signaling_thread()); } void PeerConnectionFactory::SetOptions(const Options& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); options_ = options; } RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( cricket::MediaType kind) const { - RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - channel_manager_->GetSupportedAudioSendCodecs(&cricket_codecs); + channel_manager()->GetSupportedAudioSendCodecs(&cricket_codecs); return ToRtpCapabilities( cricket_codecs, - channel_manager_->GetDefaultEnabledAudioRtpHeaderExtensions()); + channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoCodecs cricket_codecs; - channel_manager_->GetSupportedVideoSendCodecs(&cricket_codecs); + channel_manager()->GetSupportedVideoSendCodecs(&cricket_codecs); return ToRtpCapabilities( cricket_codecs, - channel_manager_->GetDefaultEnabledVideoRtpHeaderExtensions()); + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); + case cricket::MEDIA_TYPE_UNSUPPORTED: + return RtpCapabilities(); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities( cricket::MediaType kind) const { - RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - channel_manager_->GetSupportedAudioReceiveCodecs(&cricket_codecs); + channel_manager()->GetSupportedAudioReceiveCodecs(&cricket_codecs); return ToRtpCapabilities( cricket_codecs, - channel_manager_->GetDefaultEnabledAudioRtpHeaderExtensions()); + channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoCodecs cricket_codecs; - channel_manager_->GetSupportedVideoReceiveCodecs(&cricket_codecs); + channel_manager()->GetSupportedVideoReceiveCodecs(&cricket_codecs); return ToRtpCapabilities( cricket_codecs, - channel_manager_->GetDefaultEnabledVideoRtpHeaderExtensions()); + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); + case cricket::MEDIA_TYPE_UNSUPPORTED: + return RtpCapabilities(); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } rtc::scoped_refptr PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr source( LocalAudioSource::Create(&options)); return source; } bool PeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) { - RTC_DCHECK(signaling_thread_->IsCurrent()); - return channel_manager_->StartAecDump(FileWrapper(file), max_size_bytes); + RTC_DCHECK(signaling_thread()->IsCurrent()); + return channel_manager()->StartAecDump(FileWrapper(file), max_size_bytes); } void PeerConnectionFactory::StopAecDump() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - channel_manager_->StopAecDump(); + RTC_DCHECK(signaling_thread()->IsCurrent()); + channel_manager()->StopAecDump(); } rtc::scoped_refptr @@ -248,7 +206,7 @@ rtc::scoped_refptr PeerConnectionFactory::CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!(dependencies.allocator && dependencies.packet_socket_factory)) << "You can't set both allocator and packet_socket_factory; " "the former is going away (see bugs.webrtc.org/7447"; @@ -256,18 +214,18 @@ PeerConnectionFactory::CreatePeerConnection( // Set internal defaults if optional dependencies are not set. if (!dependencies.cert_generator) { dependencies.cert_generator = - std::make_unique(signaling_thread_, - network_thread_); + std::make_unique(signaling_thread(), + network_thread()); } if (!dependencies.allocator) { rtc::PacketSocketFactory* packet_socket_factory; if (dependencies.packet_socket_factory) packet_socket_factory = dependencies.packet_socket_factory.get(); else - packet_socket_factory = default_socket_factory_.get(); + packet_socket_factory = context_->default_socket_factory(); dependencies.allocator = std::make_unique( - default_network_manager_.get(), packet_socket_factory, + context_->default_network_manager(), packet_socket_factory, configuration.turn_customizer); } @@ -281,22 +239,21 @@ PeerConnectionFactory::CreatePeerConnection( std::make_unique(); } - dependencies.allocator->SetNetworkIgnoreMask(options_.network_ignore_mask); + dependencies.allocator->SetNetworkIgnoreMask(options().network_ignore_mask); std::unique_ptr event_log = - worker_thread_->Invoke>( + worker_thread()->Invoke>( RTC_FROM_HERE, rtc::Bind(&PeerConnectionFactory::CreateRtcEventLog_w, this)); - std::unique_ptr call = worker_thread_->Invoke>( + std::unique_ptr call = worker_thread()->Invoke>( RTC_FROM_HERE, rtc::Bind(&PeerConnectionFactory::CreateCall_w, this, event_log.get())); - rtc::scoped_refptr pc( - new rtc::RefCountedObject(this, std::move(event_log), - std::move(call))); - ActionsBeforeInitializeForTesting(pc); - if (!pc->Initialize(configuration, std::move(dependencies))) { + rtc::scoped_refptr pc = PeerConnection::Create( + context_, options_, std::move(event_log), std::move(call), configuration, + std::move(dependencies)); + if (!pc) { return nullptr; } return PeerConnectionProxy::Create(signaling_thread(), pc); @@ -304,43 +261,34 @@ PeerConnectionFactory::CreatePeerConnection( rtc::scoped_refptr PeerConnectionFactory::CreateLocalMediaStream(const std::string& stream_id) { - RTC_DCHECK(signaling_thread_->IsCurrent()); - return MediaStreamProxy::Create(signaling_thread_, + RTC_DCHECK(signaling_thread()->IsCurrent()); + return MediaStreamProxy::Create(signaling_thread(), MediaStream::Create(stream_id)); } rtc::scoped_refptr PeerConnectionFactory::CreateVideoTrack( const std::string& id, VideoTrackSourceInterface* source) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr track( - VideoTrack::Create(id, source, worker_thread_)); - return VideoTrackProxy::Create(signaling_thread_, worker_thread_, track); + VideoTrack::Create(id, source, worker_thread())); + return VideoTrackProxy::Create(signaling_thread(), worker_thread(), track); } rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( const std::string& id, AudioSourceInterface* source) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr track(AudioTrack::Create(id, source)); - return AudioTrackProxy::Create(signaling_thread_, track); -} - -std::unique_ptr -PeerConnectionFactory::CreateSctpTransportInternalFactory() { -#ifdef HAVE_SCTP - return std::make_unique(network_thread()); -#else - return nullptr; -#endif + return AudioTrackProxy::Create(signaling_thread(), track); } cricket::ChannelManager* PeerConnectionFactory::channel_manager() { - return channel_manager_.get(); + return context_->channel_manager(); } std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); auto encoding_type = RtcEventLog::EncodingType::Legacy; if (IsTrialEnabled("WebRTC-RtcEventLogNewFormat")) @@ -352,14 +300,14 @@ std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { std::unique_ptr PeerConnectionFactory::CreateCall_w( RtcEventLog* event_log) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); webrtc::Call::Config call_config(event_log); - if (!channel_manager_->media_engine() || !call_factory_) { + if (!channel_manager()->media_engine() || !context_->call_factory()) { return nullptr; } call_config.audio_state = - channel_manager_->media_engine()->voice().GetAudioState(); + channel_manager()->media_engine()->voice().GetAudioState(); FieldTrialParameter min_bandwidth("min", DataRate::KilobitsPerSec(30)); @@ -368,7 +316,7 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( FieldTrialParameter max_bandwidth("max", DataRate::KilobitsPerSec(2000)); ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth}, - trials_->Lookup("WebRTC-PcFactoryDefaultBitrates")); + trials().Lookup("WebRTC-PcFactoryDefaultBitrates")); call_config.bitrate_config.min_bitrate_bps = rtc::saturated_cast(min_bandwidth->bps()); @@ -391,14 +339,14 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( RTC_LOG(LS_INFO) << "Using default network controller factory"; } - call_config.trials = trials_.get(); + call_config.trials = &trials(); - return std::unique_ptr(call_factory_->CreateCall(call_config)); + return std::unique_ptr( + context_->call_factory()->CreateCall(call_config)); } bool PeerConnectionFactory::IsTrialEnabled(absl::string_view key) const { - RTC_DCHECK(trials_); - return absl::StartsWith(trials_->Lookup(key), "Enabled"); + return absl::StartsWith(trials().Lookup(key), "Enabled"); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h index 3932562d2..427207f9c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h @@ -12,14 +12,32 @@ #ifndef PC_PEER_CONNECTION_FACTORY_H_ #define PC_PEER_CONNECTION_FACTORY_H_ +#include +#include #include #include +#include "absl/strings/string_view.h" +#include "api/audio_options.h" +#include "api/fec_controller.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/neteq/neteq_factory.h" +#include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/sctp_transport_factory_interface.h" +#include "api/transport/webrtc_key_value_config.h" +#include "call/call.h" #include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/port_allocator.h" #include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" @@ -34,6 +52,14 @@ class RtcEventLog; class PeerConnectionFactory : public PeerConnectionFactoryInterface { public: + // Creates a PeerConnectionFactory. It returns nullptr on initialization + // error. + // + // The Dependencies structure allows simple management of all new + // dependencies being added to the PeerConnectionFactory. + static rtc::scoped_refptr Create( + PeerConnectionFactoryDependencies dependencies); + void SetOptions(const Options& options) override; rtc::scoped_refptr CreatePeerConnection( @@ -46,8 +72,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) override; - bool Initialize(); - RtpCapabilities GetRtpSenderCapabilities( cricket::MediaType kind) const override; @@ -71,53 +95,53 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { bool StartAecDump(FILE* file, int64_t max_size_bytes) override; void StopAecDump() override; - virtual std::unique_ptr - CreateSctpTransportInternalFactory(); + SctpTransportFactoryInterface* sctp_transport_factory() { + return context_->sctp_transport_factory(); + } virtual cricket::ChannelManager* channel_manager(); - rtc::Thread* signaling_thread() { + rtc::Thread* signaling_thread() const { // This method can be called on a different thread when the factory is // created in CreatePeerConnectionFactory(). - return signaling_thread_; + return context_->signaling_thread(); } - rtc::Thread* worker_thread() { return worker_thread_; } - rtc::Thread* network_thread() { return network_thread_; } - const Options& options() const { return options_; } + const Options& options() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return options_; + } + + const WebRtcKeyValueConfig& trials() const { return context_->trials(); } protected: - // This structure allows simple management of all new dependencies being added - // to the PeerConnectionFactory. + // Constructor used by the static Create() method. Modifies the dependencies. + PeerConnectionFactory(rtc::scoped_refptr context, + PeerConnectionFactoryDependencies* dependencies); + + // Constructor for use in testing. Ignores the possibility of initialization + // failure. The dependencies are passed in by std::move(). explicit PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies); - // Hook to let testing framework insert actions between - // "new RTCPeerConnection" and "pc.Initialize" - virtual void ActionsBeforeInitializeForTesting(PeerConnectionInterface*) {} - virtual ~PeerConnectionFactory(); private: + rtc::Thread* worker_thread() const { return context_->worker_thread(); } + rtc::Thread* network_thread() const { return context_->network_thread(); } + bool IsTrialEnabled(absl::string_view key) const; + const cricket::ChannelManager* channel_manager() const { + return context_->channel_manager(); + } std::unique_ptr CreateRtcEventLog_w(); std::unique_ptr CreateCall_w(RtcEventLog* event_log); - bool wraps_current_thread_; - rtc::Thread* network_thread_; - rtc::Thread* worker_thread_; - rtc::Thread* signaling_thread_; - std::unique_ptr owned_network_thread_; - std::unique_ptr owned_worker_thread_; - const std::unique_ptr task_queue_factory_; - Options options_; - std::unique_ptr channel_manager_; - const std::unique_ptr network_monitor_factory_; - std::unique_ptr default_network_manager_; - std::unique_ptr default_socket_factory_; - std::unique_ptr media_engine_; - std::unique_ptr call_factory_; + rtc::scoped_refptr context_; + PeerConnectionFactoryInterface::Options options_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr task_queue_factory_; std::unique_ptr event_log_factory_; std::unique_ptr fec_controller_factory_; std::unique_ptr @@ -125,7 +149,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { std::unique_ptr injected_network_controller_factory_; std::unique_ptr neteq_factory_; - const std::unique_ptr trials_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_integrationtest.cc deleted file mode 100644 index dd24163f3..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_integrationtest.cc +++ /dev/null @@ -1,5650 +0,0 @@ -/* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Disable for TSan v2, see -// https://code.google.com/p/webrtc/issues/detail?id=1205 for details. -#if !defined(THREAD_SANITIZER) - -#include - -#include -#include -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/peer_connection_proxy.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/rtp_receiver_interface.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/uma_metrics.h" -#include "api/video_codecs/sdp_video_format.h" -#include "call/call.h" -#include "logging/rtc_event_log/fake_rtc_event_log_factory.h" -#include "media/engine/fake_webrtc_video_engine.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" -#include "p2p/base/fake_ice_transport.h" -#include "p2p/base/mock_async_resolver.h" -#include "p2p/base/p2p_constants.h" -#include "p2p/base/port_interface.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_customizer.h" -#include "p2p/base/test_turn_server.h" -#include "p2p/client/basic_port_allocator.h" -#include "pc/dtmf_sender.h" -#include "pc/local_audio_source.h" -#include "pc/media_session.h" -#include "pc/peer_connection.h" -#include "pc/peer_connection_factory.h" -#include "pc/rtp_media_utils.h" -#include "pc/session_description.h" -#include "pc/test/fake_audio_capture_module.h" -#include "pc/test/fake_periodic_video_track_source.h" -#include "pc/test/fake_rtc_certificate_generator.h" -#include "pc/test/fake_video_track_renderer.h" -#include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/fake_clock.h" -#include "rtc_base/fake_mdns_responder.h" -#include "rtc_base/fake_network.h" -#include "rtc_base/firewall_socket_server.h" -#include "rtc_base/gunit.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/test_certificate_verifier.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/virtual_socket_server.h" -#include "system_wrappers/include/metrics.h" -#include "test/field_trial.h" -#include "test/gmock.h" - -namespace webrtc { -namespace { - -using ::cricket::ContentInfo; -using ::cricket::StreamParams; -using ::rtc::SocketAddress; -using ::testing::_; -using ::testing::Combine; -using ::testing::Contains; -using ::testing::DoAll; -using ::testing::ElementsAre; -using ::testing::NiceMock; -using ::testing::Return; -using ::testing::SetArgPointee; -using ::testing::UnorderedElementsAreArray; -using ::testing::Values; -using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; - -static const int kDefaultTimeout = 10000; -static const int kMaxWaitForStatsMs = 3000; -static const int kMaxWaitForActivationMs = 5000; -static const int kMaxWaitForFramesMs = 10000; -// Default number of audio/video frames to wait for before considering a test -// successful. -static const int kDefaultExpectedAudioFrameCount = 3; -static const int kDefaultExpectedVideoFrameCount = 3; - -static const char kDataChannelLabel[] = "data_channel"; - -// SRTP cipher name negotiated by the tests. This must be updated if the -// default changes. -static const int kDefaultSrtpCryptoSuite = rtc::SRTP_AES128_CM_SHA1_80; -static const int kDefaultSrtpCryptoSuiteGcm = rtc::SRTP_AEAD_AES_256_GCM; - -static const SocketAddress kDefaultLocalAddress("192.168.1.1", 0); - -// Helper function for constructing offer/answer options to initiate an ICE -// restart. -PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions() { - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.ice_restart = true; - return options; -} - -// Remove all stream information (SSRCs, track IDs, etc.) and "msid-semantic" -// attribute from received SDP, simulating a legacy endpoint. -void RemoveSsrcsAndMsids(cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - content.media_description()->mutable_streams().clear(); - } - desc->set_msid_supported(false); - desc->set_msid_signaling(0); -} - -// Removes all stream information besides the stream ids, simulating an -// endpoint that only signals a=msid lines to convey stream_ids. -void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - std::string track_id; - std::vector stream_ids; - if (!content.media_description()->streams().empty()) { - const StreamParams& first_stream = - content.media_description()->streams()[0]; - track_id = first_stream.id; - stream_ids = first_stream.stream_ids(); - } - content.media_description()->mutable_streams().clear(); - StreamParams new_stream; - new_stream.id = track_id; - new_stream.set_stream_ids(stream_ids); - content.media_description()->AddStream(new_stream); - } -} - -int FindFirstMediaStatsIndexByKind( - const std::string& kind, - const std::vector& - media_stats_vec) { - for (size_t i = 0; i < media_stats_vec.size(); i++) { - if (media_stats_vec[i]->kind.ValueToString() == kind) { - return i; - } - } - return -1; -} - -class SignalingMessageReceiver { - public: - virtual void ReceiveSdpMessage(SdpType type, const std::string& msg) = 0; - virtual void ReceiveIceMessage(const std::string& sdp_mid, - int sdp_mline_index, - const std::string& msg) = 0; - - protected: - SignalingMessageReceiver() {} - virtual ~SignalingMessageReceiver() {} -}; - -class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface { - public: - explicit MockRtpReceiverObserver(cricket::MediaType media_type) - : expected_media_type_(media_type) {} - - void OnFirstPacketReceived(cricket::MediaType media_type) override { - ASSERT_EQ(expected_media_type_, media_type); - first_packet_received_ = true; - } - - bool first_packet_received() const { return first_packet_received_; } - - virtual ~MockRtpReceiverObserver() {} - - private: - bool first_packet_received_ = false; - cricket::MediaType expected_media_type_; -}; - -// Helper class that wraps a peer connection, observes it, and can accept -// signaling messages from another wrapper. -// -// Uses a fake network, fake A/V capture, and optionally fake -// encoders/decoders, though they aren't used by default since they don't -// advertise support of any codecs. -// TODO(steveanton): See how this could become a subclass of -// PeerConnectionWrapper defined in peerconnectionwrapper.h. -class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, - public SignalingMessageReceiver { - public: - // Different factory methods for convenience. - // TODO(deadbeef): Could use the pattern of: - // - // PeerConnectionWrapper = - // WrapperBuilder.WithConfig(...).WithOptions(...).build(); - // - // To reduce some code duplication. - static PeerConnectionWrapper* CreateWithDtlsIdentityStore( - const std::string& debug_name, - std::unique_ptr cert_generator, - rtc::Thread* network_thread, - rtc::Thread* worker_thread) { - PeerConnectionWrapper* client(new PeerConnectionWrapper(debug_name)); - webrtc::PeerConnectionDependencies dependencies(nullptr); - dependencies.cert_generator = std::move(cert_generator); - if (!client->Init(nullptr, nullptr, std::move(dependencies), network_thread, - worker_thread, nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false)) { - delete client; - return nullptr; - } - return client; - } - - webrtc::PeerConnectionFactoryInterface* pc_factory() const { - return peer_connection_factory_.get(); - } - - webrtc::PeerConnectionInterface* pc() const { return peer_connection_.get(); } - - // If a signaling message receiver is set (via ConnectFakeSignaling), this - // will set the whole offer/answer exchange in motion. Just need to wait for - // the signaling state to reach "stable". - void CreateAndSetAndSignalOffer() { - auto offer = CreateOfferAndWait(); - ASSERT_NE(nullptr, offer); - EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(offer))); - } - - // Sets the options to be used when CreateAndSetAndSignalOffer is called, or - // when a remote offer is received (via fake signaling) and an answer is - // generated. By default, uses default options. - void SetOfferAnswerOptions( - const PeerConnectionInterface::RTCOfferAnswerOptions& options) { - offer_answer_options_ = options; - } - - // Set a callback to be invoked when SDP is received via the fake signaling - // channel, which provides an opportunity to munge (modify) the SDP. This is - // used to test SDP being applied that a PeerConnection would normally not - // generate, but a non-JSEP endpoint might. - void SetReceivedSdpMunger( - std::function munger) { - received_sdp_munger_ = std::move(munger); - } - - // Similar to the above, but this is run on SDP immediately after it's - // generated. - void SetGeneratedSdpMunger( - std::function munger) { - generated_sdp_munger_ = std::move(munger); - } - - // Set a callback to be invoked when a remote offer is received via the fake - // signaling channel. This provides an opportunity to change the - // PeerConnection state before an answer is created and sent to the caller. - void SetRemoteOfferHandler(std::function handler) { - remote_offer_handler_ = std::move(handler); - } - - void SetRemoteAsyncResolver(rtc::MockAsyncResolver* resolver) { - remote_async_resolver_ = resolver; - } - - // Every ICE connection state in order that has been seen by the observer. - std::vector - ice_connection_state_history() const { - return ice_connection_state_history_; - } - void clear_ice_connection_state_history() { - ice_connection_state_history_.clear(); - } - - // Every standardized ICE connection state in order that has been seen by the - // observer. - std::vector - standardized_ice_connection_state_history() const { - return standardized_ice_connection_state_history_; - } - - // Every PeerConnection state in order that has been seen by the observer. - std::vector - peer_connection_state_history() const { - return peer_connection_state_history_; - } - - // Every ICE gathering state in order that has been seen by the observer. - std::vector - ice_gathering_state_history() const { - return ice_gathering_state_history_; - } - std::vector - ice_candidate_pair_change_history() const { - return ice_candidate_pair_change_history_; - } - - // Every PeerConnection signaling state in order that has been seen by the - // observer. - std::vector - peer_connection_signaling_state_history() const { - return peer_connection_signaling_state_history_; - } - - void AddAudioVideoTracks() { - AddAudioTrack(); - AddVideoTrack(); - } - - rtc::scoped_refptr AddAudioTrack() { - return AddTrack(CreateLocalAudioTrack()); - } - - rtc::scoped_refptr AddVideoTrack() { - return AddTrack(CreateLocalVideoTrack()); - } - - rtc::scoped_refptr CreateLocalAudioTrack() { - cricket::AudioOptions options; - // Disable highpass filter so that we can get all the test audio frames. - options.highpass_filter = false; - rtc::scoped_refptr source = - peer_connection_factory_->CreateAudioSource(options); - // TODO(perkj): Test audio source when it is implemented. Currently audio - // always use the default input. - return peer_connection_factory_->CreateAudioTrack(rtc::CreateRandomUuid(), - source); - } - - rtc::scoped_refptr CreateLocalVideoTrack() { - webrtc::FakePeriodicVideoSource::Config config; - config.timestamp_offset_ms = rtc::TimeMillis(); - return CreateLocalVideoTrackInternal(config); - } - - rtc::scoped_refptr - CreateLocalVideoTrackWithConfig( - webrtc::FakePeriodicVideoSource::Config config) { - return CreateLocalVideoTrackInternal(config); - } - - rtc::scoped_refptr - CreateLocalVideoTrackWithRotation(webrtc::VideoRotation rotation) { - webrtc::FakePeriodicVideoSource::Config config; - config.rotation = rotation; - config.timestamp_offset_ms = rtc::TimeMillis(); - return CreateLocalVideoTrackInternal(config); - } - - rtc::scoped_refptr AddTrack( - rtc::scoped_refptr track, - const std::vector& stream_ids = {}) { - auto result = pc()->AddTrack(track, stream_ids); - EXPECT_EQ(RTCErrorType::NONE, result.error().type()); - return result.MoveValue(); - } - - std::vector> GetReceiversOfType( - cricket::MediaType media_type) { - std::vector> receivers; - for (const auto& receiver : pc()->GetReceivers()) { - if (receiver->media_type() == media_type) { - receivers.push_back(receiver); - } - } - return receivers; - } - - rtc::scoped_refptr GetFirstTransceiverOfType( - cricket::MediaType media_type) { - for (auto transceiver : pc()->GetTransceivers()) { - if (transceiver->receiver()->media_type() == media_type) { - return transceiver; - } - } - return nullptr; - } - - bool SignalingStateStable() { - return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable; - } - - void CreateDataChannel() { CreateDataChannel(nullptr); } - - void CreateDataChannel(const webrtc::DataChannelInit* init) { - CreateDataChannel(kDataChannelLabel, init); - } - - void CreateDataChannel(const std::string& label, - const webrtc::DataChannelInit* init) { - data_channel_ = pc()->CreateDataChannel(label, init); - ASSERT_TRUE(data_channel_.get() != nullptr); - data_observer_.reset(new MockDataChannelObserver(data_channel_)); - } - - DataChannelInterface* data_channel() { return data_channel_; } - const MockDataChannelObserver* data_observer() const { - return data_observer_.get(); - } - - int audio_frames_received() const { - return fake_audio_capture_module_->frames_received(); - } - - // Takes minimum of video frames received for each track. - // - // Can be used like: - // EXPECT_GE(expected_frames, min_video_frames_received_per_track()); - // - // To ensure that all video tracks received at least a certain number of - // frames. - int min_video_frames_received_per_track() const { - int min_frames = INT_MAX; - if (fake_video_renderers_.empty()) { - return 0; - } - - for (const auto& pair : fake_video_renderers_) { - min_frames = std::min(min_frames, pair.second->num_rendered_frames()); - } - return min_frames; - } - - // Returns a MockStatsObserver in a state after stats gathering finished, - // which can be used to access the gathered stats. - rtc::scoped_refptr OldGetStatsForTrack( - webrtc::MediaStreamTrackInterface* track) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - EXPECT_TRUE(peer_connection_->GetStats( - observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard)); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - return observer; - } - - // Version that doesn't take a track "filter", and gathers all stats. - rtc::scoped_refptr OldGetStats() { - return OldGetStatsForTrack(nullptr); - } - - // Synchronously gets stats and returns them. If it times out, fails the test - // and returns null. - rtc::scoped_refptr NewGetStats() { - rtc::scoped_refptr callback( - new rtc::RefCountedObject()); - peer_connection_->GetStats(callback); - EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout); - return callback->report(); - } - - int rendered_width() { - EXPECT_FALSE(fake_video_renderers_.empty()); - return fake_video_renderers_.empty() - ? 0 - : fake_video_renderers_.begin()->second->width(); - } - - int rendered_height() { - EXPECT_FALSE(fake_video_renderers_.empty()); - return fake_video_renderers_.empty() - ? 0 - : fake_video_renderers_.begin()->second->height(); - } - - double rendered_aspect_ratio() { - if (rendered_height() == 0) { - return 0.0; - } - return static_cast(rendered_width()) / rendered_height(); - } - - webrtc::VideoRotation rendered_rotation() { - EXPECT_FALSE(fake_video_renderers_.empty()); - return fake_video_renderers_.empty() - ? webrtc::kVideoRotation_0 - : fake_video_renderers_.begin()->second->rotation(); - } - - int local_rendered_width() { - return local_video_renderer_ ? local_video_renderer_->width() : 0; - } - - int local_rendered_height() { - return local_video_renderer_ ? local_video_renderer_->height() : 0; - } - - double local_rendered_aspect_ratio() { - if (local_rendered_height() == 0) { - return 0.0; - } - return static_cast(local_rendered_width()) / - local_rendered_height(); - } - - size_t number_of_remote_streams() { - if (!pc()) { - return 0; - } - return pc()->remote_streams()->count(); - } - - StreamCollectionInterface* remote_streams() const { - if (!pc()) { - ADD_FAILURE(); - return nullptr; - } - return pc()->remote_streams(); - } - - StreamCollectionInterface* local_streams() { - if (!pc()) { - ADD_FAILURE(); - return nullptr; - } - return pc()->local_streams(); - } - - webrtc::PeerConnectionInterface::SignalingState signaling_state() { - return pc()->signaling_state(); - } - - webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() { - return pc()->ice_connection_state(); - } - - webrtc::PeerConnectionInterface::IceConnectionState - standardized_ice_connection_state() { - return pc()->standardized_ice_connection_state(); - } - - webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() { - return pc()->ice_gathering_state(); - } - - // Returns a MockRtpReceiverObserver for each RtpReceiver returned by - // GetReceivers. They're updated automatically when a remote offer/answer - // from the fake signaling channel is applied, or when - // ResetRtpReceiverObservers below is called. - const std::vector>& - rtp_receiver_observers() { - return rtp_receiver_observers_; - } - - void ResetRtpReceiverObservers() { - rtp_receiver_observers_.clear(); - for (const rtc::scoped_refptr& receiver : - pc()->GetReceivers()) { - std::unique_ptr observer( - new MockRtpReceiverObserver(receiver->media_type())); - receiver->SetObserver(observer.get()); - rtp_receiver_observers_.push_back(std::move(observer)); - } - } - - rtc::FakeNetworkManager* network_manager() const { - return fake_network_manager_.get(); - } - cricket::PortAllocator* port_allocator() const { return port_allocator_; } - - webrtc::FakeRtcEventLogFactory* event_log_factory() const { - return event_log_factory_; - } - - const cricket::Candidate& last_candidate_gathered() const { - return last_candidate_gathered_; - } - const cricket::IceCandidateErrorEvent& error_event() const { - return error_event_; - } - - // Sets the mDNS responder for the owned fake network manager and keeps a - // reference to the responder. - void SetMdnsResponder( - std::unique_ptr mdns_responder) { - RTC_DCHECK(mdns_responder != nullptr); - mdns_responder_ = mdns_responder.get(); - network_manager()->set_mdns_responder(std::move(mdns_responder)); - } - - // Returns null on failure. - std::unique_ptr CreateOfferAndWait() { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - pc()->CreateOffer(observer, offer_answer_options_); - return WaitForDescriptionFromObserver(observer); - } - bool Rollback() { - return SetRemoteDescription( - webrtc::CreateSessionDescription(SdpType::kRollback, "")); - } - - private: - explicit PeerConnectionWrapper(const std::string& debug_name) - : debug_name_(debug_name) {} - - bool Init( - const PeerConnectionFactory::Options* options, - const PeerConnectionInterface::RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies, - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - std::unique_ptr event_log_factory, - bool reset_encoder_factory, - bool reset_decoder_factory) { - // There's an error in this test code if Init ends up being called twice. - RTC_DCHECK(!peer_connection_); - RTC_DCHECK(!peer_connection_factory_); - - fake_network_manager_.reset(new rtc::FakeNetworkManager()); - fake_network_manager_->AddInterface(kDefaultLocalAddress); - - std::unique_ptr port_allocator( - new cricket::BasicPortAllocator(fake_network_manager_.get())); - port_allocator_ = port_allocator.get(); - fake_audio_capture_module_ = FakeAudioCaptureModule::Create(); - if (!fake_audio_capture_module_) { - return false; - } - rtc::Thread* const signaling_thread = rtc::Thread::Current(); - - webrtc::PeerConnectionFactoryDependencies pc_factory_dependencies; - pc_factory_dependencies.network_thread = network_thread; - pc_factory_dependencies.worker_thread = worker_thread; - pc_factory_dependencies.signaling_thread = signaling_thread; - pc_factory_dependencies.task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = - pc_factory_dependencies.task_queue_factory.get(); - media_deps.adm = fake_audio_capture_module_; - webrtc::SetMediaEngineDefaults(&media_deps); - - if (reset_encoder_factory) { - media_deps.video_encoder_factory.reset(); - } - if (reset_decoder_factory) { - media_deps.video_decoder_factory.reset(); - } - - if (!media_deps.audio_processing) { - // If the standard Creation method for APM returns a null pointer, instead - // use the builder for testing to create an APM object. - media_deps.audio_processing = AudioProcessingBuilderForTesting().Create(); - } - - pc_factory_dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_deps)); - pc_factory_dependencies.call_factory = webrtc::CreateCallFactory(); - if (event_log_factory) { - event_log_factory_ = event_log_factory.get(); - pc_factory_dependencies.event_log_factory = std::move(event_log_factory); - } else { - pc_factory_dependencies.event_log_factory = - std::make_unique( - pc_factory_dependencies.task_queue_factory.get()); - } - peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory( - std::move(pc_factory_dependencies)); - - if (!peer_connection_factory_) { - return false; - } - if (options) { - peer_connection_factory_->SetOptions(*options); - } - if (config) { - sdp_semantics_ = config->sdp_semantics; - } - - dependencies.allocator = std::move(port_allocator); - peer_connection_ = CreatePeerConnection(config, std::move(dependencies)); - return peer_connection_.get() != nullptr; - } - - rtc::scoped_refptr CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies) { - PeerConnectionInterface::RTCConfiguration modified_config; - // If |config| is null, this will result in a default configuration being - // used. - if (config) { - modified_config = *config; - } - // Disable resolution adaptation; we don't want it interfering with the - // test results. - // TODO(deadbeef): Do something more robust. Since we're testing for aspect - // ratios and not specific resolutions, is this even necessary? - modified_config.set_cpu_adaptation(false); - - dependencies.observer = this; - return peer_connection_factory_->CreatePeerConnection( - modified_config, std::move(dependencies)); - } - - void set_signaling_message_receiver( - SignalingMessageReceiver* signaling_message_receiver) { - signaling_message_receiver_ = signaling_message_receiver; - } - - void set_signaling_delay_ms(int delay_ms) { signaling_delay_ms_ = delay_ms; } - - void set_signal_ice_candidates(bool signal) { - signal_ice_candidates_ = signal; - } - - rtc::scoped_refptr CreateLocalVideoTrackInternal( - webrtc::FakePeriodicVideoSource::Config config) { - // Set max frame rate to 10fps to reduce the risk of test flakiness. - // TODO(deadbeef): Do something more robust. - config.frame_interval_ms = 100; - - video_track_sources_.emplace_back( - new rtc::RefCountedObject( - config, false /* remote */)); - rtc::scoped_refptr track( - peer_connection_factory_->CreateVideoTrack( - rtc::CreateRandomUuid(), video_track_sources_.back())); - if (!local_video_renderer_) { - local_video_renderer_.reset(new webrtc::FakeVideoTrackRenderer(track)); - } - return track; - } - - void HandleIncomingOffer(const std::string& msg) { - RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingOffer"; - std::unique_ptr desc = - webrtc::CreateSessionDescription(SdpType::kOffer, msg); - if (received_sdp_munger_) { - received_sdp_munger_(desc->description()); - } - - EXPECT_TRUE(SetRemoteDescription(std::move(desc))); - // Setting a remote description may have changed the number of receivers, - // so reset the receiver observers. - ResetRtpReceiverObservers(); - if (remote_offer_handler_) { - remote_offer_handler_(); - } - auto answer = CreateAnswer(); - ASSERT_NE(nullptr, answer); - EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(answer))); - } - - void HandleIncomingAnswer(const std::string& msg) { - RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingAnswer"; - std::unique_ptr desc = - webrtc::CreateSessionDescription(SdpType::kAnswer, msg); - if (received_sdp_munger_) { - received_sdp_munger_(desc->description()); - } - - EXPECT_TRUE(SetRemoteDescription(std::move(desc))); - // Set the RtpReceiverObserver after receivers are created. - ResetRtpReceiverObservers(); - } - - // Returns null on failure. - std::unique_ptr CreateAnswer() { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - pc()->CreateAnswer(observer, offer_answer_options_); - return WaitForDescriptionFromObserver(observer); - } - - std::unique_ptr WaitForDescriptionFromObserver( - MockCreateSessionDescriptionObserver* observer) { - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); - if (!observer->result()) { - return nullptr; - } - auto description = observer->MoveDescription(); - if (generated_sdp_munger_) { - generated_sdp_munger_(description->description()); - } - return description; - } - - // Setting the local description and sending the SDP message over the fake - // signaling channel are combined into the same method because the SDP - // message needs to be sent as soon as SetLocalDescription finishes, without - // waiting for the observer to be called. This ensures that ICE candidates - // don't outrace the description. - bool SetLocalDescriptionAndSendSdpMessage( - std::unique_ptr desc) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage"; - SdpType type = desc->GetType(); - std::string sdp; - EXPECT_TRUE(desc->ToString(&sdp)); - RTC_LOG(LS_INFO) << debug_name_ << ": local SDP contents=\n" << sdp; - pc()->SetLocalDescription(observer, desc.release()); - RemoveUnusedVideoRenderers(); - // As mentioned above, we need to send the message immediately after - // SetLocalDescription. - SendSdpMessage(type, sdp); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - return true; - } - - bool SetRemoteDescription(std::unique_ptr desc) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription"; - pc()->SetRemoteDescription(observer, desc.release()); - RemoveUnusedVideoRenderers(); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - return observer->result(); - } - - // This is a work around to remove unused fake_video_renderers from - // transceivers that have either stopped or are no longer receiving. - void RemoveUnusedVideoRenderers() { - if (sdp_semantics_ != SdpSemantics::kUnifiedPlan) { - return; - } - auto transceivers = pc()->GetTransceivers(); - std::set active_renderers; - for (auto& transceiver : transceivers) { - // Note - we don't check for direction here. This function is called - // before direction is set, and in that case, we should not remove - // the renderer. - if (transceiver->receiver()->media_type() == cricket::MEDIA_TYPE_VIDEO) { - active_renderers.insert(transceiver->receiver()->track()->id()); - } - } - for (auto it = fake_video_renderers_.begin(); - it != fake_video_renderers_.end();) { - // Remove fake video renderers belonging to any non-active transceivers. - if (!active_renderers.count(it->first)) { - it = fake_video_renderers_.erase(it); - } else { - it++; - } - } - } - - // Simulate sending a blob of SDP with delay |signaling_delay_ms_| (0 by - // default). - void SendSdpMessage(SdpType type, const std::string& msg) { - if (signaling_delay_ms_ == 0) { - RelaySdpMessageIfReceiverExists(type, msg); - } else { - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, rtc::Thread::Current(), - rtc::Bind(&PeerConnectionWrapper::RelaySdpMessageIfReceiverExists, - this, type, msg), - signaling_delay_ms_); - } - } - - void RelaySdpMessageIfReceiverExists(SdpType type, const std::string& msg) { - if (signaling_message_receiver_) { - signaling_message_receiver_->ReceiveSdpMessage(type, msg); - } - } - - // Simulate trickling an ICE candidate with delay |signaling_delay_ms_| (0 by - // default). - void SendIceMessage(const std::string& sdp_mid, - int sdp_mline_index, - const std::string& msg) { - if (signaling_delay_ms_ == 0) { - RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index, msg); - } else { - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, rtc::Thread::Current(), - rtc::Bind(&PeerConnectionWrapper::RelayIceMessageIfReceiverExists, - this, sdp_mid, sdp_mline_index, msg), - signaling_delay_ms_); - } - } - - void RelayIceMessageIfReceiverExists(const std::string& sdp_mid, - int sdp_mline_index, - const std::string& msg) { - if (signaling_message_receiver_) { - signaling_message_receiver_->ReceiveIceMessage(sdp_mid, sdp_mline_index, - msg); - } - } - - // SignalingMessageReceiver callbacks. - void ReceiveSdpMessage(SdpType type, const std::string& msg) override { - if (type == SdpType::kOffer) { - HandleIncomingOffer(msg); - } else { - HandleIncomingAnswer(msg); - } - } - - void ReceiveIceMessage(const std::string& sdp_mid, - int sdp_mline_index, - const std::string& msg) override { - RTC_LOG(LS_INFO) << debug_name_ << ": ReceiveIceMessage"; - std::unique_ptr candidate( - webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, msg, nullptr)); - EXPECT_TRUE(pc()->AddIceCandidate(candidate.get())); - } - - // PeerConnectionObserver callbacks. - void OnSignalingChange( - webrtc::PeerConnectionInterface::SignalingState new_state) override { - EXPECT_EQ(pc()->signaling_state(), new_state); - peer_connection_signaling_state_history_.push_back(new_state); - } - void OnAddTrack(rtc::scoped_refptr receiver, - const std::vector>& - streams) override { - if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - rtc::scoped_refptr video_track( - static_cast(receiver->track().get())); - ASSERT_TRUE(fake_video_renderers_.find(video_track->id()) == - fake_video_renderers_.end()); - fake_video_renderers_[video_track->id()] = - std::make_unique(video_track); - } - } - void OnRemoveTrack( - rtc::scoped_refptr receiver) override { - if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - auto it = fake_video_renderers_.find(receiver->track()->id()); - if (it != fake_video_renderers_.end()) { - fake_video_renderers_.erase(it); - } else { - RTC_LOG(LS_ERROR) << "OnRemoveTrack called for non-active renderer"; - } - } - } - void OnRenegotiationNeeded() override {} - void OnIceConnectionChange( - webrtc::PeerConnectionInterface::IceConnectionState new_state) override { - EXPECT_EQ(pc()->ice_connection_state(), new_state); - ice_connection_state_history_.push_back(new_state); - } - void OnStandardizedIceConnectionChange( - webrtc::PeerConnectionInterface::IceConnectionState new_state) override { - standardized_ice_connection_state_history_.push_back(new_state); - } - void OnConnectionChange( - webrtc::PeerConnectionInterface::PeerConnectionState new_state) override { - peer_connection_state_history_.push_back(new_state); - } - - void OnIceGatheringChange( - webrtc::PeerConnectionInterface::IceGatheringState new_state) override { - EXPECT_EQ(pc()->ice_gathering_state(), new_state); - ice_gathering_state_history_.push_back(new_state); - } - - void OnIceSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) { - ice_candidate_pair_change_history_.push_back(event); - } - - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override { - RTC_LOG(LS_INFO) << debug_name_ << ": OnIceCandidate"; - - if (remote_async_resolver_) { - const auto& local_candidate = candidate->candidate(); - if (local_candidate.address().IsUnresolvedIP()) { - RTC_DCHECK(local_candidate.type() == cricket::LOCAL_PORT_TYPE); - rtc::SocketAddress resolved_addr(local_candidate.address()); - const auto resolved_ip = mdns_responder_->GetMappedAddressForName( - local_candidate.address().hostname()); - RTC_DCHECK(!resolved_ip.IsNil()); - resolved_addr.SetResolvedIP(resolved_ip); - EXPECT_CALL(*remote_async_resolver_, GetResolvedAddress(_, _)) - .WillOnce(DoAll(SetArgPointee<1>(resolved_addr), Return(true))); - EXPECT_CALL(*remote_async_resolver_, Destroy(_)); - } - } - - std::string ice_sdp; - EXPECT_TRUE(candidate->ToString(&ice_sdp)); - if (signaling_message_receiver_ == nullptr || !signal_ice_candidates_) { - // Remote party may be deleted. - return; - } - SendIceMessage(candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp); - last_candidate_gathered_ = candidate->candidate(); - } - void OnIceCandidateError(const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) override { - error_event_ = cricket::IceCandidateErrorEvent(address, port, url, - error_code, error_text); - } - void OnDataChannel( - rtc::scoped_refptr data_channel) override { - RTC_LOG(LS_INFO) << debug_name_ << ": OnDataChannel"; - data_channel_ = data_channel; - data_observer_.reset(new MockDataChannelObserver(data_channel)); - } - - std::string debug_name_; - - std::unique_ptr fake_network_manager_; - // Reference to the mDNS responder owned by |fake_network_manager_| after set. - webrtc::FakeMdnsResponder* mdns_responder_ = nullptr; - - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr - peer_connection_factory_; - - cricket::PortAllocator* port_allocator_; - // Needed to keep track of number of frames sent. - rtc::scoped_refptr fake_audio_capture_module_; - // Needed to keep track of number of frames received. - std::map> - fake_video_renderers_; - // Needed to ensure frames aren't received for removed tracks. - std::vector> - removed_fake_video_renderers_; - - // For remote peer communication. - SignalingMessageReceiver* signaling_message_receiver_ = nullptr; - int signaling_delay_ms_ = 0; - bool signal_ice_candidates_ = true; - cricket::Candidate last_candidate_gathered_; - cricket::IceCandidateErrorEvent error_event_; - - // Store references to the video sources we've created, so that we can stop - // them, if required. - std::vector> - video_track_sources_; - // |local_video_renderer_| attached to the first created local video track. - std::unique_ptr local_video_renderer_; - - SdpSemantics sdp_semantics_; - PeerConnectionInterface::RTCOfferAnswerOptions offer_answer_options_; - std::function received_sdp_munger_; - std::function generated_sdp_munger_; - std::function remote_offer_handler_; - rtc::MockAsyncResolver* remote_async_resolver_ = nullptr; - rtc::scoped_refptr data_channel_; - std::unique_ptr data_observer_; - - std::vector> rtp_receiver_observers_; - - std::vector - ice_connection_state_history_; - std::vector - standardized_ice_connection_state_history_; - std::vector - peer_connection_state_history_; - std::vector - ice_gathering_state_history_; - std::vector - ice_candidate_pair_change_history_; - std::vector - peer_connection_signaling_state_history_; - webrtc::FakeRtcEventLogFactory* event_log_factory_; - - rtc::AsyncInvoker invoker_; - - friend class PeerConnectionIntegrationBaseTest; -}; - -class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput { - public: - virtual ~MockRtcEventLogOutput() = default; - MOCK_METHOD(bool, IsActive, (), (const, override)); - MOCK_METHOD(bool, Write, (const std::string&), (override)); -}; - -// This helper object is used for both specifying how many audio/video frames -// are expected to be received for a caller/callee. It provides helper functions -// to specify these expectations. The object initially starts in a state of no -// expectations. -class MediaExpectations { - public: - enum ExpectFrames { - kExpectSomeFrames, - kExpectNoFrames, - kNoExpectation, - }; - - void ExpectBidirectionalAudioAndVideo() { - ExpectBidirectionalAudio(); - ExpectBidirectionalVideo(); - } - - void ExpectBidirectionalAudio() { - CallerExpectsSomeAudio(); - CalleeExpectsSomeAudio(); - } - - void ExpectNoAudio() { - CallerExpectsNoAudio(); - CalleeExpectsNoAudio(); - } - - void ExpectBidirectionalVideo() { - CallerExpectsSomeVideo(); - CalleeExpectsSomeVideo(); - } - - void ExpectNoVideo() { - CallerExpectsNoVideo(); - CalleeExpectsNoVideo(); - } - - void CallerExpectsSomeAudioAndVideo() { - CallerExpectsSomeAudio(); - CallerExpectsSomeVideo(); - } - - void CalleeExpectsSomeAudioAndVideo() { - CalleeExpectsSomeAudio(); - CalleeExpectsSomeVideo(); - } - - // Caller's audio functions. - void CallerExpectsSomeAudio( - int expected_audio_frames = kDefaultExpectedAudioFrameCount) { - caller_audio_expectation_ = kExpectSomeFrames; - caller_audio_frames_expected_ = expected_audio_frames; - } - - void CallerExpectsNoAudio() { - caller_audio_expectation_ = kExpectNoFrames; - caller_audio_frames_expected_ = 0; - } - - // Caller's video functions. - void CallerExpectsSomeVideo( - int expected_video_frames = kDefaultExpectedVideoFrameCount) { - caller_video_expectation_ = kExpectSomeFrames; - caller_video_frames_expected_ = expected_video_frames; - } - - void CallerExpectsNoVideo() { - caller_video_expectation_ = kExpectNoFrames; - caller_video_frames_expected_ = 0; - } - - // Callee's audio functions. - void CalleeExpectsSomeAudio( - int expected_audio_frames = kDefaultExpectedAudioFrameCount) { - callee_audio_expectation_ = kExpectSomeFrames; - callee_audio_frames_expected_ = expected_audio_frames; - } - - void CalleeExpectsNoAudio() { - callee_audio_expectation_ = kExpectNoFrames; - callee_audio_frames_expected_ = 0; - } - - // Callee's video functions. - void CalleeExpectsSomeVideo( - int expected_video_frames = kDefaultExpectedVideoFrameCount) { - callee_video_expectation_ = kExpectSomeFrames; - callee_video_frames_expected_ = expected_video_frames; - } - - void CalleeExpectsNoVideo() { - callee_video_expectation_ = kExpectNoFrames; - callee_video_frames_expected_ = 0; - } - - ExpectFrames caller_audio_expectation_ = kNoExpectation; - ExpectFrames caller_video_expectation_ = kNoExpectation; - ExpectFrames callee_audio_expectation_ = kNoExpectation; - ExpectFrames callee_video_expectation_ = kNoExpectation; - int caller_audio_frames_expected_ = 0; - int caller_video_frames_expected_ = 0; - int callee_audio_frames_expected_ = 0; - int callee_video_frames_expected_ = 0; -}; - -class MockIceTransport : public webrtc::IceTransportInterface { - public: - MockIceTransport(const std::string& name, int component) - : internal_(std::make_unique( - name, - component, - nullptr /* network_thread */)) {} - ~MockIceTransport() = default; - cricket::IceTransportInternal* internal() { return internal_.get(); } - - private: - std::unique_ptr internal_; -}; - -class MockIceTransportFactory : public IceTransportFactory { - public: - ~MockIceTransportFactory() override = default; - rtc::scoped_refptr CreateIceTransport( - const std::string& transport_name, - int component, - IceTransportInit init) { - RecordIceTransportCreated(); - return new rtc::RefCountedObject(transport_name, - component); - } - MOCK_METHOD(void, RecordIceTransportCreated, ()); -}; - -// Tests two PeerConnections connecting to each other end-to-end, using a -// virtual network, fake A/V capture and fake encoder/decoders. The -// PeerConnections share the threads/socket servers, but use separate versions -// of everything else (including "PeerConnectionFactory"s). -class PeerConnectionIntegrationBaseTest : public ::testing::Test { - public: - explicit PeerConnectionIntegrationBaseTest(SdpSemantics sdp_semantics) - : sdp_semantics_(sdp_semantics), - ss_(new rtc::VirtualSocketServer()), - fss_(new rtc::FirewallSocketServer(ss_.get())), - network_thread_(new rtc::Thread(fss_.get())), - worker_thread_(rtc::Thread::Create()) { - network_thread_->SetName("PCNetworkThread", this); - worker_thread_->SetName("PCWorkerThread", this); - RTC_CHECK(network_thread_->Start()); - RTC_CHECK(worker_thread_->Start()); - webrtc::metrics::Reset(); - } - - ~PeerConnectionIntegrationBaseTest() { - // The PeerConnections should deleted before the TurnCustomizers. - // A TurnPort is created with a raw pointer to a TurnCustomizer. The - // TurnPort has the same lifetime as the PeerConnection, so it's expected - // that the TurnCustomizer outlives the life of the PeerConnection or else - // when Send() is called it will hit a seg fault. - if (caller_) { - caller_->set_signaling_message_receiver(nullptr); - delete SetCallerPcWrapperAndReturnCurrent(nullptr); - } - if (callee_) { - callee_->set_signaling_message_receiver(nullptr); - delete SetCalleePcWrapperAndReturnCurrent(nullptr); - } - - // If turn servers were created for the test they need to be destroyed on - // the network thread. - network_thread()->Invoke(RTC_FROM_HERE, [this] { - turn_servers_.clear(); - turn_customizers_.clear(); - }); - } - - bool SignalingStateStable() { - return caller_->SignalingStateStable() && callee_->SignalingStateStable(); - } - - bool DtlsConnected() { - // TODO(deadbeef): kIceConnectionConnected currently means both ICE and DTLS - // are connected. This is an important distinction. Once we have separate - // ICE and DTLS state, this check needs to use the DTLS state. - return (callee()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionConnected || - callee()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionCompleted) && - (caller()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionConnected || - caller()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionCompleted); - } - - // When |event_log_factory| is null, the default implementation of the event - // log factory will be used. - std::unique_ptr CreatePeerConnectionWrapper( - const std::string& debug_name, - const PeerConnectionFactory::Options* options, - const RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies, - std::unique_ptr event_log_factory, - bool reset_encoder_factory, - bool reset_decoder_factory) { - RTCConfiguration modified_config; - if (config) { - modified_config = *config; - } - modified_config.sdp_semantics = sdp_semantics_; - if (!dependencies.cert_generator) { - dependencies.cert_generator = - std::make_unique(); - } - std::unique_ptr client( - new PeerConnectionWrapper(debug_name)); - - if (!client->Init(options, &modified_config, std::move(dependencies), - network_thread_.get(), worker_thread_.get(), - std::move(event_log_factory), reset_encoder_factory, - reset_decoder_factory)) { - return nullptr; - } - return client; - } - - std::unique_ptr - CreatePeerConnectionWrapperWithFakeRtcEventLog( - const std::string& debug_name, - const PeerConnectionFactory::Options* options, - const RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies) { - std::unique_ptr event_log_factory( - new webrtc::FakeRtcEventLogFactory(rtc::Thread::Current())); - return CreatePeerConnectionWrapper(debug_name, options, config, - std::move(dependencies), - std::move(event_log_factory), - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - } - - bool CreatePeerConnectionWrappers() { - return CreatePeerConnectionWrappersWithConfig( - PeerConnectionInterface::RTCConfiguration(), - PeerConnectionInterface::RTCConfiguration()); - } - - bool CreatePeerConnectionWrappersWithSdpSemantics( - SdpSemantics caller_semantics, - SdpSemantics callee_semantics) { - // Can't specify the sdp_semantics in the passed-in configuration since it - // will be overwritten by CreatePeerConnectionWrapper with whatever is - // stored in sdp_semantics_. So get around this by modifying the instance - // variable before calling CreatePeerConnectionWrapper for the caller and - // callee PeerConnections. - SdpSemantics original_semantics = sdp_semantics_; - sdp_semantics_ = caller_semantics; - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - sdp_semantics_ = callee_semantics; - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - sdp_semantics_ = original_semantics; - return caller_ && callee_; - } - - bool CreatePeerConnectionWrappersWithConfig( - const PeerConnectionInterface::RTCConfiguration& caller_config, - const PeerConnectionInterface::RTCConfiguration& callee_config) { - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, &caller_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, &callee_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - return caller_ && callee_; - } - - bool CreatePeerConnectionWrappersWithConfigAndDeps( - const PeerConnectionInterface::RTCConfiguration& caller_config, - webrtc::PeerConnectionDependencies caller_dependencies, - const PeerConnectionInterface::RTCConfiguration& callee_config, - webrtc::PeerConnectionDependencies callee_dependencies) { - caller_ = - CreatePeerConnectionWrapper("Caller", nullptr, &caller_config, - std::move(caller_dependencies), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = - CreatePeerConnectionWrapper("Callee", nullptr, &callee_config, - std::move(callee_dependencies), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - return caller_ && callee_; - } - - bool CreatePeerConnectionWrappersWithOptions( - const PeerConnectionFactory::Options& caller_options, - const PeerConnectionFactory::Options& callee_options) { - caller_ = CreatePeerConnectionWrapper( - "Caller", &caller_options, nullptr, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", &callee_options, nullptr, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - return caller_ && callee_; - } - - bool CreatePeerConnectionWrappersWithFakeRtcEventLog() { - PeerConnectionInterface::RTCConfiguration default_config; - caller_ = CreatePeerConnectionWrapperWithFakeRtcEventLog( - "Caller", nullptr, &default_config, - webrtc::PeerConnectionDependencies(nullptr)); - callee_ = CreatePeerConnectionWrapperWithFakeRtcEventLog( - "Callee", nullptr, &default_config, - webrtc::PeerConnectionDependencies(nullptr)); - return caller_ && callee_; - } - - std::unique_ptr - CreatePeerConnectionWrapperWithAlternateKey() { - std::unique_ptr cert_generator( - new FakeRTCCertificateGenerator()); - cert_generator->use_alternate_key(); - - webrtc::PeerConnectionDependencies dependencies(nullptr); - dependencies.cert_generator = std::move(cert_generator); - return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr, - std::move(dependencies), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - } - - bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) { - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/!caller_to_callee, - /*reset_decoder_factory=*/caller_to_callee); - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/caller_to_callee, - /*reset_decoder_factory=*/!caller_to_callee); - return caller_ && callee_; - } - - cricket::TestTurnServer* CreateTurnServer( - rtc::SocketAddress internal_address, - rtc::SocketAddress external_address, - cricket::ProtocolType type = cricket::ProtocolType::PROTO_UDP, - const std::string& common_name = "test turn server") { - rtc::Thread* thread = network_thread(); - std::unique_ptr turn_server = - network_thread()->Invoke>( - RTC_FROM_HERE, - [thread, internal_address, external_address, type, common_name] { - return std::make_unique( - thread, internal_address, external_address, type, - /*ignore_bad_certs=*/true, common_name); - }); - turn_servers_.push_back(std::move(turn_server)); - // Interactions with the turn server should be done on the network thread. - return turn_servers_.back().get(); - } - - cricket::TestTurnCustomizer* CreateTurnCustomizer() { - std::unique_ptr turn_customizer = - network_thread()->Invoke>( - RTC_FROM_HERE, - [] { return std::make_unique(); }); - turn_customizers_.push_back(std::move(turn_customizer)); - // Interactions with the turn customizer should be done on the network - // thread. - return turn_customizers_.back().get(); - } - - // Checks that the function counters for a TestTurnCustomizer are greater than - // 0. - void ExpectTurnCustomizerCountersIncremented( - cricket::TestTurnCustomizer* turn_customizer) { - unsigned int allow_channel_data_counter = - network_thread()->Invoke( - RTC_FROM_HERE, [turn_customizer] { - return turn_customizer->allow_channel_data_cnt_; - }); - EXPECT_GT(allow_channel_data_counter, 0u); - unsigned int modify_counter = network_thread()->Invoke( - RTC_FROM_HERE, - [turn_customizer] { return turn_customizer->modify_cnt_; }); - EXPECT_GT(modify_counter, 0u); - } - - // Once called, SDP blobs and ICE candidates will be automatically signaled - // between PeerConnections. - void ConnectFakeSignaling() { - caller_->set_signaling_message_receiver(callee_.get()); - callee_->set_signaling_message_receiver(caller_.get()); - } - - // Once called, SDP blobs will be automatically signaled between - // PeerConnections. Note that ICE candidates will not be signaled unless they - // are in the exchanged SDP blobs. - void ConnectFakeSignalingForSdpOnly() { - ConnectFakeSignaling(); - SetSignalIceCandidates(false); - } - - void SetSignalingDelayMs(int delay_ms) { - caller_->set_signaling_delay_ms(delay_ms); - callee_->set_signaling_delay_ms(delay_ms); - } - - void SetSignalIceCandidates(bool signal) { - caller_->set_signal_ice_candidates(signal); - callee_->set_signal_ice_candidates(signal); - } - - // Messages may get lost on the unreliable DataChannel, so we send multiple - // times to avoid test flakiness. - void SendRtpDataWithRetries(webrtc::DataChannelInterface* dc, - const std::string& data, - int retries) { - for (int i = 0; i < retries; ++i) { - dc->Send(DataBuffer(data)); - } - } - - rtc::Thread* network_thread() { return network_thread_.get(); } - - rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); } - - PeerConnectionWrapper* caller() { return caller_.get(); } - - // Set the |caller_| to the |wrapper| passed in and return the - // original |caller_|. - PeerConnectionWrapper* SetCallerPcWrapperAndReturnCurrent( - PeerConnectionWrapper* wrapper) { - PeerConnectionWrapper* old = caller_.release(); - caller_.reset(wrapper); - return old; - } - - PeerConnectionWrapper* callee() { return callee_.get(); } - - // Set the |callee_| to the |wrapper| passed in and return the - // original |callee_|. - PeerConnectionWrapper* SetCalleePcWrapperAndReturnCurrent( - PeerConnectionWrapper* wrapper) { - PeerConnectionWrapper* old = callee_.release(); - callee_.reset(wrapper); - return old; - } - - void SetPortAllocatorFlags(uint32_t caller_flags, uint32_t callee_flags) { - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::set_flags, - caller()->port_allocator(), caller_flags)); - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::set_flags, - callee()->port_allocator(), callee_flags)); - } - - rtc::FirewallSocketServer* firewall() const { return fss_.get(); } - - // Expects the provided number of new frames to be received within - // kMaxWaitForFramesMs. The new expected frames are specified in - // |media_expectations|. Returns false if any of the expectations were - // not met. - bool ExpectNewFrames(const MediaExpectations& media_expectations) { - // Make sure there are no bogus tracks confusing the issue. - caller()->RemoveUnusedVideoRenderers(); - callee()->RemoveUnusedVideoRenderers(); - // First initialize the expected frame counts based upon the current - // frame count. - int total_caller_audio_frames_expected = caller()->audio_frames_received(); - if (media_expectations.caller_audio_expectation_ == - MediaExpectations::kExpectSomeFrames) { - total_caller_audio_frames_expected += - media_expectations.caller_audio_frames_expected_; - } - int total_caller_video_frames_expected = - caller()->min_video_frames_received_per_track(); - if (media_expectations.caller_video_expectation_ == - MediaExpectations::kExpectSomeFrames) { - total_caller_video_frames_expected += - media_expectations.caller_video_frames_expected_; - } - int total_callee_audio_frames_expected = callee()->audio_frames_received(); - if (media_expectations.callee_audio_expectation_ == - MediaExpectations::kExpectSomeFrames) { - total_callee_audio_frames_expected += - media_expectations.callee_audio_frames_expected_; - } - int total_callee_video_frames_expected = - callee()->min_video_frames_received_per_track(); - if (media_expectations.callee_video_expectation_ == - MediaExpectations::kExpectSomeFrames) { - total_callee_video_frames_expected += - media_expectations.callee_video_frames_expected_; - } - - // Wait for the expected frames. - EXPECT_TRUE_WAIT(caller()->audio_frames_received() >= - total_caller_audio_frames_expected && - caller()->min_video_frames_received_per_track() >= - total_caller_video_frames_expected && - callee()->audio_frames_received() >= - total_callee_audio_frames_expected && - callee()->min_video_frames_received_per_track() >= - total_callee_video_frames_expected, - kMaxWaitForFramesMs); - bool expectations_correct = - caller()->audio_frames_received() >= - total_caller_audio_frames_expected && - caller()->min_video_frames_received_per_track() >= - total_caller_video_frames_expected && - callee()->audio_frames_received() >= - total_callee_audio_frames_expected && - callee()->min_video_frames_received_per_track() >= - total_callee_video_frames_expected; - - // After the combined wait, print out a more detailed message upon - // failure. - EXPECT_GE(caller()->audio_frames_received(), - total_caller_audio_frames_expected); - EXPECT_GE(caller()->min_video_frames_received_per_track(), - total_caller_video_frames_expected); - EXPECT_GE(callee()->audio_frames_received(), - total_callee_audio_frames_expected); - EXPECT_GE(callee()->min_video_frames_received_per_track(), - total_callee_video_frames_expected); - - // We want to make sure nothing unexpected was received. - if (media_expectations.caller_audio_expectation_ == - MediaExpectations::kExpectNoFrames) { - EXPECT_EQ(caller()->audio_frames_received(), - total_caller_audio_frames_expected); - if (caller()->audio_frames_received() != - total_caller_audio_frames_expected) { - expectations_correct = false; - } - } - if (media_expectations.caller_video_expectation_ == - MediaExpectations::kExpectNoFrames) { - EXPECT_EQ(caller()->min_video_frames_received_per_track(), - total_caller_video_frames_expected); - if (caller()->min_video_frames_received_per_track() != - total_caller_video_frames_expected) { - expectations_correct = false; - } - } - if (media_expectations.callee_audio_expectation_ == - MediaExpectations::kExpectNoFrames) { - EXPECT_EQ(callee()->audio_frames_received(), - total_callee_audio_frames_expected); - if (callee()->audio_frames_received() != - total_callee_audio_frames_expected) { - expectations_correct = false; - } - } - if (media_expectations.callee_video_expectation_ == - MediaExpectations::kExpectNoFrames) { - EXPECT_EQ(callee()->min_video_frames_received_per_track(), - total_callee_video_frames_expected); - if (callee()->min_video_frames_received_per_track() != - total_callee_video_frames_expected) { - expectations_correct = false; - } - } - return expectations_correct; - } - - void ClosePeerConnections() { - caller()->pc()->Close(); - callee()->pc()->Close(); - } - - void TestNegotiatedCipherSuite( - const PeerConnectionFactory::Options& caller_options, - const PeerConnectionFactory::Options& callee_options, - int expected_cipher_suite) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(caller_options, - callee_options)); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(expected_cipher_suite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); - // TODO(bugs.webrtc.org/9456): Fix it. - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.SrtpCryptoSuite.Audio", - expected_cipher_suite)); - } - - void TestGcmNegotiationUsesCipherSuite(bool local_gcm_enabled, - bool remote_gcm_enabled, - bool aes_ctr_enabled, - int expected_cipher_suite) { - PeerConnectionFactory::Options caller_options; - caller_options.crypto_options.srtp.enable_gcm_crypto_suites = - local_gcm_enabled; - caller_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher = - aes_ctr_enabled; - PeerConnectionFactory::Options callee_options; - callee_options.crypto_options.srtp.enable_gcm_crypto_suites = - remote_gcm_enabled; - callee_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher = - aes_ctr_enabled; - TestNegotiatedCipherSuite(caller_options, callee_options, - expected_cipher_suite); - } - - protected: - SdpSemantics sdp_semantics_; - - private: - // |ss_| is used by |network_thread_| so it must be destroyed later. - std::unique_ptr ss_; - std::unique_ptr fss_; - // |network_thread_| and |worker_thread_| are used by both - // |caller_| and |callee_| so they must be destroyed - // later. - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; - // The turn servers and turn customizers should be accessed & deleted on the - // network thread to avoid a race with the socket read/write that occurs - // on the network thread. - std::vector> turn_servers_; - std::vector> turn_customizers_; - std::unique_ptr caller_; - std::unique_ptr callee_; -}; - -class PeerConnectionIntegrationTest - : public PeerConnectionIntegrationBaseTest, - public ::testing::WithParamInterface { - protected: - PeerConnectionIntegrationTest() - : PeerConnectionIntegrationBaseTest(GetParam()) {} -}; - -// Fake clock must be set before threads are started to prevent race on -// Set/GetClockForTesting(). -// To achieve that, multiple inheritance is used as a mixin pattern -// where order of construction is finely controlled. -// This also ensures peerconnection is closed before switching back to non-fake -// clock, avoiding other races and DCHECK failures such as in rtp_sender.cc. -class FakeClockForTest : public rtc::ScopedFakeClock { - protected: - FakeClockForTest() { - // Some things use a time of "0" as a special value, so we need to start out - // the fake clock at a nonzero time. - // TODO(deadbeef): Fix this. - AdvanceTime(webrtc::TimeDelta::Seconds(1)); - } - - // Explicit handle. - ScopedFakeClock& FakeClock() { return *this; } -}; - -// Ensure FakeClockForTest is constructed first (see class for rationale). -class PeerConnectionIntegrationTestWithFakeClock - : public FakeClockForTest, - public PeerConnectionIntegrationTest {}; - -class PeerConnectionIntegrationTestPlanB - : public PeerConnectionIntegrationBaseTest { - protected: - PeerConnectionIntegrationTestPlanB() - : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB) {} -}; - -class PeerConnectionIntegrationTestUnifiedPlan - : public PeerConnectionIntegrationBaseTest { - protected: - PeerConnectionIntegrationTestUnifiedPlan() - : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} -}; - -// Test the OnFirstPacketReceived callback from audio/video RtpReceivers. This -// includes testing that the callback is invoked if an observer is connected -// after the first packet has already been received. -TEST_P(PeerConnectionIntegrationTest, - RtpReceiverObserverOnFirstPacketReceived) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - // Start offer/answer exchange and wait for it to complete. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Should be one receiver each for audio/video. - EXPECT_EQ(2U, caller()->rtp_receiver_observers().size()); - EXPECT_EQ(2U, callee()->rtp_receiver_observers().size()); - // Wait for all "first packet received" callbacks to be fired. - EXPECT_TRUE_WAIT( - absl::c_all_of(caller()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); - EXPECT_TRUE_WAIT( - absl::c_all_of(callee()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); - // If new observers are set after the first packet was already received, the - // callback should still be invoked. - caller()->ResetRtpReceiverObservers(); - callee()->ResetRtpReceiverObservers(); - EXPECT_EQ(2U, caller()->rtp_receiver_observers().size()); - EXPECT_EQ(2U, callee()->rtp_receiver_observers().size()); - EXPECT_TRUE( - absl::c_all_of(caller()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - })); - EXPECT_TRUE( - absl::c_all_of(callee()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - })); -} - -class DummyDtmfObserver : public DtmfSenderObserverInterface { - public: - DummyDtmfObserver() : completed_(false) {} - - // Implements DtmfSenderObserverInterface. - void OnToneChange(const std::string& tone) override { - tones_.push_back(tone); - if (tone.empty()) { - completed_ = true; - } - } - - const std::vector& tones() const { return tones_; } - bool completed() const { return completed_; } - - private: - bool completed_; - std::vector tones_; -}; - -// Assumes |sender| already has an audio track added and the offer/answer -// exchange is done. -void TestDtmfFromSenderToReceiver(PeerConnectionWrapper* sender, - PeerConnectionWrapper* receiver) { - // We should be able to get a DTMF sender from the local sender. - rtc::scoped_refptr dtmf_sender = - sender->pc()->GetSenders().at(0)->GetDtmfSender(); - ASSERT_TRUE(dtmf_sender); - DummyDtmfObserver observer; - dtmf_sender->RegisterObserver(&observer); - - // Test the DtmfSender object just created. - EXPECT_TRUE(dtmf_sender->CanInsertDtmf()); - EXPECT_TRUE(dtmf_sender->InsertDtmf("1a", 100, 50)); - - EXPECT_TRUE_WAIT(observer.completed(), kDefaultTimeout); - std::vector tones = {"1", "a", ""}; - EXPECT_EQ(tones, observer.tones()); - dtmf_sender->UnregisterObserver(); - // TODO(deadbeef): Verify the tones were actually received end-to-end. -} - -// Verifies the DtmfSenderObserver callbacks for a DtmfSender (one in each -// direction). -TEST_P(PeerConnectionIntegrationTest, DtmfSenderObserver) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Only need audio for DTMF. - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // DTLS must finish before the DTMF sender can be used reliably. - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - TestDtmfFromSenderToReceiver(caller(), callee()); - TestDtmfFromSenderToReceiver(callee(), caller()); -} - -// Basic end-to-end test, verifying media can be encoded/transmitted/decoded -// between two connections, using DTLS-SRTP. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - EXPECT_METRIC_LE( - 2, webrtc::metrics::NumEvents("WebRTC.PeerConnection.KeyProtocol", - webrtc::kEnumCounterKeyProtocolDtls)); - EXPECT_METRIC_EQ( - 0, webrtc::metrics::NumEvents("WebRTC.PeerConnection.KeyProtocol", - webrtc::kEnumCounterKeyProtocolSdes)); -} - -// Uses SDES instead of DTLS for key agreement. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSdes) { - PeerConnectionInterface::RTCConfiguration sdes_config; - sdes_config.enable_dtls_srtp.emplace(false); - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(sdes_config, sdes_config)); - ConnectFakeSignaling(); - - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - EXPECT_METRIC_LE( - 2, webrtc::metrics::NumEvents("WebRTC.PeerConnection.KeyProtocol", - webrtc::kEnumCounterKeyProtocolSdes)); - EXPECT_METRIC_EQ( - 0, webrtc::metrics::NumEvents("WebRTC.PeerConnection.KeyProtocol", - webrtc::kEnumCounterKeyProtocolDtls)); -} - -// Basic end-to-end test specifying the |enable_encrypted_rtp_header_extensions| -// option to offer encrypted versions of all header extensions alongside the -// unencrypted versions. -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallWithEncryptedRtpHeaderExtensions) { - CryptoOptions crypto_options; - crypto_options.srtp.enable_encrypted_rtp_header_extensions = true; - PeerConnectionInterface::RTCConfiguration config; - config.crypto_options = crypto_options; - // Note: This allows offering >14 RTP header extensions. - config.offer_extmap_allow_mixed = true; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that the GetRemoteAudioSSLCertificate method returns the remote DTLS -// certificate once the DTLS handshake has finished. -TEST_P(PeerConnectionIntegrationTest, - GetRemoteAudioSSLCertificateReturnsExchangedCertificate) { - auto GetRemoteAudioSSLCertificate = [](PeerConnectionWrapper* wrapper) { - auto pci = reinterpret_cast(wrapper->pc()); - auto pc = reinterpret_cast(pci->internal()); - return pc->GetRemoteAudioSSLCertificate(); - }; - auto GetRemoteAudioSSLCertChain = [](PeerConnectionWrapper* wrapper) { - auto pci = reinterpret_cast(wrapper->pc()); - auto pc = reinterpret_cast(pci->internal()); - return pc->GetRemoteAudioSSLCertChain(); - }; - - auto caller_cert = rtc::RTCCertificate::FromPEM(kRsaPems[0]); - auto callee_cert = rtc::RTCCertificate::FromPEM(kRsaPems[1]); - - // Configure each side with a known certificate so they can be compared later. - PeerConnectionInterface::RTCConfiguration caller_config; - caller_config.enable_dtls_srtp.emplace(true); - caller_config.certificates.push_back(caller_cert); - PeerConnectionInterface::RTCConfiguration callee_config; - callee_config.enable_dtls_srtp.emplace(true); - callee_config.certificates.push_back(callee_cert); - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); - ConnectFakeSignaling(); - - // When first initialized, there should not be a remote SSL certificate (and - // calling this method should not crash). - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertificate(caller())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertificate(callee())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertChain(caller())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertChain(callee())); - - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - - // Once DTLS has been connected, each side should return the other's SSL - // certificate when calling GetRemoteAudioSSLCertificate. - - auto caller_remote_cert = GetRemoteAudioSSLCertificate(caller()); - ASSERT_TRUE(caller_remote_cert); - EXPECT_EQ(callee_cert->GetSSLCertificate().ToPEMString(), - caller_remote_cert->ToPEMString()); - - auto callee_remote_cert = GetRemoteAudioSSLCertificate(callee()); - ASSERT_TRUE(callee_remote_cert); - EXPECT_EQ(caller_cert->GetSSLCertificate().ToPEMString(), - callee_remote_cert->ToPEMString()); - - auto caller_remote_cert_chain = GetRemoteAudioSSLCertChain(caller()); - ASSERT_TRUE(caller_remote_cert_chain); - ASSERT_EQ(1U, caller_remote_cert_chain->GetSize()); - auto remote_cert = &caller_remote_cert_chain->Get(0); - EXPECT_EQ(callee_cert->GetSSLCertificate().ToPEMString(), - remote_cert->ToPEMString()); - - auto callee_remote_cert_chain = GetRemoteAudioSSLCertChain(callee()); - ASSERT_TRUE(callee_remote_cert_chain); - ASSERT_EQ(1U, callee_remote_cert_chain->GetSize()); - remote_cert = &callee_remote_cert_chain->Get(0); - EXPECT_EQ(caller_cert->GetSSLCertificate().ToPEMString(), - remote_cert->ToPEMString()); -} - -// This test sets up a call between two parties with a source resolution of -// 1280x720 and verifies that a 16:9 aspect ratio is received. -TEST_P(PeerConnectionIntegrationTest, - Send1280By720ResolutionAndReceive16To9AspectRatio) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Add video tracks with 16:9 aspect ratio, size 1280 x 720. - webrtc::FakePeriodicVideoSource::Config config; - config.width = 1280; - config.height = 720; - config.timestamp_offset_ms = rtc::TimeMillis(); - caller()->AddTrack(caller()->CreateLocalVideoTrackWithConfig(config)); - callee()->AddTrack(callee()->CreateLocalVideoTrackWithConfig(config)); - - // Do normal offer/answer and wait for at least one frame to be received in - // each direction. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); - - // Check rendered aspect ratio. - EXPECT_EQ(16.0 / 9, caller()->local_rendered_aspect_ratio()); - EXPECT_EQ(16.0 / 9, caller()->rendered_aspect_ratio()); - EXPECT_EQ(16.0 / 9, callee()->local_rendered_aspect_ratio()); - EXPECT_EQ(16.0 / 9, callee()->rendered_aspect_ratio()); -} - -// This test sets up an one-way call, with media only from caller to -// callee. -TEST_P(PeerConnectionIntegrationTest, OneWayMediaCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - media_expectations.CallerExpectsNoAudio(); - media_expectations.CallerExpectsNoVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that send only works without the caller having a decoder factory and -// the callee having an encoder factory. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSendOnlyVideo) { - ASSERT_TRUE( - CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true)); - ConnectFakeSignaling(); - // Add one-directional video, from caller to callee. - rtc::scoped_refptr caller_track = - caller()->CreateLocalVideoTrack(); - caller()->AddTrack(caller_track); - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 0; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); - - // Expect video to be received in one direction. - MediaExpectations media_expectations; - media_expectations.CallerExpectsNoVideo(); - media_expectations.CalleeExpectsSomeVideo(); - - EXPECT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that receive only works without the caller having an encoder factory -// and the callee having a decoder factory. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithReceiveOnlyVideo) { - ASSERT_TRUE( - CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/false)); - ConnectFakeSignaling(); - // Add one-directional video, from callee to caller. - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - callee()->AddTrack(callee_track); - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ(caller()->pc()->GetReceivers().size(), 1u); - - // Expect video to be received in one direction. - MediaExpectations media_expectations; - media_expectations.CallerExpectsSomeVideo(); - media_expectations.CalleeExpectsNoVideo(); - - EXPECT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallAddReceiveVideoToSendOnlyCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add one-directional video, from caller to callee. - rtc::scoped_refptr caller_track = - caller()->CreateLocalVideoTrack(); - caller()->AddTrack(caller_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Add receive video. - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - callee()->AddTrack(callee_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that video frames are received end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallAddSendVideoToReceiveOnlyCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add one-directional video, from callee to caller. - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - callee()->AddTrack(callee_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Add send video. - rtc::scoped_refptr caller_track = - caller()->CreateLocalVideoTrack(); - caller()->AddTrack(caller_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Expect video to be received in one direction. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallRemoveReceiveVideoFromSendReceiveCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add send video, from caller to callee. - rtc::scoped_refptr caller_track = - caller()->CreateLocalVideoTrack(); - rtc::scoped_refptr caller_sender = - caller()->AddTrack(caller_track); - // Add receive video, from callee to caller. - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - - rtc::scoped_refptr callee_sender = - callee()->AddTrack(callee_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Remove receive video (i.e., callee sender track). - callee()->pc()->RemoveTrack(callee_sender); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Expect one-directional video. - MediaExpectations media_expectations; - media_expectations.CallerExpectsNoVideo(); - media_expectations.CalleeExpectsSomeVideo(); - - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallRemoveSendVideoFromSendReceiveCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add send video, from caller to callee. - rtc::scoped_refptr caller_track = - caller()->CreateLocalVideoTrack(); - rtc::scoped_refptr caller_sender = - caller()->AddTrack(caller_track); - // Add receive video, from callee to caller. - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - - rtc::scoped_refptr callee_sender = - callee()->AddTrack(callee_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Remove send video (i.e., caller sender track). - caller()->pc()->RemoveTrack(caller_sender); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Expect one-directional video. - MediaExpectations media_expectations; - media_expectations.CalleeExpectsNoVideo(); - media_expectations.CallerExpectsSomeVideo(); - - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test sets up a audio call initially, with the callee rejecting video -// initially. Then later the callee decides to upgrade to audio/video, and -// initiates a new offer/answer exchange. -TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Initially, offer an audio/video stream from the caller, but refuse to - // send/receive video on the callee side. - caller()->AddAudioVideoTracks(); - callee()->AddAudioTrack(); - if (sdp_semantics_ == SdpSemantics::kPlanB) { - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 0; - callee()->SetOfferAnswerOptions(options); - } else { - callee()->SetRemoteOfferHandler([this] { - callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->StopInternal(); - }); - } - // Do offer/answer and make sure audio is still received end-to-end. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - media_expectations.ExpectNoVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - // Sanity check that the callee's description has a rejected video section. - ASSERT_NE(nullptr, callee()->pc()->local_description()); - const ContentInfo* callee_video_content = - GetFirstVideoContent(callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, callee_video_content); - EXPECT_TRUE(callee_video_content->rejected); - - // Now negotiate with video and ensure negotiation succeeds, with video - // frames and additional audio frames being received. - callee()->AddVideoTrack(); - if (sdp_semantics_ == SdpSemantics::kPlanB) { - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 1; - callee()->SetOfferAnswerOptions(options); - } else { - callee()->SetRemoteOfferHandler(nullptr); - caller()->SetRemoteOfferHandler([this] { - // The caller creates a new transceiver to receive video on when receiving - // the offer, but by default it is send only. - auto transceivers = caller()->pc()->GetTransceivers(); - ASSERT_EQ(2U, transceivers.size()); - ASSERT_EQ(cricket::MEDIA_TYPE_VIDEO, - transceivers[1]->receiver()->media_type()); - transceivers[1]->sender()->SetTrack(caller()->CreateLocalVideoTrack()); - transceivers[1]->SetDirectionWithError( - RtpTransceiverDirection::kSendRecv); - }); - } - callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - // Expect additional audio frames to be received after the upgrade. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } -} - -// Simpler than the above test; just add an audio track to an established -// video-only connection. -TEST_P(PeerConnectionIntegrationTest, AddAudioToVideoOnlyCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do initial offer/answer with just a video track. - caller()->AddVideoTrack(); - callee()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Now add an audio track and do another offer/answer. - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Ensure both audio and video frames are received end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test sets up a call that's transferred to a new caller with a different -// DTLS fingerprint. -TEST_P(PeerConnectionIntegrationTest, CallTransferredForCallee) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Keep the original peer around which will still send packets to the - // receiving client. These SRTP packets will be dropped. - std::unique_ptr original_peer( - SetCallerPcWrapperAndReturnCurrent( - CreatePeerConnectionWrapperWithAlternateKey().release())); - // TODO(deadbeef): Why do we call Close here? That goes against the comment - // directly above. - original_peer->pc()->Close(); - - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for some additional frames to be transmitted end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test sets up a call that's transferred to a new callee with a different -// DTLS fingerprint. -TEST_P(PeerConnectionIntegrationTest, CallTransferredForCaller) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Keep the original peer around which will still send packets to the - // receiving client. These SRTP packets will be dropped. - std::unique_ptr original_peer( - SetCalleePcWrapperAndReturnCurrent( - CreatePeerConnectionWrapperWithAlternateKey().release())); - // TODO(deadbeef): Why do we call Close here? That goes against the comment - // directly above. - original_peer->pc()->Close(); - - ConnectFakeSignaling(); - callee()->AddAudioVideoTracks(); - caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for some additional frames to be transmitted end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test sets up a non-bundled call and negotiates bundling at the same -// time as starting an ICE restart. When bundling is in effect in the restart, -// the DTLS-SRTP context should be successfully reset. -TEST_P(PeerConnectionIntegrationTest, BundlingEnabledWhileIceRestartOccurs) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - // Remove the bundle group from the SDP received by the callee. - callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) { - desc->RemoveGroupByName("BUNDLE"); - }); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - // Now stop removing the BUNDLE group, and trigger an ICE restart. - callee()->SetReceivedSdpMunger(nullptr); - caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Expect additional frames to be received after the ICE restart. - { - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } -} - -// Test CVO (Coordination of Video Orientation). If a video source is rotated -// and both peers support the CVO RTP header extension, the actual video frames -// don't need to be encoded in different resolutions, since the rotation is -// communicated through the RTP header extension. -TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithCVOExtension) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add rotated video tracks. - caller()->AddTrack( - caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90)); - callee()->AddTrack( - callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270)); - - // Wait for video frames to be received by both sides. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); - - // Ensure that the aspect ratio is unmodified. - // TODO(deadbeef): Where does 4:3 come from? Should be explicit in the test, - // not just assumed. - EXPECT_EQ(4.0 / 3, caller()->local_rendered_aspect_ratio()); - EXPECT_EQ(4.0 / 3, caller()->rendered_aspect_ratio()); - EXPECT_EQ(4.0 / 3, callee()->local_rendered_aspect_ratio()); - EXPECT_EQ(4.0 / 3, callee()->rendered_aspect_ratio()); - // Ensure that the CVO bits were surfaced to the renderer. - EXPECT_EQ(webrtc::kVideoRotation_270, caller()->rendered_rotation()); - EXPECT_EQ(webrtc::kVideoRotation_90, callee()->rendered_rotation()); -} - -// Test that when the CVO extension isn't supported, video is rotated the -// old-fashioned way, by encoding rotated frames. -TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithoutCVOExtension) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add rotated video tracks. - caller()->AddTrack( - caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90)); - callee()->AddTrack( - callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270)); - - // Remove the CVO extension from the offered SDP. - callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) { - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(desc); - video->ClearRtpHeaderExtensions(); - }); - // Wait for video frames to be received by both sides. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); - - // Expect that the aspect ratio is inversed to account for the 90/270 degree - // rotation. - // TODO(deadbeef): Where does 4:3 come from? Should be explicit in the test, - // not just assumed. - EXPECT_EQ(3.0 / 4, caller()->local_rendered_aspect_ratio()); - EXPECT_EQ(3.0 / 4, caller()->rendered_aspect_ratio()); - EXPECT_EQ(3.0 / 4, callee()->local_rendered_aspect_ratio()); - EXPECT_EQ(3.0 / 4, callee()->rendered_aspect_ratio()); - // Expect that each endpoint is unaware of the rotation of the other endpoint. - EXPECT_EQ(webrtc::kVideoRotation_0, caller()->rendered_rotation()); - EXPECT_EQ(webrtc::kVideoRotation_0, callee()->rendered_rotation()); -} - -// Test that if the answerer rejects the audio m= section, no audio is sent or -// received, but video still can be. -TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - if (sdp_semantics_ == SdpSemantics::kPlanB) { - // Only add video track for callee, and set offer_to_receive_audio to 0, so - // it will reject the audio m= section completely. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 0; - callee()->SetOfferAnswerOptions(options); - } else { - // Stopping the audio RtpTransceiver will cause the media section to be - // rejected in the answer. - callee()->SetRemoteOfferHandler([this] { - callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO) - ->StopInternal(); - }); - } - callee()->AddTrack(callee()->CreateLocalVideoTrack()); - // Do offer/answer and wait for successful end-to-end video frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - media_expectations.ExpectNoAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - // Sanity check that the callee's description has a rejected audio section. - ASSERT_NE(nullptr, callee()->pc()->local_description()); - const ContentInfo* callee_audio_content = - GetFirstAudioContent(callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, callee_audio_content); - EXPECT_TRUE(callee_audio_content->rejected); - if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - // The caller's transceiver should have stopped after receiving the answer, - // and thus no longer listed in transceivers. - EXPECT_EQ(nullptr, - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)); - } -} - -// Test that if the answerer rejects the video m= section, no video is sent or -// received, but audio still can be. -TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - if (sdp_semantics_ == SdpSemantics::kPlanB) { - // Only add audio track for callee, and set offer_to_receive_video to 0, so - // it will reject the video m= section completely. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 0; - callee()->SetOfferAnswerOptions(options); - } else { - // Stopping the video RtpTransceiver will cause the media section to be - // rejected in the answer. - callee()->SetRemoteOfferHandler([this] { - callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->StopInternal(); - }); - } - callee()->AddTrack(callee()->CreateLocalAudioTrack()); - // Do offer/answer and wait for successful end-to-end audio frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - media_expectations.ExpectNoVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - // Sanity check that the callee's description has a rejected video section. - ASSERT_NE(nullptr, callee()->pc()->local_description()); - const ContentInfo* callee_video_content = - GetFirstVideoContent(callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, callee_video_content); - EXPECT_TRUE(callee_video_content->rejected); - if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - // The caller's transceiver should have stopped after receiving the answer, - // and thus is no longer present. - EXPECT_EQ(nullptr, - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)); - } -} - -// Test that if the answerer rejects both audio and video m= sections, nothing -// bad happens. -// TODO(deadbeef): Test that a data channel still works. Currently this doesn't -// test anything but the fact that negotiation succeeds, which doesn't mean -// much. -TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioAndVideoSections) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - if (sdp_semantics_ == SdpSemantics::kPlanB) { - // Don't give the callee any tracks, and set offer_to_receive_X to 0, so it - // will reject both audio and video m= sections. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 0; - options.offer_to_receive_video = 0; - callee()->SetOfferAnswerOptions(options); - } else { - callee()->SetRemoteOfferHandler([this] { - // Stopping all transceivers will cause all media sections to be rejected. - for (const auto& transceiver : callee()->pc()->GetTransceivers()) { - transceiver->StopInternal(); - } - }); - } - // Do offer/answer and wait for stable signaling state. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Sanity check that the callee's description has rejected m= sections. - ASSERT_NE(nullptr, callee()->pc()->local_description()); - const ContentInfo* callee_audio_content = - GetFirstAudioContent(callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, callee_audio_content); - EXPECT_TRUE(callee_audio_content->rejected); - const ContentInfo* callee_video_content = - GetFirstVideoContent(callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, callee_video_content); - EXPECT_TRUE(callee_video_content->rejected); -} - -// This test sets up an audio and video call between two parties. After the -// call runs for a while, the caller sends an updated offer with video being -// rejected. Once the re-negotiation is done, the video flow should stop and -// the audio flow should continue. -TEST_P(PeerConnectionIntegrationTest, VideoRejectedInSubsequentOffer) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - // Renegotiate, rejecting the video m= section. - if (sdp_semantics_ == SdpSemantics::kPlanB) { - caller()->SetGeneratedSdpMunger( - [](cricket::SessionDescription* description) { - for (cricket::ContentInfo& content : description->contents()) { - if (cricket::IsVideoContent(&content)) { - content.rejected = true; - } - } - }); - } else { - caller() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->StopInternal(); - } - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); - - // Sanity check that the caller's description has a rejected video section. - ASSERT_NE(nullptr, caller()->pc()->local_description()); - const ContentInfo* caller_video_content = - GetFirstVideoContent(caller()->pc()->local_description()->description()); - ASSERT_NE(nullptr, caller_video_content); - EXPECT_TRUE(caller_video_content->rejected); - // Wait for some additional audio frames to be received. - { - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - media_expectations.ExpectNoVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } -} - -// Do one offer/answer with audio, another that disables it (rejecting the m= -// section), and another that re-enables it. Regression test for: -// bugs.webrtc.org/6023 -TEST_F(PeerConnectionIntegrationTestPlanB, EnableAudioAfterRejecting) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Add audio track, do normal offer/answer. - rtc::scoped_refptr track = - caller()->CreateLocalAudioTrack(); - rtc::scoped_refptr sender = - caller()->pc()->AddTrack(track, {"stream"}).MoveValue(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Remove audio track, and set offer_to_receive_audio to false to cause the - // m= section to be completely disabled, not just "recvonly". - caller()->pc()->RemoveTrack(sender); - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 0; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Add the audio track again, expecting negotiation to succeed and frames to - // flow. - sender = caller()->pc()->AddTrack(track, {"stream"}).MoveValue(); - options.offer_to_receive_audio = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(); - EXPECT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Basic end-to-end test, but without SSRC/MSID signaling. This functionality -// is needed to support legacy endpoints. -// TODO(deadbeef): When we support the MID extension and demuxing on MID, also -// add a test for an end-to-end test without MID signaling either (basically, -// the minimum acceptable SDP). -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithoutSsrcOrMsidSignaling) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add audio and video, testing that packets can be demuxed on payload type. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - // Remove SSRCs and MSIDs from the received offer SDP. - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Basic end-to-end test, without SSRC signaling. This means that the track -// was created properly and frames are delivered when the MSIDs are communicated -// with a=msid lines and no a=ssrc lines. -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - EndToEndCallWithoutSsrcSignaling) { - const char kStreamId[] = "streamId"; - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add just audio tracks. - caller()->AddTrack(caller()->CreateLocalAudioTrack(), {kStreamId}); - callee()->AddAudioTrack(); - - // Remove SSRCs from the received offer SDP. - callee()->SetReceivedSdpMunger(RemoveSsrcsAndKeepMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - EndToEndCallAddReceiveVideoToSendOnlyCall) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add one-directional video, from caller to callee. - rtc::scoped_refptr track = - caller()->CreateLocalVideoTrack(); - - RtpTransceiverInit video_transceiver_init; - video_transceiver_init.stream_ids = {"video1"}; - video_transceiver_init.direction = RtpTransceiverDirection::kSendOnly; - auto video_sender = - caller()->pc()->AddTransceiver(track, video_transceiver_init).MoveValue(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Add receive direction. - video_sender->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); - - rtc::scoped_refptr callee_track = - callee()->CreateLocalVideoTrack(); - - callee()->AddTrack(callee_track); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Ensure that video frames are received end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that video flows between multiple video tracks when SSRCs are not -// signaled. This exercises the MID RTP header extension which is needed to -// demux the incoming video tracks. -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - EndToEndCallWithTwoVideoTracksAndNoSignaledSsrc) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddVideoTrack(); - caller()->AddVideoTrack(); - callee()->AddVideoTrack(); - callee()->AddVideoTrack(); - - caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); - callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); - ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); - - // Expect video to be received in both directions on both tracks. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - EXPECT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLinePresent) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - auto callee_receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(2u, callee_receivers.size()); - EXPECT_TRUE(callee_receivers[0]->stream_ids().empty()); - EXPECT_TRUE(callee_receivers[1]->stream_ids().empty()); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLineMissing) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - caller()->AddVideoTrack(); - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - auto callee_receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(2u, callee_receivers.size()); - ASSERT_EQ(1u, callee_receivers[0]->stream_ids().size()); - ASSERT_EQ(1u, callee_receivers[1]->stream_ids().size()); - EXPECT_EQ(callee_receivers[0]->stream_ids()[0], - callee_receivers[1]->stream_ids()[0]); - EXPECT_EQ(callee_receivers[0]->streams()[0], - callee_receivers[1]->streams()[0]); -} - -// Test that if two video tracks are sent (from caller to callee, in this test), -// they're transmitted correctly end-to-end. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithTwoVideoTracks) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Add one audio/video stream, and one video-only stream. - caller()->AddAudioVideoTracks(); - caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ(3u, callee()->pc()->GetReceivers().size()); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -static void MakeSpecCompliantMaxBundleOffer(cricket::SessionDescription* desc) { - bool first = true; - for (cricket::ContentInfo& content : desc->contents()) { - if (first) { - first = false; - continue; - } - content.bundle_only = true; - } - first = true; - for (cricket::TransportInfo& transport : desc->transport_infos()) { - if (first) { - first = false; - continue; - } - transport.description.ice_ufrag.clear(); - transport.description.ice_pwd.clear(); - transport.description.connection_role = cricket::CONNECTIONROLE_NONE; - transport.description.identity_fingerprint.reset(nullptr); - } -} - -// Test that if applying a true "max bundle" offer, which uses ports of 0, -// "a=bundle-only", omitting "a=fingerprint", "a=setup", "a=ice-ufrag" and -// "a=ice-pwd" for all but the audio "m=" section, negotiation still completes -// successfully and media flows. -// TODO(deadbeef): Update this test to also omit "a=rtcp-mux", once that works. -// TODO(deadbeef): Won't need this test once we start generating actual -// standards-compliant SDP. -TEST_P(PeerConnectionIntegrationTest, - EndToEndCallWithSpecCompliantMaxBundleOffer) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - // Do the equivalent of setting the port to 0, adding a=bundle-only, and - // removing a=ice-ufrag, a=ice-pwd, a=fingerprint and a=setup from all - // but the first m= section. - callee()->SetReceivedSdpMunger(MakeSpecCompliantMaxBundleOffer); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that we can receive the audio output level from a remote audio track. -// TODO(deadbeef): Use a fake audio source and verify that the output level is -// exactly what the source on the other side was configured with. -TEST_P(PeerConnectionIntegrationTest, GetAudioOutputLevelStatsWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Just add an audio track. - caller()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Get the audio output level stats. Note that the level is not available - // until an RTCP packet has been received. - EXPECT_TRUE_WAIT(callee()->OldGetStats()->AudioOutputLevel() > 0, - kMaxWaitForFramesMs); -} - -// Test that an audio input level is reported. -// TODO(deadbeef): Use a fake audio source and verify that the input level is -// exactly what the source was configured with. -TEST_P(PeerConnectionIntegrationTest, GetAudioInputLevelStatsWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Just add an audio track. - caller()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Get the audio input level stats. The level should be available very - // soon after the test starts. - EXPECT_TRUE_WAIT(caller()->OldGetStats()->AudioInputLevel() > 0, - kMaxWaitForStatsMs); -} - -// Test that we can get incoming byte counts from both audio and video tracks. -TEST_P(PeerConnectionIntegrationTest, GetBytesReceivedStatsWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - // Do offer/answer, wait for the callee to receive some frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - // Get a handle to the remote tracks created, so they can be used as GetStats - // filters. - for (const auto& receiver : callee()->pc()->GetReceivers()) { - // We received frames, so we definitely should have nonzero "received bytes" - // stats at this point. - EXPECT_GT(callee()->OldGetStatsForTrack(receiver->track())->BytesReceived(), - 0); - } -} - -// Test that we can get outgoing byte counts from both audio and video tracks. -TEST_P(PeerConnectionIntegrationTest, GetBytesSentStatsWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - auto audio_track = caller()->CreateLocalAudioTrack(); - auto video_track = caller()->CreateLocalVideoTrack(); - caller()->AddTrack(audio_track); - caller()->AddTrack(video_track); - // Do offer/answer, wait for the callee to receive some frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - // The callee received frames, so we definitely should have nonzero "sent - // bytes" stats at this point. - EXPECT_GT(caller()->OldGetStatsForTrack(audio_track)->BytesSent(), 0); - EXPECT_GT(caller()->OldGetStatsForTrack(video_track)->BytesSent(), 0); -} - -// Test that we can get capture start ntp time. -TEST_P(PeerConnectionIntegrationTest, GetCaptureStartNtpTimeWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - - callee()->AddAudioTrack(); - - // Do offer/answer, wait for the callee to receive some frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Get the remote audio track created on the receiver, so they can be used as - // GetStats filters. - auto receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(1u, receivers.size()); - auto remote_audio_track = receivers[0]->track(); - - // Get the audio output level stats. Note that the level is not available - // until an RTCP packet has been received. - EXPECT_TRUE_WAIT( - callee()->OldGetStatsForTrack(remote_audio_track)->CaptureStartNtpTime() > - 0, - 2 * kMaxWaitForFramesMs); -} - -// Test that the track ID is associated with all local and remote SSRC stats -// using the old GetStats() and more than 1 audio and more than 1 video track. -// This is a regression test for crbug.com/906988 -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - OldGetStatsAssociatesTrackIdForManyMediaSections) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - auto audio_sender_1 = caller()->AddAudioTrack(); - auto video_sender_1 = caller()->AddVideoTrack(); - auto audio_sender_2 = caller()->AddAudioTrack(); - auto video_sender_2 = caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout); - - std::vector track_ids = { - audio_sender_1->track()->id(), video_sender_1->track()->id(), - audio_sender_2->track()->id(), video_sender_2->track()->id()}; - - auto caller_stats = caller()->OldGetStats(); - EXPECT_THAT(caller_stats->TrackIds(), UnorderedElementsAreArray(track_ids)); - auto callee_stats = callee()->OldGetStats(); - EXPECT_THAT(callee_stats->TrackIds(), UnorderedElementsAreArray(track_ids)); -} - -// Test that the new GetStats() returns stats for all outgoing/incoming streams -// with the correct track IDs if there are more than one audio and more than one -// video senders/receivers. -TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - auto audio_sender_1 = caller()->AddAudioTrack(); - auto video_sender_1 = caller()->AddVideoTrack(); - auto audio_sender_2 = caller()->AddAudioTrack(); - auto video_sender_2 = caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout); - - std::vector track_ids = { - audio_sender_1->track()->id(), video_sender_1->track()->id(), - audio_sender_2->track()->id(), video_sender_2->track()->id()}; - - rtc::scoped_refptr caller_report = - caller()->NewGetStats(); - ASSERT_TRUE(caller_report); - auto outbound_stream_stats = - caller_report->GetStatsOfType(); - ASSERT_EQ(outbound_stream_stats.size(), 4u); - std::vector outbound_track_ids; - for (const auto& stat : outbound_stream_stats) { - ASSERT_TRUE(stat->bytes_sent.is_defined()); - EXPECT_LT(0u, *stat->bytes_sent); - if (*stat->kind == "video") { - ASSERT_TRUE(stat->key_frames_encoded.is_defined()); - EXPECT_GT(*stat->key_frames_encoded, 0u); - ASSERT_TRUE(stat->frames_encoded.is_defined()); - EXPECT_GE(*stat->frames_encoded, *stat->key_frames_encoded); - } - ASSERT_TRUE(stat->track_id.is_defined()); - const auto* track_stat = - caller_report->GetAs(*stat->track_id); - ASSERT_TRUE(track_stat); - outbound_track_ids.push_back(*track_stat->track_identifier); - } - EXPECT_THAT(outbound_track_ids, UnorderedElementsAreArray(track_ids)); - - rtc::scoped_refptr callee_report = - callee()->NewGetStats(); - ASSERT_TRUE(callee_report); - auto inbound_stream_stats = - callee_report->GetStatsOfType(); - ASSERT_EQ(4u, inbound_stream_stats.size()); - std::vector inbound_track_ids; - for (const auto& stat : inbound_stream_stats) { - ASSERT_TRUE(stat->bytes_received.is_defined()); - EXPECT_LT(0u, *stat->bytes_received); - if (*stat->kind == "video") { - ASSERT_TRUE(stat->key_frames_decoded.is_defined()); - EXPECT_GT(*stat->key_frames_decoded, 0u); - ASSERT_TRUE(stat->frames_decoded.is_defined()); - EXPECT_GE(*stat->frames_decoded, *stat->key_frames_decoded); - } - ASSERT_TRUE(stat->track_id.is_defined()); - const auto* track_stat = - callee_report->GetAs(*stat->track_id); - ASSERT_TRUE(track_stat); - inbound_track_ids.push_back(*track_stat->track_identifier); - } - EXPECT_THAT(inbound_track_ids, UnorderedElementsAreArray(track_ids)); -} - -// Test that we can get stats (using the new stats implementation) for -// unsignaled streams. Meaning when SSRCs/MSIDs aren't signaled explicitly in -// SDP. -TEST_P(PeerConnectionIntegrationTest, - GetStatsForUnsignaledStreamWithNewStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - // Remove SSRCs and MSIDs from the received offer SDP. - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(1); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - // We received a frame, so we should have nonzero "bytes received" stats for - // the unsignaled stream, if stats are working for it. - rtc::scoped_refptr report = - callee()->NewGetStats(); - ASSERT_NE(nullptr, report); - auto inbound_stream_stats = - report->GetStatsOfType(); - ASSERT_EQ(1U, inbound_stream_stats.size()); - ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined()); - ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U); - ASSERT_TRUE(inbound_stream_stats[0]->track_id.is_defined()); -} - -// Same as above but for the legacy stats implementation. -TEST_P(PeerConnectionIntegrationTest, - GetStatsForUnsignaledStreamWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - // Remove SSRCs and MSIDs from the received offer SDP. - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Note that, since the old stats implementation associates SSRCs with tracks - // using SDP, when SSRCs aren't signaled in SDP these stats won't have an - // associated track ID. So we can't use the track "selector" argument. - // - // Also, we use "EXPECT_TRUE_WAIT" because the stats collector may decide to - // return cached stats if not enough time has passed since the last update. - EXPECT_TRUE_WAIT(callee()->OldGetStats()->BytesReceived() > 0, - kDefaultTimeout); -} - -// Test that we can successfully get the media related stats (audio level -// etc.) for the unsignaled stream. -TEST_P(PeerConnectionIntegrationTest, - GetMediaStatsForUnsignaledStreamWithNewStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - // Remove SSRCs and MSIDs from the received offer SDP. - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(1); - media_expectations.CalleeExpectsSomeVideo(1); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - - rtc::scoped_refptr report = - callee()->NewGetStats(); - ASSERT_NE(nullptr, report); - - auto media_stats = report->GetStatsOfType(); - auto audio_index = FindFirstMediaStatsIndexByKind("audio", media_stats); - ASSERT_GE(audio_index, 0); - EXPECT_TRUE(media_stats[audio_index]->audio_level.is_defined()); -} - -// Helper for test below. -void ModifySsrcs(cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - for (StreamParams& stream : - content.media_description()->mutable_streams()) { - for (uint32_t& ssrc : stream.ssrcs) { - ssrc = rtc::CreateRandomId(); - } - } - } -} - -// Test that the "RTCMediaSteamTrackStats" object is updated correctly when -// SSRCs are unsignaled, and the SSRC of the received (audio) stream changes. -// This should result in two "RTCInboundRTPStreamStats", but only one -// "RTCMediaStreamTrackStats", whose counters go up continuously rather than -// being reset to 0 once the SSRC change occurs. -// -// Regression test for this bug: -// https://bugs.chromium.org/p/webrtc/issues/detail?id=8158 -// -// The bug causes the track stats to only represent one of the two streams: -// whichever one has the higher SSRC. So with this bug, there was a 50% chance -// that the track stat counters would reset to 0 when the new stream is -// received, and a 50% chance that they'll stop updating (while -// "concealed_samples" continues increasing, due to silence being generated for -// the inactive stream). -TEST_P(PeerConnectionIntegrationTest, - TrackStatsUpdatedCorrectlyWhenUnsignaledSsrcChanges) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - // Remove SSRCs and MSIDs from the received offer SDP, simulating an endpoint - // that doesn't signal SSRCs (from the callee's perspective). - callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for 50 audio frames (500ms of audio) to be received by the callee. - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(50); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - // Some audio frames were received, so we should have nonzero "samples - // received" for the track. - rtc::scoped_refptr report = - callee()->NewGetStats(); - ASSERT_NE(nullptr, report); - auto track_stats = report->GetStatsOfType(); - ASSERT_EQ(1U, track_stats.size()); - ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined()); - ASSERT_GT(*track_stats[0]->total_samples_received, 0U); - // uint64_t prev_samples_received = *track_stats[0]->total_samples_received; - - // Create a new offer and munge it to cause the caller to use a new SSRC. - caller()->SetGeneratedSdpMunger(ModifySsrcs); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for 25 more audio frames (250ms of audio) to be received, from the new - // SSRC. - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(25); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - - report = callee()->NewGetStats(); - ASSERT_NE(nullptr, report); - track_stats = report->GetStatsOfType(); - ASSERT_EQ(1U, track_stats.size()); - ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined()); - // The "total samples received" stat should only be greater than it was - // before. - // TODO(deadbeef): Uncomment this assertion once the bug is completely fixed. - // Right now, the new SSRC will cause the counters to reset to 0. - // EXPECT_GT(*track_stats[0]->total_samples_received, prev_samples_received); - - // Additionally, the percentage of concealed samples (samples generated to - // conceal packet loss) should be less than 50%. If it's greater, that's a - // good sign that we're seeing stats from the old stream that's no longer - // receiving packets, and is generating concealed samples of silence. - constexpr double kAcceptableConcealedSamplesPercentage = 0.50; - ASSERT_TRUE(track_stats[0]->concealed_samples.is_defined()); - EXPECT_LT(*track_stats[0]->concealed_samples, - *track_stats[0]->total_samples_received * - kAcceptableConcealedSamplesPercentage); - - // Also ensure that we have two "RTCInboundRTPStreamStats" as expected, as a - // sanity check that the SSRC really changed. - // TODO(deadbeef): This isn't working right now, because we're not returning - // *any* stats for the inactive stream. Uncomment when the bug is completely - // fixed. - // auto inbound_stream_stats = - // report->GetStatsOfType(); - // ASSERT_EQ(2U, inbound_stream_stats.size()); -} - -// Test that DTLS 1.0 is used if both sides only support DTLS 1.0. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) { - PeerConnectionFactory::Options dtls_10_options; - dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; - ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options, - dtls_10_options)); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test getting cipher stats and UMA metrics when DTLS 1.0 is negotiated. -TEST_P(PeerConnectionIntegrationTest, Dtls10CipherStatsAndUmaMetrics) { - PeerConnectionFactory::Options dtls_10_options; - dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; - ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options, - dtls_10_options)); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher( - caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); - // TODO(bugs.webrtc.org/9456): Fix it. - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.SrtpCryptoSuite.Audio", - kDefaultSrtpCryptoSuite)); -} - -// Test getting cipher stats and UMA metrics when DTLS 1.2 is negotiated. -TEST_P(PeerConnectionIntegrationTest, Dtls12CipherStatsAndUmaMetrics) { - PeerConnectionFactory::Options dtls_12_options; - dtls_12_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; - ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_12_options, - dtls_12_options)); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher( - caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); - // TODO(bugs.webrtc.org/9456): Fix it. - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.SrtpCryptoSuite.Audio", - kDefaultSrtpCryptoSuite)); -} - -// Test that DTLS 1.0 can be used if the caller supports DTLS 1.2 and the -// callee only supports 1.0. -TEST_P(PeerConnectionIntegrationTest, CallerDtls12ToCalleeDtls10) { - PeerConnectionFactory::Options caller_options; - caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; - PeerConnectionFactory::Options callee_options; - callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; - ASSERT_TRUE( - CreatePeerConnectionWrappersWithOptions(caller_options, callee_options)); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that DTLS 1.0 can be used if the caller only supports DTLS 1.0 and the -// callee supports 1.2. -TEST_P(PeerConnectionIntegrationTest, CallerDtls10ToCalleeDtls12) { - PeerConnectionFactory::Options caller_options; - caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; - PeerConnectionFactory::Options callee_options; - callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; - ASSERT_TRUE( - CreatePeerConnectionWrappersWithOptions(caller_options, callee_options)); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// The three tests below verify that "enable_aes128_sha1_32_crypto_cipher" -// works as expected; the cipher should only be used if enabled by both sides. -TEST_P(PeerConnectionIntegrationTest, - Aes128Sha1_32_CipherNotUsedWhenOnlyCallerSupported) { - PeerConnectionFactory::Options caller_options; - caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - PeerConnectionFactory::Options callee_options; - callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = - false; - int expected_cipher_suite = rtc::SRTP_AES128_CM_SHA1_80; - TestNegotiatedCipherSuite(caller_options, callee_options, - expected_cipher_suite); -} - -TEST_P(PeerConnectionIntegrationTest, - Aes128Sha1_32_CipherNotUsedWhenOnlyCalleeSupported) { - PeerConnectionFactory::Options caller_options; - caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = - false; - PeerConnectionFactory::Options callee_options; - callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - int expected_cipher_suite = rtc::SRTP_AES128_CM_SHA1_80; - TestNegotiatedCipherSuite(caller_options, callee_options, - expected_cipher_suite); -} - -TEST_P(PeerConnectionIntegrationTest, Aes128Sha1_32_CipherUsedWhenSupported) { - PeerConnectionFactory::Options caller_options; - caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - PeerConnectionFactory::Options callee_options; - callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - int expected_cipher_suite = rtc::SRTP_AES128_CM_SHA1_32; - TestNegotiatedCipherSuite(caller_options, callee_options, - expected_cipher_suite); -} - -// Test that a non-GCM cipher is used if both sides only support non-GCM. -TEST_P(PeerConnectionIntegrationTest, NonGcmCipherUsedWhenGcmNotSupported) { - bool local_gcm_enabled = false; - bool remote_gcm_enabled = false; - bool aes_ctr_enabled = true; - int expected_cipher_suite = kDefaultSrtpCryptoSuite; - TestGcmNegotiationUsesCipherSuite(local_gcm_enabled, remote_gcm_enabled, - aes_ctr_enabled, expected_cipher_suite); -} - -// Test that a GCM cipher is used if both ends support it and non-GCM is -// disabled. -TEST_P(PeerConnectionIntegrationTest, GcmCipherUsedWhenOnlyGcmSupported) { - bool local_gcm_enabled = true; - bool remote_gcm_enabled = true; - bool aes_ctr_enabled = false; - int expected_cipher_suite = kDefaultSrtpCryptoSuiteGcm; - TestGcmNegotiationUsesCipherSuite(local_gcm_enabled, remote_gcm_enabled, - aes_ctr_enabled, expected_cipher_suite); -} - -// Verify that media can be transmitted end-to-end when GCM crypto suites are -// enabled. Note that the above tests, such as GcmCipherUsedWhenGcmSupported, -// only verify that a GCM cipher is negotiated, and not necessarily that SRTP -// works with it. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithGcmCipher) { - PeerConnectionFactory::Options gcm_options; - gcm_options.crypto_options.srtp.enable_gcm_crypto_suites = true; - gcm_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher = false; - ASSERT_TRUE( - CreatePeerConnectionWrappersWithOptions(gcm_options, gcm_options)); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test sets up a call between two parties with audio, video and an RTP -// data channel. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithRtpDataChannel) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.enable_rtp_data_channel = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); - ConnectFakeSignaling(); - // Expect that data channel created on caller side will show up for callee as - // well. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Ensure the existence of the RTP data channel didn't impede audio/video. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_NE(nullptr, callee()->data_channel()); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - SendRtpDataWithRetries(caller()->data_channel(), data, 5); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - SendRtpDataWithRetries(callee()->data_channel(), data, 5); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -TEST_P(PeerConnectionIntegrationTest, RtpDataChannelWorksAfterRollback) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.enable_rtp_data_channel = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); - ConnectFakeSignaling(); - auto data_channel = caller()->pc()->CreateDataChannel("label_1", nullptr); - ASSERT_TRUE(data_channel.get() != nullptr); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - caller()->CreateDataChannel("label_2", nullptr); - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - caller()->pc()->SetLocalDescription(observer, - caller()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - caller()->Rollback(); - - std::string data = "hello world"; - SendRtpDataWithRetries(data_channel, data, 5); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); -} - -// Ensure that an RTP data channel is signaled as closed for the caller when -// the callee rejects it in a subsequent offer. -TEST_P(PeerConnectionIntegrationTest, - RtpDataChannelSignaledClosedInCalleeOffer) { - // Same procedure as above test. - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.enable_rtp_data_channel = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_NE(nullptr, callee()->data_channel()); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Close the data channel on the callee, and do an updated offer/answer. - callee()->data_channel()->Close(); - callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_FALSE(caller()->data_observer()->IsOpen()); - EXPECT_FALSE(callee()->data_observer()->IsOpen()); -} - -// Tests that data is buffered in an RTP data channel until an observer is -// registered for it. -// -// NOTE: RTP data channels can receive data before the underlying -// transport has detected that a channel is writable and thus data can be -// received before the data channel state changes to open. That is hard to test -// but the same buffering is expected to be used in that case. -// -// Use fake clock and simulated network delay so that we predictably can wait -// until an SCTP message has been delivered without "sleep()"ing. -TEST_P(PeerConnectionIntegrationTestWithFakeClock, - DataBufferedUntilRtpDataChannelObserverRegistered) { - virtual_socket_server()->set_delay_mean(5); // 5 ms per hop. - virtual_socket_server()->UpdateDelayDistribution(); - - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.enable_rtp_data_channel = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE(caller()->data_channel() != nullptr); - ASSERT_TRUE_SIMULATED_WAIT(callee()->data_channel() != nullptr, - kDefaultTimeout, FakeClock()); - ASSERT_TRUE_SIMULATED_WAIT(caller()->data_observer()->IsOpen(), - kDefaultTimeout, FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(DataChannelInterface::kOpen, - callee()->data_channel()->state(), kDefaultTimeout, - FakeClock()); - - // Unregister the observer which is normally automatically registered. - callee()->data_channel()->UnregisterObserver(); - // Send data and advance fake clock until it should have been received. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - SIMULATED_WAIT(false, 50, FakeClock()); - - // Attach data channel and expect data to be received immediately. Note that - // EXPECT_EQ_WAIT is used, such that the simulated clock is not advanced any - // further, but data can be received even if the callback is asynchronous. - MockDataChannelObserver new_observer(callee()->data_channel()); - EXPECT_EQ_SIMULATED_WAIT(data, new_observer.last_message(), kDefaultTimeout, - FakeClock()); -} - -// This test sets up a call between two parties with audio, video and but only -// the caller client supports RTP data channels. -TEST_P(PeerConnectionIntegrationTest, RtpDataChannelsRejectedByCallee) { - PeerConnectionInterface::RTCConfiguration rtc_config_1; - rtc_config_1.enable_rtp_data_channel = true; - // Must disable DTLS to make negotiation succeed. - rtc_config_1.enable_dtls_srtp = false; - PeerConnectionInterface::RTCConfiguration rtc_config_2; - rtc_config_2.enable_dtls_srtp = false; - rtc_config_2.enable_dtls_srtp = false; - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(rtc_config_1, rtc_config_2)); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - ASSERT_TRUE(caller()->data_channel() != nullptr); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // The caller should still have a data channel, but it should be closed, and - // one should ever have been created for the callee. - EXPECT_TRUE(caller()->data_channel() != nullptr); - EXPECT_FALSE(caller()->data_observer()->IsOpen()); - EXPECT_EQ(nullptr, callee()->data_channel()); -} - -// This test sets up a call between two parties with audio, and video. When -// audio and video is setup and flowing, an RTP data channel is negotiated. -TEST_P(PeerConnectionIntegrationTest, AddRtpDataChannelInSubsequentOffer) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.enable_rtp_data_channel = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); - ConnectFakeSignaling(); - // Do initial offer/answer with audio/video. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Create data channel and do new offer and answer. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_NE(nullptr, callee()->data_channel()); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - // Ensure data can be sent in both directions. - std::string data = "hello world"; - SendRtpDataWithRetries(caller()->data_channel(), data, 5); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - SendRtpDataWithRetries(callee()->data_channel(), data, 5); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -#ifdef HAVE_SCTP - -// This test sets up a call between two parties with audio, video and an SCTP -// data channel. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSctpDataChannel) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Expect that data channel created on caller side will show up for callee as - // well. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Ensure the existence of the SCTP data channel didn't impede audio/video. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - // Caller data channel should already exist (it created one). Callee data - // channel may not exist yet, since negotiation happens in-band, not in SDP. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -// Ensure that when the callee closes an SCTP data channel, the closing -// procedure results in the data channel being closed for the caller as well. -TEST_P(PeerConnectionIntegrationTest, CalleeClosesSctpDataChannel) { - // Same procedure as above test. - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Close the data channel on the callee side, and wait for it to reach the - // "closed" state on both sides. - callee()->data_channel()->Close(); - EXPECT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); -} - -TEST_P(PeerConnectionIntegrationTest, SctpDataChannelConfigSentToOtherSide) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - webrtc::DataChannelInit init; - init.id = 53; - init.maxRetransmits = 52; - caller()->CreateDataChannel("data-channel", &init); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - // Since "negotiated" is false, the "id" parameter should be ignored. - EXPECT_NE(init.id, callee()->data_channel()->id()); - EXPECT_EQ("data-channel", callee()->data_channel()->label()); - EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits()); - EXPECT_FALSE(callee()->data_channel()->negotiated()); -} - -// Test usrsctp's ability to process unordered data stream, where data actually -// arrives out of order using simulated delays. Previously there have been some -// bugs in this area. -TEST_P(PeerConnectionIntegrationTest, StressTestUnorderedSctpDataChannel) { - // Introduce random network delays. - // Otherwise it's not a true "unordered" test. - virtual_socket_server()->set_delay_mean(20); - virtual_socket_server()->set_delay_stddev(5); - virtual_socket_server()->UpdateDelayDistribution(); - // Normal procedure, but with unordered data channel config. - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - webrtc::DataChannelInit init; - init.ordered = false; - caller()->CreateDataChannel(&init); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - static constexpr int kNumMessages = 100; - // Deliberately chosen to be larger than the MTU so messages get fragmented. - static constexpr size_t kMaxMessageSize = 4096; - // Create and send random messages. - std::vector sent_messages; - for (int i = 0; i < kNumMessages; ++i) { - size_t length = - (rand() % kMaxMessageSize) + 1; // NOLINT (rand_r instead of rand) - std::string message; - ASSERT_TRUE(rtc::CreateRandomString(length, &message)); - caller()->data_channel()->Send(DataBuffer(message)); - callee()->data_channel()->Send(DataBuffer(message)); - sent_messages.push_back(message); - } - - // Wait for all messages to be received. - EXPECT_EQ_WAIT(rtc::checked_cast(kNumMessages), - caller()->data_observer()->received_message_count(), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::checked_cast(kNumMessages), - callee()->data_observer()->received_message_count(), - kDefaultTimeout); - - // Sort and compare to make sure none of the messages were corrupted. - std::vector caller_received_messages = - caller()->data_observer()->messages(); - std::vector callee_received_messages = - callee()->data_observer()->messages(); - absl::c_sort(sent_messages); - absl::c_sort(caller_received_messages); - absl::c_sort(callee_received_messages); - EXPECT_EQ(sent_messages, caller_received_messages); - EXPECT_EQ(sent_messages, callee_received_messages); -} - -// This test sets up a call between two parties with audio, and video. When -// audio and video are setup and flowing, an SCTP data channel is negotiated. -TEST_P(PeerConnectionIntegrationTest, AddSctpDataChannelInSubsequentOffer) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do initial offer/answer with audio/video. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Create data channel and do new offer and answer. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Caller data channel should already exist (it created one). Callee data - // channel may not exist yet, since negotiation happens in-band, not in SDP. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -// Set up a connection initially just using SCTP data channels, later upgrading -// to audio/video, ensuring frames are received end-to-end. Effectively the -// inverse of the test above. -// This was broken in M57; see https://crbug.com/711243 -TEST_P(PeerConnectionIntegrationTest, SctpDataChannelToAudioVideoUpgrade) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do initial offer/answer with just data channel. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait until data can be sent over the data channel. - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Do subsequent offer/answer with two-way audio and video. Audio and video - // should end up bundled on the DTLS/ICE transport already used for data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -static void MakeSpecCompliantSctpOffer(cricket::SessionDescription* desc) { - cricket::SctpDataContentDescription* dcd_offer = - GetFirstSctpDataContentDescription(desc); - // See https://crbug.com/webrtc/11211 - this function is a no-op - ASSERT_TRUE(dcd_offer); - dcd_offer->set_use_sctpmap(false); - dcd_offer->set_protocol("UDP/DTLS/SCTP"); -} - -// Test that the data channel works when a spec-compliant SCTP m= section is -// offered (using "a=sctp-port" instead of "a=sctpmap", and using -// "UDP/DTLS/SCTP" as the protocol). -TEST_P(PeerConnectionIntegrationTest, - DataChannelWorksWhenSpecCompliantSctpOfferReceived) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->SetGeneratedSdpMunger(MakeSpecCompliantSctpOffer); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -#endif // HAVE_SCTP - -// Test that the ICE connection and gathering states eventually reach -// "complete". -TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do normal offer/answer. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - caller()->ice_gathering_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - callee()->ice_gathering_state(), kMaxWaitForFramesMs); - // After the best candidate pair is selected and all candidates are signaled, - // the ICE connection state should reach "complete". - // TODO(deadbeef): Currently, the ICE "controlled" agent (the - // answerer/"callee" by default) only reaches "connected". When this is - // fixed, this test should be updated. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); -} - -constexpr int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP; - -// Use a mock resolver to resolve the hostname back to the original IP on both -// sides and check that the ICE connection connects. -TEST_P(PeerConnectionIntegrationTest, - IceStatesReachCompletionWithRemoteHostname) { - auto caller_resolver_factory = - std::make_unique>(); - auto callee_resolver_factory = - std::make_unique>(); - NiceMock callee_async_resolver; - NiceMock caller_async_resolver; - - // This also verifies that the injected AsyncResolverFactory is used by - // P2PTransportChannel. - EXPECT_CALL(*caller_resolver_factory, Create()) - .WillOnce(Return(&caller_async_resolver)); - webrtc::PeerConnectionDependencies caller_deps(nullptr); - caller_deps.async_resolver_factory = std::move(caller_resolver_factory); - - EXPECT_CALL(*callee_resolver_factory, Create()) - .WillOnce(Return(&callee_async_resolver)); - webrtc::PeerConnectionDependencies callee_deps(nullptr); - callee_deps.async_resolver_factory = std::move(callee_resolver_factory); - - PeerConnectionInterface::RTCConfiguration config; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( - config, std::move(caller_deps), config, std::move(callee_deps))); - - caller()->SetRemoteAsyncResolver(&callee_async_resolver); - callee()->SetRemoteAsyncResolver(&caller_async_resolver); - - // Enable hostname candidates with mDNS names. - caller()->SetMdnsResponder( - std::make_unique(network_thread())); - callee()->SetMdnsResponder( - std::make_unique(network_thread())); - - SetPortAllocatorFlags(kOnlyLocalPorts, kOnlyLocalPorts); - - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); - - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostNameHostName)); -} - -// Test that firewalling the ICE connection causes the clients to identify the -// disconnected state and then removing the firewall causes them to reconnect. -class PeerConnectionIntegrationIceStatesTest - : public PeerConnectionIntegrationBaseTest, - public ::testing::WithParamInterface< - std::tuple>> { - protected: - PeerConnectionIntegrationIceStatesTest() - : PeerConnectionIntegrationBaseTest(std::get<0>(GetParam())) { - port_allocator_flags_ = std::get<1>(std::get<1>(GetParam())); - } - - void StartStunServer(const SocketAddress& server_address) { - stun_server_.reset( - cricket::TestStunServer::Create(network_thread(), server_address)); - } - - bool TestIPv6() { - return (port_allocator_flags_ & cricket::PORTALLOCATOR_ENABLE_IPV6); - } - - void SetPortAllocatorFlags() { - PeerConnectionIntegrationBaseTest::SetPortAllocatorFlags( - port_allocator_flags_, port_allocator_flags_); - } - - std::vector CallerAddresses() { - std::vector addresses; - addresses.push_back(SocketAddress("1.1.1.1", 0)); - if (TestIPv6()) { - addresses.push_back(SocketAddress("1111:0:a:b:c:d:e:f", 0)); - } - return addresses; - } - - std::vector CalleeAddresses() { - std::vector addresses; - addresses.push_back(SocketAddress("2.2.2.2", 0)); - if (TestIPv6()) { - addresses.push_back(SocketAddress("2222:0:a:b:c:d:e:f", 0)); - } - return addresses; - } - - void SetUpNetworkInterfaces() { - // Remove the default interfaces added by the test infrastructure. - caller()->network_manager()->RemoveInterface(kDefaultLocalAddress); - callee()->network_manager()->RemoveInterface(kDefaultLocalAddress); - - // Add network addresses for test. - for (const auto& caller_address : CallerAddresses()) { - caller()->network_manager()->AddInterface(caller_address); - } - for (const auto& callee_address : CalleeAddresses()) { - callee()->network_manager()->AddInterface(callee_address); - } - } - - private: - uint32_t port_allocator_flags_; - std::unique_ptr stun_server_; -}; - -// Ensure FakeClockForTest is constructed first (see class for rationale). -class PeerConnectionIntegrationIceStatesTestWithFakeClock - : public FakeClockForTest, - public PeerConnectionIntegrationIceStatesTest {}; - -// Tests that the PeerConnection goes through all the ICE gathering/connection -// states over the duration of the call. This includes Disconnected and Failed -// states, induced by putting a firewall between the peers and waiting for them -// to time out. -TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) { - const SocketAddress kStunServerAddress = - SocketAddress("99.99.99.1", cricket::STUN_SERVER_PORT); - StartStunServer(kStunServerAddress); - - PeerConnectionInterface::RTCConfiguration config; - PeerConnectionInterface::IceServer ice_stun_server; - ice_stun_server.urls.push_back( - "stun:" + kStunServerAddress.HostAsURIString() + ":" + - kStunServerAddress.PortAsString()); - config.servers.push_back(ice_stun_server); - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - SetPortAllocatorFlags(); - SetUpNetworkInterfaces(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - - // Initial state before anything happens. - ASSERT_EQ(PeerConnectionInterface::kIceGatheringNew, - caller()->ice_gathering_state()); - ASSERT_EQ(PeerConnectionInterface::kIceConnectionNew, - caller()->ice_connection_state()); - ASSERT_EQ(PeerConnectionInterface::kIceConnectionNew, - caller()->standardized_ice_connection_state()); - - // Start the call by creating the offer, setting it as the local description, - // then sending it to the peer who will respond with an answer. This happens - // asynchronously so that we can watch the states as it runs in the - // background. - caller()->CreateAndSetAndSignalOffer(); - - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); - - // Verify that the observer was notified of the intermediate transitions. - EXPECT_THAT(caller()->ice_connection_state_history(), - ElementsAre(PeerConnectionInterface::kIceConnectionChecking, - PeerConnectionInterface::kIceConnectionConnected, - PeerConnectionInterface::kIceConnectionCompleted)); - EXPECT_THAT(caller()->standardized_ice_connection_state_history(), - ElementsAre(PeerConnectionInterface::kIceConnectionChecking, - PeerConnectionInterface::kIceConnectionConnected, - PeerConnectionInterface::kIceConnectionCompleted)); - EXPECT_THAT( - caller()->peer_connection_state_history(), - ElementsAre(PeerConnectionInterface::PeerConnectionState::kConnecting, - PeerConnectionInterface::PeerConnectionState::kConnected)); - EXPECT_THAT(caller()->ice_gathering_state_history(), - ElementsAre(PeerConnectionInterface::kIceGatheringGathering, - PeerConnectionInterface::kIceGatheringComplete)); - - // Block connections to/from the caller and wait for ICE to become - // disconnected. - for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); - } - RTC_LOG(LS_INFO) << "Firewall rules applied"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); - - // Let ICE re-establish by removing the firewall rules. - firewall()->ClearRules(); - RTC_LOG(LS_INFO) << "Firewall rules cleared"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); - - // According to RFC7675, if there is no response within 30 seconds then the - // peer should consider the other side to have rejected the connection. This - // is signaled by the state transitioning to "failed". - constexpr int kConsentTimeout = 30000; - for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); - } - RTC_LOG(LS_INFO) << "Firewall rules applied again"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->ice_connection_state(), kConsentTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->standardized_ice_connection_state(), - kConsentTimeout, FakeClock()); -} - -// Tests that if the connection doesn't get set up properly we eventually reach -// the "failed" iceConnectionState. -TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, - IceStateSetupFailure) { - // Block connections to/from the caller and wait for ICE to become - // disconnected. - for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); - } - - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - SetPortAllocatorFlags(); - SetUpNetworkInterfaces(); - caller()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - - // According to RFC7675, if there is no response within 30 seconds then the - // peer should consider the other side to have rejected the connection. This - // is signaled by the state transitioning to "failed". - constexpr int kConsentTimeout = 30000; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->standardized_ice_connection_state(), - kConsentTimeout, FakeClock()); -} - -// Tests that the best connection is set to the appropriate IPv4/IPv6 connection -// and that the statistics in the metric observers are updated correctly. -TEST_P(PeerConnectionIntegrationIceStatesTest, VerifyBestConnection) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - SetPortAllocatorFlags(); - SetUpNetworkInterfaces(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); - - // TODO(bugs.webrtc.org/9456): Fix it. - const int num_best_ipv4 = webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv4); - const int num_best_ipv6 = webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv6); - if (TestIPv6()) { - // When IPv6 is enabled, we should prefer an IPv6 connection over an IPv4 - // connection. - EXPECT_METRIC_EQ(0, num_best_ipv4); - EXPECT_METRIC_EQ(1, num_best_ipv6); - } else { - EXPECT_METRIC_EQ(1, num_best_ipv4); - EXPECT_METRIC_EQ(0, num_best_ipv6); - } - - EXPECT_METRIC_EQ(0, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostHost)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostPublicHostPublic)); -} - -constexpr uint32_t kFlagsIPv4NoStun = cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY; -constexpr uint32_t kFlagsIPv6NoStun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_DISABLE_RELAY; -constexpr uint32_t kFlagsIPv4Stun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY; - -INSTANTIATE_TEST_SUITE_P( - PeerConnectionIntegrationTest, - PeerConnectionIntegrationIceStatesTest, - Combine(Values(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan), - Values(std::make_pair("IPv4 no STUN", kFlagsIPv4NoStun), - std::make_pair("IPv6 no STUN", kFlagsIPv6NoStun), - std::make_pair("IPv4 with STUN", kFlagsIPv4Stun)))); - -INSTANTIATE_TEST_SUITE_P( - PeerConnectionIntegrationTest, - PeerConnectionIntegrationIceStatesTestWithFakeClock, - Combine(Values(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan), - Values(std::make_pair("IPv4 no STUN", kFlagsIPv4NoStun), - std::make_pair("IPv6 no STUN", kFlagsIPv6NoStun), - std::make_pair("IPv4 with STUN", kFlagsIPv4Stun)))); - -// This test sets up a call between two parties with audio and video. -// During the call, the caller restarts ICE and the test verifies that -// new ICE candidates are generated and audio and video still can flow, and the -// ICE state reaches completed again. -TEST_P(PeerConnectionIntegrationTest, MediaContinuesFlowingAfterIceRestart) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for ICE to complete. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); - - // To verify that the ICE restart actually occurs, get - // ufrag/password/candidates before and after restart. - // Create an SDP string of the first audio candidate for both clients. - const webrtc::IceCandidateCollection* audio_candidates_caller = - caller()->pc()->local_description()->candidates(0); - const webrtc::IceCandidateCollection* audio_candidates_callee = - callee()->pc()->local_description()->candidates(0); - ASSERT_GT(audio_candidates_caller->count(), 0u); - ASSERT_GT(audio_candidates_callee->count(), 0u); - std::string caller_candidate_pre_restart; - ASSERT_TRUE( - audio_candidates_caller->at(0)->ToString(&caller_candidate_pre_restart)); - std::string callee_candidate_pre_restart; - ASSERT_TRUE( - audio_candidates_callee->at(0)->ToString(&callee_candidate_pre_restart)); - const cricket::SessionDescription* desc = - caller()->pc()->local_description()->description(); - std::string caller_ufrag_pre_restart = - desc->transport_infos()[0].description.ice_ufrag; - desc = callee()->pc()->local_description()->description(); - std::string callee_ufrag_pre_restart = - desc->transport_infos()[0].description.ice_ufrag; - - EXPECT_EQ(caller()->ice_candidate_pair_change_history().size(), 1u); - // Have the caller initiate an ICE restart. - caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); - - // Grab the ufrags/candidates again. - audio_candidates_caller = caller()->pc()->local_description()->candidates(0); - audio_candidates_callee = callee()->pc()->local_description()->candidates(0); - ASSERT_GT(audio_candidates_caller->count(), 0u); - ASSERT_GT(audio_candidates_callee->count(), 0u); - std::string caller_candidate_post_restart; - ASSERT_TRUE( - audio_candidates_caller->at(0)->ToString(&caller_candidate_post_restart)); - std::string callee_candidate_post_restart; - ASSERT_TRUE( - audio_candidates_callee->at(0)->ToString(&callee_candidate_post_restart)); - desc = caller()->pc()->local_description()->description(); - std::string caller_ufrag_post_restart = - desc->transport_infos()[0].description.ice_ufrag; - desc = callee()->pc()->local_description()->description(); - std::string callee_ufrag_post_restart = - desc->transport_infos()[0].description.ice_ufrag; - // Sanity check that an ICE restart was actually negotiated in SDP. - ASSERT_NE(caller_candidate_pre_restart, caller_candidate_post_restart); - ASSERT_NE(callee_candidate_pre_restart, callee_candidate_post_restart); - ASSERT_NE(caller_ufrag_pre_restart, caller_ufrag_post_restart); - ASSERT_NE(callee_ufrag_pre_restart, callee_ufrag_post_restart); - EXPECT_GT(caller()->ice_candidate_pair_change_history().size(), 1u); - - // Ensure that additional frames are received after the ICE restart. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Verify that audio/video can be received end-to-end when ICE renomination is -// enabled. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithIceRenomination) { - PeerConnectionInterface::RTCConfiguration config; - config.enable_ice_renomination = true; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Sanity check that ICE renomination was actually negotiated. - const cricket::SessionDescription* desc = - caller()->pc()->local_description()->description(); - for (const cricket::TransportInfo& info : desc->transport_infos()) { - ASSERT_THAT(info.description.transport_options, Contains("renomination")); - } - desc = callee()->pc()->local_description()->description(); - for (const cricket::TransportInfo& info : desc->transport_infos()) { - ASSERT_THAT(info.description.transport_options, Contains("renomination")); - } - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// With a max bundle policy and RTCP muxing, adding a new media description to -// the connection should not affect ICE at all because the new media will use -// the existing connection. -TEST_P(PeerConnectionIntegrationTest, - AddMediaToConnectedBundleDoesNotRestartIce) { - PeerConnectionInterface::RTCConfiguration config; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig( - config, PeerConnectionInterface::RTCConfiguration())); - ConnectFakeSignaling(); - - caller()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - - caller()->clear_ice_connection_state_history(); - - caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - EXPECT_EQ(0u, caller()->ice_connection_state_history().size()); -} - -// This test sets up a call between two parties with audio and video. It then -// renegotiates setting the video m-line to "port 0", then later renegotiates -// again, enabling video. -TEST_P(PeerConnectionIntegrationTest, - VideoFlowsAfterMediaSectionIsRejectedAndRecycled) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Do initial negotiation, only sending media from the caller. Will result in - // video and audio recvonly "m=" sections. - caller()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Negotiate again, disabling the video "m=" section (the callee will set the - // port to 0 due to offer_to_receive_video = 0). - if (sdp_semantics_ == SdpSemantics::kPlanB) { - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 0; - callee()->SetOfferAnswerOptions(options); - } else { - callee()->SetRemoteOfferHandler([this] { - callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->StopInternal(); - }); - } - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Sanity check that video "m=" section was actually rejected. - const ContentInfo* answer_video_content = cricket::GetFirstVideoContent( - callee()->pc()->local_description()->description()); - ASSERT_NE(nullptr, answer_video_content); - ASSERT_TRUE(answer_video_content->rejected); - - // Enable video and do negotiation again, making sure video is received - // end-to-end, also adding media stream to callee. - if (sdp_semantics_ == SdpSemantics::kPlanB) { - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_video = 1; - callee()->SetOfferAnswerOptions(options); - } else { - // The caller's transceiver is stopped, so we need to add another track. - auto caller_transceiver = - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); - EXPECT_EQ(nullptr, caller_transceiver.get()); - caller()->AddVideoTrack(); - } - callee()->AddVideoTrack(); - callee()->SetRemoteOfferHandler(nullptr); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify the caller receives frames from the newly added stream, and the - // callee receives additional frames from the re-enabled video m= section. - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(); - media_expectations.ExpectBidirectionalVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This tests that if we negotiate after calling CreateSender but before we -// have a track, then set a track later, frames from the newly-set track are -// received end-to-end. -TEST_F(PeerConnectionIntegrationTestPlanB, - MediaFlowsAfterEarlyWarmupWithCreateSender) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - auto caller_audio_sender = - caller()->pc()->CreateSender("audio", "caller_stream"); - auto caller_video_sender = - caller()->pc()->CreateSender("video", "caller_stream"); - auto callee_audio_sender = - callee()->pc()->CreateSender("audio", "callee_stream"); - auto callee_video_sender = - callee()->pc()->CreateSender("video", "callee_stream"); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); - // Wait for ICE to complete, without any tracks being set. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); - // Now set the tracks, and expect frames to immediately start flowing. - EXPECT_TRUE(caller_audio_sender->SetTrack(caller()->CreateLocalAudioTrack())); - EXPECT_TRUE(caller_video_sender->SetTrack(caller()->CreateLocalVideoTrack())); - EXPECT_TRUE(callee_audio_sender->SetTrack(callee()->CreateLocalAudioTrack())); - EXPECT_TRUE(callee_video_sender->SetTrack(callee()->CreateLocalVideoTrack())); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This tests that if we negotiate after calling AddTransceiver but before we -// have a track, then set a track later, frames from the newly-set tracks are -// received end-to-end. -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - MediaFlowsAfterEarlyWarmupWithAddTransceiver) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - auto audio_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - ASSERT_EQ(RTCErrorType::NONE, audio_result.error().type()); - auto caller_audio_sender = audio_result.MoveValue()->sender(); - auto video_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); - ASSERT_EQ(RTCErrorType::NONE, video_result.error().type()); - auto caller_video_sender = video_result.MoveValue()->sender(); - callee()->SetRemoteOfferHandler([this] { - ASSERT_EQ(2u, callee()->pc()->GetTransceivers().size()); - callee()->pc()->GetTransceivers()[0]->SetDirectionWithError( - RtpTransceiverDirection::kSendRecv); - callee()->pc()->GetTransceivers()[1]->SetDirectionWithError( - RtpTransceiverDirection::kSendRecv); - }); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); - // Wait for ICE to complete, without any tracks being set. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); - // Now set the tracks, and expect frames to immediately start flowing. - auto callee_audio_sender = callee()->pc()->GetSenders()[0]; - auto callee_video_sender = callee()->pc()->GetSenders()[1]; - ASSERT_TRUE(caller_audio_sender->SetTrack(caller()->CreateLocalAudioTrack())); - ASSERT_TRUE(caller_video_sender->SetTrack(caller()->CreateLocalVideoTrack())); - ASSERT_TRUE(callee_audio_sender->SetTrack(callee()->CreateLocalAudioTrack())); - ASSERT_TRUE(callee_video_sender->SetTrack(callee()->CreateLocalVideoTrack())); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// This test verifies that a remote video track can be added via AddStream, -// and sent end-to-end. For this particular test, it's simply echoed back -// from the caller to the callee, rather than being forwarded to a third -// PeerConnection. -TEST_F(PeerConnectionIntegrationTestPlanB, CanSendRemoteVideoTrack) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Just send a video track from the caller. - caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); - ASSERT_EQ(1U, callee()->remote_streams()->count()); - - // Echo the stream back, and do a new offer/anwer (initiated by callee this - // time). - callee()->pc()->AddStream(callee()->remote_streams()->at(0)); - callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); - - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that we achieve the expected end-to-end connection time, using a -// fake clock and simulated latency on the media and signaling paths. -// We use a TURN<->TURN connection because this is usually the quickest to -// set up initially, especially when we're confident the connection will work -// and can start sending media before we get a STUN response. -// -// With various optimizations enabled, here are the network delays we expect to -// be on the critical path: -// 1. 2 signaling trips: Signaling offer and offerer's TURN candidate, then -// signaling answer (with DTLS fingerprint). -// 2. 9 media hops: Rest of the DTLS handshake. 3 hops in each direction when -// using TURN<->TURN pair, and DTLS exchange is 4 packets, -// the first of which should have arrived before the answer. -TEST_P(PeerConnectionIntegrationTestWithFakeClock, - EndToEndConnectionTimeWithTurnTurnPair) { - static constexpr int media_hop_delay_ms = 50; - static constexpr int signaling_trip_delay_ms = 500; - // For explanation of these values, see comment above. - static constexpr int required_media_hops = 9; - static constexpr int required_signaling_trips = 2; - // For internal delays (such as posting an event asychronously). - static constexpr int allowed_internal_delay_ms = 20; - static constexpr int total_connection_time_ms = - media_hop_delay_ms * required_media_hops + - signaling_trip_delay_ms * required_signaling_trips + - allowed_internal_delay_ms; - - static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1", - 0}; - static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0", - 3478}; - static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1", - 0}; - cricket::TestTurnServer* turn_server_1 = CreateTurnServer( - turn_server_1_internal_address, turn_server_1_external_address); - - cricket::TestTurnServer* turn_server_2 = CreateTurnServer( - turn_server_2_internal_address, turn_server_2_external_address); - // Bypass permission check on received packets so media can be sent before - // the candidate is signaled. - network_thread()->Invoke(RTC_FROM_HERE, [turn_server_1] { - turn_server_1->set_enable_permission_checks(false); - }); - network_thread()->Invoke(RTC_FROM_HERE, [turn_server_2] { - turn_server_2->set_enable_permission_checks(false); - }); - - PeerConnectionInterface::RTCConfiguration client_1_config; - webrtc::PeerConnectionInterface::IceServer ice_server_1; - ice_server_1.urls.push_back("turn:88.88.88.0:3478"); - ice_server_1.username = "test"; - ice_server_1.password = "test"; - client_1_config.servers.push_back(ice_server_1); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; - client_1_config.presume_writable_when_fully_relayed = true; - - PeerConnectionInterface::RTCConfiguration client_2_config; - webrtc::PeerConnectionInterface::IceServer ice_server_2; - ice_server_2.urls.push_back("turn:99.99.99.0:3478"); - ice_server_2.username = "test"; - ice_server_2.password = "test"; - client_2_config.servers.push_back(ice_server_2); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; - client_2_config.presume_writable_when_fully_relayed = true; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config)); - // Set up the simulated delays. - SetSignalingDelayMs(signaling_trip_delay_ms); - ConnectFakeSignaling(); - virtual_socket_server()->set_delay_mean(media_hop_delay_ms); - virtual_socket_server()->UpdateDelayDistribution(); - - // Set "offer to receive audio/video" without adding any tracks, so we just - // set up ICE/DTLS with no media. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 1; - options.offer_to_receive_video = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - EXPECT_TRUE_SIMULATED_WAIT(DtlsConnected(), total_connection_time_ms, - FakeClock()); - // Closing the PeerConnections destroys the ports before the ScopedFakeClock. - // If this is not done a DCHECK can be hit in ports.cc, because a large - // negative number is calculated for the rtt due to the global clock changing. - ClosePeerConnections(); -} - -// Verify that a TurnCustomizer passed in through RTCConfiguration -// is actually used by the underlying TURN candidate pair. -// Note that turnport_unittest.cc contains more detailed, lower-level tests. -TEST_P(PeerConnectionIntegrationTest, TurnCustomizerUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1", - 0}; - static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0", - 3478}; - static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1", - 0}; - CreateTurnServer(turn_server_1_internal_address, - turn_server_1_external_address); - CreateTurnServer(turn_server_2_internal_address, - turn_server_2_external_address); - - PeerConnectionInterface::RTCConfiguration client_1_config; - webrtc::PeerConnectionInterface::IceServer ice_server_1; - ice_server_1.urls.push_back("turn:88.88.88.0:3478"); - ice_server_1.username = "test"; - ice_server_1.password = "test"; - client_1_config.servers.push_back(ice_server_1); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; - auto* customizer1 = CreateTurnCustomizer(); - client_1_config.turn_customizer = customizer1; - - PeerConnectionInterface::RTCConfiguration client_2_config; - webrtc::PeerConnectionInterface::IceServer ice_server_2; - ice_server_2.urls.push_back("turn:99.99.99.0:3478"); - ice_server_2.username = "test"; - ice_server_2.password = "test"; - client_2_config.servers.push_back(ice_server_2); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; - auto* customizer2 = CreateTurnCustomizer(); - client_2_config.turn_customizer = customizer2; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config)); - ConnectFakeSignaling(); - - // Set "offer to receive audio/video" without adding any tracks, so we just - // set up ICE/DTLS with no media. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 1; - options.offer_to_receive_video = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - - ExpectTurnCustomizerCountersIncremented(customizer1); - ExpectTurnCustomizerCountersIncremented(customizer2); -} - -// Verifies that you can use TCP instead of UDP to connect to a TURN server and -// send media between the caller and the callee. -TEST_P(PeerConnectionIntegrationTest, TCPUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; - - // Enable TCP for the fake turn server. - CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TCP); - - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turn:88.88.88.0:3478?transport=tcp"); - ice_server.username = "test"; - ice_server.password = "test"; - - PeerConnectionInterface::RTCConfiguration client_1_config; - client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; - - PeerConnectionInterface::RTCConfiguration client_2_config; - client_2_config.servers.push_back(ice_server); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config)); - - // Do normal offer/answer and wait for ICE to complete. - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); - - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - EXPECT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Verify that a SSLCertificateVerifier passed in through -// PeerConnectionDependencies is actually used by the underlying SSL -// implementation to determine whether a certificate presented by the TURN -// server is accepted by the client. Note that openssladapter_unittest.cc -// contains more detailed, lower-level tests. -TEST_P(PeerConnectionIntegrationTest, - SSLCertificateVerifierUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; - - // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so - // that host name verification passes on the fake certificate. - CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TLS, "88.88.88.0"); - - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp"); - ice_server.username = "test"; - ice_server.password = "test"; - - PeerConnectionInterface::RTCConfiguration client_1_config; - client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; - - PeerConnectionInterface::RTCConfiguration client_2_config; - client_2_config.servers.push_back(ice_server); - // Setting the type to kRelay forces the connection to go through a TURN - // server. - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; - - // Get a copy to the pointer so we can verify calls later. - rtc::TestCertificateVerifier* client_1_cert_verifier = - new rtc::TestCertificateVerifier(); - client_1_cert_verifier->verify_certificate_ = true; - rtc::TestCertificateVerifier* client_2_cert_verifier = - new rtc::TestCertificateVerifier(); - client_2_cert_verifier->verify_certificate_ = true; - - // Create the dependencies with the test certificate verifier. - webrtc::PeerConnectionDependencies client_1_deps(nullptr); - client_1_deps.tls_cert_verifier = - std::unique_ptr(client_1_cert_verifier); - webrtc::PeerConnectionDependencies client_2_deps(nullptr); - client_2_deps.tls_cert_verifier = - std::unique_ptr(client_2_cert_verifier); - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( - client_1_config, std::move(client_1_deps), client_2_config, - std::move(client_2_deps))); - ConnectFakeSignaling(); - - // Set "offer to receive audio/video" without adding any tracks, so we just - // set up ICE/DTLS with no media. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 1; - options.offer_to_receive_video = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - - EXPECT_GT(client_1_cert_verifier->call_count_, 0u); - EXPECT_GT(client_2_cert_verifier->call_count_, 0u); -} - -TEST_P(PeerConnectionIntegrationTest, - SSLCertificateVerifierFailureUsedForTurnConnectionsFailsConnection) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; - - // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so - // that host name verification passes on the fake certificate. - CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TLS, "88.88.88.0"); - - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp"); - ice_server.username = "test"; - ice_server.password = "test"; - - PeerConnectionInterface::RTCConfiguration client_1_config; - client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; - - PeerConnectionInterface::RTCConfiguration client_2_config; - client_2_config.servers.push_back(ice_server); - // Setting the type to kRelay forces the connection to go through a TURN - // server. - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; - - // Get a copy to the pointer so we can verify calls later. - rtc::TestCertificateVerifier* client_1_cert_verifier = - new rtc::TestCertificateVerifier(); - client_1_cert_verifier->verify_certificate_ = false; - rtc::TestCertificateVerifier* client_2_cert_verifier = - new rtc::TestCertificateVerifier(); - client_2_cert_verifier->verify_certificate_ = false; - - // Create the dependencies with the test certificate verifier. - webrtc::PeerConnectionDependencies client_1_deps(nullptr); - client_1_deps.tls_cert_verifier = - std::unique_ptr(client_1_cert_verifier); - webrtc::PeerConnectionDependencies client_2_deps(nullptr); - client_2_deps.tls_cert_verifier = - std::unique_ptr(client_2_cert_verifier); - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( - client_1_config, std::move(client_1_deps), client_2_config, - std::move(client_2_deps))); - ConnectFakeSignaling(); - - // Set "offer to receive audio/video" without adding any tracks, so we just - // set up ICE/DTLS with no media. - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 1; - options.offer_to_receive_video = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - bool wait_res = true; - // TODO(bugs.webrtc.org/9219): When IceConnectionState is implemented - // properly, should be able to just wait for a state of "failed" instead of - // waiting a fixed 10 seconds. - WAIT_(DtlsConnected(), kDefaultTimeout, wait_res); - ASSERT_FALSE(wait_res); - - EXPECT_GT(client_1_cert_verifier->call_count_, 0u); - EXPECT_GT(client_2_cert_verifier->call_count_, 0u); -} - -// Test that the injected ICE transport factory is used to create ICE transports -// for WebRTC connections. -TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) { - PeerConnectionInterface::RTCConfiguration default_config; - PeerConnectionDependencies dependencies(nullptr); - auto ice_transport_factory = std::make_unique(); - EXPECT_CALL(*ice_transport_factory, RecordIceTransportCreated()).Times(1); - dependencies.ice_transport_factory = std::move(ice_transport_factory); - auto wrapper = CreatePeerConnectionWrapper("Caller", nullptr, &default_config, - std::move(dependencies), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - ASSERT_TRUE(wrapper); - wrapper->CreateDataChannel(); - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - wrapper->pc()->SetLocalDescription(observer, - wrapper->CreateOfferAndWait().release()); -} - -// Test that audio and video flow end-to-end when codec names don't use the -// expected casing, given that they're supposed to be case insensitive. To test -// this, all but one codec is removed from each media description, and its -// casing is changed. -// -// In the past, this has regressed and caused crashes/black video, due to the -// fact that code at some layers was doing case-insensitive comparisons and -// code at other layers was not. -TEST_P(PeerConnectionIntegrationTest, CodecNamesAreCaseInsensitive) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - - // Remove all but one audio/video codec (opus and VP8), and change the - // casing of the caller's generated offer. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* description) { - cricket::AudioContentDescription* audio = - GetFirstAudioContentDescription(description); - ASSERT_NE(nullptr, audio); - auto audio_codecs = audio->codecs(); - audio_codecs.erase(std::remove_if(audio_codecs.begin(), audio_codecs.end(), - [](const cricket::AudioCodec& codec) { - return codec.name != "opus"; - }), - audio_codecs.end()); - ASSERT_EQ(1u, audio_codecs.size()); - audio_codecs[0].name = "OpUs"; - audio->set_codecs(audio_codecs); - - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(description); - ASSERT_NE(nullptr, video); - auto video_codecs = video->codecs(); - video_codecs.erase(std::remove_if(video_codecs.begin(), video_codecs.end(), - [](const cricket::VideoCodec& codec) { - return codec.name != "VP8"; - }), - video_codecs.end()); - ASSERT_EQ(1u, video_codecs.size()); - video_codecs[0].name = "vP8"; - video->set_codecs(video_codecs); - }); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify frames are still received end-to-end. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, GetSourcesAudio) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for one audio frame to be received by the callee. - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(1); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); - auto receiver = callee()->pc()->GetReceivers()[0]; - ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_AUDIO); - auto sources = receiver->GetSources(); - ASSERT_GT(receiver->GetParameters().encodings.size(), 0u); - EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc, - sources[0].source_id()); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); -} - -TEST_P(PeerConnectionIntegrationTest, GetSourcesVideo) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddVideoTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for one video frame to be received by the callee. - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeVideo(1); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); - auto receiver = callee()->pc()->GetReceivers()[0]; - ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_VIDEO); - auto sources = receiver->GetSources(); - ASSERT_GT(receiver->GetParameters().encodings.size(), 0u); - ASSERT_GT(sources.size(), 0u); - EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc, - sources[0].source_id()); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); -} - -// Test that if a track is removed and added again with a different stream ID, -// the new stream ID is successfully communicated in SDP and media continues to -// flow end-to-end. -// TODO(webrtc.bugs.org/8734): This test does not work for Unified Plan because -// it will not reuse a transceiver that has already been sending. After creating -// a new transceiver it tries to create an offer with two senders of the same -// track ids and it fails. -TEST_F(PeerConnectionIntegrationTestPlanB, RemoveAndAddTrackWithNewStreamId) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Add track using stream 1, do offer/answer. - rtc::scoped_refptr track = - caller()->CreateLocalAudioTrack(); - rtc::scoped_refptr sender = - caller()->AddTrack(track, {"stream_1"}); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(1); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - // Remove the sender, and create a new one with the new stream. - caller()->pc()->RemoveTrack(sender); - sender = caller()->AddTrack(track, {"stream_2"}); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for additional audio frames to be received by the callee. - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } -} - -TEST_P(PeerConnectionIntegrationTest, RtcEventLogOutputWriteCalled) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - auto output = std::make_unique>(); - ON_CALL(*output, IsActive()).WillByDefault(::testing::Return(true)); - ON_CALL(*output, Write(::testing::_)).WillByDefault(::testing::Return(true)); - EXPECT_CALL(*output, Write(::testing::_)).Times(::testing::AtLeast(1)); - EXPECT_TRUE(caller()->pc()->StartRtcEventLog( - std::move(output), webrtc::RtcEventLog::kImmediateOutput)); - - caller()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); -} - -// Test that if candidates are only signaled by applying full session -// descriptions (instead of using AddIceCandidate), the peers can connect to -// each other and exchange media. -TEST_P(PeerConnectionIntegrationTest, MediaFlowsWhenCandidatesSetOnlyInSdp) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - // Each side will signal the session descriptions but not candidates. - ConnectFakeSignalingForSdpOnly(); - - // Add audio video track and exchange the initial offer/answer with media - // information only. This will start ICE gathering on each side. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - - // Wait for all candidates to be gathered on both the caller and callee. - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete, - caller()->ice_gathering_state(), kDefaultTimeout); - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete, - callee()->ice_gathering_state(), kDefaultTimeout); - - // The candidates will now be included in the session description, so - // signaling them will start the ICE connection. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that media flows in both directions. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that SetAudioPlayout can be used to disable audio playout from the -// start, then later enable it. This may be useful, for example, if the caller -// needs to play a local ringtone until some event occurs, after which it -// switches to playing the received audio. -TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioPlayout) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Set up audio-only call where audio playout is disabled on caller's side. - caller()->pc()->SetAudioPlayout(false); - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Pump messages for a second. - WAIT(false, 1000); - // Since audio playout is disabled, the caller shouldn't have received - // anything (at the playout level, at least). - EXPECT_EQ(0, caller()->audio_frames_received()); - // As a sanity check, make sure the callee (for which playout isn't disabled) - // did still see frames on its audio level. - ASSERT_GT(callee()->audio_frames_received(), 0); - - // Enable playout again, and ensure audio starts flowing. - caller()->pc()->SetAudioPlayout(true); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -double GetAudioEnergyStat(PeerConnectionWrapper* pc) { - auto report = pc->NewGetStats(); - auto track_stats_list = - report->GetStatsOfType(); - const webrtc::RTCMediaStreamTrackStats* remote_track_stats = nullptr; - for (const auto* track_stats : track_stats_list) { - if (track_stats->remote_source.is_defined() && - *track_stats->remote_source) { - remote_track_stats = track_stats; - break; - } - } - - if (!remote_track_stats->total_audio_energy.is_defined()) { - return 0.0; - } - return *remote_track_stats->total_audio_energy; -} - -// Test that if audio playout is disabled via the SetAudioPlayout() method, then -// incoming audio is still processed and statistics are generated. -TEST_P(PeerConnectionIntegrationTest, - DisableAudioPlayoutStillGeneratesAudioStats) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Set up audio-only call where playout is disabled but audio-processing is - // still active. - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->pc()->SetAudioPlayout(false); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Wait for the callee to receive audio stats. - EXPECT_TRUE_WAIT(GetAudioEnergyStat(caller()) > 0, kMaxWaitForFramesMs); -} - -// Test that SetAudioRecording can be used to disable audio recording from the -// start, then later enable it. This may be useful, for example, if the caller -// wants to ensure that no audio resources are active before a certain state -// is reached. -TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioRecording) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - // Set up audio-only call where audio recording is disabled on caller's side. - caller()->pc()->SetAudioRecording(false); - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Pump messages for a second. - WAIT(false, 1000); - // Since caller has disabled audio recording, the callee shouldn't have - // received anything. - EXPECT_EQ(0, callee()->audio_frames_received()); - // As a sanity check, make sure the caller did still see frames on its - // audio level since audio recording is enabled on the calle side. - ASSERT_GT(caller()->audio_frames_received(), 0); - - // Enable audio recording again, and ensure audio starts flowing. - caller()->pc()->SetAudioRecording(true); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that after closing PeerConnections, they stop sending any packets (ICE, -// DTLS, RTP...). -TEST_P(PeerConnectionIntegrationTest, ClosingConnectionStopsPacketFlow) { - // Set up audio/video/data, wait for some frames to be received. - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); -#ifdef HAVE_SCTP - caller()->CreateDataChannel(); -#endif - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - // Close PeerConnections. - ClosePeerConnections(); - // Pump messages for a second, and ensure no new packets end up sent. - uint32_t sent_packets_a = virtual_socket_server()->sent_packets(); - WAIT(false, 1000); - uint32_t sent_packets_b = virtual_socket_server()->sent_packets(); - EXPECT_EQ(sent_packets_a, sent_packets_b); -} - -// Test that transport stats are generated by the RTCStatsCollector for a -// connection that only involves data channels. This is a regression test for -// crbug.com/826972. -#ifdef HAVE_SCTP -TEST_P(PeerConnectionIntegrationTest, - TransportStatsReportedForDataChannelOnlyConnection) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); - - auto caller_report = caller()->NewGetStats(); - EXPECT_EQ(1u, caller_report->GetStatsOfType().size()); - auto callee_report = callee()->NewGetStats(); - EXPECT_EQ(1u, callee_report->GetStatsOfType().size()); -} -#endif // HAVE_SCTP - -TEST_P(PeerConnectionIntegrationTest, - IceEventsGeneratedAndLoggedInRtcEventLog) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithFakeRtcEventLog()); - ConnectFakeSignaling(); - PeerConnectionInterface::RTCOfferAnswerOptions options; - options.offer_to_receive_audio = 1; - caller()->SetOfferAnswerOptions(options); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - ASSERT_NE(nullptr, caller()->event_log_factory()); - ASSERT_NE(nullptr, callee()->event_log_factory()); - webrtc::FakeRtcEventLog* caller_event_log = - static_cast( - caller()->event_log_factory()->last_log_created()); - webrtc::FakeRtcEventLog* callee_event_log = - static_cast( - callee()->event_log_factory()->last_log_created()); - ASSERT_NE(nullptr, caller_event_log); - ASSERT_NE(nullptr, callee_event_log); - int caller_ice_config_count = caller_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairConfig); - int caller_ice_event_count = caller_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairEvent); - int callee_ice_config_count = callee_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairConfig); - int callee_ice_event_count = callee_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairEvent); - EXPECT_LT(0, caller_ice_config_count); - EXPECT_LT(0, caller_ice_event_count); - EXPECT_LT(0, callee_ice_config_count); - EXPECT_LT(0, callee_ice_event_count); -} - -TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; - - CreateTurnServer(turn_server_internal_address, turn_server_external_address); - - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turn:88.88.88.0:3478"); - ice_server.username = "test"; - ice_server.password = "test"; - - PeerConnectionInterface::RTCConfiguration caller_config; - caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; - caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - caller_config.surface_ice_candidates_on_ice_transport_type_changed = true; - - PeerConnectionInterface::RTCConfiguration callee_config; - callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; - callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - callee_config.surface_ice_candidates_on_ice_transport_type_changed = true; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); - - // Do normal offer/answer and wait for ICE to complete. - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Since we are doing continual gathering, the ICE transport does not reach - // kIceGatheringComplete (see - // P2PTransportChannel::OnCandidatesAllocationDone), and consequently not - // kIceConnectionComplete. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); - // Note that we cannot use the metric - // |WebRTC.PeerConnection.CandidatePairType_UDP| in this test since this - // metric is only populated when we reach kIceConnectionComplete in the - // current implementation. - EXPECT_EQ(cricket::RELAY_PORT_TYPE, - caller()->last_candidate_gathered().type()); - EXPECT_EQ(cricket::RELAY_PORT_TYPE, - callee()->last_candidate_gathered().type()); - - // Loosen the caller's candidate filter. - caller_config = caller()->pc()->GetConfiguration(); - caller_config.type = webrtc::PeerConnectionInterface::kAll; - caller()->pc()->SetConfiguration(caller_config); - // We should have gathered a new host candidate. - EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE, - caller()->last_candidate_gathered().type(), kDefaultTimeout); - - // Loosen the callee's candidate filter. - callee_config = callee()->pc()->GetConfiguration(); - callee_config.type = webrtc::PeerConnectionInterface::kAll; - callee()->pc()->SetConfiguration(callee_config); - EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE, - callee()->last_candidate_gathered().type(), kDefaultTimeout); - - // Create an offer and verify that it does not contain an ICE restart (i.e new - // ice credentials). - std::string caller_ufrag_pre_offer = caller() - ->pc() - ->local_description() - ->description() - ->transport_infos()[0] - .description.ice_ufrag; - caller()->CreateAndSetAndSignalOffer(); - std::string caller_ufrag_post_offer = caller() - ->pc() - ->local_description() - ->description() - ->transport_infos()[0] - .description.ice_ufrag; - EXPECT_EQ(caller_ufrag_pre_offer, caller_ufrag_post_offer); -} - -TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; - - CreateTurnServer(turn_server_internal_address, turn_server_external_address); - - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turn:88.88.88.0:3478"); - ice_server.username = "test"; - ice_server.password = "123"; - - PeerConnectionInterface::RTCConfiguration caller_config; - caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; - caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - - PeerConnectionInterface::RTCConfiguration callee_config; - callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; - callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); - - // Do normal offer/answer and wait for ICE to complete. - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(401, caller()->error_event().error_code, kDefaultTimeout); - EXPECT_EQ("Unauthorized", caller()->error_event().error_text); - EXPECT_EQ("turn:88.88.88.0:3478?transport=udp", caller()->error_event().url); - EXPECT_NE(caller()->error_event().address, ""); -} - -TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) { - webrtc::PeerConnectionInterface::IceServer ice_server; - ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp"); - ice_server.username = "test"; - ice_server.password = "test"; - - PeerConnectionInterface::RTCConfiguration caller_config; - caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; - caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - - PeerConnectionInterface::RTCConfiguration callee_config; - callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; - callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; - - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); - - // Do normal offer/answer and wait for ICE to complete. - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout); - EXPECT_EQ(caller()->error_event().address, ""); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - AudioKeepsFlowingAfterImplicitRollback) { - PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = SdpSemantics::kUnifiedPlan; - config.enable_implicit_rollback = true; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - SetSignalIceCandidates(false); // Workaround candidate outrace sdp. - caller()->AddVideoTrack(); - callee()->AddVideoTrack(); - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); - callee()->pc()->SetLocalDescription(observer, - callee()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - caller()->CreateAndSetAndSignalOffer(); // Implicit rollback. - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - ImplicitRollbackVisitsStableState) { - RTCConfiguration config; - config.sdp_semantics = SdpSemantics::kUnifiedPlan; - config.enable_implicit_rollback = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - - rtc::scoped_refptr sld_observer( - new rtc::RefCountedObject()); - callee()->pc()->SetLocalDescription(sld_observer, - callee()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(sld_observer->called(), kDefaultTimeout); - EXPECT_EQ(sld_observer->error(), ""); - - rtc::scoped_refptr srd_observer( - new rtc::RefCountedObject()); - callee()->pc()->SetRemoteDescription( - srd_observer, caller()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(srd_observer->called(), kDefaultTimeout); - EXPECT_EQ(srd_observer->error(), ""); - - EXPECT_THAT(callee()->peer_connection_signaling_state_history(), - ElementsAre(PeerConnectionInterface::kHaveLocalOffer, - PeerConnectionInterface::kStable, - PeerConnectionInterface::kHaveRemoteOffer)); -} - -INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest, - PeerConnectionIntegrationTest, - Values(SdpSemantics::kPlanB, - SdpSemantics::kUnifiedPlan)); - -INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest, - PeerConnectionIntegrationTestWithFakeClock, - Values(SdpSemantics::kPlanB, - SdpSemantics::kUnifiedPlan)); - -// Tests that verify interoperability between Plan B and Unified Plan -// PeerConnections. -class PeerConnectionIntegrationInteropTest - : public PeerConnectionIntegrationBaseTest, - public ::testing::WithParamInterface< - std::tuple> { - protected: - // Setting the SdpSemantics for the base test to kDefault does not matter - // because we specify not to use the test semantics when creating - // PeerConnectionWrappers. - PeerConnectionIntegrationInteropTest() - : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB), - caller_semantics_(std::get<0>(GetParam())), - callee_semantics_(std::get<1>(GetParam())) {} - - bool CreatePeerConnectionWrappersWithSemantics() { - return CreatePeerConnectionWrappersWithSdpSemantics(caller_semantics_, - callee_semantics_); - } - - const SdpSemantics caller_semantics_; - const SdpSemantics callee_semantics_; -}; - -TEST_P(PeerConnectionIntegrationInteropTest, NoMediaLocalToNoMediaRemote) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics()); - ConnectFakeSignaling(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); -} - -TEST_P(PeerConnectionIntegrationInteropTest, OneAudioLocalToNoMediaRemote) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics()); - ConnectFakeSignaling(); - auto audio_sender = caller()->AddAudioTrack(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify that one audio receiver has been created on the remote and that it - // has the same track ID as the sending track. - auto receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(1u, receivers.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, receivers[0]->media_type()); - EXPECT_EQ(receivers[0]->track()->id(), audio_sender->track()->id()); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationInteropTest, OneAudioOneVideoToNoMediaRemote) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics()); - ConnectFakeSignaling(); - auto video_sender = caller()->AddVideoTrack(); - auto audio_sender = caller()->AddAudioTrack(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify that one audio and one video receiver have been created on the - // remote and that they have the same track IDs as the sending tracks. - auto audio_receivers = - callee()->GetReceiversOfType(cricket::MEDIA_TYPE_AUDIO); - ASSERT_EQ(1u, audio_receivers.size()); - EXPECT_EQ(audio_receivers[0]->track()->id(), audio_sender->track()->id()); - auto video_receivers = - callee()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO); - ASSERT_EQ(1u, video_receivers.size()); - EXPECT_EQ(video_receivers[0]->track()->id(), video_sender->track()->id()); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationInteropTest, - OneAudioOneVideoLocalToOneAudioOneVideoRemote) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics()); - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationInteropTest, - ReverseRolesOneAudioLocalToOneVideoRemote) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - callee()->AddVideoTrack(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify that only the audio track has been negotiated. - EXPECT_EQ(0u, caller()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO).size()); - // Might also check that the callee's NegotiationNeeded flag is set. - - // Reverse roles. - callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - MediaExpectations media_expectations; - media_expectations.CallerExpectsSomeVideo(); - media_expectations.CalleeExpectsSomeAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -INSTANTIATE_TEST_SUITE_P( - PeerConnectionIntegrationTest, - PeerConnectionIntegrationInteropTest, - Values(std::make_tuple(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan), - std::make_tuple(SdpSemantics::kUnifiedPlan, SdpSemantics::kPlanB))); - -// Test that if the Unified Plan side offers two video tracks then the Plan B -// side will only see the first one and ignore the second. -TEST_F(PeerConnectionIntegrationTestPlanB, TwoVideoUnifiedPlanToNoMediaPlanB) { - ASSERT_TRUE(CreatePeerConnectionWrappersWithSdpSemantics( - SdpSemantics::kUnifiedPlan, SdpSemantics::kPlanB)); - ConnectFakeSignaling(); - auto first_sender = caller()->AddVideoTrack(); - caller()->AddVideoTrack(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Verify that there is only one receiver and it corresponds to the first - // added track. - auto receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(1u, receivers.size()); - EXPECT_TRUE(receivers[0]->track()->enabled()); - EXPECT_EQ(first_sender->track()->id(), receivers[0]->track()->id()); - - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that if the initial offer tagged BUNDLE section is rejected due to its -// associated RtpTransceiver being stopped and another transceiver is added, -// then renegotiation causes the callee to receive the new video track without -// error. -// This is a regression test for bugs.webrtc.org/9954 -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - ReOfferWithStoppedBundleTaggedTransceiver) { - RTCConfiguration config; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - auto audio_transceiver_or_error = - caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); - ASSERT_TRUE(audio_transceiver_or_error.ok()); - auto audio_transceiver = audio_transceiver_or_error.MoveValue(); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } - - audio_transceiver->StopInternal(); - caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); - - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - { - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); - } -} - -#ifdef HAVE_SCTP - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - EndToEndCallWithBundledSctpDataChannel) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, - caller()->pc()->GetSctpTransport()->Information().state(), - kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - EndToEndCallWithDataChannelOnlyConnects) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE(caller()->data_observer()->IsOpen()); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, DataChannelClosesWhenClosed) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - caller()->data_channel()->Close(); - ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - DataChannelClosesWhenClosedReverse) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - callee()->data_channel()->Close(); - ASSERT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout); -} - -TEST_F(PeerConnectionIntegrationTestUnifiedPlan, - DataChannelClosesWhenPeerConnectionClosed) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - caller()->pc()->Close(); - ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); -} - -#endif // HAVE_SCTP - -} // namespace -} // namespace webrtc - -#endif // if !defined(THREAD_SANITIZER) diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc new file mode 100644 index 000000000..7988339c5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc @@ -0,0 +1,200 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/peer_connection_message_handler.h" + +#include + +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "pc/stats_collector_interface.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +namespace { + +enum { + MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0, + MSG_SET_SESSIONDESCRIPTION_FAILED, + MSG_CREATE_SESSIONDESCRIPTION_FAILED, + MSG_GETSTATS, + MSG_REPORT_USAGE_PATTERN, + MSG_ON_ERROR_DEMUXING_PACKET, +}; + +struct SetSessionDescriptionMsg : public rtc::MessageData { + explicit SetSessionDescriptionMsg( + webrtc::SetSessionDescriptionObserver* observer) + : observer(observer) {} + + rtc::scoped_refptr observer; + RTCError error; +}; + +struct CreateSessionDescriptionMsg : public rtc::MessageData { + explicit CreateSessionDescriptionMsg( + webrtc::CreateSessionDescriptionObserver* observer) + : observer(observer) {} + + rtc::scoped_refptr observer; + RTCError error; +}; + +struct GetStatsMsg : public rtc::MessageData { + GetStatsMsg(webrtc::StatsObserver* observer, + StatsCollectorInterface* stats, + webrtc::MediaStreamTrackInterface* track) + : observer(observer), stats(stats), track(track) {} + rtc::scoped_refptr observer; + StatsCollectorInterface* stats; + rtc::scoped_refptr track; +}; + +struct RequestUsagePatternMsg : public rtc::MessageData { + explicit RequestUsagePatternMsg(std::function func) + : function(func) {} + std::function function; +}; + +struct OnErrorDemuxingPacketMsg : public rtc::MessageData { + explicit OnErrorDemuxingPacketMsg(webrtc::ErrorDemuxingPacketObserver* observer, + uint32_t ssrc) + : observer(observer), ssrc(ssrc) {} + + rtc::scoped_refptr observer; + uint32_t ssrc; +}; + +} // namespace + +PeerConnectionMessageHandler::~PeerConnectionMessageHandler() { + // Process all pending notifications in the message queue. If we don't do + // this, requests will linger and not know they succeeded or failed. + rtc::MessageList list; + signaling_thread()->Clear(this, rtc::MQID_ANY, &list); + for (auto& msg : list) { + if (msg.message_id == MSG_CREATE_SESSIONDESCRIPTION_FAILED) { + // Processing CreateOffer() and CreateAnswer() messages ensures their + // observers are invoked even if the PeerConnection is destroyed early. + OnMessage(&msg); + } else { + // TODO(hbos): Consider processing all pending messages. This would mean + // that SetLocalDescription() and SetRemoteDescription() observers are + // informed of successes and failures; this is currently NOT the case. + delete msg.pdata; + } + } +} + +void PeerConnectionMessageHandler::OnMessage(rtc::Message* msg) { + RTC_DCHECK_RUN_ON(signaling_thread()); + switch (msg->message_id) { + case MSG_SET_SESSIONDESCRIPTION_SUCCESS: { + SetSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnSuccess(); + delete param; + break; + } + case MSG_SET_SESSIONDESCRIPTION_FAILED: { + SetSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnFailure(std::move(param->error)); + delete param; + break; + } + case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { + CreateSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnFailure(std::move(param->error)); + delete param; + break; + } + case MSG_GETSTATS: { + GetStatsMsg* param = static_cast(msg->pdata); + StatsReports reports; + param->stats->GetStats(param->track, &reports); + param->observer->OnComplete(reports); + delete param; + break; + } + case MSG_REPORT_USAGE_PATTERN: { + RequestUsagePatternMsg* param = + static_cast(msg->pdata); + param->function(); + delete param; + break; + } + case MSG_ON_ERROR_DEMUXING_PACKET: { + OnErrorDemuxingPacketMsg* param = static_cast(msg->pdata); + param->observer->OnErrorDemuxingPacket(param->ssrc); + delete param; + break; + } + default: + RTC_NOTREACHED() << "Not implemented"; + break; + } +} + +void PeerConnectionMessageHandler::PostSetSessionDescriptionSuccess( + SetSessionDescriptionObserver* observer) { + SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg); +} + +void PeerConnectionMessageHandler::PostSetSessionDescriptionFailure( + SetSessionDescriptionObserver* observer, + RTCError&& error) { + RTC_DCHECK(!error.ok()); + SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); + msg->error = std::move(error); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_SET_SESSIONDESCRIPTION_FAILED, msg); +} + +void PeerConnectionMessageHandler::PostCreateSessionDescriptionFailure( + CreateSessionDescriptionObserver* observer, + RTCError error) { + RTC_DCHECK(!error.ok()); + CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer); + msg->error = std::move(error); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); +} + +void PeerConnectionMessageHandler::PostGetStats( + StatsObserver* observer, + StatsCollectorInterface* stats, + MediaStreamTrackInterface* track) { + signaling_thread()->Post(RTC_FROM_HERE, this, MSG_GETSTATS, + new GetStatsMsg(observer, stats, track)); +} + +void PeerConnectionMessageHandler::RequestUsagePatternReport( + std::function func, + int delay_ms) { + signaling_thread()->PostDelayed(RTC_FROM_HERE, delay_ms, this, + MSG_REPORT_USAGE_PATTERN, + new RequestUsagePatternMsg(func)); +} + +void PeerConnectionMessageHandler::PostErrorDemuxingPacket( + ErrorDemuxingPacketObserver* observer, + uint32_t ssrc) { + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_ON_ERROR_DEMUXING_PACKET, + new OnErrorDemuxingPacketMsg(observer, ssrc)); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h new file mode 100644 index 000000000..858426d6f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h @@ -0,0 +1,59 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ +#define PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ + +#include +#include "api/rtc_error.h" +#include "api/stats_types.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +class CreateSessionDescriptionObserver; +class SetSessionDescriptionObserver; +class StatsCollectorInterface; +class StatsObserver; +class MediaStreamTrackInterface; +class ErrorDemuxingPacketObserver; + +class PeerConnectionMessageHandler : public rtc::MessageHandler { + public: + explicit PeerConnectionMessageHandler(rtc::Thread* signaling_thread) + : signaling_thread_(signaling_thread) {} + ~PeerConnectionMessageHandler(); + + // Implements MessageHandler. + void OnMessage(rtc::Message* msg) override; + void PostSetSessionDescriptionSuccess( + SetSessionDescriptionObserver* observer); + void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer, + RTCError&& error); + void PostCreateSessionDescriptionFailure( + CreateSessionDescriptionObserver* observer, + RTCError error); + void PostGetStats(StatsObserver* observer, + StatsCollectorInterface* stats, + MediaStreamTrackInterface* track); + void RequestUsagePatternReport(std::function, int delay_ms); + void PostErrorDemuxingPacket(ErrorDemuxingPacketObserver* observer, + uint32_t ssrc); + + private: + rtc::Thread* signaling_thread() const { return signaling_thread_; } + + rtc::Thread* const signaling_thread_; +}; + +} // namespace webrtc + +#endif // PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc index 18a4ed25c..8ae061254 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc @@ -18,7 +18,6 @@ #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -36,6 +35,11 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { explicit AudioDataProxy(RemoteAudioSource* source) : source_(source) { RTC_DCHECK(source); } + + AudioDataProxy() = delete; + AudioDataProxy(const AudioDataProxy&) = delete; + AudioDataProxy& operator=(const AudioDataProxy&) = delete; + ~AudioDataProxy() override { source_->OnAudioChannelGone(); } // AudioSinkInterface implementation. @@ -45,8 +49,6 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { private: const rtc::scoped_refptr source_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioDataProxy); }; RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread) diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc index 5851b0680..529200894 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc @@ -216,6 +216,7 @@ const char* QualityLimitationReasonToRTCQualityLimitationReason( case QualityLimitationReason::kOther: return RTCQualityLimitationReason::kOther; } + RTC_CHECK_NOTREACHED(); } double DoubleAudioLevelFromIntAudioLevel(int audio_level) { @@ -227,6 +228,7 @@ double DoubleAudioLevelFromIntAudioLevel(int audio_level) { std::unique_ptr CodecStatsFromRtpCodecParameters( uint64_t timestamp_us, const std::string& mid, + const std::string& transport_id, bool inbound, const RtpCodecParameters& codec_params) { RTC_DCHECK_GE(codec_params.payload_type, 0); @@ -249,6 +251,7 @@ std::unique_ptr CodecStatsFromRtpCodecParameters( if (WriteFmtpParameters(codec_params.parameters, &fmtp)) { codec_stats->sdp_fmtp_line = fmtp.Release(); } + codec_stats->transport_id = transport_id; return codec_stats; } @@ -305,7 +308,10 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo( voice_receiver_info.inserted_samples_for_deceleration; inbound_audio->removed_samples_for_acceleration = voice_receiver_info.removed_samples_for_acceleration; - inbound_audio->audio_level = voice_receiver_info.audio_level; + if (voice_receiver_info.audio_level >= 0) { + inbound_audio->audio_level = + DoubleAudioLevelFromIntAudioLevel(voice_receiver_info.audio_level); + } inbound_audio->total_audio_energy = voice_receiver_info.total_output_energy; inbound_audio->total_samples_duration = voice_receiver_info.total_output_duration; @@ -1070,24 +1076,14 @@ void RTCStatsCollector::GetStatsReportInternal( num_pending_partial_reports_ = 2; partial_report_timestamp_us_ = cache_now_us; - // Prepare |transceiver_stats_infos_| for use in + // Prepare |transceiver_stats_infos_| and |call_stats_| for use in // |ProducePartialResultsOnNetworkThread| and // |ProducePartialResultsOnSignalingThread|. - transceiver_stats_infos_ = PrepareTransceiverStatsInfos_s_w(); + PrepareTransceiverStatsInfosAndCallStats_s_w(); // Prepare |transport_names_| for use in // |ProducePartialResultsOnNetworkThread|. transport_names_ = PrepareTransportNames_s(); - // Prepare |call_stats_| here since GetCallStats() will hop to the worker - // thread. - // TODO(holmer): To avoid the hop we could move BWE and BWE stats to the - // network thread, where it more naturally belongs. - // TODO(https://crbug.com/webrtc/11767): In the meantime we can piggyback on - // the blocking-invoke that is already performed in - // PrepareTransceiverStatsInfos_s_w() so that we can call GetCallStats() - // without additional blocking-invokes. - call_stats_ = pc_->GetCallStats(); - // Don't touch |network_report_| on the signaling thread until // ProducePartialResultsOnNetworkThread() has signaled the // |network_report_event_|. @@ -1288,6 +1284,9 @@ void RTCStatsCollector::ProduceCodecStats_n( if (!stats.mid) { continue; } + std::string transport_id = RTCTransportStatsIDFromTransportChannel( + *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); + const cricket::VoiceMediaInfo* voice_media_info = stats.track_media_info_map->voice_media_info(); const cricket::VideoMediaInfo* video_media_info = @@ -1297,12 +1296,12 @@ void RTCStatsCollector::ProduceCodecStats_n( // Inbound for (const auto& pair : voice_media_info->receive_codecs) { report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, true, pair.second)); + timestamp_us, *stats.mid, transport_id, true, pair.second)); } // Outbound for (const auto& pair : voice_media_info->send_codecs) { report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, false, pair.second)); + timestamp_us, *stats.mid, transport_id, false, pair.second)); } } // Video @@ -1310,12 +1309,12 @@ void RTCStatsCollector::ProduceCodecStats_n( // Inbound for (const auto& pair : video_media_info->receive_codecs) { report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, true, pair.second)); + timestamp_us, *stats.mid, transport_id, true, pair.second)); } // Outbound for (const auto& pair : video_media_info->send_codecs) { report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, false, pair.second)); + timestamp_us, *stats.mid, transport_id, false, pair.second)); } } } @@ -1894,11 +1893,10 @@ RTCStatsCollector::PrepareTransportCertificateStats_n( return transport_cert_stats; } -std::vector -RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const { +void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w() { RTC_DCHECK(signaling_thread_->IsCurrent()); - std::vector transceiver_stats_infos; + transceiver_stats_infos_.clear(); // These are used to invoke GetStats for all the media channels together in // one worker thread hop. std::mapinternal(); stats.media_type = media_type; @@ -1948,14 +1946,16 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const { } } - // We jump to the worker thread and call GetStats() on each media channel. At - // the same time we construct the TrackMediaInfoMaps, which also needs info - // from the worker thread. This minimizes the number of thread jumps. + // We jump to the worker thread and call GetStats() on each media channel as + // well as GetCallStats(). At the same time we construct the + // TrackMediaInfoMaps, which also needs info from the worker thread. This + // minimizes the number of thread jumps. worker_thread_->Invoke(RTC_FROM_HERE, [&] { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : voice_stats) { - if (!entry.first->GetStats(entry.second.get())) { + if (!entry.first->GetStats(entry.second.get(), + /*get_and_clear_legacy_stats=*/false)) { RTC_LOG(LS_WARNING) << "Failed to get voice stats."; } } @@ -1966,7 +1966,7 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const { } // Create the TrackMediaInfoMap for each transceiver stats object. - for (auto& stats : transceiver_stats_infos) { + for (auto& stats : transceiver_stats_infos_) { auto transceiver = stats.transceiver; std::unique_ptr voice_media_info; std::unique_ptr video_media_info; @@ -1998,9 +1998,9 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const { std::move(voice_media_info), std::move(video_media_info), senders, receivers); } - }); - return transceiver_stats_infos; + call_stats_ = pc_->GetCallStats(); + }); } std::set RTCStatsCollector::PrepareTransportNames_s() const { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h index e1bc27d9e..35576e91d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h @@ -215,7 +215,8 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface, PrepareTransportCertificateStats_n( const std::map& transport_stats_by_name) const; - std::vector PrepareTransceiverStatsInfos_s_w() const; + // The results are stored in |transceiver_stats_infos_| and |call_stats_|. + void PrepareTransceiverStatsInfosAndCallStats_s_w(); std::set PrepareTransportNames_s() const; // Stats gathering on a particular thread. diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc index e627d45e2..ee68ec9a0 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc @@ -442,6 +442,8 @@ class RTCStatsReportVerifier { bool VerifyRTCCodecStats(const RTCCodecStats& codec) { RTCStatsVerifier verifier(report_, &codec); + verifier.TestMemberIsIDReference(codec.transport_id, + RTCTransportStats::kType); verifier.TestMemberIsDefined(codec.payload_type); verifier.TestMemberIsDefined(codec.mime_type); verifier.TestMemberIsPositive(codec.clock_rate); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc index c08643eba..aa53dde18 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc @@ -76,7 +76,8 @@ std::vector GetStatsReferencedIds(const RTCStats& stats) { const auto& certificate = static_cast(stats); AddIdIfDefined(certificate.issuer_certificate_id, &neighbor_ids); } else if (type == RTCCodecStats::kType) { - // RTCCodecStats does not have any neighbor references. + const auto& codec = static_cast(stats); + AddIdIfDefined(codec.transport_id, &neighbor_ids); } else if (type == RTCDataChannelStats::kType) { // RTCDataChannelStats does not have any neighbor references. } else if (type == RTCIceCandidatePairStats::kType) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h index adc724d64..ea2de49b5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h @@ -191,7 +191,6 @@ class RtpDataChannel : public DataChannelInterface, uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_); - rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(signaling_thread_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc index 9c7a337ab..68a948ea8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc @@ -76,8 +76,7 @@ RTCErrorOr ToCricketFeedbackParam( } return cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } template diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h index 84c2ff723..2cfccd4e6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h @@ -41,7 +41,11 @@ namespace webrtc { // Internal class used by PeerConnection. class RtpReceiverInternal : public RtpReceiverInterface { public: + // Stops receiving. The track may be reactivated. virtual void Stop() = 0; + // Stops the receiver permanently. + // Causes the associated track to enter kEnded state. Cannot be reversed. + virtual void StopAndEndTrack() = 0; // Sets the underlying MediaEngine channel associated with this RtpSender. // A VoiceMediaChannel should be used for audio RtpSenders and diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc index 1430e299c..0da6dfca8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc @@ -17,8 +17,7 @@ #include "api/audio_options.h" #include "api/media_stream_interface.h" #include "media/base/media_engine.h" -#include "pc/peer_connection.h" -#include "pc/stats_collector.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/location.h" @@ -418,7 +417,7 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) { rtc::scoped_refptr AudioRtpSender::Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) { return rtc::scoped_refptr( new rtc::RefCountedObject(worker_thread, id, stats, @@ -427,7 +426,7 @@ rtc::scoped_refptr AudioRtpSender::Create( AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) : RtpSenderBase(worker_thread, id, set_streams_observer), stats_(stats), diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h index c343ff085..c2fe91f01 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h @@ -28,7 +28,7 @@ namespace webrtc { -class StatsCollector; +class StatsCollectorInterface; bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters); @@ -257,7 +257,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { static rtc::scoped_refptr Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); virtual ~AudioRtpSender(); @@ -281,7 +281,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { protected: AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); void SetSend() override; @@ -303,7 +303,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { } sigslot::signal0<> SignalDestroyed; - StatsCollector* stats_ = nullptr; + StatsCollectorInterface* stats_ = nullptr; rtc::scoped_refptr dtmf_sender_proxy_; bool cached_track_enabled_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc index 701b83ffc..6b3032e27 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc @@ -119,12 +119,14 @@ RtpTransceiver::RtpTransceiver( rtc::scoped_refptr> receiver, cricket::ChannelManager* channel_manager, - std::vector header_extensions_offered) + std::vector header_extensions_offered, + std::function on_negotiation_needed) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(true), media_type_(sender->media_type()), channel_manager_(channel_manager), - header_extensions_to_offer_(std::move(header_extensions_offered)) { + header_extensions_to_offer_(std::move(header_extensions_offered)), + on_negotiation_needed_(std::move(on_negotiation_needed)) { RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || media_type_ == cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); @@ -299,10 +301,6 @@ RtpTransceiverDirection RtpTransceiver::direction() const { return direction_; } -void RtpTransceiver::SetDirection(RtpTransceiverDirection new_direction) { - SetDirectionWithError(new_direction); -} - RTCError RtpTransceiver::SetDirectionWithError( RtpTransceiverDirection new_direction) { if (unified_plan_ && stopping()) { @@ -318,14 +316,14 @@ RTCError RtpTransceiver::SetDirectionWithError( } direction_ = new_direction; - SignalNegotiationNeeded(); + on_negotiation_needed_(); return RTCError::OK(); } absl::optional RtpTransceiver::current_direction() const { - if (unified_plan_ && stopping()) + if (unified_plan_ && stopped()) return webrtc::RtpTransceiverDirection::kStopped; return current_direction_; @@ -350,7 +348,7 @@ void RtpTransceiver::StopSendingAndReceiving() { // 5. Stop receiving media with receiver. for (const auto& receiver : receivers_) - receiver->internal()->Stop(); + receiver->internal()->StopAndEndTrack(); stopping_ = true; direction_ = webrtc::RtpTransceiverDirection::kInactive; @@ -358,7 +356,11 @@ void RtpTransceiver::StopSendingAndReceiving() { RTCError RtpTransceiver::StopStandard() { RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(unified_plan_); + // If we're on Plan B, do what Stop() used to do there. + if (!unified_plan_) { + StopInternal(); + return RTCError::OK(); + } // 1. Let transceiver be the RTCRtpTransceiver object on which the method is // invoked. // @@ -378,13 +380,18 @@ RTCError RtpTransceiver::StopStandard() { // 5. Stop sending and receiving given transceiver, and update the // negotiation-needed flag for connection. StopSendingAndReceiving(); - SignalNegotiationNeeded(); + on_negotiation_needed_(); return RTCError::OK(); } void RtpTransceiver::StopInternal() { + StopTransceiverProcedure(); +} + +void RtpTransceiver::StopTransceiverProcedure() { RTC_DCHECK_RUN_ON(thread_); + // As specified in the "Stop the RTCRtpTransceiver" procedure // 1. If transceiver.[[Stopping]] is false, stop sending and receiving given // transceiver. if (!stopping_) diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h index 980d64ca7..4d9716c89 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h @@ -71,7 +71,8 @@ class RtpTransceiver final rtc::scoped_refptr> receiver, cricket::ChannelManager* channel_manager, - std::vector HeaderExtensionsToOffer); + std::vector HeaderExtensionsToOffer, + std::function on_negotiation_needed); ~RtpTransceiver() override; // Returns the Voice/VideoChannel set for this transceiver. May be null if @@ -177,9 +178,13 @@ class RtpTransceiver final // PeerConnection is closed. void SetPeerConnectionClosed(); + // Executes the "stop the RTCRtpTransceiver" procedure from + // the webrtc-pc specification, described under the stop() method. + void StopTransceiverProcedure(); + // Fired when the RtpTransceiver state changes such that negotiation is now // needed (e.g., in response to a direction change). - sigslot::signal0<> SignalNegotiationNeeded; + // sigslot::signal0<> SignalNegotiationNeeded; // RtpTransceiverInterface implementation. cricket::MediaType media_type() const override; @@ -189,7 +194,6 @@ class RtpTransceiver final bool stopped() const override; bool stopping() const override; RtpTransceiverDirection direction() const override; - void SetDirection(RtpTransceiverDirection new_direction) override; RTCError SetDirectionWithError( RtpTransceiverDirection new_direction) override; absl::optional current_direction() const override; @@ -237,18 +241,18 @@ class RtpTransceiver final cricket::ChannelManager* channel_manager_ = nullptr; std::vector codec_preferences_; std::vector header_extensions_to_offer_; + const std::function on_negotiation_needed_; }; BEGIN_SIGNALING_PROXY_MAP(RtpTransceiver) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) PROXY_CONSTMETHOD0(absl::optional, mid) PROXY_CONSTMETHOD0(rtc::scoped_refptr, sender) PROXY_CONSTMETHOD0(rtc::scoped_refptr, receiver) PROXY_CONSTMETHOD0(bool, stopped) PROXY_CONSTMETHOD0(bool, stopping) PROXY_CONSTMETHOD0(RtpTransceiverDirection, direction) -PROXY_METHOD1(void, SetDirection, RtpTransceiverDirection) PROXY_METHOD1(webrtc::RTCError, SetDirectionWithError, RtpTransceiverDirection) PROXY_CONSTMETHOD0(absl::optional, current_direction) PROXY_CONSTMETHOD0(absl::optional, fired_direction) diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc new file mode 100644 index 000000000..e796f9b1b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc @@ -0,0 +1,685 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/rtp_transmission_manager.h" + +#include + +#include "absl/types/optional.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "pc/audio_rtp_receiver.h" +#include "pc/channel.h" +#include "pc/stats_collector_interface.h" +#include "pc/video_rtp_receiver.h" +#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +static const char kDefaultAudioSenderId[] = "defaulta0"; +static const char kDefaultVideoSenderId[] = "defaultv0"; + +} // namespace + +RtpTransmissionManager::RtpTransmissionManager( + bool is_unified_plan, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + cricket::ChannelManager* channel_manager, + UsagePattern* usage_pattern, + PeerConnectionObserver* observer, + StatsCollectorInterface* stats, + std::function on_negotiation_needed) + : is_unified_plan_(is_unified_plan), + signaling_thread_(signaling_thread), + worker_thread_(worker_thread), + channel_manager_(channel_manager), + usage_pattern_(usage_pattern), + observer_(observer), + stats_(stats), + on_negotiation_needed_(on_negotiation_needed), + weak_ptr_factory_(this) {} + +void RtpTransmissionManager::Close() { + closed_ = true; + observer_ = nullptr; +} + +// Implementation of SetStreamsObserver +void RtpTransmissionManager::OnSetStreams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (IsUnifiedPlan()) + OnNegotiationNeeded(); +} + +// Function to call back to the PeerConnection when negotiation is needed +void RtpTransmissionManager::OnNegotiationNeeded() { + on_negotiation_needed_(); +} + +// Function that returns the currently valid observer +PeerConnectionObserver* RtpTransmissionManager::Observer() const { + RTC_DCHECK(!closed_); + RTC_DCHECK(observer_); + return observer_; +} + +cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* voice_channel = static_cast( + GetAudioTransceiver()->internal()->channel()); + if (voice_channel) { + return voice_channel->media_channel(); + } else { + return nullptr; + } +} + +cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* video_channel = static_cast( + GetVideoTransceiver()->internal()->channel()); + if (video_channel) { + return video_channel->media_channel(); + } else { + return nullptr; + } +} + +RTCErrorOr> +RtpTransmissionManager::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + return (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) + : AddTrackPlanB(track, stream_ids)); +} + +RTCErrorOr> +RtpTransmissionManager::AddTrackPlanB( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (stream_ids.size() > 1u) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "AddTrack with more than one stream is not " + "supported with Plan B semantics."); + } + std::vector adjusted_stream_ids = stream_ids; + if (adjusted_stream_ids.empty()) { + adjusted_stream_ids.push_back(rtc::CreateRandomUuid()); + } + cricket::MediaType media_type = + (track->kind() == MediaStreamTrackInterface::kAudioKind + ? cricket::MEDIA_TYPE_AUDIO + : cricket::MEDIA_TYPE_VIDEO); + auto new_sender = + CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); + if (track->kind() == MediaStreamTrackInterface::kAudioKind) { + new_sender->internal()->SetMediaChannel(voice_media_channel()); + GetAudioTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_audio_sender_infos_, + new_sender->internal()->stream_ids()[0], track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } + } else { + RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); + new_sender->internal()->SetMediaChannel(video_media_channel()); + GetVideoTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_video_sender_infos_, + new_sender->internal()->stream_ids()[0], track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } + } + return rtc::scoped_refptr(new_sender); +} + +RTCErrorOr> +RtpTransmissionManager::AddTrackUnifiedPlan( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + auto transceiver = FindFirstTransceiverForAddedTrack(track); + if (transceiver) { + RTC_LOG(LS_INFO) << "Reusing an existing " + << cricket::MediaTypeToString(transceiver->media_type()) + << " transceiver for AddTrack."; + if (transceiver->stopping()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "The existing transceiver is stopping."); + } + + if (transceiver->direction() == RtpTransceiverDirection::kRecvOnly) { + transceiver->internal()->set_direction( + RtpTransceiverDirection::kSendRecv); + } else if (transceiver->direction() == RtpTransceiverDirection::kInactive) { + transceiver->internal()->set_direction( + RtpTransceiverDirection::kSendOnly); + } + transceiver->sender()->SetTrack(track); + transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); + transceiver->internal()->set_reused_for_addtrack(true); + } else { + cricket::MediaType media_type = + (track->kind() == MediaStreamTrackInterface::kAudioKind + ? cricket::MEDIA_TYPE_AUDIO + : cricket::MEDIA_TYPE_VIDEO); + RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) + << " transceiver in response to a call to AddTrack."; + std::string sender_id = track->id(); + // Avoid creating a sender with an existing ID by generating a random ID. + // This can happen if this is the second time AddTrack has created a sender + // for this track. + if (FindSenderById(sender_id)) { + sender_id = rtc::CreateRandomUuid(); + } + auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); + auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); + transceiver = CreateAndAddTransceiver(sender, receiver); + transceiver->internal()->set_created_by_addtrack(true); + transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv); + } + return transceiver->sender(); +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateSender( + cricket::MediaType media_type, + const std::string& id, + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& send_encodings) { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::scoped_refptr> sender; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + RTC_DCHECK(!track || + (track->kind() == MediaStreamTrackInterface::kAudioKind)); + sender = RtpSenderProxyWithInternal::Create( + signaling_thread(), + AudioRtpSender::Create(worker_thread(), id, stats_, this)); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); + } else { + RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(!track || + (track->kind() == MediaStreamTrackInterface::kVideoKind)); + sender = RtpSenderProxyWithInternal::Create( + signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); + } + bool set_track_succeeded = sender->SetTrack(track); + RTC_DCHECK(set_track_succeeded); + sender->internal()->set_stream_ids(stream_ids); + sender->internal()->set_init_send_encodings(send_encodings); + return sender; +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type, + const std::string& receiver_id) { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::scoped_refptr> + receiver; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), new AudioRtpReceiver(worker_thread(), receiver_id, + std::vector({}))); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); + } else { + RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id, + std::vector({}))); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); + } + return receiver; +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateAndAddTransceiver( + rtc::scoped_refptr> sender, + rtc::scoped_refptr> + receiver) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Ensure that the new sender does not have an ID that is already in use by + // another sender. + // Allow receiver IDs to conflict since those come from remote SDP (which + // could be invalid, but should not cause a crash). + RTC_DCHECK(!FindSenderById(sender->id())); + auto transceiver = RtpTransceiverProxyWithInternal::Create( + signaling_thread(), + new RtpTransceiver( + sender, receiver, channel_manager(), + sender->media_type() == cricket::MEDIA_TYPE_AUDIO + ? channel_manager()->GetSupportedAudioRtpHeaderExtensions() + : channel_manager()->GetSupportedVideoRtpHeaderExtensions(), + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { + if (this_weak_ptr) { + this_weak_ptr->OnNegotiationNeeded(); + } + })); + transceivers()->Add(transceiver); + return transceiver; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindFirstTransceiverForAddedTrack( + rtc::scoped_refptr track) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + for (auto transceiver : transceivers()->List()) { + if (!transceiver->sender()->track() && + cricket::MediaTypeToString(transceiver->media_type()) == + track->kind() && + !transceiver->internal()->has_ever_been_used_to_send() && + !transceiver->stopped()) { + return transceiver; + } + } + return nullptr; +} + +std::vector>> +RtpTransmissionManager::GetSendersInternal() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector>> + all_senders; + for (const auto& transceiver : transceivers_.List()) { + if (IsUnifiedPlan() && transceiver->internal()->stopped()) + continue; + + auto senders = transceiver->internal()->senders(); + all_senders.insert(all_senders.end(), senders.begin(), senders.end()); + } + return all_senders; +} + +std::vector< + rtc::scoped_refptr>> +RtpTransmissionManager::GetReceiversInternal() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector< + rtc::scoped_refptr>> + all_receivers; + for (const auto& transceiver : transceivers_.List()) { + if (IsUnifiedPlan() && transceiver->internal()->stopped()) + continue; + + auto receivers = transceiver->internal()->receivers(); + all_receivers.insert(all_receivers.end(), receivers.begin(), + receivers.end()); + } + return all_receivers; +} + +rtc::scoped_refptr> +RtpTransmissionManager::GetAudioTransceiver() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // This method only works with Plan B SDP, where there is a single + // audio/video transceiver. + RTC_DCHECK(!IsUnifiedPlan()); + for (auto transceiver : transceivers_.List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + return transceiver; + } + } + RTC_NOTREACHED(); + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::GetVideoTransceiver() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // This method only works with Plan B SDP, where there is a single + // audio/video transceiver. + RTC_DCHECK(!IsUnifiedPlan()); + for (auto transceiver : transceivers_.List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + return transceiver; + } + } + RTC_NOTREACHED(); + return nullptr; +} + +void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + RTC_DCHECK(stream); + auto sender = FindSenderForTrack(track); + if (sender) { + // We already have a sender for this track, so just change the stream_id + // so that it's correct in the next call to CreateOffer. + sender->internal()->set_stream_ids({stream->id()}); + return; + } + + // Normal case; we've never seen this track before. + auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, + {stream->id()}, {}); + new_sender->internal()->SetMediaChannel(voice_media_channel()); + GetAudioTransceiver()->internal()->AddSender(new_sender); + // If the sender has already been configured in SDP, we call SetSsrc, + // which will connect the sender to the underlying transport. This can + // occur if a local session description that contains the ID of the sender + // is set before AddStream is called. It can also occur if the local + // session description is not changed and RemoveStream is called, and + // later AddStream is called again with the same stream. + const RtpSenderInfo* sender_info = + FindSenderInfo(local_audio_sender_infos_, stream->id(), track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } +} + +// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around +// indefinitely, when we have unified plan SDP. +void RtpTransmissionManager::RemoveAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderForTrack(track); + if (!sender) { + RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() + << " doesn't exist."; + return; + } + GetAudioTransceiver()->internal()->RemoveSender(sender); +} + +void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + RTC_DCHECK(stream); + auto sender = FindSenderForTrack(track); + if (sender) { + // We already have a sender for this track, so just change the stream_id + // so that it's correct in the next call to CreateOffer. + sender->internal()->set_stream_ids({stream->id()}); + return; + } + + // Normal case; we've never seen this track before. + auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, + {stream->id()}, {}); + new_sender->internal()->SetMediaChannel(video_media_channel()); + GetVideoTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } +} + +void RtpTransmissionManager::RemoveVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderForTrack(track); + if (!sender) { + RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() + << " doesn't exist."; + return; + } + GetVideoTransceiver()->internal()->RemoveSender(sender); +} + +void RtpTransmissionManager::CreateAudioReceiver( + MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) { + RTC_DCHECK(!closed_); + std::vector> streams; + streams.push_back(rtc::scoped_refptr(stream)); + // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use + // the constructor taking stream IDs instead. + auto* audio_receiver = new AudioRtpReceiver( + worker_thread(), remote_sender_info.sender_id, streams); + audio_receiver->SetMediaChannel(voice_media_channel()); + if (remote_sender_info.sender_id == kDefaultAudioSenderId) { + audio_receiver->SetupUnsignaledMediaChannel(); + } else { + audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); + } + auto receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), audio_receiver); + GetAudioTransceiver()->internal()->AddReceiver(receiver); + Observer()->OnAddTrack(receiver, streams); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); +} + +void RtpTransmissionManager::CreateVideoReceiver( + MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) { + RTC_DCHECK(!closed_); + std::vector> streams; + streams.push_back(rtc::scoped_refptr(stream)); + // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use + // the constructor taking stream IDs instead. + auto* video_receiver = new VideoRtpReceiver( + worker_thread(), remote_sender_info.sender_id, streams); + video_receiver->SetMediaChannel(video_media_channel()); + if (remote_sender_info.sender_id == kDefaultVideoSenderId) { + video_receiver->SetupUnsignaledMediaChannel(); + } else { + video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); + } + auto receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), video_receiver); + GetVideoTransceiver()->internal()->AddReceiver(receiver); + Observer()->OnAddTrack(receiver, streams); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); +} + +// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote +// description. +rtc::scoped_refptr +RtpTransmissionManager::RemoveAndStopReceiver( + const RtpSenderInfo& remote_sender_info) { + auto receiver = FindReceiverById(remote_sender_info.sender_id); + if (!receiver) { + RTC_LOG(LS_WARNING) << "RtpReceiver for track with id " + << remote_sender_info.sender_id << " doesn't exist."; + return nullptr; + } + if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + GetAudioTransceiver()->internal()->RemoveReceiver(receiver); + } else { + GetVideoTransceiver()->internal()->RemoveReceiver(receiver); + } + return receiver; +} + +void RtpTransmissionManager::OnRemoteSenderAdded( + const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type) + << " receiver for track_id=" << sender_info.sender_id + << " and stream_id=" << sender_info.stream_id; + + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + CreateAudioReceiver(stream, sender_info); + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + CreateVideoReceiver(stream, sender_info); + } else { + RTC_NOTREACHED() << "Invalid media type"; + } +} + +void RtpTransmissionManager::OnRemoteSenderRemoved( + const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type) + << " receiver for track_id=" << sender_info.sender_id + << " and stream_id=" << sender_info.stream_id; + + rtc::scoped_refptr receiver; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + // When the MediaEngine audio channel is destroyed, the RemoteAudioSource + // will be notified which will end the AudioRtpReceiver::track(). + receiver = RemoveAndStopReceiver(sender_info); + rtc::scoped_refptr audio_track = + stream->FindAudioTrack(sender_info.sender_id); + if (audio_track) { + stream->RemoveTrack(audio_track); + } + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + // Stopping or destroying a VideoRtpReceiver will end the + // VideoRtpReceiver::track(). + receiver = RemoveAndStopReceiver(sender_info); + rtc::scoped_refptr video_track = + stream->FindVideoTrack(sender_info.sender_id); + if (video_track) { + // There's no guarantee the track is still available, e.g. the track may + // have been removed from the stream by an application. + stream->RemoveTrack(video_track); + } + } else { + RTC_NOTREACHED() << "Invalid media type"; + } + if (receiver) { + RTC_DCHECK(!closed_); + Observer()->OnRemoveTrack(receiver); + } +} + +void RtpTransmissionManager::OnLocalSenderAdded( + const RtpSenderInfo& sender_info, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderById(sender_info.sender_id); + if (!sender) { + RTC_LOG(LS_WARNING) << "An unknown RtpSender with id " + << sender_info.sender_id + << " has been configured in the local description."; + return; + } + + if (sender->media_type() != media_type) { + RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" + " description with an unexpected media type."; + return; + } + + sender->internal()->set_stream_ids({sender_info.stream_id}); + sender->internal()->SetSsrc(sender_info.first_ssrc); +} + +void RtpTransmissionManager::OnLocalSenderRemoved( + const RtpSenderInfo& sender_info, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + auto sender = FindSenderById(sender_info.sender_id); + if (!sender) { + // This is the normal case. I.e., RemoveStream has been called and the + // SessionDescriptions has been renegotiated. + return; + } + + // A sender has been removed from the SessionDescription but it's still + // associated with the PeerConnection. This only occurs if the SDP doesn't + // match with the calls to CreateSender, AddStream and RemoveStream. + if (sender->media_type() != media_type) { + RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" + " description with an unexpected media type."; + return; + } + + sender->internal()->SetSsrc(0); +} + +std::vector* RtpTransmissionManager::GetRemoteSenderInfos( + cricket::MediaType media_type) { + RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO); + return (media_type == cricket::MEDIA_TYPE_AUDIO) + ? &remote_audio_sender_infos_ + : &remote_video_sender_infos_; +} + +std::vector* RtpTransmissionManager::GetLocalSenderInfos( + cricket::MediaType media_type) { + RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO); + return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_ + : &local_video_sender_infos_; +} + +const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( + const std::vector& infos, + const std::string& stream_id, + const std::string sender_id) const { + for (const RtpSenderInfo& sender_info : infos) { + if (sender_info.stream_id == stream_id && + sender_info.sender_id == sender_id) { + return &sender_info; + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindSenderForTrack( + MediaStreamTrackInterface* track) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto sender : transceiver->internal()->senders()) { + if (sender->track() == track) { + return sender; + } + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindSenderById(const std::string& sender_id) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto sender : transceiver->internal()->senders()) { + if (sender->id() == sender_id) { + return sender; + } + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto receiver : transceiver->internal()->receivers()) { + if (receiver->id() == receiver_id) { + return receiver; + } + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h new file mode 100644 index 000000000..731c3b74d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h @@ -0,0 +1,267 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_RTP_TRANSMISSION_MANAGER_H_ +#define PC_RTP_TRANSMISSION_MANAGER_H_ + +#include +#include +#include +#include + +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "media/base/media_channel.h" +#include "pc/channel_manager.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transceiver.h" +#include "pc/stats_collector_interface.h" +#include "pc/transceiver_list.h" +#include "pc/usage_pattern.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { +class Thread; +} + +namespace webrtc { + +// This class contains information about +// an RTPSender, used for things like looking it up by SSRC. +struct RtpSenderInfo { + RtpSenderInfo() : first_ssrc(0) {} + RtpSenderInfo(const std::string& stream_id, + const std::string sender_id, + uint32_t ssrc) + : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} + bool operator==(const RtpSenderInfo& other) { + return this->stream_id == other.stream_id && + this->sender_id == other.sender_id && + this->first_ssrc == other.first_ssrc; + } + std::string stream_id; + std::string sender_id; + // An RtpSender can have many SSRCs. The first one is used as a sort of ID + // for communicating with the lower layers. + uint32_t first_ssrc; +}; + +// The RtpTransmissionManager class is responsible for managing the lifetime +// and relationships between objects of type RtpSender, RtpReceiver and +// RtpTransceiver. +class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { + public: + RtpTransmissionManager(bool is_unified_plan, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + cricket::ChannelManager* channel_manager, + UsagePattern* usage_pattern, + PeerConnectionObserver* observer, + StatsCollectorInterface* stats_, + std::function on_negotiation_needed); + + // No move or copy permitted. + RtpTransmissionManager(const RtpTransmissionManager&) = delete; + RtpTransmissionManager& operator=(const RtpTransmissionManager&) = delete; + + // Stop activity. In particular, don't call observer_ any more. + void Close(); + + // RtpSenderBase::SetStreamsObserver override. + void OnSetStreams() override; + + // Add a new track, creating transceiver if required. + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids); + + // Create a new RTP sender. Does not associate with a transceiver. + rtc::scoped_refptr> + CreateSender(cricket::MediaType media_type, + const std::string& id, + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& send_encodings); + + // Create a new RTP receiver. Does not associate with a transceiver. + rtc::scoped_refptr> + CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id); + + // Create a new RtpTransceiver of the given type and add it to the list of + // registered transceivers. + rtc::scoped_refptr> + CreateAndAddTransceiver( + rtc::scoped_refptr> sender, + rtc::scoped_refptr> + receiver); + + // Returns the first RtpTransceiver suitable for a newly added track, if such + // transceiver is available. + rtc::scoped_refptr> + FindFirstTransceiverForAddedTrack( + rtc::scoped_refptr track); + + // Returns the list of senders currently associated with some + // registered transceiver + std::vector>> + GetSendersInternal() const; + + // Returns the list of receivers currently associated with a transceiver + std::vector< + rtc::scoped_refptr>> + GetReceiversInternal() const; + + // Plan B: Get the transceiver containing all audio senders and receivers + rtc::scoped_refptr> + GetAudioTransceiver() const; + // Plan B: Get the transceiver containing all video senders and receivers + rtc::scoped_refptr> + GetVideoTransceiver() const; + + // Add an audio track, reusing or creating the sender. + void AddAudioTrack(AudioTrackInterface* track, MediaStreamInterface* stream); + // Plan B: Remove an audio track, removing the sender. + void RemoveAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream); + // Add a video track, reusing or creating the sender. + void AddVideoTrack(VideoTrackInterface* track, MediaStreamInterface* stream); + // Plan B: Remove a video track, removing the sender. + void RemoveVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream); + + // Triggered when a remote sender has been seen for the first time in a remote + // session description. It creates a remote MediaStreamTrackInterface + // implementation and triggers CreateAudioReceiver or CreateVideoReceiver. + void OnRemoteSenderAdded(const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type); + + // Triggered when a remote sender has been removed from a remote session + // description. It removes the remote sender with id |sender_id| from a remote + // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver. + void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type); + + // Triggered when a local sender has been seen for the first time in a local + // session description. + // This method triggers CreateAudioSender or CreateVideoSender if the rtp + // streams in the local SessionDescription can be mapped to a MediaStreamTrack + // in a MediaStream in |local_streams_| + void OnLocalSenderAdded(const RtpSenderInfo& sender_info, + cricket::MediaType media_type); + + // Triggered when a local sender has been removed from a local session + // description. + // This method triggers DestroyAudioSender or DestroyVideoSender if a stream + // has been removed from the local SessionDescription and the stream can be + // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|. + void OnLocalSenderRemoved(const RtpSenderInfo& sender_info, + cricket::MediaType media_type); + + std::vector* GetRemoteSenderInfos( + cricket::MediaType media_type); + std::vector* GetLocalSenderInfos( + cricket::MediaType media_type); + const RtpSenderInfo* FindSenderInfo(const std::vector& infos, + const std::string& stream_id, + const std::string sender_id) const; + + // Return the RtpSender with the given track attached. + rtc::scoped_refptr> + FindSenderForTrack(MediaStreamTrackInterface* track) const; + + // Return the RtpSender with the given id, or null if none exists. + rtc::scoped_refptr> + FindSenderById(const std::string& sender_id) const; + + // Return the RtpReceiver with the given id, or null if none exists. + rtc::scoped_refptr> + FindReceiverById(const std::string& receiver_id) const; + + TransceiverList* transceivers() { return &transceivers_; } + const TransceiverList* transceivers() const { return &transceivers_; } + + // Plan B helpers for getting the voice/video media channels for the single + // audio/video transceiver, if it exists. + cricket::VoiceMediaChannel* voice_media_channel() const; + cricket::VideoMediaChannel* video_media_channel() const; + + private: + rtc::Thread* signaling_thread() const { return signaling_thread_; } + rtc::Thread* worker_thread() const { return worker_thread_; } + cricket::ChannelManager* channel_manager() const { return channel_manager_; } + bool IsUnifiedPlan() const { return is_unified_plan_; } + void NoteUsageEvent(UsageEvent event) { + usage_pattern_->NoteUsageEvent(event); + } + + // AddTrack implementation when Unified Plan is specified. + RTCErrorOr> AddTrackUnifiedPlan( + rtc::scoped_refptr track, + const std::vector& stream_ids); + // AddTrack implementation when Plan B is specified. + RTCErrorOr> AddTrackPlanB( + rtc::scoped_refptr track, + const std::vector& stream_ids); + + // Create an RtpReceiver that sources an audio track. + void CreateAudioReceiver(MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) + RTC_RUN_ON(signaling_thread()); + + // Create an RtpReceiver that sources a video track. + void CreateVideoReceiver(MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) + RTC_RUN_ON(signaling_thread()); + rtc::scoped_refptr RemoveAndStopReceiver( + const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); + + PeerConnectionObserver* Observer() const; + void OnNegotiationNeeded(); + + TransceiverList transceivers_; + + // These lists store sender info seen in local/remote descriptions. + std::vector remote_audio_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector remote_video_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector local_audio_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector local_video_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + + bool closed_ = false; + bool const is_unified_plan_; + rtc::Thread* signaling_thread_; + rtc::Thread* worker_thread_; + cricket::ChannelManager* channel_manager_; + UsagePattern* usage_pattern_; + PeerConnectionObserver* observer_; + StatsCollectorInterface* const stats_; + std::function on_negotiation_needed_; + rtc::WeakPtrFactory weak_ptr_factory_ + RTC_GUARDED_BY(signaling_thread()); +}; + +} // namespace webrtc + +#endif // PC_RTP_TRANSMISSION_MANAGER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc index fe7357fc9..0f4a820d8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc @@ -195,9 +195,14 @@ void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet, if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) { RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: " << RtpDemuxer::DescribePacket(parsed_packet); + uint32_t ssrc = parsed_packet.Ssrc(); + OnErrorDemuxingPacket(ssrc); } } +void RtpTransport::OnErrorDemuxingPacket(uint32_t ssrc) { +} + bool RtpTransport::IsTransportWritable() { auto rtcp_packet_transport = rtcp_mux_enabled_ ? nullptr : rtcp_packet_transport_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h index 57ad9e5fd..3191e852b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h @@ -76,6 +76,8 @@ class RtpTransport : public RtpTransportInternal { bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; + virtual void OnErrorDemuxingPacket(uint32_t ssrc); + protected: // These methods will be used in the subclasses. void DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us); diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc index e603dddd0..c4357a8da 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc @@ -21,6 +21,7 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -206,8 +207,14 @@ bool SctpDataChannel::Init() { // Chrome glue and WebKit) are not wired up properly until after this // function returns. if (provider_->ReadyToSendData()) { - invoker_.AsyncInvoke(RTC_FROM_HERE, rtc::Thread::Current(), - [this] { OnTransportReady(true); }); + AddRef(); + rtc::Thread::Current()->PostTask(ToQueuedTask( + [this] { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ != kClosed) + OnTransportReady(true); + }, + [this] { Release(); })); } return true; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h index 871f18af5..6d121e6f8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h @@ -21,7 +21,6 @@ #include "api/transport/data_channel_transport_interface.h" #include "media/base/media_channel.h" #include "pc/data_channel_utils.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/ssl_stream_adapter.h" // For SSLRole #include "rtc_base/third_party/sigslot/sigslot.h" @@ -277,7 +276,6 @@ class SctpDataChannel : public DataChannelInterface, PacketQueue queued_control_data_ RTC_GUARDED_BY(signaling_thread_); PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_); PacketQueue queued_send_data_ RTC_GUARDED_BY(signaling_thread_); - rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(signaling_thread_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc new file mode 100644 index 000000000..fd697ce8b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc @@ -0,0 +1,5027 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/sdp_offer_answer.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/media_stream_proxy.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/uma_metrics.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "media/base/codec.h" +#include "media/base/media_engine.h" +#include "media/base/rid_description.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/port.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_description_factory.h" +#include "p2p/base/transport_info.h" +#include "pc/connection_context.h" +#include "pc/data_channel_utils.h" +#include "pc/media_protocol_names.h" +#include "pc/media_stream.h" +#include "pc/peer_connection.h" +#include "pc/peer_connection_message_handler.h" +#include "pc/rtp_data_channel.h" +#include "pc/rtp_media_utils.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_transport.h" +#include "pc/simulcast_description.h" +#include "pc/stats_collector.h" +#include "pc/usage_pattern.h" +#include "pc/webrtc_session_description_factory.h" +#include "rtc_base/bind.h" +#include "rtc_base/helpers.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/metrics.h" + +using cricket::ContentInfo; +using cricket::ContentInfos; +using cricket::MediaContentDescription; +using cricket::MediaProtocolType; +using cricket::RidDescription; +using cricket::RidDirection; +using cricket::SessionDescription; +using cricket::SimulcastDescription; +using cricket::SimulcastLayer; +using cricket::SimulcastLayerList; +using cricket::StreamParams; +using cricket::TransportInfo; + +using cricket::LOCAL_PORT_TYPE; +using cricket::PRFLX_PORT_TYPE; +using cricket::RELAY_PORT_TYPE; +using cricket::STUN_PORT_TYPE; + +namespace webrtc { + +namespace { + +typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions + RTCOfferAnswerOptions; + +// Error messages +const char kInvalidSdp[] = "Invalid session description."; +const char kInvalidCandidates[] = "Description contains invalid candidates."; +const char kBundleWithoutRtcpMux[] = + "rtcp-mux must be enabled when BUNDLE " + "is enabled."; +const char kMlineMismatchInAnswer[] = + "The order of m-lines in answer doesn't match order in offer. Rejecting " + "answer."; +const char kMlineMismatchInSubsequentOffer[] = + "The order of m-lines in subsequent offer doesn't match order from " + "previous offer/answer."; +const char kSdpWithoutIceUfragPwd[] = + "Called with SDP without ice-ufrag and ice-pwd."; +const char kSdpWithoutDtlsFingerprint[] = + "Called with SDP without DTLS fingerprint."; +const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; + +const char kSessionError[] = "Session error code: "; +const char kSessionErrorDesc[] = "Session error description: "; + +// UMA metric names. +const char kSimulcastVersionApplyLocalDescription[] = + "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"; +const char kSimulcastVersionApplyRemoteDescription[] = + "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"; +const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; + +// The length of RTCP CNAMEs. +static const int kRtcpCnameLength = 16; + +const char kDefaultStreamId[] = "default"; +// NOTE: Duplicated in peer_connection.cc: +static const char kDefaultAudioSenderId[] = "defaulta0"; +static const char kDefaultVideoSenderId[] = "defaultv0"; + +void NoteAddIceCandidateResult(int result) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.AddIceCandidate", result, + kAddIceCandidateMax); +} + +void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, + cricket::MediaType media_type) { + // Array of structs needed to map {KeyExchangeProtocolType, + // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in + // order to avoid -Wglobal-constructors and -Wexit-time-destructors. + static constexpr struct { + KeyExchangeProtocolType protocol_type; + cricket::MediaType media_type; + KeyExchangeProtocolMedia protocol_media; + } kEnumCounterKeyProtocolMediaMap[] = { + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeDtlsAudio}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeDtlsVideo}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeDtlsData}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeSdesAudio}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeSdesVideo}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeSdesData}, + }; + + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, + kEnumCounterKeyProtocolMax); + + for (const auto& i : kEnumCounterKeyProtocolMediaMap) { + if (i.protocol_type == protocol_type && i.media_type == media_type) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", + i.protocol_media, + kEnumCounterKeyProtocolMediaTypeMax); + } + } +} + +// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd). +bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, + const SessionDescriptionInterface* new_desc, + const std::string& content_name) { + if (!old_desc) { + return false; + } + const SessionDescription* new_sd = new_desc->description(); + const SessionDescription* old_sd = old_desc->description(); + const ContentInfo* cinfo = new_sd->GetContentByName(content_name); + if (!cinfo || cinfo->rejected) { + return false; + } + // If the content isn't rejected, check if ufrag and password has changed. + const cricket::TransportDescription* new_transport_desc = + new_sd->GetTransportDescriptionByName(content_name); + const cricket::TransportDescription* old_transport_desc = + old_sd->GetTransportDescriptionByName(content_name); + if (!new_transport_desc || !old_transport_desc) { + // No transport description exists. This is not an ICE restart. + return false; + } + if (cricket::IceCredentialsChanged( + old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd, + new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) { + RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name + << "."; + return true; + } + return false; +} + +// Generates a string error message for SetLocalDescription/SetRemoteDescription +// from an RTCError. +std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, + SdpType type, + const RTCError& error) { + rtc::StringBuilder oss; + oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") + << " " << SdpTypeToString(type) << " sdp: " << error.message(); + return oss.Release(); +} + +std::string GetStreamIdsString(rtc::ArrayView stream_ids) { + std::string output = "streams=["; + const char* separator = ""; + for (const auto& stream_id : stream_ids) { + output.append(separator).append(stream_id); + separator = ", "; + } + output.append("]"); + return output; +} + +void ReportSimulcastApiVersion(const char* name, + const SessionDescription& session) { + bool has_legacy = false; + bool has_spec_compliant = false; + for (const ContentInfo& content : session.contents()) { + if (!content.media_description()) { + continue; + } + has_spec_compliant |= content.media_description()->HasSimulcast(); + for (const StreamParams& sp : content.media_description()->streams()) { + has_legacy |= sp.has_ssrc_group(cricket::kSimSsrcGroupSemantics); + } + } + + if (has_legacy) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionLegacy, + kSimulcastApiVersionMax); + } + if (has_spec_compliant) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionSpecCompliant, + kSimulcastApiVersionMax); + } + if (!has_legacy && !has_spec_compliant) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionNone, + kSimulcastApiVersionMax); + } +} + +const ContentInfo* FindTransceiverMSection( + RtpTransceiverProxyWithInternal* transceiver, + const SessionDescriptionInterface* session_description) { + return transceiver->mid() + ? session_description->description()->GetContentByName( + *transceiver->mid()) + : nullptr; +} + +// If the direction is "recvonly" or "inactive", treat the description +// as containing no streams. +// See: https://code.google.com/p/webrtc/issues/detail?id=5054 +std::vector GetActiveStreams( + const cricket::MediaContentDescription* desc) { + return RtpTransceiverDirectionHasSend(desc->direction()) + ? desc->streams() + : std::vector(); +} + +// Logic to decide if an m= section can be recycled. This means that the new +// m= section is not rejected, but the old local or remote m= section is +// rejected. |old_content_one| and |old_content_two| refer to the m= section +// of the old remote and old local descriptions in no particular order. +// We need to check both the old local and remote because either +// could be the most current from the latest negotation. +bool IsMediaSectionBeingRecycled(SdpType type, + const ContentInfo& content, + const ContentInfo* old_content_one, + const ContentInfo* old_content_two) { + return type == SdpType::kOffer && !content.rejected && + ((old_content_one && old_content_one->rejected) || + (old_content_two && old_content_two->rejected)); +} + +// Verify that the order of media sections in |new_desc| matches +// |current_desc|. The number of m= sections in |new_desc| should be no +// less than |current_desc|. In the case of checking an answer's +// |new_desc|, the |current_desc| is the last offer that was set as the +// local or remote. In the case of checking an offer's |new_desc| we +// check against the local and remote descriptions stored from the last +// negotiation, because either of these could be the most up to date for +// possible rejected m sections. These are the |current_desc| and +// |secondary_current_desc|. +bool MediaSectionsInSameOrder(const SessionDescription& current_desc, + const SessionDescription* secondary_current_desc, + const SessionDescription& new_desc, + const SdpType type) { + if (current_desc.contents().size() > new_desc.contents().size()) { + return false; + } + + for (size_t i = 0; i < current_desc.contents().size(); ++i) { + const cricket::ContentInfo* secondary_content_info = nullptr; + if (secondary_current_desc && + i < secondary_current_desc->contents().size()) { + secondary_content_info = &secondary_current_desc->contents()[i]; + } + if (IsMediaSectionBeingRecycled(type, new_desc.contents()[i], + ¤t_desc.contents()[i], + secondary_content_info)) { + // For new offer descriptions, if the media section can be recycled, it's + // valid for the MID and media type to change. + continue; + } + if (new_desc.contents()[i].name != current_desc.contents()[i].name) { + return false; + } + const MediaContentDescription* new_desc_mdesc = + new_desc.contents()[i].media_description(); + const MediaContentDescription* current_desc_mdesc = + current_desc.contents()[i].media_description(); + if (new_desc_mdesc->type() != current_desc_mdesc->type()) { + return false; + } + } + return true; +} + +bool MediaSectionsHaveSameCount(const SessionDescription& desc1, + const SessionDescription& desc2) { + return desc1.contents().size() == desc2.contents().size(); +} +// Checks that each non-rejected content has SDES crypto keys or a DTLS +// fingerprint, unless it's in a BUNDLE group, in which case only the +// BUNDLE-tag section (first media section/description in the BUNDLE group) +// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint +// to SDES keys, will be caught in JsepTransport negotiation, and backstopped +// by Channel's |srtp_required| check. +RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { + const cricket::ContentGroup* bundle = + desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentInfo& content_info : desc->contents()) { + if (content_info.rejected) { + continue; + } + // Note what media is used with each crypto protocol, for all sections. + NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls + : webrtc::kEnumCounterKeyProtocolSdes, + content_info.media_description()->type()); + const std::string& mid = content_info.name; + if (bundle && bundle->HasContentName(mid) && + mid != *(bundle->FirstContentName())) { + // This isn't the first media section in the BUNDLE group, so it's not + // required to have crypto attributes, since only the crypto attributes + // from the first section actually get used. + continue; + } + + // If the content isn't rejected or bundled into another m= section, crypto + // must be present. + const MediaContentDescription* media = content_info.media_description(); + const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); + if (!media || !tinfo) { + // Something is not right. + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + } + if (dtls_enabled) { + if (!tinfo->description.identity_fingerprint) { + RTC_LOG(LS_WARNING) + << "Session description must have DTLS fingerprint if " + "DTLS enabled."; + return RTCError(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutDtlsFingerprint); + } + } else { + if (media->cryptos().empty()) { + RTC_LOG(LS_WARNING) + << "Session description must have SDES when DTLS disabled."; + return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutSdesCrypto); + } + } + } + return RTCError::OK(); +} + +// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless +// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first +// media section/description in the BUNDLE group) needs a ufrag and pwd. +bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { + const cricket::ContentGroup* bundle = + desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentInfo& content_info : desc->contents()) { + if (content_info.rejected) { + continue; + } + const std::string& mid = content_info.name; + if (bundle && bundle->HasContentName(mid) && + mid != *(bundle->FirstContentName())) { + // This isn't the first media section in the BUNDLE group, so it's not + // required to have ufrag/password, since only the ufrag/password from + // the first section actually get used. + continue; + } + + // If the content isn't rejected or bundled into another m= section, + // ice-ufrag and ice-pwd must be present. + const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); + if (!tinfo) { + // Something is not right. + RTC_LOG(LS_ERROR) << kInvalidSdp; + return false; + } + if (tinfo->description.ice_ufrag.empty() || + tinfo->description.ice_pwd.empty()) { + RTC_LOG(LS_ERROR) << "Session description must have ice ufrag and pwd."; + return false; + } + } + return true; +} + +static RTCError ValidateMids(const cricket::SessionDescription& description) { + std::set mids; + for (const cricket::ContentInfo& content : description.contents()) { + if (content.name.empty()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "A media section is missing a MID attribute."); + } + if (!mids.insert(content.name).second) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Duplicate a=mid value '" + content.name + "'."); + } + } + return RTCError::OK(); +} + +bool IsValidOfferToReceiveMedia(int value) { + typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; + return (value >= Options::kUndefined) && + (value <= Options::kMaxOfferToReceiveMedia); +} + +bool ValidateOfferAnswerOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options) { + return IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) && + IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video); +} + +// Map internal signaling state name to spec name: +// https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum +std::string GetSignalingStateString( + PeerConnectionInterface::SignalingState state) { + switch (state) { + case PeerConnectionInterface::kStable: + return "stable"; + case PeerConnectionInterface::kHaveLocalOffer: + return "have-local-offer"; + case PeerConnectionInterface::kHaveLocalPrAnswer: + return "have-local-pranswer"; + case PeerConnectionInterface::kHaveRemoteOffer: + return "have-remote-offer"; + case PeerConnectionInterface::kHaveRemotePrAnswer: + return "have-remote-pranswer"; + case PeerConnectionInterface::kClosed: + return "closed"; + } + RTC_NOTREACHED(); + return ""; +} + +// This method will extract any send encodings that were sent by the remote +// connection. This is currently only relevant for Simulcast scenario (where +// the number of layers may be communicated by the server). +static std::vector GetSendEncodingsFromRemoteDescription( + const MediaContentDescription& desc) { + if (!desc.HasSimulcast()) { + return {}; + } + std::vector result; + const SimulcastDescription& simulcast = desc.simulcast_description(); + + // This is a remote description, the parameters we are after should appear + // as receive streams. + for (const auto& alternatives : simulcast.receive_layers()) { + RTC_DCHECK(!alternatives.empty()); + // There is currently no way to specify or choose from alternatives. + // We will always use the first alternative, which is the most preferred. + const SimulcastLayer& layer = alternatives[0]; + RtpEncodingParameters parameters; + parameters.rid = layer.rid; + parameters.active = !layer.is_paused; + result.push_back(parameters); + } + + return result; +} + +static RTCError UpdateSimulcastLayerStatusInSender( + const std::vector& layers, + rtc::scoped_refptr sender) { + RTC_DCHECK(sender); + RtpParameters parameters = sender->GetParametersInternal(); + std::vector disabled_layers; + + // The simulcast envelope cannot be changed, only the status of the streams. + // So we will iterate over the send encodings rather than the layers. + for (RtpEncodingParameters& encoding : parameters.encodings) { + auto iter = std::find_if(layers.begin(), layers.end(), + [&encoding](const SimulcastLayer& layer) { + return layer.rid == encoding.rid; + }); + // A layer that cannot be found may have been removed by the remote party. + if (iter == layers.end()) { + disabled_layers.push_back(encoding.rid); + continue; + } + + encoding.active = !iter->is_paused; + } + + RTCError result = sender->SetParametersInternal(parameters); + if (result.ok()) { + result = sender->DisableEncodingLayers(disabled_layers); + } + + return result; +} + +static bool SimulcastIsRejected( + const ContentInfo* local_content, + const MediaContentDescription& answer_media_desc) { + bool simulcast_offered = local_content && + local_content->media_description() && + local_content->media_description()->HasSimulcast(); + bool simulcast_answered = answer_media_desc.HasSimulcast(); + bool rids_supported = RtpExtension::FindHeaderExtensionByUri( + answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri); + return simulcast_offered && (!simulcast_answered || !rids_supported); +} + +static RTCError DisableSimulcastInSender( + rtc::scoped_refptr sender) { + RTC_DCHECK(sender); + RtpParameters parameters = sender->GetParametersInternal(); + if (parameters.encodings.size() <= 1) { + return RTCError::OK(); + } + + std::vector disabled_layers; + std::transform( + parameters.encodings.begin() + 1, parameters.encodings.end(), + std::back_inserter(disabled_layers), + [](const RtpEncodingParameters& encoding) { return encoding.rid; }); + return sender->DisableEncodingLayers(disabled_layers); +} + +// The SDP parser used to populate these values by default for the 'content +// name' if an a=mid line was absent. +static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { + switch (media_type) { + case cricket::MEDIA_TYPE_AUDIO: + return cricket::CN_AUDIO; + case cricket::MEDIA_TYPE_VIDEO: + return cricket::CN_VIDEO; + case cricket::MEDIA_TYPE_DATA: + return cricket::CN_DATA; + case cricket::MEDIA_TYPE_UNSUPPORTED: + return "not supported"; + } + RTC_NOTREACHED(); + return ""; +} + +// Add options to |[audio/video]_media_description_options| from |senders|. +void AddPlanBRtpSenderOptions( + const std::vector>>& senders, + cricket::MediaDescriptionOptions* audio_media_description_options, + cricket::MediaDescriptionOptions* video_media_description_options, + int num_sim_layers) { + for (const auto& sender : senders) { + if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (audio_media_description_options) { + audio_media_description_options->AddAudioSender( + sender->id(), sender->internal()->stream_ids()); + } + } else { + RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO); + if (video_media_description_options) { + video_media_description_options->AddVideoSender( + sender->id(), sender->internal()->stream_ids(), {}, + SimulcastLayerList(), num_sim_layers); + } + } + } +} + +static cricket::MediaDescriptionOptions +GetMediaDescriptionOptionsForTransceiver( + rtc::scoped_refptr> + transceiver, + const std::string& mid, + bool is_create_offer) { + // NOTE: a stopping transceiver should be treated as a stopped one in + // createOffer as specified in + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. + bool stopped = + is_create_offer ? transceiver->stopping() : transceiver->stopped(); + cricket::MediaDescriptionOptions media_description_options( + transceiver->media_type(), mid, transceiver->direction(), stopped); + media_description_options.codec_preferences = + transceiver->codec_preferences(); + media_description_options.header_extensions = + transceiver->HeaderExtensionsToOffer(); + // This behavior is specified in JSEP. The gist is that: + // 1. The MSID is included if the RtpTransceiver's direction is sendonly or + // sendrecv. + // 2. If the MSID is included, then it must be included in any subsequent + // offer/answer exactly the same until the RtpTransceiver is stopped. + if (stopped || (!RtpTransceiverDirectionHasSend(transceiver->direction()) && + !transceiver->internal()->has_ever_been_used_to_send())) { + return media_description_options; + } + + cricket::SenderOptions sender_options; + sender_options.track_id = transceiver->sender()->id(); + sender_options.stream_ids = transceiver->sender()->stream_ids(); + + // The following sets up RIDs and Simulcast. + // RIDs are included if Simulcast is requested or if any RID was specified. + RtpParameters send_parameters = + transceiver->internal()->sender_internal()->GetParametersInternal(); + bool has_rids = std::any_of(send_parameters.encodings.begin(), + send_parameters.encodings.end(), + [](const RtpEncodingParameters& encoding) { + return !encoding.rid.empty(); + }); + + std::vector send_rids; + SimulcastLayerList send_layers; + for (const RtpEncodingParameters& encoding : send_parameters.encodings) { + if (encoding.rid.empty()) { + continue; + } + send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend)); + send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active)); + } + + if (has_rids) { + sender_options.rids = send_rids; + } + + sender_options.simulcast_layers = send_layers; + // When RIDs are configured, we must set num_sim_layers to 0 to. + // Otherwise, num_sim_layers must be 1 because either there is no + // simulcast, or simulcast is acheived by munging the SDP. + sender_options.num_sim_layers = has_rids ? 0 : 1; + media_description_options.sender_options.push_back(sender_options); + + return media_description_options; +} + +// Returns the ContentInfo at mline index |i|, or null if none exists. +static const ContentInfo* GetContentByIndex( + const SessionDescriptionInterface* sdesc, + size_t i) { + if (!sdesc) { + return nullptr; + } + const ContentInfos& contents = sdesc->description()->contents(); + return (i < contents.size() ? &contents[i] : nullptr); +} + +// From |rtc_options|, fill parts of |session_options| shared by all generated +// m= sectionss (in other words, nothing that involves a map/array). +void ExtractSharedMediaSessionOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, + cricket::MediaSessionOptions* session_options) { + session_options->vad_enabled = rtc_options.voice_activity_detection; + session_options->bundle_enabled = rtc_options.use_rtp_mux; + session_options->raw_packetization_for_video = + rtc_options.raw_packetization_for_video; +} + +// Generate a RTCP CNAME when a PeerConnection is created. +std::string GenerateRtcpCname() { + std::string cname; + if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) { + RTC_LOG(LS_ERROR) << "Failed to generate CNAME."; + RTC_NOTREACHED(); + } + return cname; +} + +// Add options to |session_options| from |rtp_data_channels|. +void AddRtpDataChannelOptions( + const std::map>& + rtp_data_channels, + cricket::MediaDescriptionOptions* data_media_description_options) { + if (!data_media_description_options) { + return; + } + // Check for data channels. + for (const auto& kv : rtp_data_channels) { + const RtpDataChannel* channel = kv.second; + if (channel->state() == RtpDataChannel::kConnecting || + channel->state() == RtpDataChannel::kOpen) { + // Legacy RTP data channels are signaled with the track/stream ID set to + // the data channel's label. + data_media_description_options->AddRtpDataChannel(channel->label(), + channel->label()); + } + } +} + +// Check if we can send |new_stream| on a PeerConnection. +bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, + webrtc::MediaStreamInterface* new_stream) { + if (!new_stream || !current_streams) { + return false; + } + if (current_streams->find(new_stream->id()) != nullptr) { + RTC_LOG(LS_ERROR) << "MediaStream with ID " << new_stream->id() + << " is already added."; + return false; + } + return true; +} + +} // namespace + +// Used by parameterless SetLocalDescription() to create an offer or answer. +// Upon completion of creating the session description, SetLocalDescription() is +// invoked with the result. +class SdpOfferAnswerHandler::ImplicitCreateSessionDescriptionObserver + : public CreateSessionDescriptionObserver { + public: + ImplicitCreateSessionDescriptionObserver( + rtc::WeakPtr sdp_handler, + rtc::scoped_refptr + set_local_description_observer) + : sdp_handler_(std::move(sdp_handler)), + set_local_description_observer_( + std::move(set_local_description_observer)) {} + ~ImplicitCreateSessionDescriptionObserver() override { + RTC_DCHECK(was_called_); + } + + void SetOperationCompleteCallback( + std::function operation_complete_callback) { + operation_complete_callback_ = std::move(operation_complete_callback); + } + + bool was_called() const { return was_called_; } + + void OnSuccess(SessionDescriptionInterface* desc_ptr) override { + RTC_DCHECK(!was_called_); + std::unique_ptr desc(desc_ptr); + was_called_ = true; + + // Abort early if |pc_| is no longer valid. + if (!sdp_handler_) { + operation_complete_callback_(); + return; + } + // DoSetLocalDescription() is a synchronous operation that invokes + // |set_local_description_observer_| with the result. + sdp_handler_->DoSetLocalDescription( + std::move(desc), std::move(set_local_description_observer_)); + operation_complete_callback_(); + } + + void OnFailure(RTCError error) override { + RTC_DCHECK(!was_called_); + was_called_ = true; + set_local_description_observer_->OnSetLocalDescriptionComplete(RTCError( + error.type(), std::string("SetLocalDescription failed to create " + "session description - ") + + error.message())); + operation_complete_callback_(); + } + + private: + bool was_called_ = false; + rtc::WeakPtr sdp_handler_; + rtc::scoped_refptr + set_local_description_observer_; + std::function operation_complete_callback_; +}; + +// Wraps a CreateSessionDescriptionObserver and an OperationsChain operation +// complete callback. When the observer is invoked, the wrapped observer is +// invoked followed by invoking the completion callback. +class CreateSessionDescriptionObserverOperationWrapper + : public CreateSessionDescriptionObserver { + public: + CreateSessionDescriptionObserverOperationWrapper( + rtc::scoped_refptr observer, + std::function operation_complete_callback) + : observer_(std::move(observer)), + operation_complete_callback_(std::move(operation_complete_callback)) { + RTC_DCHECK(observer_); + } + ~CreateSessionDescriptionObserverOperationWrapper() override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(was_called_); +#endif + } + + void OnSuccess(SessionDescriptionInterface* desc) override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(!was_called_); + was_called_ = true; +#endif // RTC_DCHECK_IS_ON + // Completing the operation before invoking the observer allows the observer + // to execute SetLocalDescription() without delay. + operation_complete_callback_(); + observer_->OnSuccess(desc); + } + + void OnFailure(RTCError error) override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(!was_called_); + was_called_ = true; +#endif // RTC_DCHECK_IS_ON + operation_complete_callback_(); + observer_->OnFailure(std::move(error)); + } + + private: +#if RTC_DCHECK_IS_ON + bool was_called_ = false; +#endif // RTC_DCHECK_IS_ON + rtc::scoped_refptr observer_; + std::function operation_complete_callback_; +}; + +// Wrapper for SetSessionDescriptionObserver that invokes the success or failure +// callback in a posted message handled by the peer connection. This introduces +// a delay that prevents recursive API calls by the observer, but this also +// means that the PeerConnection can be modified before the observer sees the +// result of the operation. This is ill-advised for synchronizing states. +// +// Implements both the SetLocalDescriptionObserverInterface and the +// SetRemoteDescriptionObserverInterface. +class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter + : public SetLocalDescriptionObserverInterface, + public SetRemoteDescriptionObserverInterface { + public: + SetSessionDescriptionObserverAdapter( + rtc::WeakPtr handler, + rtc::scoped_refptr inner_observer) + : handler_(std::move(handler)), + inner_observer_(std::move(inner_observer)) {} + + // SetLocalDescriptionObserverInterface implementation. + void OnSetLocalDescriptionComplete(RTCError error) override { + OnSetDescriptionComplete(std::move(error)); + } + // SetRemoteDescriptionObserverInterface implementation. + void OnSetRemoteDescriptionComplete(RTCError error) override { + OnSetDescriptionComplete(std::move(error)); + } + + private: + void OnSetDescriptionComplete(RTCError error) { + if (!handler_) + return; + if (error.ok()) { + handler_->pc_->message_handler()->PostSetSessionDescriptionSuccess( + inner_observer_); + } else { + handler_->pc_->message_handler()->PostSetSessionDescriptionFailure( + inner_observer_, std::move(error)); + } + } + + rtc::WeakPtr handler_; + rtc::scoped_refptr inner_observer_; +}; + +class SdpOfferAnswerHandler::LocalIceCredentialsToReplace { + public: + // Sets the ICE credentials that need restarting to the ICE credentials of + // the current and pending descriptions. + void SetIceCredentialsFromLocalDescriptions( + const SessionDescriptionInterface* current_local_description, + const SessionDescriptionInterface* pending_local_description) { + ice_credentials_.clear(); + if (current_local_description) { + AppendIceCredentialsFromSessionDescription(*current_local_description); + } + if (pending_local_description) { + AppendIceCredentialsFromSessionDescription(*pending_local_description); + } + } + + void ClearIceCredentials() { ice_credentials_.clear(); } + + // Returns true if we have ICE credentials that need restarting. + bool HasIceCredentials() const { return !ice_credentials_.empty(); } + + // Returns true if |local_description| shares no ICE credentials with the + // ICE credentials that need restarting. + bool SatisfiesIceRestart( + const SessionDescriptionInterface& local_description) const { + for (const auto& transport_info : + local_description.description()->transport_infos()) { + if (ice_credentials_.find(std::make_pair( + transport_info.description.ice_ufrag, + transport_info.description.ice_pwd)) != ice_credentials_.end()) { + return false; + } + } + return true; + } + + private: + void AppendIceCredentialsFromSessionDescription( + const SessionDescriptionInterface& desc) { + for (const auto& transport_info : desc.description()->transport_infos()) { + ice_credentials_.insert( + std::make_pair(transport_info.description.ice_ufrag, + transport_info.description.ice_pwd)); + } + } + + std::set> ice_credentials_; +}; + +SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnection* pc) + : pc_(pc), + local_streams_(StreamCollection::Create()), + remote_streams_(StreamCollection::Create()), + operations_chain_(rtc::OperationsChain::Create()), + rtcp_cname_(GenerateRtcpCname()), + local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()), + weak_ptr_factory_(this) { + operations_chain_->SetOnChainEmptyCallback( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { + if (!this_weak_ptr) + return; + this_weak_ptr->OnOperationsChainEmpty(); + }); +} + +SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {} + +// Static +std::unique_ptr SdpOfferAnswerHandler::Create( + PeerConnection* pc, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies) { + auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc)); + handler->Initialize(configuration, dependencies); + return handler; +} + +void SdpOfferAnswerHandler::Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies) { + RTC_DCHECK_RUN_ON(signaling_thread()); + video_options_.screencast_min_bitrate_kbps = + configuration.screencast_min_bitrate; + audio_options_.combined_audio_video_bwe = + configuration.combined_audio_video_bwe; + + audio_options_.audio_jitter_buffer_max_packets = + configuration.audio_jitter_buffer_max_packets; + + audio_options_.audio_jitter_buffer_fast_accelerate = + configuration.audio_jitter_buffer_fast_accelerate; + + audio_options_.audio_jitter_buffer_min_delay_ms = + configuration.audio_jitter_buffer_min_delay_ms; + + audio_options_.audio_jitter_buffer_enable_rtx_handling = + configuration.audio_jitter_buffer_enable_rtx_handling; + + // Obtain a certificate from RTCConfiguration if any were provided (optional). + rtc::scoped_refptr certificate; + if (!configuration.certificates.empty()) { + // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of + // just picking the first one. The decision should be made based on the DTLS + // handshake. The DTLS negotiations need to know about all certificates. + certificate = configuration.certificates[0]; + } + + webrtc_session_desc_factory_ = + std::make_unique( + signaling_thread(), channel_manager(), this, pc_->session_id(), + pc_->dtls_enabled(), std::move(dependencies.cert_generator), + certificate, &ssrc_generator_, + [this](const rtc::scoped_refptr& certificate) { + transport_controller()->SetLocalCertificate(certificate); + }); + + if (pc_->options()->disable_encryption) { + webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); + } + + webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions( + pc_->GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions); + webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan()); + + if (dependencies.video_bitrate_allocator_factory) { + video_bitrate_allocator_factory_ = + std::move(dependencies.video_bitrate_allocator_factory); + } else { + video_bitrate_allocator_factory_ = + CreateBuiltinVideoBitrateAllocatorFactory(); + } +} + +// ================================================================== +// Access to pc_ variables +cricket::ChannelManager* SdpOfferAnswerHandler::channel_manager() const { + return pc_->channel_manager(); +} +TransceiverList* SdpOfferAnswerHandler::transceivers() { + if (!pc_->rtp_manager()) { + return nullptr; + } + return pc_->rtp_manager()->transceivers(); +} +const TransceiverList* SdpOfferAnswerHandler::transceivers() const { + if (!pc_->rtp_manager()) { + return nullptr; + } + return pc_->rtp_manager()->transceivers(); +} +JsepTransportController* SdpOfferAnswerHandler::transport_controller() { + return pc_->transport_controller(); +} +const JsepTransportController* SdpOfferAnswerHandler::transport_controller() + const { + return pc_->transport_controller(); +} +DataChannelController* SdpOfferAnswerHandler::data_channel_controller() { + return pc_->data_channel_controller(); +} +const DataChannelController* SdpOfferAnswerHandler::data_channel_controller() + const { + return pc_->data_channel_controller(); +} +cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() { + return pc_->port_allocator(); +} +const cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() const { + return pc_->port_allocator(); +} +RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() { + return pc_->rtp_manager(); +} +const RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() const { + return pc_->rtp_manager(); +} + +// =================================================================== + +void SdpOfferAnswerHandler::PrepareForShutdown() { + RTC_DCHECK_RUN_ON(signaling_thread()); + weak_ptr_factory_.InvalidateWeakPtrs(); +} + +void SdpOfferAnswerHandler::Close() { + ChangeSignalingState(PeerConnectionInterface::kClosed); +} + +void SdpOfferAnswerHandler::RestartIce() { + RTC_DCHECK_RUN_ON(signaling_thread()); + local_ice_credentials_to_replace_->SetIceCredentialsFromLocalDescriptions( + current_local_description(), pending_local_description()); + UpdateNegotiationNeeded(); +} + +rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const { + return pc_->signaling_thread(); +} + +void SdpOfferAnswerHandler::CreateOffer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + options](std::function operations_chain_callback) { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer_refptr->OnFailure( + RTCError(RTCErrorType::INTERNAL_ERROR, + "CreateOffer failed because the session was shut down")); + operations_chain_callback(); + return; + } + // The operation completes asynchronously when the wrapper is invoked. + rtc::scoped_refptr + observer_wrapper(new rtc::RefCountedObject< + CreateSessionDescriptionObserverOperationWrapper>( + std::move(observer_refptr), + std::move(operations_chain_callback))); + this_weak_ptr->DoCreateOffer(options, observer_wrapper); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc_ptr) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + desc = std::unique_ptr(desc_ptr)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + // For consistency with SetSessionDescriptionObserverAdapter whose + // posted messages doesn't get processed when the PC is destroyed, we + // do not inform |observer_refptr| that the operation failed. + operations_chain_callback(); + return; + } + // SetSessionDescriptionObserverAdapter takes care of making sure the + // |observer_refptr| is invoked in a posted message. + this_weak_ptr->DoSetLocalDescription( + std::move(desc), + rtc::scoped_refptr( + new rtc::RefCountedObject( + this_weak_ptr, observer_refptr))); + // For backwards-compatability reasons, we declare the operation as + // completed here (rather than in a post), so that the operation chain + // is not blocked by this operation when the observer is invoked. This + // allows the observer to trigger subsequent offer/answer operations + // synchronously if the operation chain is now empty. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, + desc = std::move(desc)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer->OnSetLocalDescriptionComplete(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetLocalDescription failed because the session was shut down")); + operations_chain_callback(); + return; + } + this_weak_ptr->DoSetLocalDescription(std::move(desc), observer); + // DoSetLocalDescription() is implemented as a synchronous operation. + // The |observer| will already have been informed that it completed, and + // we can mark this operation as complete without any loose ends. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + SetSessionDescriptionObserver* observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + SetLocalDescription( + new rtc::RefCountedObject( + weak_ptr_factory_.GetWeakPtr(), observer)); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // The |create_sdp_observer| handles performing DoSetLocalDescription() with + // the resulting description as well as completing the operation. + rtc::scoped_refptr + create_sdp_observer( + new rtc::RefCountedObject( + weak_ptr_factory_.GetWeakPtr(), observer)); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + create_sdp_observer](std::function operations_chain_callback) { + // The |create_sdp_observer| is responsible for completing the + // operation. + create_sdp_observer->SetOperationCompleteCallback( + std::move(operations_chain_callback)); + // Abort early if |this_weak_ptr| is no longer valid. This triggers the + // same code path as if DoCreateOffer() or DoCreateAnswer() failed. + if (!this_weak_ptr) { + create_sdp_observer->OnFailure(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetLocalDescription failed because the session was shut down")); + return; + } + switch (this_weak_ptr->signaling_state()) { + case PeerConnectionInterface::kStable: + case PeerConnectionInterface::kHaveLocalOffer: + case PeerConnectionInterface::kHaveRemotePrAnswer: + // TODO(hbos): If [LastCreatedOffer] exists and still represents the + // current state of the system, use that instead of creating another + // offer. + this_weak_ptr->DoCreateOffer( + PeerConnectionInterface::RTCOfferAnswerOptions(), + create_sdp_observer); + break; + case PeerConnectionInterface::kHaveLocalPrAnswer: + case PeerConnectionInterface::kHaveRemoteOffer: + // TODO(hbos): If [LastCreatedAnswer] exists and still represents + // the current state of the system, use that instead of creating + // another answer. + this_weak_ptr->DoCreateAnswer( + PeerConnectionInterface::RTCOfferAnswerOptions(), + create_sdp_observer); + break; + case PeerConnectionInterface::kClosed: + create_sdp_observer->OnFailure(RTCError( + RTCErrorType::INVALID_STATE, + "SetLocalDescription called when PeerConnection is closed.")); + break; + } + }); +} + +RTCError SdpOfferAnswerHandler::ApplyLocalDescription( + std::unique_ptr desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(desc); + + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + + // Take a reference to the old local description since it's used below to + // compare against the new local description. When setting the new local + // description, grab ownership of the replaced session description in case it + // is the same as |old_local_description|, to keep it alive for the duration + // of the method. + const SessionDescriptionInterface* old_local_description = + local_description(); + std::unique_ptr replaced_local_description; + SdpType type = desc->GetType(); + if (type == SdpType::kAnswer) { + replaced_local_description = pending_local_description_ + ? std::move(pending_local_description_) + : std::move(current_local_description_); + current_local_description_ = std::move(desc); + pending_local_description_ = nullptr; + current_remote_description_ = std::move(pending_remote_description_); + } else { + replaced_local_description = std::move(pending_local_description_); + pending_local_description_ = std::move(desc); + } + // The session description to apply now must be accessed by + // |local_description()|. + RTC_DCHECK(local_description()); + + // Report statistics about any use of simulcast. + ReportSimulcastApiVersion(kSimulcastVersionApplyLocalDescription, + *local_description()->description()); + + if (!is_caller_) { + if (remote_description()) { + // Remote description was applied first, so this PC is the callee. + is_caller_ = false; + } else { + // Local description is applied first, so this PC is the caller. + is_caller_ = true; + } + } + + RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type); + if (!error.ok()) { + return error; + } + + if (IsUnifiedPlan()) { + RTCError error = UpdateTransceiversAndDataChannels( + cricket::CS_LOCAL, *local_description(), old_local_description, + remote_description()); + if (!error.ok()) { + return error; + } + std::vector> remove_list; + std::vector> removed_streams; + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->stopped()) { + continue; + } + + // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. + // Note that code paths that don't set MID won't be able to use + // information about DTLS transports. + if (transceiver->mid()) { + auto dtls_transport = transport_controller()->LookupDtlsTransportByMid( + *transceiver->mid()); + transceiver->internal()->sender_internal()->set_transport( + dtls_transport); + transceiver->internal()->receiver_internal()->set_transport( + dtls_transport); + } + + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and + // transceiver's [[FiredDirection]] slot is either "sendrecv" or + // "recvonly", process the removal of a remote track for the media + // description, given transceiver, removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && + (transceiver->internal()->fired_direction() && + RtpTransceiverDirectionHasRecv( + *transceiver->internal()->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + &removed_streams); + } + // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and + // [[FiredDirection]] slots to direction. + transceiver->internal()->set_current_direction(media_desc->direction()); + transceiver->internal()->set_fired_direction(media_desc->direction()); + } + } + auto observer = pc_->Observer(); + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } + } else { + // Media channels will be created only when offer is set. These may use new + // transports just created by PushdownTransportDescription. + if (type == SdpType::kOffer) { + // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + RTCError error = CreateChannels(*local_description()->description()); + if (!error.ok()) { + return error; + } + } + // Remove unused channels if MediaContentDescription is rejected. + RemoveUnusedChannels(local_description()->description()); + } + + error = UpdateSessionState(type, cricket::CS_LOCAL, + local_description()->description()); + if (!error.ok()) { + return error; + } + + if (remote_description()) { + // Now that we have a local description, we can push down remote candidates. + UseCandidatesInSessionDescription(remote_description()); + } + + pending_ice_restarts_.clear(); + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + // If setting the description decided our SSL role, allocate any necessary + // SCTP sids. + rtc::SSLRole role; + if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + data_channel_controller()->AllocateSctpSids(role); + } + + if (IsUnifiedPlan()) { + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->stopped()) { + continue; + } + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (content->rejected || !channel || channel->local_streams().empty()) { + // 0 is a special value meaning "this sender has no associated send + // stream". Need to call this so the sender won't attempt to configure + // a no longer existing stream and run into DCHECKs in the lower + // layers. + transceiver->internal()->sender_internal()->SetSsrc(0); + } else { + // Get the StreamParams from the channel which could generate SSRCs. + const std::vector& streams = channel->local_streams(); + transceiver->internal()->sender_internal()->set_stream_ids( + streams[0].stream_ids()); + transceiver->internal()->sender_internal()->SetSsrc( + streams[0].first_ssrc()); + } + } + } else { + // Plan B semantics. + + // Update state and SSRC of local MediaStreams and DataChannels based on the + // local session description. + const cricket::ContentInfo* audio_content = + GetFirstAudioContent(local_description()->description()); + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + const cricket::AudioContentDescription* audio_desc = + audio_content->media_description()->as_audio(); + UpdateLocalSenders(audio_desc->streams(), audio_desc->type()); + } + } + + const cricket::ContentInfo* video_content = + GetFirstVideoContent(local_description()->description()); + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + const cricket::VideoContentDescription* video_desc = + video_content->media_description()->as_video(); + UpdateLocalSenders(video_desc->streams(), video_desc->type()); + } + } + } + + const cricket::ContentInfo* data_content = + GetFirstDataContent(local_description()->description()); + if (data_content) { + const cricket::RtpDataContentDescription* rtp_data_desc = + data_content->media_description()->as_rtp_data(); + // rtp_data_desc will be null if this is an SCTP description. + if (rtp_data_desc) { + data_channel_controller()->UpdateLocalRtpDataChannels( + rtp_data_desc->streams()); + } + } + + if (type == SdpType::kAnswer && + local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::SetRemoteDescription( + SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc_ptr) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + desc = std::unique_ptr(desc_ptr)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + // For consistency with SetSessionDescriptionObserverAdapter whose + // posted messages doesn't get processed when the PC is destroyed, we + // do not inform |observer_refptr| that the operation failed. + operations_chain_callback(); + return; + } + // SetSessionDescriptionObserverAdapter takes care of making sure the + // |observer_refptr| is invoked in a posted message. + this_weak_ptr->DoSetRemoteDescription( + std::move(desc), + rtc::scoped_refptr( + new rtc::RefCountedObject( + this_weak_ptr, observer_refptr))); + // For backwards-compatability reasons, we declare the operation as + // completed here (rather than in a post), so that the operation chain + // is not blocked by this operation when the observer is invoked. This + // allows the observer to trigger subsequent offer/answer operations + // synchronously if the operation chain is now empty. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, + desc = std::move(desc)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer->OnSetRemoteDescriptionComplete(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetRemoteDescription failed because the session was shut down")); + operations_chain_callback(); + return; + } + this_weak_ptr->DoSetRemoteDescription(std::move(desc), + std::move(observer)); + // DoSetRemoteDescription() is implemented as a synchronous operation. + // The |observer| will already have been informed that it completed, and + // we can mark this operation as complete without any loose ends. + operations_chain_callback(); + }); +} + +RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( + std::unique_ptr desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(desc); + + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + + // Take a reference to the old remote description since it's used below to + // compare against the new remote description. When setting the new remote + // description, grab ownership of the replaced session description in case it + // is the same as |old_remote_description|, to keep it alive for the duration + // of the method. + const SessionDescriptionInterface* old_remote_description = + remote_description(); + std::unique_ptr replaced_remote_description; + SdpType type = desc->GetType(); + if (type == SdpType::kAnswer) { + replaced_remote_description = pending_remote_description_ + ? std::move(pending_remote_description_) + : std::move(current_remote_description_); + current_remote_description_ = std::move(desc); + pending_remote_description_ = nullptr; + current_local_description_ = std::move(pending_local_description_); + } else { + replaced_remote_description = std::move(pending_remote_description_); + pending_remote_description_ = std::move(desc); + } + // The session description to apply now must be accessed by + // |remote_description()|. + RTC_DCHECK(remote_description()); + + // Report statistics about any use of simulcast. + ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, + *remote_description()->description()); + + RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); + if (!error.ok()) { + return error; + } + // Transport and Media channels will be created only when offer is set. + if (IsUnifiedPlan()) { + RTCError error = UpdateTransceiversAndDataChannels( + cricket::CS_REMOTE, *remote_description(), local_description(), + old_remote_description); + if (!error.ok()) { + return error; + } + } else { + // Media channels will be created only when offer is set. These may use new + // transports just created by PushdownTransportDescription. + if (type == SdpType::kOffer) { + // TODO(mallinath) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + RTCError error = CreateChannels(*remote_description()->description()); + if (!error.ok()) { + return error; + } + } + // Remove unused channels if MediaContentDescription is rejected. + RemoveUnusedChannels(remote_description()->description()); + } + + // NOTE: Candidates allocation will be initiated only when + // SetLocalDescription is called. + error = UpdateSessionState(type, cricket::CS_REMOTE, + remote_description()->description()); + if (!error.ok()) { + return error; + } + + if (local_description() && + !UseCandidatesInSessionDescription(remote_description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); + } + + if (old_remote_description) { + for (const cricket::ContentInfo& content : + old_remote_description->description()->contents()) { + // Check if this new SessionDescription contains new ICE ufrag and + // password that indicates the remote peer requests an ICE restart. + // TODO(deadbeef): When we start storing both the current and pending + // remote description, this should reset pending_ice_restarts and compare + // against the current description. + if (CheckForRemoteIceRestart(old_remote_description, remote_description(), + content.name)) { + if (type == SdpType::kOffer) { + pending_ice_restarts_.insert(content.name); + } + } else { + // We retain all received candidates only if ICE is not restarted. + // When ICE is restarted, all previous candidates belong to an old + // generation and should not be kept. + // TODO(deadbeef): This goes against the W3C spec which says the remote + // description should only contain candidates from the last set remote + // description plus any candidates added since then. We should remove + // this once we're sure it won't break anything. + WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( + old_remote_description, content.name, mutable_remote_description()); + } + } + } + + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + // Set the the ICE connection state to connecting since the connection may + // become writable with peer reflexive candidates before any remote candidate + // is signaled. + // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix + // is to have a new signal the indicates a change in checking state from the + // transport and expose a new checking() member from transport that can be + // read to determine the current checking state. The existing SignalConnecting + // actually means "gathering candidates", so cannot be be used here. + if (remote_description()->GetType() != SdpType::kOffer && + remote_description()->number_of_mediasections() > 0u && + pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionNew) { + pc_->SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); + } + + // If setting the description decided our SSL role, allocate any necessary + // SCTP sids. + rtc::SSLRole role; + if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + data_channel_controller()->AllocateSctpSids(role); + } + + if (IsUnifiedPlan()) { + std::vector> + now_receiving_transceivers; + std::vector> remove_list; + std::vector> added_streams; + std::vector> removed_streams; + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + RtpTransceiverDirection local_direction = + RtpTransceiverDirectionReversed(media_desc->direction()); + // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the + // RTCSessionDescription: Set the associated remote streams given + // transceiver.[[Receiver]], msids, addList, and removeList". + // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription + if (RtpTransceiverDirectionHasRecv(local_direction)) { + std::vector stream_ids; + if (!media_desc->streams().empty()) { + // The remote description has signaled the stream IDs. + stream_ids = media_desc->streams()[0].stream_ids(); + } + transceivers() + ->StableState(transceiver) + ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); + + RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name + << " (" << GetStreamIdsString(stream_ids) << ")."; + SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), + stream_ids, &added_streams, + &removed_streams); + // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 + // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, + // and transceiver's current direction is neither sendrecv nor recvonly, + // process the addition of a remote track for the media description. + if (!transceiver->fired_direction() || + !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { + RTC_LOG(LS_INFO) + << "Processing the addition of a remote track for MID=" + << content->name << "."; + now_receiving_transceivers.push_back(transceiver); + } + } + // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's + // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the + // removal of a remote track for the media description, given transceiver, + // removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(local_direction) && + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + &removed_streams); + } + // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. + transceiver->internal()->set_fired_direction(local_direction); + // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to + // direction. + transceiver->internal()->set_current_direction(local_direction); + // 2.2.8.1.11.[3-6]: Set the transport internal slots. + if (transceiver->mid()) { + auto dtls_transport = + transport_controller()->LookupDtlsTransportByMid( + *transceiver->mid()); + transceiver->internal()->sender_internal()->set_transport( + dtls_transport); + transceiver->internal()->receiver_internal()->set_transport( + dtls_transport); + } + } + // 2.2.8.1.12: If the media description is rejected, and transceiver is + // not already stopped, stop the RTCRtpTransceiver transceiver. + if (content->rejected && !transceiver->stopped()) { + RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name + << " since the media section was rejected."; + transceiver->internal()->StopTransceiverProcedure(); + } + if (!content->rejected && + RtpTransceiverDirectionHasRecv(local_direction)) { + if (!media_desc->streams().empty() && + media_desc->streams()[0].has_ssrcs()) { + uint32_t ssrc = media_desc->streams()[0].first_ssrc(); + transceiver->internal()->receiver_internal()->SetupMediaChannel(ssrc); + } else { + transceiver->internal() + ->receiver_internal() + ->SetupUnsignaledMediaChannel(); + } + } + } + // Once all processing has finished, fire off callbacks. + auto observer = pc_->Observer(); + for (const auto& transceiver : now_receiving_transceivers) { + pc_->stats()->AddTrack(transceiver->receiver()->track()); + observer->OnTrack(transceiver); + observer->OnAddTrack(transceiver->receiver(), + transceiver->receiver()->streams()); + } + for (const auto& stream : added_streams) { + observer->OnAddStream(stream); + } + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } + } + + const cricket::ContentInfo* audio_content = + GetFirstAudioContent(remote_description()->description()); + const cricket::ContentInfo* video_content = + GetFirstVideoContent(remote_description()->description()); + const cricket::AudioContentDescription* audio_desc = + GetFirstAudioContentDescription(remote_description()->description()); + const cricket::VideoContentDescription* video_desc = + GetFirstVideoContentDescription(remote_description()->description()); + const cricket::RtpDataContentDescription* rtp_data_desc = + GetFirstRtpDataContentDescription(remote_description()->description()); + + // Check if the descriptions include streams, just in case the peer supports + // MSID, but doesn't indicate so with "a=msid-semantic". + if (remote_description()->description()->msid_supported() || + (audio_desc && !audio_desc->streams().empty()) || + (video_desc && !video_desc->streams().empty())) { + remote_peer_supports_msid_ = true; + } + + // We wait to signal new streams until we finish processing the description, + // since only at that point will new streams have all their tracks. + rtc::scoped_refptr new_streams(StreamCollection::Create()); + + if (!IsUnifiedPlan()) { + // TODO(steveanton): When removing RTP senders/receivers in response to a + // rejected media section, there is some cleanup logic that expects the + // voice/ video channel to still be set. But in this method the voice/video + // channel would have been destroyed by the SetRemoteDescription caller + // above so the cleanup that relies on them fails to run. The RemoveSenders + // calls should be moved to right before the DestroyChannel calls to fix + // this. + + // Find all audio rtp streams and create corresponding remote AudioTracks + // and MediaStreams. + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + bool default_audio_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(audio_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(audio_desc), + default_audio_track_needed, audio_desc->type(), + new_streams); + } + } + + // Find all video rtp streams and create corresponding remote VideoTracks + // and MediaStreams. + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + bool default_video_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(video_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(video_desc), + default_video_track_needed, video_desc->type(), + new_streams); + } + } + + // If this is an RTP data transport, update the DataChannels with the + // information from the remote peer. + if (rtp_data_desc) { + data_channel_controller()->UpdateRemoteRtpDataChannels( + GetActiveStreams(rtp_data_desc)); + } + + // Iterate new_streams and notify the observer about new MediaStreams. + auto observer = pc_->Observer(); + for (size_t i = 0; i < new_streams->count(); ++i) { + MediaStreamInterface* new_stream = new_streams->at(i); + pc_->stats()->AddStream(new_stream); + observer->OnAddStream( + rtc::scoped_refptr(new_stream)); + } + + UpdateEndedRemoteMediaStreams(); + } + + if (type == SdpType::kAnswer && + local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::DoSetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetLocalDescription"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "SetLocalDescription - observer is NULL."; + return; + } + + if (!desc) { + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, "SessionDescription is NULL.")); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "SetLocalDescription: " << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + // For SLD we support only explicit rollback. + if (desc->GetType() == SdpType::kRollback) { + if (IsUnifiedPlan()) { + observer->OnSetLocalDescriptionComplete(Rollback(desc->GetType())); + } else { + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Rollback not supported in Plan B")); + } + return; + } + + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL); + if (!error.ok()) { + std::string error_message = GetSetDescriptionErrorMessage( + cricket::CS_LOCAL, desc->GetType(), error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + // Grab the description type before moving ownership to ApplyLocalDescription, + // which may destroy it before returning. + const SdpType type = desc->GetType(); + + error = ApplyLocalDescription(std::move(desc)); + // |desc| may be destroyed at this point. + + if (!error.ok()) { + // If ApplyLocalDescription fails, the PeerConnection could be in an + // inconsistent state, so act conservatively here and set the session error + // so that future calls to SetLocalDescription/SetRemoteDescription fail. + SetSessionError(SessionError::kContent, error.message()); + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + RTC_DCHECK(local_description()); + + if (local_description()->GetType() == SdpType::kAnswer) { + RemoveStoppedTransceivers(); + + // TODO(deadbeef): We already had to hop to the network thread for + // MaybeStartGathering... + pc_->network_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, + port_allocator())); + // Make UMA notes about what was agreed to. + ReportNegotiatedSdpSemantics(*local_description()); + } + + observer->OnSetLocalDescriptionComplete(RTCError::OK()); + pc_->NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED); + + // Check if negotiation is needed. We must do this after informing the + // observer that SetLocalDescription() has completed to ensure negotiation is + // not needed prior to the promise resolving. + if (IsUnifiedPlan()) { + bool was_negotiation_needed = is_negotiation_needed_; + UpdateNegotiationNeeded(); + if (signaling_state() == PeerConnectionInterface::kStable && + was_negotiation_needed && is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } + + // MaybeStartGathering needs to be called after informing the observer so that + // we don't signal any candidates before signaling that SetLocalDescription + // completed. + transport_controller()->MaybeStartGathering(); +} + +void SdpOfferAnswerHandler::DoCreateOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateOffer"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "CreateOffer - observer is NULL."; + return; + } + + if (pc_->IsClosed()) { + std::string error = "CreateOffer called when PeerConnection is closed."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + if (!ValidateOfferAnswerOptions(options)) { + std::string error = "CreateOffer called with invalid options."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); + return; + } + + // Legacy handling for offer_to_receive_audio and offer_to_receive_video. + // Specified in WebRTC section 4.4.3.2 "Legacy configuration extensions". + if (IsUnifiedPlan()) { + RTCError error = HandleLegacyOfferOptions(options); + if (!error.ok()) { + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, std::move(error)); + return; + } + } + + cricket::MediaSessionOptions session_options; + GetOptionsForOffer(options, &session_options); + webrtc_session_desc_factory_->CreateOffer(observer, options, session_options); +} + +void SdpOfferAnswerHandler::CreateAnswer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + options](std::function operations_chain_callback) { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer_refptr->OnFailure(RTCError( + RTCErrorType::INTERNAL_ERROR, + "CreateAnswer failed because the session was shut down")); + operations_chain_callback(); + return; + } + // The operation completes asynchronously when the wrapper is invoked. + rtc::scoped_refptr + observer_wrapper(new rtc::RefCountedObject< + CreateSessionDescriptionObserverOperationWrapper>( + std::move(observer_refptr), + std::move(operations_chain_callback))); + this_weak_ptr->DoCreateAnswer(options, observer_wrapper); + }); +} + +void SdpOfferAnswerHandler::DoCreateAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateAnswer"); + if (!observer) { + RTC_LOG(LS_ERROR) << "CreateAnswer - observer is NULL."; + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + if (!(signaling_state_ == PeerConnectionInterface::kHaveRemoteOffer || + signaling_state_ == PeerConnectionInterface::kHaveLocalPrAnswer)) { + std::string error = + "PeerConnection cannot create an answer in a state other than " + "have-remote-offer or have-local-pranswer."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + return; + } + + // The remote description should be set if we're in the right state. + RTC_DCHECK(remote_description()); + + if (IsUnifiedPlan()) { + if (options.offer_to_receive_audio != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_audio is not " + "supported with Unified Plan semantics. Use the " + "RtpTransceiver API instead."; + } + if (options.offer_to_receive_video != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_video is not " + "supported with Unified Plan semantics. Use the " + "RtpTransceiver API instead."; + } + } + + cricket::MediaSessionOptions session_options; + GetOptionsForAnswer(options, &session_options); + webrtc_session_desc_factory_->CreateAnswer(observer, session_options); +} + +void SdpOfferAnswerHandler::DoSetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; + return; + } + + if (!desc) { + observer->OnSetRemoteDescriptionComplete(RTCError( + RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + if (IsUnifiedPlan()) { + if (pc_->configuration()->enable_implicit_rollback) { + if (desc->GetType() == SdpType::kOffer && + signaling_state() == PeerConnectionInterface::kHaveLocalOffer) { + Rollback(desc->GetType()); + } + } + // Explicit rollback. + if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); + return; + } + } else if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Rollback not supported in Plan B")); + return; + } + if (desc->GetType() == SdpType::kOffer || + desc->GetType() == SdpType::kAnswer) { + // Report to UMA the format of the received offer or answer. + pc_->ReportSdpFormatReceived(*desc); + } + + // Handle remote descriptions missing a=mid lines for interop with legacy end + // points. + FillInMissingRemoteMids(desc->description()); + + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE); + if (!error.ok()) { + std::string error_message = GetSetDescriptionErrorMessage( + cricket::CS_REMOTE, desc->GetType(), error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } + + // Grab the description type before moving ownership to + // ApplyRemoteDescription, which may destroy it before returning. + const SdpType type = desc->GetType(); + + error = ApplyRemoteDescription(std::move(desc)); + // |desc| may be destroyed at this point. + + if (!error.ok()) { + // If ApplyRemoteDescription fails, the PeerConnection could be in an + // inconsistent state, so act conservatively here and set the session error + // so that future calls to SetLocalDescription/SetRemoteDescription fail. + SetSessionError(SessionError::kContent, error.message()); + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } + RTC_DCHECK(remote_description()); + + if (type == SdpType::kAnswer) { + RemoveStoppedTransceivers(); + // TODO(deadbeef): We already had to hop to the network thread for + // MaybeStartGathering... + pc_->network_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, + port_allocator())); + // Make UMA notes about what was agreed to. + ReportNegotiatedSdpSemantics(*remote_description()); + } + + observer->OnSetRemoteDescriptionComplete(RTCError::OK()); + pc_->NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); + + // Check if negotiation is needed. We must do this after informing the + // observer that SetRemoteDescription() has completed to ensure negotiation is + // not needed prior to the promise resolving. + if (IsUnifiedPlan()) { + bool was_negotiation_needed = is_negotiation_needed_; + UpdateNegotiationNeeded(); + if (signaling_state() == PeerConnectionInterface::kStable && + was_negotiation_needed && is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } +} + +void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( + rtc::scoped_refptr receiver, + const std::vector& stream_ids, + std::vector>* added_streams, + std::vector>* removed_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector> media_streams; + for (const std::string& stream_id : stream_ids) { + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); + if (!stream) { + stream = MediaStreamProxy::Create(rtc::Thread::Current(), + MediaStream::Create(stream_id)); + remote_streams_->AddStream(stream); + added_streams->push_back(stream); + } + media_streams.push_back(stream); + } + // Special case: "a=msid" missing, use random stream ID. + if (media_streams.empty() && + !(remote_description()->description()->msid_signaling() & + cricket::kMsidSignalingMediaSection)) { + if (!missing_msid_default_stream_) { + missing_msid_default_stream_ = MediaStreamProxy::Create( + rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid())); + added_streams->push_back(missing_msid_default_stream_); + } + media_streams.push_back(missing_msid_default_stream_); + } + std::vector> previous_streams = + receiver->streams(); + // SetStreams() will add/remove the receiver's track to/from the streams. This + // differs from the spec - the spec uses an "addList" and "removeList" to + // update the stream-track relationships in a later step. We do this earlier, + // changing the order of things, but the end-result is the same. + // TODO(hbos): When we remove remote_streams(), use set_stream_ids() + // instead. https://crbug.com/webrtc/9480 + receiver->SetStreams(media_streams); + RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); +} + +bool SdpOfferAnswerHandler::AddIceCandidate( + const IceCandidateInterface* ice_candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate"); + if (pc_->IsClosed()) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed."; + NoteAddIceCandidateResult(kAddIceCandidateFailClosed); + return false; + } + + if (!remote_description()) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added " + "without any remote session description."; + NoteAddIceCandidateResult(kAddIceCandidateFailNoRemoteDescription); + return false; + } + + if (!ice_candidate) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null."; + NoteAddIceCandidateResult(kAddIceCandidateFailNullCandidate); + return false; + } + + bool valid = false; + bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid); + if (!valid) { + NoteAddIceCandidateResult(kAddIceCandidateFailNotValid); + return false; + } + + // Add this candidate to the remote session description. + if (!mutable_remote_description()->AddCandidate(ice_candidate)) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used."; + NoteAddIceCandidateResult(kAddIceCandidateFailInAddition); + return false; + } + + if (ready) { + bool result = UseCandidate(ice_candidate); + if (result) { + pc_->NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); + NoteAddIceCandidateResult(kAddIceCandidateSuccess); + } else { + NoteAddIceCandidateResult(kAddIceCandidateFailNotUsable); + } + return result; + } else { + RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate."; + NoteAddIceCandidateResult(kAddIceCandidateFailNotReady); + return true; + } +} + +void SdpOfferAnswerHandler::AddIceCandidate( + std::unique_ptr candidate, + std::function callback) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + candidate = std::move(candidate), callback = std::move(callback)]( + std::function operations_chain_callback) { + if (!this_weak_ptr) { + operations_chain_callback(); + callback(RTCError( + RTCErrorType::INVALID_STATE, + "AddIceCandidate failed because the session was shut down")); + return; + } + if (!this_weak_ptr->AddIceCandidate(candidate.get())) { + operations_chain_callback(); + // Fail with an error type and message consistent with Chromium. + // TODO(hbos): Fail with error types according to spec. + callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Error processing ICE candidate")); + return; + } + operations_chain_callback(); + callback(RTCError::OK()); + }); +} + +bool SdpOfferAnswerHandler::RemoveIceCandidates( + const std::vector& candidates) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveIceCandidates"); + RTC_DCHECK_RUN_ON(signaling_thread()); + if (pc_->IsClosed()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: PeerConnection is closed."; + return false; + } + + if (!remote_description()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: ICE candidates can't be removed " + "without any remote session description."; + return false; + } + + if (candidates.empty()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: candidates are empty."; + return false; + } + + size_t number_removed = + mutable_remote_description()->RemoveCandidates(candidates); + if (number_removed != candidates.size()) { + RTC_LOG(LS_ERROR) + << "RemoveIceCandidates: Failed to remove candidates. Requested " + << candidates.size() << " but only " << number_removed + << " are removed."; + } + + // Remove the candidates from the transport controller. + RTCError error = transport_controller()->RemoveRemoteCandidates(candidates); + if (!error.ok()) { + RTC_LOG(LS_ERROR) + << "RemoveIceCandidates: Error when removing remote candidates: " + << error.message(); + } + return true; +} + +void SdpOfferAnswerHandler::AddLocalIceCandidate( + const JsepIceCandidate* candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (local_description()) { + mutable_local_description()->AddCandidate(candidate); + } +} + +void SdpOfferAnswerHandler::RemoveLocalIceCandidates( + const std::vector& candidates) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (local_description()) { + mutable_local_description()->RemoveCandidates(candidates); + } +} + +const SessionDescriptionInterface* SdpOfferAnswerHandler::local_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_local_description_ ? pending_local_description_.get() + : current_local_description_.get(); +} + +const SessionDescriptionInterface* SdpOfferAnswerHandler::remote_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_remote_description_ ? pending_remote_description_.get() + : current_remote_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::current_local_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return current_local_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::current_remote_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return current_remote_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::pending_local_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_local_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::pending_remote_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_remote_description_.get(); +} + +PeerConnectionInterface::SignalingState SdpOfferAnswerHandler::signaling_state() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return signaling_state_; +} + +void SdpOfferAnswerHandler::ChangeSignalingState( + PeerConnectionInterface::SignalingState signaling_state) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (signaling_state_ == signaling_state) { + return; + } + RTC_LOG(LS_INFO) << "Session: " << pc_->session_id() << " Old state: " + << GetSignalingStateString(signaling_state_) + << " New state: " + << GetSignalingStateString(signaling_state); + signaling_state_ = signaling_state; + pc_->Observer()->OnSignalingChange(signaling_state_); +} + +RTCError SdpOfferAnswerHandler::UpdateSessionState( + SdpType type, + cricket::ContentSource source, + const cricket::SessionDescription* description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + // If there's already a pending error then no state transition should happen. + // But all call-sites should be verifying this before calling us! + RTC_DCHECK(session_error() == SessionError::kNone); + + // If this is answer-ish we're ready to let media flow. + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + EnableSending(); + } + + // Update the signaling state according to the specified state machine (see + // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum). + if (type == SdpType::kOffer) { + ChangeSignalingState(source == cricket::CS_LOCAL + ? PeerConnectionInterface::kHaveLocalOffer + : PeerConnectionInterface::kHaveRemoteOffer); + } else if (type == SdpType::kPrAnswer) { + ChangeSignalingState(source == cricket::CS_LOCAL + ? PeerConnectionInterface::kHaveLocalPrAnswer + : PeerConnectionInterface::kHaveRemotePrAnswer); + } else { + RTC_DCHECK(type == SdpType::kAnswer); + ChangeSignalingState(PeerConnectionInterface::kStable); + transceivers()->DiscardStableStates(); + have_pending_rtp_data_channel_ = false; + } + + // Update internal objects according to the session description's media + // descriptions. + RTCError error = PushdownMediaDescription(type, source); + if (!error.ok()) { + return error; + } + + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( + uint32_t event_id) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Plan B? Always fire to conform with useless legacy behavior. + if (!IsUnifiedPlan()) { + return true; + } + // The event ID has been invalidated. Either negotiation is no longer needed + // or a newer negotiation needed event has been generated. + if (event_id != negotiation_needed_event_id_) { + return false; + } + // The chain is no longer empty, update negotiation needed when it becomes + // empty. This should generate a newer negotiation needed event, making this + // one obsolete. + if (!operations_chain_->IsEmpty()) { + // Since we just suppressed an event that would have been fired, if + // negotiation is still needed by the time the chain becomes empty again, we + // must make sure to generate another event if negotiation is needed then. + // This happens when |is_negotiation_needed_| goes from false to true, so we + // set it to false until UpdateNegotiationNeeded() is called. + is_negotiation_needed_ = false; + update_negotiation_needed_on_empty_chain_ = true; + return false; + } + // We must not fire if the signaling state is no longer "stable". If + // negotiation is still needed when we return to "stable", a new negotiation + // needed event will be generated, so this one can safely be suppressed. + if (signaling_state_ != PeerConnectionInterface::kStable) { + return false; + } + // All checks have passed - please fire "negotiationneeded" now! + return true; +} + +rtc::scoped_refptr +SdpOfferAnswerHandler::local_streams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " + "Plan SdpSemantics. Please use GetSenders " + "instead."; + return local_streams_; +} + +rtc::scoped_refptr +SdpOfferAnswerHandler::remote_streams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " + "Plan SdpSemantics. Please use GetReceivers " + "instead."; + return remote_streams_; +} + +bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan " + "SdpSemantics. Please use AddTrack instead."; + if (pc_->IsClosed()) { + return false; + } + if (!CanAddLocalMediaStream(local_streams_, local_stream)) { + return false; + } + + local_streams_->AddStream(local_stream); + MediaStreamObserver* observer = new MediaStreamObserver(local_stream); + observer->SignalAudioTrackAdded.connect( + this, &SdpOfferAnswerHandler::OnAudioTrackAdded); + observer->SignalAudioTrackRemoved.connect( + this, &SdpOfferAnswerHandler::OnAudioTrackRemoved); + observer->SignalVideoTrackAdded.connect( + this, &SdpOfferAnswerHandler::OnVideoTrackAdded); + observer->SignalVideoTrackRemoved.connect( + this, &SdpOfferAnswerHandler::OnVideoTrackRemoved); + stream_observers_.push_back(std::unique_ptr(observer)); + + for (const auto& track : local_stream->GetAudioTracks()) { + rtp_manager()->AddAudioTrack(track.get(), local_stream); + } + for (const auto& track : local_stream->GetVideoTracks()) { + rtp_manager()->AddVideoTrack(track.get(), local_stream); + } + + pc_->stats()->AddStream(local_stream); + UpdateNegotiationNeeded(); + return true; +} + +void SdpOfferAnswerHandler::RemoveStream(MediaStreamInterface* local_stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "RemoveStream is not available with Unified " + "Plan SdpSemantics. Please use RemoveTrack " + "instead."; + TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream"); + if (!pc_->IsClosed()) { + for (const auto& track : local_stream->GetAudioTracks()) { + rtp_manager()->RemoveAudioTrack(track.get(), local_stream); + } + for (const auto& track : local_stream->GetVideoTracks()) { + rtp_manager()->RemoveVideoTrack(track.get(), local_stream); + } + } + local_streams_->RemoveStream(local_stream); + stream_observers_.erase( + std::remove_if( + stream_observers_.begin(), stream_observers_.end(), + [local_stream](const std::unique_ptr& observer) { + return observer->stream()->id().compare(local_stream->id()) == 0; + }), + stream_observers_.end()); + + if (pc_->IsClosed()) { + return; + } + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnAudioTrackAdded(AudioTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->AddAudioTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnAudioTrackRemoved(AudioTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->RemoveAudioTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnVideoTrackAdded(VideoTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->AddVideoTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnVideoTrackRemoved(VideoTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->RemoveVideoTrack(track, stream); + UpdateNegotiationNeeded(); +} + +RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { + auto state = signaling_state(); + if (state != PeerConnectionInterface::kHaveLocalOffer && + state != PeerConnectionInterface::kHaveRemoteOffer) { + return RTCError(RTCErrorType::INVALID_STATE, + "Called in wrong signalingState: " + + GetSignalingStateString(signaling_state())); + } + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + std::vector> all_added_streams; + std::vector> all_removed_streams; + std::vector> removed_receivers; + + for (auto&& transceivers_stable_state_pair : transceivers()->StableStates()) { + auto transceiver = transceivers_stable_state_pair.first; + auto state = transceivers_stable_state_pair.second; + + if (state.remote_stream_ids()) { + std::vector> added_streams; + std::vector> removed_streams; + SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), + state.remote_stream_ids().value(), + &added_streams, &removed_streams); + all_added_streams.insert(all_added_streams.end(), added_streams.begin(), + added_streams.end()); + all_removed_streams.insert(all_removed_streams.end(), + removed_streams.begin(), + removed_streams.end()); + if (!state.has_m_section() && !state.newly_created()) { + continue; + } + } + + RTC_DCHECK(transceiver->internal()->mid().has_value()); + DestroyTransceiverChannel(transceiver); + + if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && + transceiver->receiver()) { + removed_receivers.push_back(transceiver->receiver()); + } + if (state.newly_created()) { + if (transceiver->internal()->reused_for_addtrack()) { + transceiver->internal()->set_created_by_addtrack(true); + } else { + transceivers()->Remove(transceiver); + } + } + transceiver->internal()->sender_internal()->set_transport(nullptr); + transceiver->internal()->receiver_internal()->set_transport(nullptr); + transceiver->internal()->set_mid(state.mid()); + transceiver->internal()->set_mline_index(state.mline_index()); + } + transport_controller()->RollbackTransports(); + if (have_pending_rtp_data_channel_) { + DestroyDataChannelTransport(); + have_pending_rtp_data_channel_ = false; + } + transceivers()->DiscardStableStates(); + pending_local_description_.reset(); + pending_remote_description_.reset(); + ChangeSignalingState(PeerConnectionInterface::kStable); + + // Once all processing has finished, fire off callbacks. + for (const auto& receiver : removed_receivers) { + pc_->Observer()->OnRemoveTrack(receiver); + } + for (const auto& stream : all_added_streams) { + pc_->Observer()->OnAddStream(stream); + } + for (const auto& stream : all_removed_streams) { + pc_->Observer()->OnRemoveStream(stream); + } + + // The assumption is that in case of implicit rollback UpdateNegotiationNeeded + // gets called in SetRemoteDescription. + if (desc_type == SdpType::kRollback) { + UpdateNegotiationNeeded(); + if (is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::IsUnifiedPlan() const { + return pc_->IsUnifiedPlan(); +} + +void SdpOfferAnswerHandler::OnOperationsChainEmpty() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (pc_->IsClosed() || !update_negotiation_needed_on_empty_chain_) + return; + update_negotiation_needed_on_empty_chain_ = false; + // Firing when chain is empty is only supported in Unified Plan to avoid Plan + // B regressions. (In Plan B, onnegotiationneeded is already broken anyway, so + // firing it even more might just be confusing.) + if (IsUnifiedPlan()) { + UpdateNegotiationNeeded(); + } +} + +absl::optional SdpOfferAnswerHandler::is_caller() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return is_caller_; +} + +bool SdpOfferAnswerHandler::HasNewIceCredentials() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return local_ice_credentials_to_replace_->HasIceCredentials(); +} + +bool SdpOfferAnswerHandler::IceRestartPending( + const std::string& content_name) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_ice_restarts_.find(content_name) != + pending_ice_restarts_.end(); +} + +bool SdpOfferAnswerHandler::NeedsIceRestart( + const std::string& content_name) const { + return transport_controller()->NeedsIceRestart(content_name); +} + +absl::optional SdpOfferAnswerHandler::GetDtlsRole( + const std::string& mid) const { + return transport_controller()->GetDtlsRole(mid); +} + +void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!IsUnifiedPlan()) { + pc_->Observer()->OnRenegotiationNeeded(); + GenerateNegotiationNeededEvent(); + return; + } + + // In the spec, a task is queued here to run the following steps - this is + // meant to ensure we do not fire onnegotiationneeded prematurely if multiple + // changes are being made at once. In order to support Chromium's + // implementation where the JavaScript representation of the PeerConnection + // lives on a separate thread though, the queuing of a task is instead + // performed by the PeerConnectionObserver posting from the signaling thread + // to the JavaScript main thread that negotiation is needed. And because the + // Operations Chain lives on the WebRTC signaling thread, + // ShouldFireNegotiationNeededEvent() must be called before firing the event + // to ensure the Operations Chain is still empty and the event has not been + // invalidated. + + // If connection's [[IsClosed]] slot is true, abort these steps. + if (pc_->IsClosed()) + return; + + // If connection's signaling state is not "stable", abort these steps. + if (signaling_state() != PeerConnectionInterface::kStable) + return; + + // NOTE + // The negotiation-needed flag will be updated once the state transitions to + // "stable", as part of the steps for setting an RTCSessionDescription. + + // If the result of checking if negotiation is needed is false, clear the + // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot + // to false, and abort these steps. + bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); + if (!is_negotiation_needed) { + is_negotiation_needed_ = false; + // Invalidate any negotiation needed event that may previosuly have been + // generated. + ++negotiation_needed_event_id_; + return; + } + + // If connection's [[NegotiationNeeded]] slot is already true, abort these + // steps. + if (is_negotiation_needed_) + return; + + // Set connection's [[NegotiationNeeded]] slot to true. + is_negotiation_needed_ = true; + + // Queue a task that runs the following steps: + // If connection's [[IsClosed]] slot is true, abort these steps. + // If connection's [[NegotiationNeeded]] slot is false, abort these steps. + // Fire an event named negotiationneeded at connection. + pc_->Observer()->OnRenegotiationNeeded(); + // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() is + // used in the task queued by the observer, this event will only fire when the + // chain is empty. + GenerateNegotiationNeededEvent(); +} + +bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { + RTC_DCHECK_RUN_ON(signaling_thread()); + // 1. If any implementation-specific negotiation is required, as described at + // the start of this section, return true. + + // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return + // true. + if (local_ice_credentials_to_replace_->HasIceCredentials()) { + return true; + } + + // 3. Let description be connection.[[CurrentLocalDescription]]. + const SessionDescriptionInterface* description = current_local_description(); + if (!description) + return true; + + // 4. If connection has created any RTCDataChannels, and no m= section in + // description has been negotiated yet for data, return true. + if (data_channel_controller()->HasSctpDataChannels()) { + if (!cricket::GetFirstDataContent(description->description()->contents())) + return true; + } + + // 5. For each transceiver in connection's set of transceivers, perform the + // following checks: + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* current_local_msection = + FindTransceiverMSection(transceiver.get(), description); + + const ContentInfo* current_remote_msection = FindTransceiverMSection( + transceiver.get(), current_remote_description()); + + // 5.4 If transceiver is stopped and is associated with an m= section, + // but the associated m= section is not yet rejected in + // connection.[[CurrentLocalDescription]] or + // connection.[[CurrentRemoteDescription]], return true. + if (transceiver->stopped()) { + RTC_DCHECK(transceiver->stopping()); + if (current_local_msection && !current_local_msection->rejected && + ((current_remote_msection && !current_remote_msection->rejected) || + !current_remote_msection)) { + return true; + } + continue; + } + + // 5.1 If transceiver.[[Stopping]] is true and transceiver.[[Stopped]] is + // false, return true. + if (transceiver->stopping() && !transceiver->stopped()) + return true; + + // 5.2 If transceiver isn't stopped and isn't yet associated with an m= + // section in description, return true. + if (!current_local_msection) + return true; + + const MediaContentDescription* current_local_media_description = + current_local_msection->media_description(); + // 5.3 If transceiver isn't stopped and is associated with an m= section + // in description then perform the following checks: + + // 5.3.1 If transceiver.[[Direction]] is "sendrecv" or "sendonly", and the + // associated m= section in description either doesn't contain a single + // "a=msid" line, or the number of MSIDs from the "a=msid" lines in this + // m= section, or the MSID values themselves, differ from what is in + // transceiver.sender.[[AssociatedMediaStreamIds]], return true. + if (RtpTransceiverDirectionHasSend(transceiver->direction())) { + if (current_local_media_description->streams().size() == 0) + return true; + + std::vector msection_msids; + for (const auto& stream : current_local_media_description->streams()) { + for (const std::string& msid : stream.stream_ids()) + msection_msids.push_back(msid); + } + + std::vector transceiver_msids = + transceiver->sender()->stream_ids(); + if (msection_msids.size() != transceiver_msids.size()) + return true; + + absl::c_sort(transceiver_msids); + absl::c_sort(msection_msids); + if (transceiver_msids != msection_msids) + return true; + } + + // 5.3.2 If description is of type "offer", and the direction of the + // associated m= section in neither connection.[[CurrentLocalDescription]] + // nor connection.[[CurrentRemoteDescription]] matches + // transceiver.[[Direction]], return true. + if (description->GetType() == SdpType::kOffer) { + if (!current_remote_description()) + return true; + + if (!current_remote_msection) + return true; + + RtpTransceiverDirection current_local_direction = + current_local_media_description->direction(); + RtpTransceiverDirection current_remote_direction = + current_remote_msection->media_description()->direction(); + if (transceiver->direction() != current_local_direction && + transceiver->direction() != + RtpTransceiverDirectionReversed(current_remote_direction)) { + return true; + } + } + + // 5.3.3 If description is of type "answer", and the direction of the + // associated m= section in the description does not match + // transceiver.[[Direction]] intersected with the offered direction (as + // described in [JSEP] (section 5.3.1.)), return true. + if (description->GetType() == SdpType::kAnswer) { + if (!remote_description()) + return true; + + const ContentInfo* offered_remote_msection = + FindTransceiverMSection(transceiver.get(), remote_description()); + + RtpTransceiverDirection offered_direction = + offered_remote_msection + ? offered_remote_msection->media_description()->direction() + : RtpTransceiverDirection::kInactive; + + if (current_local_media_description->direction() != + (RtpTransceiverDirectionIntersection( + transceiver->direction(), + RtpTransceiverDirectionReversed(offered_direction)))) { + return true; + } + } + } + + // If all the preceding checks were performed and true was not returned, + // nothing remains to be negotiated; return false. + return false; +} + +void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ++negotiation_needed_event_id_; + pc_->Observer()->OnNegotiationNeededEvent(negotiation_needed_event_id_); +} + +RTCError SdpOfferAnswerHandler::ValidateSessionDescription( + const SessionDescriptionInterface* sdesc, + cricket::ContentSource source) { + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + if (!sdesc || !sdesc->description()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + } + + SdpType type = sdesc->GetType(); + if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || + (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + "Called in wrong state: " + GetSignalingStateString(signaling_state())); + } + + RTCError error = ValidateMids(*sdesc->description()); + if (!error.ok()) { + return error; + } + + // Verify crypto settings. + std::string crypto_error; + if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || + pc_->dtls_enabled()) { + RTCError crypto_error = + VerifyCrypto(sdesc->description(), pc_->dtls_enabled()); + if (!crypto_error.ok()) { + return crypto_error; + } + } + + // Verify ice-ufrag and ice-pwd. + if (!VerifyIceUfragPwdPresent(sdesc->description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutIceUfragPwd); + } + + if (!pc_->ValidateBundleSettings(sdesc->description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kBundleWithoutRtcpMux); + } + + // TODO(skvlad): When the local rtcp-mux policy is Require, reject any + // m-lines that do not rtcp-mux enabled. + + // Verify m-lines in Answer when compared against Offer. + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // With an answer we want to compare the new answer session description with + // the offer's session description from the current negotiation. + const cricket::SessionDescription* offer_desc = + (source == cricket::CS_LOCAL) ? remote_description()->description() + : local_description()->description(); + if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || + !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), + type)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInAnswer); + } + } else { + // The re-offers should respect the order of m= sections in current + // description. See RFC3264 Section 8 paragraph 4 for more details. + // With a re-offer, either the current local or current remote descriptions + // could be the most up to date, so we would like to check against both of + // them if they exist. It could be the case that one of them has a 0 port + // for a media section, but the other does not. This is important to check + // against in the case that we are recycling an m= section. + const cricket::SessionDescription* current_desc = nullptr; + const cricket::SessionDescription* secondary_current_desc = nullptr; + if (local_description()) { + current_desc = local_description()->description(); + if (remote_description()) { + secondary_current_desc = remote_description()->description(); + } + } else if (remote_description()) { + current_desc = remote_description()->description(); + } + if (current_desc && + !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, + *sdesc->description(), type)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInSubsequentOffer); + } + } + + if (IsUnifiedPlan()) { + // Ensure that each audio and video media section has at most one + // "StreamParams". This will return an error if receiving a session + // description from a "Plan B" endpoint which adds multiple tracks of the + // same type. With Unified Plan, there can only be at most one track per + // media section. + for (const ContentInfo& content : sdesc->description()->contents()) { + const MediaContentDescription& desc = *content.media_description(); + if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || + desc.type() == cricket::MEDIA_TYPE_VIDEO) && + desc.streams().size() > 1u) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Media section has more than one track specified " + "with a=ssrc lines which is not supported with " + "Unified Plan."); + } + } + } + + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( + cricket::ContentSource source, + const SessionDescriptionInterface& new_session, + const SessionDescriptionInterface* old_local_description, + const SessionDescriptionInterface* old_remote_description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + + const cricket::ContentGroup* bundle_group = nullptr; + if (new_session.GetType() == SdpType::kOffer) { + auto bundle_group_or_error = + GetEarlyBundleGroup(*new_session.description()); + if (!bundle_group_or_error.ok()) { + return bundle_group_or_error.MoveError(); + } + bundle_group = bundle_group_or_error.MoveValue(); + } + + const ContentInfos& new_contents = new_session.description()->contents(); + for (size_t i = 0; i < new_contents.size(); ++i) { + const cricket::ContentInfo& new_content = new_contents[i]; + cricket::MediaType media_type = new_content.media_description()->type(); + mid_generator_.AddKnownId(new_content.name); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + const cricket::ContentInfo* old_local_content = nullptr; + if (old_local_description && + i < old_local_description->description()->contents().size()) { + old_local_content = + &old_local_description->description()->contents()[i]; + } + const cricket::ContentInfo* old_remote_content = nullptr; + if (old_remote_description && + i < old_remote_description->description()->contents().size()) { + old_remote_content = + &old_remote_description->description()->contents()[i]; + } + auto transceiver_or_error = + AssociateTransceiver(source, new_session.GetType(), i, new_content, + old_local_content, old_remote_content); + if (!transceiver_or_error.ok()) { + // In the case where a transceiver is rejected locally, we don't + // expect to find a transceiver, but might find it in the case + // where state is still "stopping", not "stopped". + if (new_content.rejected) { + continue; + } + return transceiver_or_error.MoveError(); + } + auto transceiver = transceiver_or_error.MoveValue(); + RTCError error = + UpdateTransceiverChannel(transceiver, new_content, bundle_group); + if (!error.ok()) { + return error; + } + } else if (media_type == cricket::MEDIA_TYPE_DATA) { + if (pc_->GetDataMid() && new_content.name != *(pc_->GetDataMid())) { + // Ignore all but the first data section. + RTC_LOG(LS_INFO) << "Ignoring data media section with MID=" + << new_content.name; + continue; + } + RTCError error = UpdateDataChannel(source, new_content, bundle_group); + if (!error.ok()) { + return error; + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; + } else { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Unknown section type."); + } + } + + return RTCError::OK(); +} + +RTCErrorOr>> +SdpOfferAnswerHandler::AssociateTransceiver( + cricket::ContentSource source, + SdpType type, + size_t mline_index, + const ContentInfo& content, + const ContentInfo* old_local_content, + const ContentInfo* old_remote_content) { + RTC_DCHECK(IsUnifiedPlan()); +#if RTC_DCHECK_IS_ON + // If this is an offer then the m= section might be recycled. If the m= + // section is being recycled (defined as: rejected in the current local or + // remote description and not rejected in new description), the transceiver + // should have been removed by RemoveStoppedtransceivers()-> + if (IsMediaSectionBeingRecycled(type, content, old_local_content, + old_remote_content)) { + const std::string& old_mid = + (old_local_content && old_local_content->rejected) + ? old_local_content->name + : old_remote_content->name; + auto old_transceiver = transceivers()->FindByMid(old_mid); + // The transceiver should be disassociated in RemoveStoppedTransceivers() + RTC_DCHECK(!old_transceiver); + } +#endif + + const MediaContentDescription* media_desc = content.media_description(); + auto transceiver = transceivers()->FindByMid(content.name); + if (source == cricket::CS_LOCAL) { + // Find the RtpTransceiver that corresponds to this m= section, using the + // mapping between transceivers and m= section indices established when + // creating the offer. + if (!transceiver) { + transceiver = transceivers()->FindByMLineIndex(mline_index); + } + if (!transceiver) { + // This may happen normally when media sections are rejected. + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Transceiver not found based on m-line index"); + } + } else { + RTC_DCHECK_EQ(source, cricket::CS_REMOTE); + // If the m= section is sendrecv or recvonly, and there are RtpTransceivers + // of the same type... + // When simulcast is requested, a transceiver cannot be associated because + // AddTrack cannot be called to initialize it. + if (!transceiver && + RtpTransceiverDirectionHasRecv(media_desc->direction()) && + !media_desc->HasSimulcast()) { + transceiver = FindAvailableTransceiverToReceive(media_desc->type()); + } + // If no RtpTransceiver was found in the previous step, create one with a + // recvonly direction. + if (!transceiver) { + RTC_LOG(LS_INFO) << "Adding " + << cricket::MediaTypeToString(media_desc->type()) + << " transceiver for MID=" << content.name + << " at i=" << mline_index + << " in response to the remote description."; + std::string sender_id = rtc::CreateRandomUuid(); + std::vector send_encodings = + GetSendEncodingsFromRemoteDescription(*media_desc); + auto sender = rtp_manager()->CreateSender(media_desc->type(), sender_id, + nullptr, {}, send_encodings); + std::string receiver_id; + if (!media_desc->streams().empty()) { + receiver_id = media_desc->streams()[0].id; + } else { + receiver_id = rtc::CreateRandomUuid(); + } + auto receiver = + rtp_manager()->CreateReceiver(media_desc->type(), receiver_id); + transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver); + transceiver->internal()->set_direction( + RtpTransceiverDirection::kRecvOnly); + if (type == SdpType::kOffer) { + transceivers()->StableState(transceiver)->set_newly_created(); + } + } + + RTC_DCHECK(transceiver); + + // Check if the offer indicated simulcast but the answer rejected it. + // This can happen when simulcast is not supported on the remote party. + if (SimulcastIsRejected(old_local_content, *media_desc)) { + RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true); + RTCError error = + DisableSimulcastInSender(transceiver->internal()->sender_internal()); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "Failed to remove rejected simulcast."; + return std::move(error); + } + } + } + + if (transceiver->media_type() != media_desc->type()) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Transceiver type does not match media description type."); + } + + if (media_desc->HasSimulcast()) { + std::vector layers = + source == cricket::CS_LOCAL + ? media_desc->simulcast_description().send_layers().GetAllLayers() + : media_desc->simulcast_description() + .receive_layers() + .GetAllLayers(); + RTCError error = UpdateSimulcastLayerStatusInSender( + layers, transceiver->internal()->sender_internal()); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "Failed updating status for simulcast layers."; + return std::move(error); + } + } + if (type == SdpType::kOffer) { + bool state_changes = transceiver->internal()->mid() != content.name || + transceiver->internal()->mline_index() != mline_index; + if (state_changes) { + transceivers() + ->StableState(transceiver) + ->SetMSectionIfUnset(transceiver->internal()->mid(), + transceiver->internal()->mline_index()); + } + } + // Associate the found or created RtpTransceiver with the m= section by + // setting the value of the RtpTransceiver's mid property to the MID of the m= + // section, and establish a mapping between the transceiver and the index of + // the m= section. + transceiver->internal()->set_mid(content.name); + transceiver->internal()->set_mline_index(mline_index); + return std::move(transceiver); +} + +RTCErrorOr +SdpOfferAnswerHandler::GetEarlyBundleGroup( + const SessionDescription& desc) const { + const cricket::ContentGroup* bundle_group = nullptr; + if (pc_->configuration()->bundle_policy == + PeerConnectionInterface::kBundlePolicyMaxBundle) { + bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + if (!bundle_group) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max-bundle configured but session description " + "has no BUNDLE group"); + } + } + return bundle_group; +} + +RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( + rtc::scoped_refptr> + transceiver, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) { + RTC_DCHECK(IsUnifiedPlan()); + RTC_DCHECK(transceiver); + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (content.rejected) { + if (channel) { + transceiver->internal()->SetChannel(nullptr); + DestroyChannelInterface(channel); + } + } else { + if (!channel) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + channel = CreateVoiceChannel(content.name); + } else { + RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); + channel = CreateVideoChannel(content.name); + } + if (!channel) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create channel for mid=" + content.name); + } + transceiver->internal()->SetChannel(channel); + } + } + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::UpdateDataChannel( + cricket::ContentSource source, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) { + if (pc_->data_channel_type() == cricket::DCT_NONE) { + // If data channels are disabled, ignore this media section. CreateAnswer + // will take care of rejecting it. + return RTCError::OK(); + } + if (content.rejected) { + RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid(); + DestroyDataChannelTransport(); + } else { + if (!data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->data_channel_transport()) { + RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); + if (!CreateDataChannel(content.name)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); + } + } + if (source == cricket::CS_REMOTE) { + const MediaContentDescription* data_desc = content.media_description(); + if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) { + data_channel_controller()->UpdateRemoteRtpDataChannels( + GetActiveStreams(data_desc)); + } + } + } + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::ExpectSetLocalDescription(SdpType type) { + PeerConnectionInterface::SignalingState state = signaling_state(); + if (type == SdpType::kOffer) { + return (state == PeerConnectionInterface::kStable) || + (state == PeerConnectionInterface::kHaveLocalOffer); + } else { + RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); + return (state == PeerConnectionInterface::kHaveRemoteOffer) || + (state == PeerConnectionInterface::kHaveLocalPrAnswer); + } +} + +bool SdpOfferAnswerHandler::ExpectSetRemoteDescription(SdpType type) { + PeerConnectionInterface::SignalingState state = signaling_state(); + if (type == SdpType::kOffer) { + return (state == PeerConnectionInterface::kStable) || + (state == PeerConnectionInterface::kHaveRemoteOffer); + } else { + RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); + return (state == PeerConnectionInterface::kHaveLocalOffer) || + (state == PeerConnectionInterface::kHaveRemotePrAnswer); + } +} + +void SdpOfferAnswerHandler::FillInMissingRemoteMids( + cricket::SessionDescription* new_remote_description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(new_remote_description); + const cricket::ContentInfos no_infos; + const cricket::ContentInfos& local_contents = + (local_description() ? local_description()->description()->contents() + : no_infos); + const cricket::ContentInfos& remote_contents = + (remote_description() ? remote_description()->description()->contents() + : no_infos); + for (size_t i = 0; i < new_remote_description->contents().size(); ++i) { + cricket::ContentInfo& content = new_remote_description->contents()[i]; + if (!content.name.empty()) { + continue; + } + std::string new_mid; + absl::string_view source_explanation; + if (IsUnifiedPlan()) { + if (i < local_contents.size()) { + new_mid = local_contents[i].name; + source_explanation = "from the matching local media section"; + } else if (i < remote_contents.size()) { + new_mid = remote_contents[i].name; + source_explanation = "from the matching previous remote media section"; + } else { + new_mid = mid_generator_.GenerateString(); + source_explanation = "generated just now"; + } + } else { + new_mid = std::string( + GetDefaultMidForPlanB(content.media_description()->type())); + source_explanation = "to match pre-existing behavior"; + } + RTC_DCHECK(!new_mid.empty()); + content.name = new_mid; + new_remote_description->transport_infos()[i].content_name = new_mid; + RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i + << " is missing an a=mid line. Filling in the value '" + << new_mid << "' " << source_explanation << "."; + } +} + +rtc::scoped_refptr> +SdpOfferAnswerHandler::FindAvailableTransceiverToReceive( + cricket::MediaType media_type) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + // From JSEP section 5.10 (Applying a Remote Description): + // If the m= section is sendrecv or recvonly, and there are RtpTransceivers of + // the same type that were added to the PeerConnection by addTrack and are not + // associated with any m= section and are not stopped, find the first such + // RtpTransceiver. + for (auto transceiver : transceivers()->List()) { + if (transceiver->media_type() == media_type && + transceiver->internal()->created_by_addtrack() && !transceiver->mid() && + !transceiver->stopped()) { + return transceiver; + } + } + return nullptr; +} + +const cricket::ContentInfo* +SdpOfferAnswerHandler::FindMediaSectionForTransceiver( + rtc::scoped_refptr> + transceiver, + const SessionDescriptionInterface* sdesc) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(transceiver); + RTC_DCHECK(sdesc); + if (IsUnifiedPlan()) { + if (!transceiver->internal()->mid()) { + // This transceiver is not associated with a media section yet. + return nullptr; + } + return sdesc->description()->GetContentByName( + *transceiver->internal()->mid()); + } else { + // Plan B only allows at most one audio and one video section, so use the + // first media section of that type. + return cricket::GetFirstMediaContent(sdesc->description()->contents(), + transceiver->media_type()); + } +} + +void SdpOfferAnswerHandler::GetOptionsForOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + ExtractSharedMediaSessionOptions(offer_answer_options, session_options); + + if (IsUnifiedPlan()) { + GetOptionsForUnifiedPlanOffer(offer_answer_options, session_options); + } else { + GetOptionsForPlanBOffer(offer_answer_options, session_options); + } + + // Intentionally unset the data channel type for RTP data channel with the + // second condition. Otherwise the RTP data channels would be successfully + // negotiated by default and the unit tests in WebRtcDataBrowserTest will fail + // when building with chromium. We want to leave RTP data channels broken, so + // people won't try to use them. + if (data_channel_controller()->HasRtpDataChannels() || + pc_->data_channel_type() != cricket::DCT_RTP) { + session_options->data_channel_type = pc_->data_channel_type(); + } + + // Apply ICE restart flag and renomination flag. + bool ice_restart = offer_answer_options.ice_restart || HasNewIceCredentials(); + for (auto& options : session_options->media_description_options) { + options.transport_options.ice_restart = ice_restart; + options.transport_options.enable_ice_renomination = + pc_->configuration()->enable_ice_renomination; + } + + session_options->rtcp_cname = rtcp_cname_; + session_options->crypto_options = pc_->GetCryptoOptions(); + session_options->pooled_ice_credentials = + pc_->network_thread()->Invoke>( + RTC_FROM_HERE, + rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, + port_allocator())); + session_options->offer_extmap_allow_mixed = + pc_->configuration()->offer_extmap_allow_mixed; + + // Allow fallback for using obsolete SCTP syntax. + // Note that the default in |session_options| is true, while + // the default in |options| is false. + session_options->use_obsolete_sctp_sdp = + offer_answer_options.use_obsolete_sctp_sdp; +} + +void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Figure out transceiver directional preferences. + bool send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + bool send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + + // By default, generate sendrecv/recvonly m= sections. + bool recv_audio = true; + bool recv_video = true; + + // By default, only offer a new m= section if we have media to send with it. + bool offer_new_audio_description = send_audio; + bool offer_new_video_description = send_video; + bool offer_new_data_description = + data_channel_controller()->HasDataChannels(); + + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + offer_new_audio_description = + offer_new_audio_description || + (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + offer_new_video_description = + offer_new_video_description || + (offer_answer_options.offer_to_receive_video > 0); + } + + absl::optional audio_index; + absl::optional video_index; + absl::optional data_index; + // If a current description exists, generate m= sections in the same order, + // using the first audio/video/data section that appears and rejecting + // extraneous ones. + if (local_description()) { + GenerateMediaDescriptionOptions( + local_description(), + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), + &audio_index, &video_index, &data_index, session_options); + } + + // Add audio/video/data m= sections to the end if needed. + if (!audio_index && offer_new_audio_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); + options.header_extensions = + channel_manager()->GetSupportedAudioRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + audio_index = session_options->media_description_options.size() - 1; + } + if (!video_index && offer_new_video_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); + options.header_extensions = + channel_manager()->GetSupportedVideoRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + video_index = session_options->media_description_options.size() - 1; + } + if (!data_index && offer_new_data_description) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); + data_index = session_options->media_description_options.size() - 1; + } + + cricket::MediaDescriptionOptions* audio_media_description_options = + !audio_index ? nullptr + : &session_options->media_description_options[*audio_index]; + cricket::MediaDescriptionOptions* video_media_description_options = + !video_index ? nullptr + : &session_options->media_description_options[*video_index]; + + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); +} + +void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( + const RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial + // Offers) and 5.2.2 (Subsequent Offers). + RTC_DCHECK_EQ(session_options->media_description_options.size(), 0); + const ContentInfos no_infos; + const ContentInfos& local_contents = + (local_description() ? local_description()->description()->contents() + : no_infos); + const ContentInfos& remote_contents = + (remote_description() ? remote_description()->description()->contents() + : no_infos); + // The mline indices that can be recycled. New transceivers should reuse these + // slots first. + std::queue recycleable_mline_indices; + // First, go through each media section that exists in either the local or + // remote description and generate a media section in this offer for the + // associated transceiver. If a media section can be recycled, generate a + // default, rejected media section here that can be later overwritten. + for (size_t i = 0; + i < std::max(local_contents.size(), remote_contents.size()); ++i) { + // Either |local_content| or |remote_content| is non-null. + const ContentInfo* local_content = + (i < local_contents.size() ? &local_contents[i] : nullptr); + const ContentInfo* current_local_content = + GetContentByIndex(current_local_description(), i); + const ContentInfo* remote_content = + (i < remote_contents.size() ? &remote_contents[i] : nullptr); + const ContentInfo* current_remote_content = + GetContentByIndex(current_remote_description(), i); + bool had_been_rejected = + (current_local_content && current_local_content->rejected) || + (current_remote_content && current_remote_content->rejected); + const std::string& mid = + (local_content ? local_content->name : remote_content->name); + cricket::MediaType media_type = + (local_content ? local_content->media_description()->type() + : remote_content->media_description()->type()); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + // A media section is considered eligible for recycling if it is marked as + // rejected in either the current local or current remote description. + auto transceiver = transceivers()->FindByMid(mid); + if (!transceiver) { + // No associated transceiver. The media section has been stopped. + recycleable_mline_indices.push(i); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + // NOTE: a stopping transceiver should be treated as a stopped one in + // createOffer as specified in + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. + if (had_been_rejected && transceiver->stopping()) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + transceiver->media_type(), mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + recycleable_mline_indices.push(i); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid, + /*is_create_offer=*/true)); + // CreateOffer shouldn't really cause any state changes in + // PeerConnection, but we need a way to match new transceivers to new + // media sections in SetLocalDescription and JSEP specifies this is + // done by recording the index of the media section generated for the + // transceiver in the offer. + transceiver->internal()->set_mline_index(i); + } + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_DCHECK(local_content->rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + if (had_been_rejected) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(mid)); + } else { + RTC_CHECK(pc_->GetDataMid()); + if (mid == *(pc_->GetDataMid())) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(mid)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(mid)); + } + } + } + } + + // Next, look for transceivers that are newly added (that is, are not stopped + // and not associated). Reuse media sections marked as recyclable first, + // otherwise append to the end of the offer. New media sections should be + // added in the order they were added to the PeerConnection. + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->mid() || transceiver->stopping()) { + continue; + } + size_t mline_index; + if (!recycleable_mline_indices.empty()) { + mline_index = recycleable_mline_indices.front(); + recycleable_mline_indices.pop(); + session_options->media_description_options[mline_index] = + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true); + } else { + mline_index = session_options->media_description_options.size(); + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true)); + } + // See comment above for why CreateOffer changes the transceiver's state. + transceiver->internal()->set_mline_index(mline_index); + } + // Lastly, add a m-section if we have local data channels and an m section + // does not already exist. + if (!pc_->GetDataMid() && data_channel_controller()->HasDataChannels()) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData( + mid_generator_.GenerateString())); + } +} + +void SdpOfferAnswerHandler::GetOptionsForAnswer( + const RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + ExtractSharedMediaSessionOptions(offer_answer_options, session_options); + + if (IsUnifiedPlan()) { + GetOptionsForUnifiedPlanAnswer(offer_answer_options, session_options); + } else { + GetOptionsForPlanBAnswer(offer_answer_options, session_options); + } + + // Intentionally unset the data channel type for RTP data channel. Otherwise + // the RTP data channels would be successfully negotiated by default and the + // unit tests in WebRtcDataBrowserTest will fail when building with chromium. + // We want to leave RTP data channels broken, so people won't try to use them. + if (data_channel_controller()->HasRtpDataChannels() || + pc_->data_channel_type() != cricket::DCT_RTP) { + session_options->data_channel_type = pc_->data_channel_type(); + } + + // Apply ICE renomination flag. + for (auto& options : session_options->media_description_options) { + options.transport_options.enable_ice_renomination = + pc_->configuration()->enable_ice_renomination; + } + + session_options->rtcp_cname = rtcp_cname_; + session_options->crypto_options = pc_->GetCryptoOptions(); + session_options->pooled_ice_credentials = + pc_->network_thread()->Invoke>( + RTC_FROM_HERE, + rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, + port_allocator())); +} + +void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Figure out transceiver directional preferences. + bool send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + bool send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + + // By default, generate sendrecv/recvonly m= sections. The direction is also + // restricted by the direction in the offer. + bool recv_audio = true; + bool recv_video = true; + + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + } + + absl::optional audio_index; + absl::optional video_index; + absl::optional data_index; + + // Generate m= sections that match those in the offer. + // Note that mediasession.cc will handle intersection our preferred + // direction with the offered direction. + GenerateMediaDescriptionOptions( + remote_description(), + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index, + &video_index, &data_index, session_options); + + cricket::MediaDescriptionOptions* audio_media_description_options = + !audio_index ? nullptr + : &session_options->media_description_options[*audio_index]; + cricket::MediaDescriptionOptions* video_media_description_options = + !video_index ? nullptr + : &session_options->media_description_options[*video_index]; + + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); +} + +void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial + // Answers) and 5.3.2 (Subsequent Answers). + RTC_DCHECK(remote_description()); + RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer); + for (const ContentInfo& content : + remote_description()->description()->contents()) { + cricket::MediaType media_type = content.media_description()->type(); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + auto transceiver = transceivers()->FindByMid(content.name); + if (transceiver) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, content.name, + /*is_create_offer=*/false)); + } else { + // This should only happen with rejected transceivers. + RTC_DCHECK(content.rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_DCHECK(content.rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + // Reject all data sections if data channels are disabled. + // Reject a data section if it has already been rejected. + // Reject all data sections except for the first one. + if (pc_->data_channel_type() == cricket::DCT_NONE || content.rejected || + content.name != *(pc_->GetDataMid())) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(content.name)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(content.name)); + } + } + } +} + +const char* SdpOfferAnswerHandler::SessionErrorToString( + SessionError error) const { + switch (error) { + case SessionError::kNone: + return "ERROR_NONE"; + case SessionError::kContent: + return "ERROR_CONTENT"; + case SessionError::kTransport: + return "ERROR_TRANSPORT"; + } + RTC_NOTREACHED(); + return ""; +} + +std::string SdpOfferAnswerHandler::GetSessionErrorMsg() { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::StringBuilder desc; + desc << kSessionError << SessionErrorToString(session_error()) << ". "; + desc << kSessionErrorDesc << session_error_desc() << "."; + return desc.Release(); +} + +void SdpOfferAnswerHandler::SetSessionError(SessionError error, + const std::string& error_desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (error != session_error_) { + session_error_ = error; + session_error_desc_ = error_desc; + } +} + +RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + + if (options.offer_to_receive_audio == 0) { + RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MEDIA_TYPE_AUDIO); + } else if (options.offer_to_receive_audio == 1) { + AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO); + } else if (options.offer_to_receive_audio > 1) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, + "offer_to_receive_audio > 1 is not supported."); + } + + if (options.offer_to_receive_video == 0) { + RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MEDIA_TYPE_VIDEO); + } else if (options.offer_to_receive_video == 1) { + AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); + } else if (options.offer_to_receive_video > 1) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, + "offer_to_receive_video > 1 is not supported."); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MediaType media_type) { + for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) { + RtpTransceiverDirection new_direction = + RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false); + if (new_direction != transceiver->direction()) { + RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type) + << " transceiver (MID=" + << transceiver->mid().value_or("") << ") from " + << RtpTransceiverDirectionToString( + transceiver->direction()) + << " to " + << RtpTransceiverDirectionToString(new_direction) + << " since CreateOffer specified offer_to_receive=0"; + transceiver->internal()->set_direction(new_direction); + } + } +} + +void SdpOfferAnswerHandler::AddUpToOneReceivingTransceiverOfType( + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (GetReceivingTransceiversOfType(media_type).empty()) { + RTC_LOG(LS_INFO) + << "Adding one recvonly " << cricket::MediaTypeToString(media_type) + << " transceiver since CreateOffer specified offer_to_receive=1"; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kRecvOnly; + pc_->AddTransceiver(media_type, nullptr, init, + /*update_negotiation_needed=*/false); + } +} + +std::vector>> +SdpOfferAnswerHandler::GetReceivingTransceiversOfType( + cricket::MediaType media_type) { + std::vector< + rtc::scoped_refptr>> + receiving_transceivers; + for (const auto& transceiver : transceivers()->List()) { + if (!transceiver->stopped() && transceiver->media_type() == media_type && + RtpTransceiverDirectionHasRecv(transceiver->direction())) { + receiving_transceivers.push_back(transceiver); + } + } + return receiving_transceivers; +} + +void SdpOfferAnswerHandler::ProcessRemovalOfRemoteTrack( + rtc::scoped_refptr> + transceiver, + std::vector>* remove_list, + std::vector>* removed_streams) { + RTC_DCHECK(transceiver->mid()); + RTC_LOG(LS_INFO) << "Processing the removal of a track for MID=" + << *transceiver->mid(); + std::vector> previous_streams = + transceiver->internal()->receiver_internal()->streams(); + // This will remove the remote track from the streams. + transceiver->internal()->receiver_internal()->set_stream_ids({}); + remove_list->push_back(transceiver); + RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); +} + +void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty( + const std::vector>& remote_streams, + std::vector>* removed_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of + // streams, see if the stream was removed by checking if this was the last + // receiver with that stream ID. + for (const auto& remote_stream : remote_streams) { + if (remote_stream->GetAudioTracks().empty() && + remote_stream->GetVideoTracks().empty()) { + remote_streams_->RemoveStream(remote_stream); + removed_streams->push_back(remote_stream); + } + } +} + +void SdpOfferAnswerHandler::RemoveSenders(cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + UpdateLocalSenders(std::vector(), media_type); + UpdateRemoteSendersList(std::vector(), false, + media_type, nullptr); +} + +void SdpOfferAnswerHandler::UpdateLocalSenders( + const std::vector& streams, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector* current_senders = + rtp_manager()->GetLocalSenderInfos(media_type); + + // Find removed tracks. I.e., tracks where the track id, stream id or ssrc + // don't match the new StreamParam. + for (auto sender_it = current_senders->begin(); + sender_it != current_senders->end(); + /* incremented manually */) { + const RtpSenderInfo& info = *sender_it; + const cricket::StreamParams* params = + cricket::GetStreamBySsrc(streams, info.first_ssrc); + if (!params || params->id != info.sender_id || + params->first_stream_id() != info.stream_id) { + rtp_manager()->OnLocalSenderRemoved(info, media_type); + sender_it = current_senders->erase(sender_it); + } else { + ++sender_it; + } + } + + // Find new and active senders. + for (const cricket::StreamParams& params : streams) { + // The sync_label is the MediaStream label and the |stream.id| is the + // sender id. + const std::string& stream_id = params.first_stream_id(); + const std::string& sender_id = params.id; + uint32_t ssrc = params.first_ssrc(); + const RtpSenderInfo* sender_info = + rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id); + if (!sender_info) { + current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); + rtp_manager()->OnLocalSenderAdded(current_senders->back(), media_type); + } + } +} + +void SdpOfferAnswerHandler::UpdateRemoteSendersList( + const cricket::StreamParamsVec& streams, + bool default_sender_needed, + cricket::MediaType media_type, + StreamCollection* new_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + + std::vector* current_senders = + rtp_manager()->GetRemoteSenderInfos(media_type); + + // Find removed senders. I.e., senders where the sender id or ssrc don't match + // the new StreamParam. + for (auto sender_it = current_senders->begin(); + sender_it != current_senders->end(); + /* incremented manually */) { + const RtpSenderInfo& info = *sender_it; + const cricket::StreamParams* params = + cricket::GetStreamBySsrc(streams, info.first_ssrc); + std::string params_stream_id; + if (params) { + params_stream_id = + (!params->first_stream_id().empty() ? params->first_stream_id() + : kDefaultStreamId); + } + bool sender_exists = params && params->id == info.sender_id && + params_stream_id == info.stream_id; + // If this is a default track, and we still need it, don't remove it. + if ((info.stream_id == kDefaultStreamId && default_sender_needed) || + sender_exists) { + ++sender_it; + } else { + rtp_manager()->OnRemoteSenderRemoved( + info, remote_streams_->find(info.stream_id), media_type); + sender_it = current_senders->erase(sender_it); + } + } + + // Find new and active senders. + for (const cricket::StreamParams& params : streams) { + if (!params.has_ssrcs()) { + // The remote endpoint has streams, but didn't signal ssrcs. For an active + // sender, this means it is coming from a Unified Plan endpoint,so we just + // create a default. + default_sender_needed = true; + break; + } + + // |params.id| is the sender id and the stream id uses the first of + // |params.stream_ids|. The remote description could come from a Unified + // Plan endpoint, with multiple or no stream_ids() signaled. Since this is + // not supported in Plan B, we just take the first here and create the + // default stream ID if none is specified. + const std::string& stream_id = + (!params.first_stream_id().empty() ? params.first_stream_id() + : kDefaultStreamId); + const std::string& sender_id = params.id; + uint32_t ssrc = params.first_ssrc(); + + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); + if (!stream) { + // This is a new MediaStream. Create a new remote MediaStream. + stream = MediaStreamProxy::Create(rtc::Thread::Current(), + MediaStream::Create(stream_id)); + remote_streams_->AddStream(stream); + new_streams->AddStream(stream); + } + + const RtpSenderInfo* sender_info = + rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id); + if (!sender_info) { + current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); + rtp_manager()->OnRemoteSenderAdded(current_senders->back(), stream, + media_type); + } + } + + // Add default sender if necessary. + if (default_sender_needed) { + rtc::scoped_refptr default_stream = + remote_streams_->find(kDefaultStreamId); + if (!default_stream) { + // Create the new default MediaStream. + default_stream = MediaStreamProxy::Create( + rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId)); + remote_streams_->AddStream(default_stream); + new_streams->AddStream(default_stream); + } + std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO) + ? kDefaultAudioSenderId + : kDefaultVideoSenderId; + const RtpSenderInfo* default_sender_info = rtp_manager()->FindSenderInfo( + *current_senders, kDefaultStreamId, default_sender_id); + if (!default_sender_info) { + current_senders->push_back( + RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0)); + rtp_manager()->OnRemoteSenderAdded(current_senders->back(), + default_stream, media_type); + } + } +} + +void SdpOfferAnswerHandler::EnableSending() { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers()->List()) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel && !channel->enabled()) { + channel->Enable(true); + } + } + + if (data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->rtp_data_channel()->enabled()) { + data_channel_controller()->rtp_data_channel()->Enable(true); + } +} + +RTCError SdpOfferAnswerHandler::PushdownMediaDescription( + SdpType type, + cricket::ContentSource source) { + const SessionDescriptionInterface* sdesc = + (source == cricket::CS_LOCAL ? local_description() + : remote_description()); + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(sdesc); + + if (!UpdatePayloadTypeDemuxingState(source)) { + // Note that this is never expected to fail, since RtpDemuxer doesn't return + // an error when changing payload type demux criteria, which is all this + // does. + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to update payload type demuxing state."); + } + + // Push down the new SDP media section for each audio/video transceiver. + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* content_info = + FindMediaSectionForTransceiver(transceiver, sdesc); + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel || !content_info || content_info->rejected) { + continue; + } + const MediaContentDescription* content_desc = + content_info->media_description(); + if (!content_desc) { + continue; + } + std::string error; + bool success = (source == cricket::CS_LOCAL) + ? channel->SetLocalContent(content_desc, type, &error) + : channel->SetRemoteContent(content_desc, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + } + + // If using the RtpDataChannel, push down the new SDP section for it too. + if (data_channel_controller()->rtp_data_channel()) { + const ContentInfo* data_content = + cricket::GetFirstDataContent(sdesc->description()); + if (data_content && !data_content->rejected) { + const MediaContentDescription* data_desc = + data_content->media_description(); + if (data_desc) { + std::string error; + bool success = (source == cricket::CS_LOCAL) + ? data_channel_controller() + ->rtp_data_channel() + ->SetLocalContent(data_desc, type, &error) + : data_channel_controller() + ->rtp_data_channel() + ->SetRemoteContent(data_desc, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + } + } + } + + // Need complete offer/answer with an SCTP m= section before starting SCTP, + // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 + if (pc_->sctp_mid() && local_description() && remote_description()) { + rtc::scoped_refptr sctp_transport = + transport_controller()->GetSctpTransport(*(pc_->sctp_mid())); + auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( + local_description()->description()); + auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( + remote_description()->description()); + if (sctp_transport && local_sctp_description && remote_sctp_description) { + int max_message_size; + // A remote max message size of zero means "any size supported". + // We configure the connection with our own max message size. + if (remote_sctp_description->max_message_size() == 0) { + max_message_size = local_sctp_description->max_message_size(); + } else { + max_message_size = + std::min(local_sctp_description->max_message_size(), + remote_sctp_description->max_message_size()); + } + sctp_transport->Start(local_sctp_description->port(), + remote_sctp_description->port(), max_message_size); + } + } + + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::PushdownTransportDescription( + cricket::ContentSource source, + SdpType type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + if (source == cricket::CS_LOCAL) { + const SessionDescriptionInterface* sdesc = local_description(); + RTC_DCHECK(sdesc); + return transport_controller()->SetLocalDescription(type, + sdesc->description()); + } else { + const SessionDescriptionInterface* sdesc = remote_description(); + RTC_DCHECK(sdesc); + return transport_controller()->SetRemoteDescription(type, + sdesc->description()); + } +} + +void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { + RTC_DCHECK_RUN_ON(signaling_thread()); + // 3.2.10.1: For each transceiver in the connection's set of transceivers + // run the following steps: + if (!IsUnifiedPlan()) + return; + // Traverse a copy of the transceiver list. + auto transceiver_list = transceivers()->List(); + for (auto transceiver : transceiver_list) { + // 3.2.10.1.1: If transceiver is stopped, associated with an m= section + // and the associated m= section is rejected in + // connection.[[CurrentLocalDescription]] or + // connection.[[CurrentRemoteDescription]], remove the + // transceiver from the connection's set of transceivers. + if (!transceiver->stopped()) { + continue; + } + const ContentInfo* local_content = + FindMediaSectionForTransceiver(transceiver, local_description()); + const ContentInfo* remote_content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if ((local_content && local_content->rejected) || + (remote_content && remote_content->rejected)) { + RTC_LOG(LS_INFO) << "Dissociating transceiver" + << " since the media section is being recycled."; + transceiver->internal()->set_mid(absl::nullopt); + transceiver->internal()->set_mline_index(absl::nullopt); + transceivers()->Remove(transceiver); + continue; + } + if (!local_content && !remote_content) { + // TODO(bugs.webrtc.org/11973): Consider if this should be removed already + // See https://github.com/w3c/webrtc-pc/issues/2576 + RTC_LOG(LS_INFO) + << "Dropping stopped transceiver that was never associated"; + transceivers()->Remove(transceiver); + continue; + } + } +} + +void SdpOfferAnswerHandler::RemoveUnusedChannels( + const SessionDescription* desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Destroy video channel first since it may have a pointer to the + // voice channel. + const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); + if (!video_info || video_info->rejected) { + DestroyTransceiverChannel(rtp_manager()->GetVideoTransceiver()); + } + + const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); + if (!audio_info || audio_info->rejected) { + DestroyTransceiverChannel(rtp_manager()->GetAudioTransceiver()); + } + + const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); + if (!data_info || data_info->rejected) { + DestroyDataChannelTransport(); + } +} + +void SdpOfferAnswerHandler::ReportNegotiatedSdpSemantics( + const SessionDescriptionInterface& answer) { + SdpSemanticNegotiated semantics_negotiated; + switch (answer.description()->msid_signaling()) { + case 0: + semantics_negotiated = kSdpSemanticNegotiatedNone; + break; + case cricket::kMsidSignalingMediaSection: + semantics_negotiated = kSdpSemanticNegotiatedUnifiedPlan; + break; + case cricket::kMsidSignalingSsrcAttribute: + semantics_negotiated = kSdpSemanticNegotiatedPlanB; + break; + case cricket::kMsidSignalingMediaSection | + cricket::kMsidSignalingSsrcAttribute: + semantics_negotiated = kSdpSemanticNegotiatedMixed; + break; + default: + RTC_NOTREACHED(); + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpSemanticNegotiated", + semantics_negotiated, kSdpSemanticNegotiatedMax); +} + +void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector> streams_to_remove; + for (size_t i = 0; i < remote_streams_->count(); ++i) { + MediaStreamInterface* stream = remote_streams_->at(i); + if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { + streams_to_remove.push_back(stream); + } + } + + for (auto& stream : streams_to_remove) { + remote_streams_->RemoveStream(stream); + pc_->Observer()->OnRemoveStream(std::move(stream)); + } +} + +bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!remote_desc) { + return true; + } + bool ret = true; + + for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) { + const IceCandidateCollection* candidates = remote_desc->candidates(m); + for (size_t n = 0; n < candidates->count(); ++n) { + const IceCandidateInterface* candidate = candidates->at(n); + bool valid = false; + if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { + if (valid) { + RTC_LOG(LS_INFO) + << "UseCandidatesInSessionDescription: Not ready to use " + "candidate."; + } + continue; + } + ret = UseCandidate(candidate); + if (!ret) { + break; + } + } + } + return ret; +} + +bool SdpOfferAnswerHandler::UseCandidate( + const IceCandidateInterface* candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTCErrorOr result = + FindContentInfo(remote_description(), candidate); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "UseCandidate: Invalid candidate. " + << result.error().message(); + return false; + } + std::vector candidates; + candidates.push_back(candidate->candidate()); + // Invoking BaseSession method to handle remote candidates. + RTCError error = transport_controller()->AddRemoteCandidates( + result.value()->name, candidates); + if (error.ok()) { + ReportRemoteIceCandidateAdded(candidate->candidate()); + // Candidates successfully submitted for checking. + if (pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionNew || + pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionDisconnected) { + // If state is New, then the session has just gotten its first remote ICE + // candidates, so go to Checking. + // If state is Disconnected, the session is re-using old candidates or + // receiving additional ones, so go to Checking. + // If state is Connected, stay Connected. + // TODO(bemasc): If state is Connected, and the new candidates are for a + // newly added transport, then the state actually _should_ move to + // checking. Add a way to distinguish that case. + pc_->SetIceConnectionState( + PeerConnectionInterface::kIceConnectionChecking); + } + // TODO(bemasc): If state is Completed, go back to Connected. + } else { + RTC_LOG(LS_WARNING) << error.message(); + } + return true; +} + +// We need to check the local/remote description for the Transport instead of +// the session, because a new Transport added during renegotiation may have +// them unset while the session has them set from the previous negotiation. +// Not doing so may trigger the auto generation of transport description and +// mess up DTLS identity information, ICE credential, etc. +bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate( + const IceCandidateInterface* candidate, + const SessionDescriptionInterface* remote_desc, + bool* valid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + *valid = true; + + const SessionDescriptionInterface* current_remote_desc = + remote_desc ? remote_desc : remote_description(); + + if (!current_remote_desc) { + return false; + } + + RTCErrorOr result = + FindContentInfo(current_remote_desc, candidate); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. " + << result.error().message(); + + *valid = false; + return false; + } + + std::string transport_name = GetTransportName(result.value()->name); + return !transport_name.empty(); +} + +void SdpOfferAnswerHandler::ReportRemoteIceCandidateAdded( + const cricket::Candidate& candidate) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); + if (candidate.address().IsPrivateIP()) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); + } + if (candidate.address().IsUnresolvedIP()) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); + } + if (candidate.address().family() == AF_INET6) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); + } +} + +RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( + const SessionDescriptionInterface* description, + const IceCandidateInterface* candidate) { + if (candidate->sdp_mline_index() >= 0) { + size_t mediacontent_index = + static_cast(candidate->sdp_mline_index()); + size_t content_size = description->description()->contents().size(); + if (mediacontent_index < content_size) { + return &description->description()->contents()[mediacontent_index]; + } else { + return RTCError(RTCErrorType::INVALID_RANGE, + "Media line index (" + + rtc::ToString(candidate->sdp_mline_index()) + + ") out of range (number of mlines: " + + rtc::ToString(content_size) + ")."); + } + } else if (!candidate->sdp_mid().empty()) { + auto& contents = description->description()->contents(); + auto it = absl::c_find_if( + contents, [candidate](const cricket::ContentInfo& content_info) { + return content_info.mid() == candidate->sdp_mid(); + }); + if (it == contents.end()) { + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "Mid " + candidate->sdp_mid() + + " specified but no media section with that mid found."); + } else { + return &*it; + } + } + + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Neither sdp_mline_index nor sdp_mid specified."); +} + +RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { + // Creating the media channels. Transports should already have been created + // at this point. + RTC_DCHECK_RUN_ON(signaling_thread()); + const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); + if (voice && !voice->rejected && + !rtp_manager()->GetAudioTransceiver()->internal()->channel()) { + cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); + if (!voice_channel) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create voice channel."); + } + rtp_manager()->GetAudioTransceiver()->internal()->SetChannel(voice_channel); + } + + const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); + if (video && !video->rejected && + !rtp_manager()->GetVideoTransceiver()->internal()->channel()) { + cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); + if (!video_channel) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create video channel."); + } + rtp_manager()->GetVideoTransceiver()->internal()->SetChannel(video_channel); + } + + const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); + if (pc_->data_channel_type() != cricket::DCT_NONE && data && + !data->rejected && !data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->data_channel_transport()) { + if (!CreateDataChannel(data->name)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); + } + } + + return RTCError::OK(); +} + +// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. +cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel( + const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + + // TODO(bugs.webrtc.org/11992): CreateVoiceChannel internally switches to the + // worker thread. We shouldn't be using the |call_ptr_| hack here but simply + // be on the worker thread and use |call_| (update upstream code). + cricket::VoiceChannel* voice_channel; + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + voice_channel = channel_manager()->CreateVoiceChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, audio_options()); + } + if (!voice_channel) { + return nullptr; + } + voice_channel->SignalSentPacket().connect(pc_, + &PeerConnection::OnSentPacket_w); + voice_channel->SetRtpTransport(rtp_transport); + + return voice_channel; +} + +// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. +cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel( + const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + + // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to the + // worker thread. We shouldn't be using the |call_ptr_| hack here but simply + // be on the worker thread and use |call_| (update upstream code). + cricket::VideoChannel* video_channel; + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + video_channel = channel_manager()->CreateVideoChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, video_options(), + video_bitrate_allocator_factory_.get()); + } + if (!video_channel) { + return nullptr; + } + video_channel->SignalSentPacket().connect(pc_, + &PeerConnection::OnSentPacket_w); + video_channel->SetRtpTransport(rtp_transport); + + return video_channel; +} + +bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + switch (pc_->data_channel_type()) { + case cricket::DCT_SCTP: + if (pc_->network_thread()->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, pc_, + mid))) { + pc_->SetSctpDataMid(mid); + } else { + return false; + } + return true; + case cricket::DCT_RTP: + default: + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on + // the network thread like set_data_channel_transport is. + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + data_channel_controller()->set_rtp_data_channel( + channel_manager()->CreateRtpDataChannel( + pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), + pc_->GetCryptoOptions(), &ssrc_generator_)); + } + if (!data_channel_controller()->rtp_data_channel()) { + return false; + } + data_channel_controller()->rtp_data_channel()->SignalSentPacket().connect( + pc_, &PeerConnection::OnSentPacket_w); + data_channel_controller()->rtp_data_channel()->SetRtpTransport( + rtp_transport); + SetHavePendingRtpDataChannel(); + return true; + } + return false; +} + +void SdpOfferAnswerHandler::DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver) { + RTC_DCHECK(transceiver); + + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel) { + transceiver->internal()->SetChannel(nullptr); + DestroyChannelInterface(channel); + } +} + +void SdpOfferAnswerHandler::DestroyDataChannelTransport() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (data_channel_controller()->rtp_data_channel()) { + data_channel_controller()->OnTransportChannelClosed(); + DestroyChannelInterface(data_channel_controller()->rtp_data_channel()); + data_channel_controller()->set_rtp_data_channel(nullptr); + } + + // Note: Cannot use rtc::Bind to create a functor to invoke because it will + // grab a reference to this PeerConnection. If this is called from the + // PeerConnection destructor, the RefCountedObject vtable will have already + // been destroyed (since it is a subclass of PeerConnection) and using + // rtc::Bind will cause "Pure virtual function called" error to appear. + + if (pc_->sctp_mid()) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + data_channel_controller()->OnTransportChannelClosed(); + pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); + pc_->TeardownDataChannelTransport_n(); + }); + pc_->ResetSctpDataMid(); + } +} + +void SdpOfferAnswerHandler::DestroyChannelInterface( + cricket::ChannelInterface* channel) { + // TODO(bugs.webrtc.org/11992): All the below methods should be called on the + // worker thread. (they switch internally anyway). Change + // DestroyChannelInterface to either be called on the worker thread, or do + // this asynchronously on the worker. + RTC_DCHECK(channel); + switch (channel->media_type()) { + case cricket::MEDIA_TYPE_AUDIO: + channel_manager()->DestroyVoiceChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_VIDEO: + channel_manager()->DestroyVideoChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_DATA: + channel_manager()->DestroyRtpDataChannel( + static_cast(channel)); + break; + default: + RTC_NOTREACHED() << "Unknown media type: " << channel->media_type(); + break; + } +} + +void SdpOfferAnswerHandler::DestroyAllChannels() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!transceivers()) { + return; + } + // Destroy video channels first since they may have a pointer to a voice + // channel. + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + DestroyTransceiverChannel(transceiver); + } + } + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + DestroyTransceiverChannel(transceiver); + } + } + DestroyDataChannelTransport(); +} + +void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( + const SessionDescriptionInterface* session_desc, + RtpTransceiverDirection audio_direction, + RtpTransceiverDirection video_direction, + absl::optional* audio_index, + absl::optional* video_index, + absl::optional* data_index, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const cricket::ContentInfo& content : + session_desc->description()->contents()) { + if (IsAudioContent(&content)) { + // If we already have an audio m= section, reject this extra one. + if (*audio_index) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + cricket::MEDIA_TYPE_AUDIO, content.name, + RtpTransceiverDirection::kInactive, /*stopped=*/true)); + } else { + bool stopped = (audio_direction == RtpTransceiverDirection::kInactive); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO, + content.name, audio_direction, + stopped)); + *audio_index = session_options->media_description_options.size() - 1; + } + session_options->media_description_options.back().header_extensions = + channel_manager()->GetSupportedAudioRtpHeaderExtensions(); + } else if (IsVideoContent(&content)) { + // If we already have an video m= section, reject this extra one. + if (*video_index) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + cricket::MEDIA_TYPE_VIDEO, content.name, + RtpTransceiverDirection::kInactive, /*stopped=*/true)); + } else { + bool stopped = (video_direction == RtpTransceiverDirection::kInactive); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO, + content.name, video_direction, + stopped)); + *video_index = session_options->media_description_options.size() - 1; + } + session_options->media_description_options.back().header_extensions = + channel_manager()->GetSupportedVideoRtpHeaderExtensions(); + } else if (IsUnsupportedContent(&content)) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED, + content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_DCHECK(IsDataContent(&content)); + // If we already have an data m= section, reject this extra one. + if (*data_index) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(content.name)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(content.name)); + *data_index = session_options->media_description_options.size() - 1; + } + } + } +} + +cricket::MediaDescriptionOptions +SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData( + const std::string& mid) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Direction for data sections is meaningless, but legacy endpoints might + // expect sendrecv. + cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, + RtpTransceiverDirection::kSendRecv, + /*stopped=*/false); + AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), + &options); + return options; +} + +cricket::MediaDescriptionOptions +SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData( + const std::string& mid) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true); + AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), + &options); + return options; +} + +const std::string SdpOfferAnswerHandler::GetTransportName( + const std::string& content_name) { + RTC_DCHECK_RUN_ON(signaling_thread()); + cricket::ChannelInterface* channel = pc_->GetChannel(content_name); + if (channel) { + return channel->transport_name(); + } + if (data_channel_controller()->data_channel_transport()) { + RTC_DCHECK(pc_->sctp_mid()); + if (content_name == *(pc_->sctp_mid())) { + return *(pc_->sctp_transport_name()); + } + } + // Return an empty string if failed to retrieve the transport name. + return ""; +} + +bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( + cricket::ContentSource source) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // We may need to delete any created default streams and disable creation of + // new ones on the basis of payload type. This is needed to avoid SSRC + // collisions in Call's RtpDemuxer, in the case that a transceiver has + // created a default stream, and then some other channel gets the SSRC + // signaled in the corresponding Unified Plan "m=" section. Specifically, we + // need to disable payload type based demuxing when two bundled "m=" sections + // are using the same payload type(s). For more context + // see https://bugs.chromium.org/p/webrtc/issues/detail?id=11477 + const SessionDescriptionInterface* sdesc = + (source == cricket::CS_LOCAL ? local_description() + : remote_description()); + const cricket::ContentGroup* bundle_group = + sdesc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + std::set audio_payload_types; + std::set video_payload_types; + bool pt_demuxing_enabled_audio = true; + bool pt_demuxing_enabled_video = true; + for (auto& content_info : sdesc->description()->contents()) { + // If this m= section isn't bundled, it's safe to demux by payload type + // since other m= sections using the same payload type will also be using + // different transports. + if (!bundle_group || !bundle_group->HasContentName(content_info.name)) { + continue; + } + if (content_info.rejected || + (source == cricket::ContentSource::CS_LOCAL && + !RtpTransceiverDirectionHasRecv( + content_info.media_description()->direction())) || + (source == cricket::ContentSource::CS_REMOTE && + !RtpTransceiverDirectionHasSend( + content_info.media_description()->direction()))) { + // Ignore transceivers that are not receiving. + continue; + } + switch (content_info.media_description()->type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: { + const cricket::AudioContentDescription* audio_desc = + content_info.media_description()->as_audio(); + for (const cricket::AudioCodec& audio : audio_desc->codecs()) { + if (audio_payload_types.count(audio.id)) { + // Two m= sections are using the same payload type, thus demuxing + // by payload type is not possible. + pt_demuxing_enabled_audio = false; + } + audio_payload_types.insert(audio.id); + } + break; + } + case cricket::MediaType::MEDIA_TYPE_VIDEO: { + const cricket::VideoContentDescription* video_desc = + content_info.media_description()->as_video(); + for (const cricket::VideoCodec& video : video_desc->codecs()) { + if (video_payload_types.count(video.id)) { + // Two m= sections are using the same payload type, thus demuxing + // by payload type is not possible. + pt_demuxing_enabled_video = false; + } + video_payload_types.insert(video.id); + } + break; + } + default: + // Ignore data channels. + continue; + } + } + + // Gather all updates ahead of time so that all channels can be updated in a + // single Invoke; necessary due to thread guards. + std::vector> + channels_to_update; + for (const auto& transceiver : transceivers()->List()) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, sdesc); + if (!channel || !content) { + continue; + } + RtpTransceiverDirection local_direction = + content->media_description()->direction(); + if (source == cricket::CS_REMOTE) { + local_direction = RtpTransceiverDirectionReversed(local_direction); + } + channels_to_update.emplace_back(local_direction, + transceiver->internal()->channel()); + } + + if (channels_to_update.empty()) { + return true; + } + return pc_->worker_thread()->Invoke( + RTC_FROM_HERE, [&channels_to_update, bundle_group, + pt_demuxing_enabled_audio, pt_demuxing_enabled_video]() { + for (const auto& it : channels_to_update) { + RtpTransceiverDirection local_direction = it.first; + cricket::ChannelInterface* channel = it.second; + cricket::MediaType media_type = channel->media_type(); + bool in_bundle_group = (bundle_group && bundle_group->HasContentName( + channel->content_name())); + if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (!channel->SetPayloadTypeDemuxingEnabled( + (!in_bundle_group || pt_demuxing_enabled_audio) && + RtpTransceiverDirectionHasRecv(local_direction))) { + return false; + } + } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) { + if (!channel->SetPayloadTypeDemuxingEnabled( + (!in_bundle_group || pt_demuxing_enabled_video) && + RtpTransceiverDirectionHasRecv(local_direction))) { + return false; + } + } + } + return true; + }); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h new file mode 100644 index 000000000..43a3dbb5a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h @@ -0,0 +1,676 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SDP_OFFER_ANSWER_H_ +#define PC_SDP_OFFER_ANSWER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_options.h" +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_transceiver_direction.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/turn_customizer.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "media/base/media_channel.h" +#include "media/base/stream_params.h" +#include "p2p/base/port_allocator.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/channel_manager.h" +#include "pc/data_channel_controller.h" +#include "pc/ice_server_parsing.h" +#include "pc/jsep_transport_controller.h" +#include "pc/media_session.h" +#include "pc/media_stream_observer.h" +#include "pc/peer_connection_factory.h" +#include "pc/peer_connection_internal.h" +#include "pc/rtc_stats_collector.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/sctp_transport.h" +#include "pc/sdp_state_provider.h" +#include "pc/session_description.h" +#include "pc/stats_collector.h" +#include "pc/stream_collection.h" +#include "pc/transceiver_list.h" +#include "pc/webrtc_session_description_factory.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/operations_chain.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/unique_id_generator.h" +#include "rtc_base/weak_ptr.h" + +namespace webrtc { + +// SdpOfferAnswerHandler is a component +// of the PeerConnection object as defined +// by the PeerConnectionInterface API surface. +// The class is responsible for the following: +// - Parsing and interpreting SDP. +// - Generating offers and answers based on the current state. +// This class lives on the signaling thread. +class SdpOfferAnswerHandler : public SdpStateProvider, + public sigslot::has_slots<> { + public: + ~SdpOfferAnswerHandler(); + + // Creates an SdpOfferAnswerHandler. Modifies dependencies. + static std::unique_ptr Create( + PeerConnection* pc, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies); + + void ResetSessionDescFactory() { + RTC_DCHECK_RUN_ON(signaling_thread()); + webrtc_session_desc_factory_.reset(); + } + const WebRtcSessionDescriptionFactory* webrtc_session_desc_factory() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return webrtc_session_desc_factory_.get(); + } + + // Change signaling state to Closed, and perform appropriate actions. + void Close(); + + // Called as part of destroying the owning PeerConnection. + void PrepareForShutdown(); + + // Implementation of SdpStateProvider + PeerConnectionInterface::SignalingState signaling_state() const override; + + const SessionDescriptionInterface* local_description() const override; + const SessionDescriptionInterface* remote_description() const override; + const SessionDescriptionInterface* current_local_description() const override; + const SessionDescriptionInterface* current_remote_description() + const override; + const SessionDescriptionInterface* pending_local_description() const override; + const SessionDescriptionInterface* pending_remote_description() + const override; + + bool NeedsIceRestart(const std::string& content_name) const override; + bool IceRestartPending(const std::string& content_name) const override; + absl::optional GetDtlsRole( + const std::string& mid) const override; + + void RestartIce(); + + // JSEP01 + void CreateOffer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + void CreateAnswer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + + void SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void SetLocalDescription( + rtc::scoped_refptr observer); + void SetLocalDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc); + void SetLocalDescription(SetSessionDescriptionObserver* observer); + + void SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc); + + PeerConnectionInterface::RTCConfiguration GetConfiguration(); + RTCError SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& configuration); + bool AddIceCandidate(const IceCandidateInterface* candidate); + void AddIceCandidate(std::unique_ptr candidate, + std::function callback); + bool RemoveIceCandidates(const std::vector& candidates); + // Adds a locally generated candidate to the local description. + void AddLocalIceCandidate(const JsepIceCandidate* candidate); + void RemoveLocalIceCandidates( + const std::vector& candidates); + bool ShouldFireNegotiationNeededEvent(uint32_t event_id); + + bool AddStream(MediaStreamInterface* local_stream); + void RemoveStream(MediaStreamInterface* local_stream); + + absl::optional is_caller(); + bool HasNewIceCredentials(); + void UpdateNegotiationNeeded(); + void SetHavePendingRtpDataChannel() { + RTC_DCHECK_RUN_ON(signaling_thread()); + have_pending_rtp_data_channel_ = true; + } + + // Returns the media section in the given session description that is + // associated with the RtpTransceiver. Returns null if none found or this + // RtpTransceiver is not associated. Logic varies depending on the + // SdpSemantics specified in the configuration. + const cricket::ContentInfo* FindMediaSectionForTransceiver( + rtc::scoped_refptr> + transceiver, + const SessionDescriptionInterface* sdesc) const; + + // Destroys all BaseChannels and destroys the SCTP data channel, if present. + void DestroyAllChannels(); + + rtc::scoped_refptr local_streams(); + rtc::scoped_refptr remote_streams(); + + private: + class ImplicitCreateSessionDescriptionObserver; + + friend class ImplicitCreateSessionDescriptionObserver; + class SetSessionDescriptionObserverAdapter; + + friend class SetSessionDescriptionObserverAdapter; + + enum class SessionError { + kNone, // No error. + kContent, // Error in BaseChannel SetLocalContent/SetRemoteContent. + kTransport, // Error from the underlying transport. + }; + + // Represents the [[LocalIceCredentialsToReplace]] internal slot in the spec. + // It makes the next CreateOffer() produce new ICE credentials even if + // RTCOfferAnswerOptions::ice_restart is false. + // https://w3c.github.io/webrtc-pc/#dfn-localufragstoreplace + // TODO(hbos): When JsepTransportController/JsepTransport supports rollback, + // move this type of logic to JsepTransportController/JsepTransport. + class LocalIceCredentialsToReplace; + + // Only called by the Create() function. + explicit SdpOfferAnswerHandler(PeerConnection* pc); + // Called from the `Create()` function. Can only be called + // once. Modifies dependencies. + void Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies); + + rtc::Thread* signaling_thread() const; + // Non-const versions of local_description()/remote_description(), for use + // internally. + SessionDescriptionInterface* mutable_local_description() + RTC_RUN_ON(signaling_thread()) { + return pending_local_description_ ? pending_local_description_.get() + : current_local_description_.get(); + } + SessionDescriptionInterface* mutable_remote_description() + RTC_RUN_ON(signaling_thread()) { + return pending_remote_description_ ? pending_remote_description_.get() + : current_remote_description_.get(); + } + + // Synchronous implementations of SetLocalDescription/SetRemoteDescription + // that return an RTCError instead of invoking a callback. + RTCError ApplyLocalDescription( + std::unique_ptr desc); + RTCError ApplyRemoteDescription( + std::unique_ptr desc); + + // Implementation of the offer/answer exchange operations. These are chained + // onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(), + // SetLocalDescription() and SetRemoteDescription() methods are invoked. + void DoCreateOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer); + void DoCreateAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer); + void DoSetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void DoSetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + + // Update the state, signaling if necessary. + void ChangeSignalingState( + PeerConnectionInterface::SignalingState signaling_state); + + RTCError UpdateSessionState(SdpType type, + cricket::ContentSource source, + const cricket::SessionDescription* description); + + bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()); + + // Signals from MediaStreamObserver. + void OnAudioTrackAdded(AudioTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnAudioTrackRemoved(AudioTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnVideoTrackAdded(VideoTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnVideoTrackRemoved(VideoTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + + // | desc_type | is the type of the description that caused the rollback. + RTCError Rollback(SdpType desc_type); + void OnOperationsChainEmpty(); + + // Runs the algorithm **set the associated remote streams** specified in + // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams. + void SetAssociatedRemoteStreams( + rtc::scoped_refptr receiver, + const std::vector& stream_ids, + std::vector>* added_streams, + std::vector>* removed_streams); + + bool CheckIfNegotiationIsNeeded(); + void GenerateNegotiationNeededEvent(); + // Helper method which verifies SDP. + RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc, + cricket::ContentSource source) + RTC_RUN_ON(signaling_thread()); + + // Updates the local RtpTransceivers according to the JSEP rules. Called as + // part of setting the local/remote description. + RTCError UpdateTransceiversAndDataChannels( + cricket::ContentSource source, + const SessionDescriptionInterface& new_session, + const SessionDescriptionInterface* old_local_description, + const SessionDescriptionInterface* old_remote_description); + + // Associate the given transceiver according to the JSEP rules. + RTCErrorOr< + rtc::scoped_refptr>> + AssociateTransceiver(cricket::ContentSource source, + SdpType type, + size_t mline_index, + const cricket::ContentInfo& content, + const cricket::ContentInfo* old_local_content, + const cricket::ContentInfo* old_remote_content) + RTC_RUN_ON(signaling_thread()); + + // If the BUNDLE policy is max-bundle, then we know for sure that all + // transports will be bundled from the start. This method returns the BUNDLE + // group if that's the case, or null if BUNDLE will be negotiated later. An + // error is returned if max-bundle is specified but the session description + // does not have a BUNDLE group. + RTCErrorOr GetEarlyBundleGroup( + const cricket::SessionDescription& desc) const + RTC_RUN_ON(signaling_thread()); + + // Either creates or destroys the transceiver's BaseChannel according to the + // given media section. + RTCError UpdateTransceiverChannel( + rtc::scoped_refptr> + transceiver, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); + + // Either creates or destroys the local data channel according to the given + // media section. + RTCError UpdateDataChannel(cricket::ContentSource source, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) + RTC_RUN_ON(signaling_thread()); + // Check if a call to SetLocalDescription is acceptable with a session + // description of the given type. + bool ExpectSetLocalDescription(SdpType type); + // Check if a call to SetRemoteDescription is acceptable with a session + // description of the given type. + bool ExpectSetRemoteDescription(SdpType type); + + // The offer/answer machinery assumes the media section MID is present and + // unique. To support legacy end points that do not supply a=mid lines, this + // method will modify the session description to add MIDs generated according + // to the SDP semantics. + void FillInMissingRemoteMids(cricket::SessionDescription* remote_description); + + // Returns an RtpTransciever, if available, that can be used to receive the + // given media type according to JSEP rules. + rtc::scoped_refptr> + FindAvailableTransceiverToReceive(cricket::MediaType media_type) const; + + // Returns a MediaSessionOptions struct with options decided by |options|, + // the local MediaStreams and DataChannels. + void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options); + void GetOptionsForPlanBOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + void GetOptionsForUnifiedPlanOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + + // Returns a MediaSessionOptions struct with options decided by + // |constraints|, the local MediaStreams and DataChannels. + void GetOptionsForAnswer(const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options); + void GetOptionsForPlanBAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + void GetOptionsForUnifiedPlanAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + + const char* SessionErrorToString(SessionError error) const; + std::string GetSessionErrorMsg(); + // Returns the last error in the session. See the enum above for details. + SessionError session_error() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return session_error_; + } + const std::string& session_error_desc() const { return session_error_desc_; } + + RTCError HandleLegacyOfferOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + void RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MediaType media_type) RTC_RUN_ON(signaling_thread()); + void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type); + + std::vector< + rtc::scoped_refptr>> + GetReceivingTransceiversOfType(cricket::MediaType media_type) + RTC_RUN_ON(signaling_thread()); + + // Runs the algorithm specified in + // https://w3c.github.io/webrtc-pc/#process-remote-track-removal + // This method will update the following lists: + // |remove_list| is the list of transceivers for which the receiving track is + // being removed. + // |removed_streams| is the list of streams which no longer have a receiving + // track so should be removed. + void ProcessRemovalOfRemoteTrack( + rtc::scoped_refptr> + transceiver, + std::vector>* remove_list, + std::vector>* removed_streams); + + void RemoveRemoteStreamsIfEmpty( + const std::vector>& + remote_streams, + std::vector>* removed_streams); + + // Remove all local and remote senders of type |media_type|. + // Called when a media type is rejected (m-line set to port 0). + void RemoveSenders(cricket::MediaType media_type); + + // Loops through the vector of |streams| and finds added and removed + // StreamParams since last time this method was called. + // For each new or removed StreamParam, OnLocalSenderSeen or + // OnLocalSenderRemoved is invoked. + void UpdateLocalSenders(const std::vector& streams, + cricket::MediaType media_type); + + // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|, + // and existing MediaStreamTracks are removed if there is no corresponding + // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack + // is created if it doesn't exist; if false, it's removed if it exists. + // |media_type| is the type of the |streams| and can be either audio or video. + // If a new MediaStream is created it is added to |new_streams|. + void UpdateRemoteSendersList( + const std::vector& streams, + bool default_track_needed, + cricket::MediaType media_type, + StreamCollection* new_streams); + + // Enables media channels to allow sending of media. + // This enables media to flow on all configured audio/video channels and the + // RtpDataChannel. + void EnableSending(); + // Push the media parts of the local or remote session description + // down to all of the channels. + RTCError PushdownMediaDescription(SdpType type, + cricket::ContentSource source); + + RTCError PushdownTransportDescription(cricket::ContentSource source, + SdpType type); + // Helper function to remove stopped transceivers. + void RemoveStoppedTransceivers(); + // Deletes the corresponding channel of contents that don't exist in |desc|. + // |desc| can be null. This means that all channels are deleted. + void RemoveUnusedChannels(const cricket::SessionDescription* desc); + + // Report inferred negotiated SDP semantics from a local/remote answer to the + // UMA observer. + void ReportNegotiatedSdpSemantics(const SessionDescriptionInterface& answer); + + // Finds remote MediaStreams without any tracks and removes them from + // |remote_streams_| and notifies the observer that the MediaStreams no longer + // exist. + void UpdateEndedRemoteMediaStreams(); + + // Uses all remote candidates in |remote_desc| in this session. + bool UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc); + // Uses |candidate| in this session. + bool UseCandidate(const IceCandidateInterface* candidate); + // Returns true if we are ready to push down the remote candidate. + // |remote_desc| is the new remote description, or NULL if the current remote + // description should be used. Output |valid| is true if the candidate media + // index is valid. + bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate, + const SessionDescriptionInterface* remote_desc, + bool* valid); + void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate) + RTC_RUN_ON(signaling_thread()); + + RTCErrorOr FindContentInfo( + const SessionDescriptionInterface* description, + const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread()); + + // Functions for dealing with transports. + // Note that cricket code uses the term "channel" for what other code + // refers to as "transport". + + // Allocates media channels based on the |desc|. If |desc| doesn't have + // the BUNDLE option, this method will disable BUNDLE in PortAllocator. + // This method will also delete any existing media channels before creating. + RTCError CreateChannels(const cricket::SessionDescription& desc); + + // Helper methods to create media channels. + cricket::VoiceChannel* CreateVoiceChannel(const std::string& mid); + cricket::VideoChannel* CreateVideoChannel(const std::string& mid); + bool CreateDataChannel(const std::string& mid); + + // Destroys and clears the BaseChannel associated with the given transceiver, + // if such channel is set. + void DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver); + + // Destroys the RTP data channel transport and/or the SCTP data channel + // transport and clears it. + void DestroyDataChannelTransport(); + + // Destroys the given ChannelInterface. + // The channel cannot be accessed after this method is called. + void DestroyChannelInterface(cricket::ChannelInterface* channel); + // Generates MediaDescriptionOptions for the |session_opts| based on existing + // local description or remote description. + + void GenerateMediaDescriptionOptions( + const SessionDescriptionInterface* session_desc, + RtpTransceiverDirection audio_direction, + RtpTransceiverDirection video_direction, + absl::optional* audio_index, + absl::optional* video_index, + absl::optional* data_index, + cricket::MediaSessionOptions* session_options); + + // Generates the active MediaDescriptionOptions for the local data channel + // given the specified MID. + cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( + const std::string& mid) const; + + // Generates the rejected MediaDescriptionOptions for the local data channel + // given the specified MID. + cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( + const std::string& mid) const; + + const std::string GetTransportName(const std::string& content_name); + // Based on number of transceivers per media type, enabled or disable + // payload type based demuxing in the affected channels. + bool UpdatePayloadTypeDemuxingState(cricket::ContentSource source); + + // ================================================================== + // Access to pc_ variables + cricket::ChannelManager* channel_manager() const; + TransceiverList* transceivers(); + const TransceiverList* transceivers() const; + DataChannelController* data_channel_controller(); + const DataChannelController* data_channel_controller() const; + cricket::PortAllocator* port_allocator(); + const cricket::PortAllocator* port_allocator() const; + RtpTransmissionManager* rtp_manager(); + const RtpTransmissionManager* rtp_manager() const; + JsepTransportController* transport_controller(); + const JsepTransportController* transport_controller() const; + // =================================================================== + const cricket::AudioOptions& audio_options() { return audio_options_; } + const cricket::VideoOptions& video_options() { return video_options_; } + + PeerConnection* const pc_; + + std::unique_ptr webrtc_session_desc_factory_ + RTC_GUARDED_BY(signaling_thread()); + + std::unique_ptr current_local_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr pending_local_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr current_remote_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr pending_remote_description_ + RTC_GUARDED_BY(signaling_thread()); + + PeerConnectionInterface::SignalingState signaling_state_ + RTC_GUARDED_BY(signaling_thread()) = PeerConnectionInterface::kStable; + + // Whether this peer is the caller. Set when the local description is applied. + absl::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); + + // Streams added via AddStream. + const rtc::scoped_refptr local_streams_ + RTC_GUARDED_BY(signaling_thread()); + // Streams created as a result of SetRemoteDescription. + const rtc::scoped_refptr remote_streams_ + RTC_GUARDED_BY(signaling_thread()); + + std::vector> stream_observers_ + RTC_GUARDED_BY(signaling_thread()); + + // The operations chain is used by the offer/answer exchange methods to ensure + // they are executed in the right order. For example, if + // SetRemoteDescription() is invoked while CreateOffer() is still pending, the + // SRD operation will not start until CreateOffer() has completed. See + // https://w3c.github.io/webrtc-pc/#dfn-operations-chain. + rtc::scoped_refptr operations_chain_ + RTC_GUARDED_BY(signaling_thread()); + + // One PeerConnection has only one RTCP CNAME. + // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9 + const std::string rtcp_cname_; + + // MIDs will be generated using this generator which will keep track of + // all the MIDs that have been seen over the life of the PeerConnection. + rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); + + // List of content names for which the remote side triggered an ICE restart. + std::set pending_ice_restarts_ + RTC_GUARDED_BY(signaling_thread()); + + std::unique_ptr + local_ice_credentials_to_replace_ RTC_GUARDED_BY(signaling_thread()); + + bool remote_peer_supports_msid_ RTC_GUARDED_BY(signaling_thread()) = false; + bool is_negotiation_needed_ RTC_GUARDED_BY(signaling_thread()) = false; + uint32_t negotiation_needed_event_id_ = 0; + bool update_negotiation_needed_on_empty_chain_ + RTC_GUARDED_BY(signaling_thread()) = false; + + // In Unified Plan, if we encounter remote SDP that does not contain an a=msid + // line we create and use a stream with a random ID for our receivers. This is + // to support legacy endpoints that do not support the a=msid attribute (as + // opposed to streamless tracks with "a=msid:-"). + rtc::scoped_refptr missing_msid_default_stream_ + RTC_GUARDED_BY(signaling_thread()); + + // Used when rolling back RTP data channels. + bool have_pending_rtp_data_channel_ RTC_GUARDED_BY(signaling_thread()) = + false; + + // Updates the error state, signaling if necessary. + void SetSessionError(SessionError error, const std::string& error_desc); + + SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = + SessionError::kNone; + std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); + + // Member variables for caching global options. + cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); + cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); + + // This object should be used to generate any SSRC that is not explicitly + // specified by the user (or by the remote party). + // The generator is not used directly, instead it is passed on to the + // channel manager and the session description factory. + rtc::UniqueRandomIdGenerator ssrc_generator_ + RTC_GUARDED_BY(signaling_thread()); + + // A video bitrate allocator factory. + // This can be injected using the PeerConnectionDependencies, + // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. + // Note that one can still choose to override this in a MediaEngine + // if one wants too. + std::unique_ptr + video_bitrate_allocator_factory_; + + rtc::WeakPtrFactory weak_ptr_factory_ + RTC_GUARDED_BY(signaling_thread()); +}; + +} // namespace webrtc + +#endif // PC_SDP_OFFER_ANSWER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_state_provider.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_state_provider.h new file mode 100644 index 000000000..23ffc91bd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_state_provider.h @@ -0,0 +1,54 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SDP_STATE_PROVIDER_H_ +#define PC_SDP_STATE_PROVIDER_H_ + +#include + +#include "api/jsep.h" +#include "api/peer_connection_interface.h" + +namespace webrtc { + +// This interface provides access to the state of an SDP offer/answer +// negotiation. +// +// All the functions are const, so using this interface serves as +// assurance that the user is not modifying the state. +class SdpStateProvider { + public: + virtual ~SdpStateProvider() {} + + virtual PeerConnectionInterface::SignalingState signaling_state() const = 0; + + virtual const SessionDescriptionInterface* local_description() const = 0; + virtual const SessionDescriptionInterface* remote_description() const = 0; + virtual const SessionDescriptionInterface* current_local_description() + const = 0; + virtual const SessionDescriptionInterface* current_remote_description() + const = 0; + virtual const SessionDescriptionInterface* pending_local_description() + const = 0; + virtual const SessionDescriptionInterface* pending_remote_description() + const = 0; + + // Whether an ICE restart has been asked for. Used in CreateOffer. + virtual bool NeedsIceRestart(const std::string& content_name) const = 0; + // Whether an ICE restart was indicated in the remote offer. + // Used in CreateAnswer. + virtual bool IceRestartPending(const std::string& content_name) const = 0; + virtual absl::optional GetDtlsRole( + const std::string& mid) const = 0; +}; + +} // namespace webrtc + +#endif // PC_SDP_STATE_PROVIDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.h b/TMessagesProj/jni/voip/webrtc/pc/session_description.h index 53c981a34..52a3a1fe0 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.h @@ -58,6 +58,7 @@ class AudioContentDescription; class VideoContentDescription; class RtpDataContentDescription; class SctpDataContentDescription; +class UnsupportedContentDescription; // Describes a session description media section. There are subclasses for each // media type (audio, video, data) that will have additional information. @@ -86,6 +87,11 @@ class MediaContentDescription { virtual SctpDataContentDescription* as_sctp() { return nullptr; } virtual const SctpDataContentDescription* as_sctp() const { return nullptr; } + virtual UnsupportedContentDescription* as_unsupported() { return nullptr; } + virtual const UnsupportedContentDescription* as_unsupported() const { + return nullptr; + } + virtual bool has_codecs() const = 0; // Copy operator that returns an unique_ptr. @@ -406,13 +412,37 @@ class SctpDataContentDescription : public MediaContentDescription { int max_message_size_ = 64 * 1024; }; +class UnsupportedContentDescription : public MediaContentDescription { + public: + explicit UnsupportedContentDescription(const std::string& media_type) + : media_type_(media_type) {} + MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; } + + UnsupportedContentDescription* as_unsupported() override { return this; } + const UnsupportedContentDescription* as_unsupported() const override { + return this; + } + + bool has_codecs() const override { return false; } + const std::string& media_type() const { return media_type_; } + + private: + UnsupportedContentDescription* CloneInternal() const override { + return new UnsupportedContentDescription(*this); + } + + std::string media_type_; +}; + // Protocol used for encoding media. This is the "top level" protocol that may // be wrapped by zero or many transport protocols (UDP, ICE, etc.). enum class MediaProtocolType { - kRtp, // Section will use the RTP protocol (e.g., for audio or video). - // https://tools.ietf.org/html/rfc3550 - kSctp // Section will use the SCTP protocol (e.g., for a data channel). - // https://tools.ietf.org/html/rfc4960 + kRtp, // Section will use the RTP protocol (e.g., for audio or video). + // https://tools.ietf.org/html/rfc3550 + kSctp, // Section will use the SCTP protocol (e.g., for a data channel). + // https://tools.ietf.org/html/rfc4960 + kOther // Section will use another top protocol which is not + // explicitly supported. }; // Represents a session description section. Most information about the section diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc index 73d4510fa..991cc4eb2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc @@ -16,7 +16,6 @@ #include #include "pc/channel.h" -#include "pc/peer_connection.h" #include "rtc_base/checks.h" #include "rtc_base/third_party/base64/base64.h" #include "system_wrappers/include/field_trial.h" @@ -991,7 +990,8 @@ class VoiceMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { } bool GetStatsOnWorkerThread() override { - return voice_media_channel_->GetStats(&voice_media_info); + return voice_media_channel_->GetStats(&voice_media_info, + /*get_and_clear_legacy_stats=*/true); } void ExtractStats(StatsCollector* collector) const override { diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h index 041fe2f8f..befbcabbf 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h @@ -27,6 +27,7 @@ #include "api/stats_types.h" #include "p2p/base/port.h" #include "pc/peer_connection_internal.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/network_constants.h" #include "rtc_base/ssl_certificate.h" @@ -44,7 +45,7 @@ const char* AdapterTypeToStatsType(rtc::AdapterType type); // A mapping between track ids and their StatsReport. typedef std::map TrackIdMap; -class StatsCollector { +class StatsCollector : public StatsCollectorInterface { public: // The caller is responsible for ensuring that the pc outlives the // StatsCollector instance. @@ -57,11 +58,13 @@ class StatsCollector { void AddTrack(MediaStreamTrackInterface* track); // Adds a local audio track that is used for getting some voice statistics. - void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc); + void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; // Removes a local audio tracks that is used for getting some voice // statistics. - void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc); + void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; // Gather statistics from the session and store them for future use. void UpdateStats(PeerConnectionInterface::StatsOutputLevel level); @@ -74,7 +77,8 @@ class StatsCollector { // of filling in |reports|. As is, there's a requirement that the caller // uses |reports| immediately without allowing any async activity on // the thread (message handling etc) and then discard the results. - void GetStats(MediaStreamTrackInterface* track, StatsReports* reports); + void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) override; // Prepare a local or remote SSRC report for the given ssrc. Used internally // in the ExtractStatsFromList template. diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h new file mode 100644 index 000000000..4d5c98a4a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h @@ -0,0 +1,43 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains an interface for the (obsolete) StatsCollector class that +// is used by compilation units that do not wish to depend on the StatsCollector +// implementation. + +#ifndef PC_STATS_COLLECTOR_INTERFACE_H_ +#define PC_STATS_COLLECTOR_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "api/stats_types.h" + +namespace webrtc { + +class StatsCollectorInterface { + public: + virtual ~StatsCollectorInterface() {} + + // Adds a local audio track that is used for getting some voice statistics. + virtual void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + + // Removes a local audio tracks that is used for getting some voice + // statistics. + virtual void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + virtual void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) = 0; +}; + +} // namespace webrtc + +#endif // PC_STATS_COLLECTOR_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc new file mode 100644 index 000000000..5fe148a22 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc @@ -0,0 +1,67 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/transceiver_list.h" + +namespace webrtc { + +void TransceiverStableState::set_newly_created() { + RTC_DCHECK(!has_m_section_); + newly_created_ = true; +} + +void TransceiverStableState::SetMSectionIfUnset( + absl::optional mid, + absl::optional mline_index) { + if (!has_m_section_) { + mid_ = mid; + mline_index_ = mline_index; + has_m_section_ = true; + } +} + +void TransceiverStableState::SetRemoteStreamIdsIfUnset( + const std::vector& ids) { + if (!remote_stream_ids_.has_value()) { + remote_stream_ids_ = ids; + } +} + +RtpTransceiverProxyRefPtr TransceiverList::FindBySender( + rtc::scoped_refptr sender) const { + for (auto transceiver : transceivers_) { + if (transceiver->sender() == sender) { + return transceiver; + } + } + return nullptr; +} + +RtpTransceiverProxyRefPtr TransceiverList::FindByMid( + const std::string& mid) const { + for (auto transceiver : transceivers_) { + if (transceiver->mid() == mid) { + return transceiver; + } + } + return nullptr; +} + +RtpTransceiverProxyRefPtr TransceiverList::FindByMLineIndex( + size_t mline_index) const { + for (auto transceiver : transceivers_) { + if (transceiver->internal()->mline_index() == mline_index) { + return transceiver; + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h new file mode 100644 index 000000000..cd77d67f4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h @@ -0,0 +1,100 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_TRANSCEIVER_LIST_H_ +#define PC_TRANSCEIVER_LIST_H_ + +#include +#include +#include +#include + +#include "pc/rtp_transceiver.h" + +namespace webrtc { + +typedef rtc::scoped_refptr> + RtpTransceiverProxyRefPtr; + +// Captures partial state to be used for rollback. Applicable only in +// Unified Plan. +class TransceiverStableState { + public: + TransceiverStableState() {} + void set_newly_created(); + void SetMSectionIfUnset(absl::optional mid, + absl::optional mline_index); + void SetRemoteStreamIdsIfUnset(const std::vector& ids); + absl::optional mid() const { return mid_; } + absl::optional mline_index() const { return mline_index_; } + absl::optional> remote_stream_ids() const { + return remote_stream_ids_; + } + bool has_m_section() const { return has_m_section_; } + bool newly_created() const { return newly_created_; } + + private: + absl::optional mid_; + absl::optional mline_index_; + absl::optional> remote_stream_ids_; + // Indicates that mid value from stable state has been captured and + // that rollback has to restore the transceiver. Also protects against + // subsequent overwrites. + bool has_m_section_ = false; + // Indicates that the transceiver was created as part of applying a + // description to track potential need for removing transceiver during + // rollback. + bool newly_created_ = false; +}; + +class TransceiverList { + public: + std::vector List() const { return transceivers_; } + + void Add(RtpTransceiverProxyRefPtr transceiver) { + transceivers_.push_back(transceiver); + } + void Remove(RtpTransceiverProxyRefPtr transceiver) { + transceivers_.erase( + std::remove(transceivers_.begin(), transceivers_.end(), transceiver), + transceivers_.end()); + } + RtpTransceiverProxyRefPtr FindBySender( + rtc::scoped_refptr sender) const; + RtpTransceiverProxyRefPtr FindByMid(const std::string& mid) const; + RtpTransceiverProxyRefPtr FindByMLineIndex(size_t mline_index) const; + + // Find or create the stable state for a transceiver. + TransceiverStableState* StableState(RtpTransceiverProxyRefPtr transceiver) { + return &(transceiver_stable_states_by_transceivers_[transceiver]); + } + + void DiscardStableStates() { + transceiver_stable_states_by_transceivers_.clear(); + } + + std::map& StableStates() { + return transceiver_stable_states_by_transceivers_; + } + + private: + std::vector transceivers_; + // Holds changes made to transceivers during applying descriptors for + // potential rollback. Gets cleared once signaling state goes to stable. + std::map + transceiver_stable_states_by_transceivers_; + // Holds remote stream ids for transceivers from stable state. + std::map> + remote_stream_ids_by_transceivers_; +}; + +} // namespace webrtc + +#endif // PC_TRANSCEIVER_LIST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.cc b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.cc new file mode 100644 index 000000000..848472148 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.cc @@ -0,0 +1,49 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/usage_pattern.h" + +#include "api/peer_connection_interface.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +void UsagePattern::NoteUsageEvent(UsageEvent event) { + usage_event_accumulator_ |= static_cast(event); +} + +void UsagePattern::ReportUsagePattern(PeerConnectionObserver* observer) const { + RTC_DLOG(LS_INFO) << "Usage signature is " << usage_event_accumulator_; + RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern", + usage_event_accumulator_, + static_cast(UsageEvent::MAX_VALUE)); + const int bad_bits = + static_cast(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) | + static_cast(UsageEvent::CANDIDATE_COLLECTED); + const int good_bits = + static_cast(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) | + static_cast(UsageEvent::REMOTE_CANDIDATE_ADDED) | + static_cast(UsageEvent::ICE_STATE_CONNECTED); + if ((usage_event_accumulator_ & bad_bits) == bad_bits && + (usage_event_accumulator_ & good_bits) == 0) { + // If called after close(), we can't report, because observer may have + // been deallocated, and therefore pointer is null. Write to log instead. + if (observer) { + observer->OnInterestingUsage(usage_event_accumulator_); + } else { + RTC_LOG(LS_INFO) << "Interesting usage signature " + << usage_event_accumulator_ + << " observed after observer shutdown"; + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h new file mode 100644 index 000000000..c4a8918ac --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_USAGE_PATTERN_H_ +#define PC_USAGE_PATTERN_H_ + +namespace webrtc { + +class PeerConnectionObserver; + +// A bit in the usage pattern is registered when its defining event occurs +// at least once. +enum class UsageEvent : int { + TURN_SERVER_ADDED = 0x01, + STUN_SERVER_ADDED = 0x02, + DATA_ADDED = 0x04, + AUDIO_ADDED = 0x08, + VIDEO_ADDED = 0x10, + // |SetLocalDescription| returns successfully. + SET_LOCAL_DESCRIPTION_SUCCEEDED = 0x20, + // |SetRemoteDescription| returns successfully. + SET_REMOTE_DESCRIPTION_SUCCEEDED = 0x40, + // A local candidate (with type host, server-reflexive, or relay) is + // collected. + CANDIDATE_COLLECTED = 0x80, + // A remote candidate is successfully added via |AddIceCandidate|. + ADD_ICE_CANDIDATE_SUCCEEDED = 0x100, + ICE_STATE_CONNECTED = 0x200, + CLOSE_CALLED = 0x400, + // A local candidate with private IP is collected. + PRIVATE_CANDIDATE_COLLECTED = 0x800, + // A remote candidate with private IP is added, either via AddiceCandidate + // or from the remote description. + REMOTE_PRIVATE_CANDIDATE_ADDED = 0x1000, + // A local mDNS candidate is collected. + MDNS_CANDIDATE_COLLECTED = 0x2000, + // A remote mDNS candidate is added, either via AddIceCandidate or from the + // remote description. + REMOTE_MDNS_CANDIDATE_ADDED = 0x4000, + // A local candidate with IPv6 address is collected. + IPV6_CANDIDATE_COLLECTED = 0x8000, + // A remote candidate with IPv6 address is added, either via AddIceCandidate + // or from the remote description. + REMOTE_IPV6_CANDIDATE_ADDED = 0x10000, + // A remote candidate (with type host, server-reflexive, or relay) is + // successfully added, either via AddIceCandidate or from the remote + // description. + REMOTE_CANDIDATE_ADDED = 0x20000, + // An explicit host-host candidate pair is selected, i.e. both the local and + // the remote candidates have the host type. This does not include candidate + // pairs formed with equivalent prflx remote candidates, e.g. a host-prflx + // pair where the prflx candidate has the same base as a host candidate of + // the remote peer. + DIRECT_CONNECTION_SELECTED = 0x40000, + MAX_VALUE = 0x80000, +}; + +class UsagePattern { + public: + void NoteUsageEvent(UsageEvent event); + void ReportUsagePattern(PeerConnectionObserver* observer) const; + + private: + int usage_event_accumulator_ = 0; +}; + +} // namespace webrtc +#endif // PC_USAGE_PATTERN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc index f093bf4b3..dd601259e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc @@ -16,7 +16,6 @@ #include #include "api/media_stream_proxy.h" -#include "api/media_stream_track_proxy.h" #include "api/video_track_source_proxy.h" #include "pc/jitter_buffer_delay.h" #include "pc/jitter_buffer_delay_proxy.h" @@ -43,7 +42,7 @@ VideoRtpReceiver::VideoRtpReceiver( : worker_thread_(worker_thread), id_(receiver_id), source_(new RefCountedObject(this)), - track_(VideoTrackProxy::Create( + track_(VideoTrackProxyWithInternal::Create( rtc::Thread::Current(), worker_thread, VideoTrack::Create( @@ -136,6 +135,11 @@ void VideoRtpReceiver::Stop() { stopped_ = true; } +void VideoRtpReceiver::StopAndEndTrack() { + Stop(); + track_->internal()->set_ended(); +} + void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK(media_channel_); if (!stopped_ && ssrc_ == ssrc) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h index f66a8a789..74ae44431 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h @@ -20,6 +20,7 @@ #include "api/crypto/frame_decryptor_interface.h" #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" +#include "api/media_stream_track_proxy.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" @@ -31,6 +32,7 @@ #include "pc/jitter_buffer_delay_interface.h" #include "pc/rtp_receiver.h" #include "pc/video_rtp_track_source.h" +#include "pc/video_track.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/thread.h" @@ -89,6 +91,7 @@ class VideoRtpReceiver : public rtc::RefCountedObject, // RtpReceiverInternal implementation. void Stop() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override { return ssrc_.value_or(0); } @@ -130,7 +133,7 @@ class VideoRtpReceiver : public rtc::RefCountedObject, // |source_| is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. rtc::scoped_refptr source_; - rtc::scoped_refptr track_; + rtc::scoped_refptr> track_; std::vector> streams_; bool stopped_ = true; RtpReceiverObserverInterface* observer_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.h b/TMessagesProj/jni/voip/webrtc/pc/video_track.h index 90e0758a6..b7835dee2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.h @@ -14,12 +14,12 @@ #include #include "api/media_stream_interface.h" +#include "api/media_stream_track.h" #include "api/scoped_refptr.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" -#include "pc/media_stream_track.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc index 4af121ddc..9643dcc16 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc @@ -75,6 +75,7 @@ using cricket::StreamParams; using cricket::StreamParamsVec; using cricket::TransportDescription; using cricket::TransportInfo; +using cricket::UnsupportedContentDescription; using cricket::VideoContentDescription; using rtc::SocketAddress; @@ -276,9 +277,6 @@ static bool ParseSessionDescription(const std::string& message, rtc::SocketAddress* connection_addr, cricket::SessionDescription* desc, SdpParseError* error); -static bool ParseGroupAttribute(const std::string& line, - cricket::SessionDescription* desc, - SdpParseError* error); static bool ParseMediaDescription( const std::string& message, const TransportDescription& session_td, @@ -302,6 +300,9 @@ static bool ParseContent( TransportDescription* transport, std::vector>* candidates, SdpParseError* error); +static bool ParseGroupAttribute(const std::string& line, + cricket::SessionDescription* desc, + SdpParseError* error); static bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos, int* msid_signaling, @@ -1346,30 +1347,24 @@ void BuildMediaDescription(const ContentInfo* content_info, // RFC 4566 // m= // fmt is a list of payload type numbers that MAY be used in the session. - const char* type = NULL; - if (media_type == cricket::MEDIA_TYPE_AUDIO) - type = kMediaTypeAudio; - else if (media_type == cricket::MEDIA_TYPE_VIDEO) - type = kMediaTypeVideo; - else if (media_type == cricket::MEDIA_TYPE_DATA) - type = kMediaTypeData; - else - RTC_NOTREACHED(); - + std::string type; std::string fmt; if (media_type == cricket::MEDIA_TYPE_VIDEO) { + type = kMediaTypeVideo; const VideoContentDescription* video_desc = media_desc->as_video(); for (const cricket::VideoCodec& codec : video_desc->codecs()) { fmt.append(" "); fmt.append(rtc::ToString(codec.id)); } } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { + type = kMediaTypeAudio; const AudioContentDescription* audio_desc = media_desc->as_audio(); for (const cricket::AudioCodec& codec : audio_desc->codecs()) { fmt.append(" "); fmt.append(rtc::ToString(codec.id)); } } else if (media_type == cricket::MEDIA_TYPE_DATA) { + type = kMediaTypeData; const cricket::SctpDataContentDescription* sctp_data_desc = media_desc->as_sctp(); if (sctp_data_desc) { @@ -1388,6 +1383,12 @@ void BuildMediaDescription(const ContentInfo* content_info, fmt.append(rtc::ToString(codec.id)); } } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + const UnsupportedContentDescription* unsupported_desc = + media_desc->as_unsupported(); + type = unsupported_desc->media_type(); + } else { + RTC_NOTREACHED(); } // The fmt must never be empty. If no codecs are found, set the fmt attribute // to 0. @@ -2711,7 +2712,17 @@ bool ParseMediaDescription( } } else { RTC_LOG(LS_WARNING) << "Unsupported media type: " << line; - continue; + auto unsupported_desc = + std::make_unique(media_type); + if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index, + protocol, payload_types, pos, &content_name, + &bundle_only, §ion_msid_signaling, + unsupported_desc.get(), &transport, candidates, + error)) { + return false; + } + unsupported_desc->set_protocol(protocol); + content = std::move(unsupported_desc); } if (!content.get()) { // ParseContentDescription returns NULL if failed. @@ -2739,7 +2750,9 @@ bool ParseMediaDescription( content_rejected = port_rejected; } - if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { + if (content->as_unsupported()) { + content_rejected = true; + } else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { content->set_protocol(protocol); // Set the extmap. if (!session_extmaps.empty() && @@ -3031,11 +3044,11 @@ bool ParseContent(const std::string& message, // the first place and provides another way to get around the limitation. if (media_type == cricket::MEDIA_TYPE_DATA && cricket::IsRtpProtocol(protocol) && - (b > cricket::kDataMaxBandwidth / 1000 || + (b > cricket::kRtpDataMaxBandwidth / 1000 || bandwidth_type == kTransportSpecificBandwidth)) { rtc::StringBuilder description; description << "RTP-based data channels may not send more than " - << cricket::kDataMaxBandwidth / 1000 << "kbps."; + << cricket::kRtpDataMaxBandwidth / 1000 << "kbps."; return ParseFailed(line, description.str(), error); } // Convert values. Prevent integer overflow. diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc index aaef7fdeb..2a9dc3fbd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc @@ -11,9 +11,10 @@ #include "pc/webrtc_session_description_factory.h" #include - +#include #include #include +#include #include #include @@ -22,6 +23,7 @@ #include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/rtc_error.h" +#include "pc/sdp_state_provider.h" #include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" @@ -125,11 +127,14 @@ void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( rtc::Thread* signaling_thread, cricket::ChannelManager* channel_manager, - PeerConnectionInternal* pc, + const SdpStateProvider* sdp_info, const std::string& session_id, + bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, - UniqueRandomIdGenerator* ssrc_generator) + UniqueRandomIdGenerator* ssrc_generator, + std::function&)> + on_certificate_ready) : signaling_thread_(signaling_thread), session_desc_factory_(channel_manager, &transport_desc_factory_, @@ -139,20 +144,21 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( // to just use a random number as session id and start version from // |kInitSessionVersion|. session_version_(kInitSessionVersion), - cert_generator_(std::move(cert_generator)), - pc_(pc), + cert_generator_(dtls_enabled ? std::move(cert_generator) : nullptr), + sdp_info_(sdp_info), session_id_(session_id), - certificate_request_state_(CERTIFICATE_NOT_NEEDED) { + certificate_request_state_(CERTIFICATE_NOT_NEEDED), + on_certificate_ready_(on_certificate_ready) { RTC_DCHECK(signaling_thread_); - RTC_DCHECK(!(cert_generator_ && certificate)); - bool dtls_enabled = cert_generator_ || certificate; - // SRTP-SDES is disabled if DTLS is on. - SetSdesPolicy(dtls_enabled ? cricket::SEC_DISABLED : cricket::SEC_REQUIRED); + if (!dtls_enabled) { + SetSdesPolicy(cricket::SEC_REQUIRED); RTC_LOG(LS_VERBOSE) << "DTLS-SRTP disabled."; return; } + // SRTP-SDES is disabled if DTLS is on. + SetSdesPolicy(cricket::SEC_DISABLED); if (certificate) { // Use |certificate|. certificate_request_state_ = CERTIFICATE_WAITING; @@ -252,13 +258,13 @@ void WebRtcSessionDescriptionFactory::CreateAnswer( PostCreateSessionDescriptionFailed(observer, error); return; } - if (!pc_->remote_description()) { + if (!sdp_info_->remote_description()) { error += " can't be called before SetRemoteDescription."; RTC_LOG(LS_ERROR) << error; PostCreateSessionDescriptionFailed(observer, error); return; } - if (pc_->remote_description()->GetType() != SdpType::kOffer) { + if (sdp_info_->remote_description()->GetType() != SdpType::kOffer) { error += " failed because remote_description is not an offer."; RTC_LOG(LS_ERROR) << error; PostCreateSessionDescriptionFailed(observer, error); @@ -325,12 +331,12 @@ void WebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) { void WebRtcSessionDescriptionFactory::InternalCreateOffer( CreateSessionDescriptionRequest request) { - if (pc_->local_description()) { + if (sdp_info_->local_description()) { // If the needs-ice-restart flag is set as described by JSEP, we should // generate an offer with a new ufrag/password to trigger an ICE restart. for (cricket::MediaDescriptionOptions& options : request.options.media_description_options) { - if (pc_->NeedsIceRestart(options.mid)) { + if (sdp_info_->NeedsIceRestart(options.mid)) { options.transport_options.ice_restart = true; } } @@ -338,8 +344,8 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( std::unique_ptr desc = session_desc_factory_.CreateOffer( - request.options, pc_->local_description() - ? pc_->local_description()->description() + request.options, sdp_info_->local_description() + ? sdp_info_->local_description()->description() : nullptr); if (!desc) { PostCreateSessionDescriptionFailed(request.observer, @@ -360,11 +366,11 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( auto offer = std::make_unique( SdpType::kOffer, std::move(desc), session_id_, rtc::ToString(session_version_++)); - if (pc_->local_description()) { + if (sdp_info_->local_description()) { for (const cricket::MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { - CopyCandidatesFromSessionDescription(pc_->local_description(), + CopyCandidatesFromSessionDescription(sdp_info_->local_description(), options.mid, offer.get()); } } @@ -374,31 +380,34 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( void WebRtcSessionDescriptionFactory::InternalCreateAnswer( CreateSessionDescriptionRequest request) { - if (pc_->remote_description()) { + if (sdp_info_->remote_description()) { for (cricket::MediaDescriptionOptions& options : request.options.media_description_options) { // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1 // an answer should also contain new ICE ufrag and password if an offer // has been received with new ufrag and password. options.transport_options.ice_restart = - pc_->IceRestartPending(options.mid); - // We should pass the current SSL role to the transport description + sdp_info_->IceRestartPending(options.mid); + // We should pass the current DTLS role to the transport description // factory, if there is already an existing ongoing session. - rtc::SSLRole ssl_role; - if (pc_->GetSslRole(options.mid, &ssl_role)) { + absl::optional dtls_role = + sdp_info_->GetDtlsRole(options.mid); + if (dtls_role) { options.transport_options.prefer_passive_role = - (rtc::SSL_SERVER == ssl_role); + (rtc::SSL_SERVER == *dtls_role); } } } std::unique_ptr desc = session_desc_factory_.CreateAnswer( - pc_->remote_description() ? pc_->remote_description()->description() - : nullptr, + sdp_info_->remote_description() + ? sdp_info_->remote_description()->description() + : nullptr, request.options, - pc_->local_description() ? pc_->local_description()->description() - : nullptr); + sdp_info_->local_description() + ? sdp_info_->local_description()->description() + : nullptr); if (!desc) { PostCreateSessionDescriptionFailed(request.observer, "Failed to initialize the answer."); @@ -416,13 +425,13 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( auto answer = std::make_unique( SdpType::kAnswer, std::move(desc), session_id_, rtc::ToString(session_version_++)); - if (pc_->local_description()) { + if (sdp_info_->local_description()) { // Include all local ICE candidates in the SessionDescription unless // the remote peer has requested an ICE restart. for (const cricket::MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { - CopyCandidatesFromSessionDescription(pc_->local_description(), + CopyCandidatesFromSessionDescription(sdp_info_->local_description(), options.mid, answer.get()); } } @@ -481,7 +490,8 @@ void WebRtcSessionDescriptionFactory::SetCertificate( RTC_LOG(LS_VERBOSE) << "Setting new certificate."; certificate_request_state_ = CERTIFICATE_SUCCEEDED; - SignalCertificateReady(certificate); + + on_certificate_ready_(certificate); transport_desc_factory_.set_certificate(certificate); transport_desc_factory_.set_secure(cricket::SEC_ENABLED); diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h index f70b847b4..9256045d6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h @@ -12,7 +12,6 @@ #define PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_ #include - #include #include #include @@ -22,22 +21,23 @@ #include "api/scoped_refptr.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" +#include "pc/channel_manager.h" #include "pc/media_session.h" -#include "pc/peer_connection_internal.h" +#include "pc/sdp_state_provider.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/message_handler.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_message.h" #include "rtc_base/unique_id_generator.h" namespace webrtc { // DTLS certificate request callback class. class WebRtcCertificateGeneratorCallback - : public rtc::RTCCertificateGeneratorCallback, - public sigslot::has_slots<> { + : public rtc::RTCCertificateGeneratorCallback { public: // |rtc::RTCCertificateGeneratorCallback| overrides. void OnSuccess( @@ -80,11 +80,14 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, WebRtcSessionDescriptionFactory( rtc::Thread* signaling_thread, cricket::ChannelManager* channel_manager, - PeerConnectionInternal* pc, + const SdpStateProvider* sdp_info, const std::string& session_id, + bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, - rtc::UniqueRandomIdGenerator* ssrc_generator); + rtc::UniqueRandomIdGenerator* ssrc_generator, + std::function&)> + on_certificate_ready); virtual ~WebRtcSessionDescriptionFactory(); static void CopyCandidatesFromSessionDescription( @@ -110,9 +113,6 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, session_desc_factory_.set_is_unified_plan(is_unified_plan); } - sigslot::signal1&> - SignalCertificateReady; - // For testing. bool waiting_for_certificate_for_testing() const { return certificate_request_state_ == CERTIFICATE_WAITING; @@ -151,12 +151,13 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, cricket::MediaSessionDescriptionFactory session_desc_factory_; uint64_t session_version_; const std::unique_ptr cert_generator_; - // TODO(jiayl): remove the dependency on peer connection once bug 2264 is - // fixed. - PeerConnectionInternal* const pc_; + const SdpStateProvider* sdp_info_; const std::string session_id_; CertificateRequestState certificate_request_state_; + std::function&)> + on_certificate_ready_; + RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h index ed2df1cdc..983e710bc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h @@ -87,7 +87,7 @@ namespace rtc { // destruction. This can be done by starting each chain of invocations on the // same thread on which it will be destroyed, or by using some other // synchronization method. -class AsyncInvoker : public MessageHandler { +class AsyncInvoker : public MessageHandlerAutoCleanup { public: AsyncInvoker(); ~AsyncInvoker() override; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.cc index adad9dda1..7879e933c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.cc @@ -21,23 +21,20 @@ BufferQueue::BufferQueue(size_t capacity, size_t default_size) : capacity_(capacity), default_size_(default_size) {} BufferQueue::~BufferQueue() { - webrtc::MutexLock lock(&mutex_); - - for (Buffer* buffer : queue_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + for (Buffer* buffer : queue_) delete buffer; - } - for (Buffer* buffer : free_list_) { + for (Buffer* buffer : free_list_) delete buffer; - } } size_t BufferQueue::size() const { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return queue_.size(); } void BufferQueue::Clear() { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); while (!queue_.empty()) { free_list_.push_back(queue_.front()); queue_.pop_front(); @@ -45,36 +42,30 @@ void BufferQueue::Clear() { } bool BufferQueue::ReadFront(void* buffer, size_t bytes, size_t* bytes_read) { - webrtc::MutexLock lock(&mutex_); - if (queue_.empty()) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (queue_.empty()) return false; - } - bool was_writable = queue_.size() < capacity_; Buffer* packet = queue_.front(); queue_.pop_front(); bytes = std::min(bytes, packet->size()); memcpy(buffer, packet->data(), bytes); - if (bytes_read) { + + if (bytes_read) *bytes_read = bytes; - } + free_list_.push_back(packet); - if (!was_writable) { - NotifyWritableForTest(); - } return true; } bool BufferQueue::WriteBack(const void* buffer, size_t bytes, size_t* bytes_written) { - webrtc::MutexLock lock(&mutex_); - if (queue_.size() == capacity_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (queue_.size() == capacity_) return false; - } - bool was_readable = !queue_.empty(); Buffer* packet; if (!free_list_.empty()) { packet = free_list_.back(); @@ -84,13 +75,10 @@ bool BufferQueue::WriteBack(const void* buffer, } packet->SetData(static_cast(buffer), bytes); - if (bytes_written) { + if (bytes_written) *bytes_written = bytes; - } + queue_.push_back(packet); - if (!was_readable) { - NotifyReadableForTest(); - } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h index 29d1a5b13..24a9b04dc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h @@ -18,16 +18,16 @@ #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" namespace rtc { -class BufferQueue { +class BufferQueue final { public: // Creates a buffer queue with a given capacity and default buffer size. BufferQueue(size_t capacity, size_t default_size); - virtual ~BufferQueue(); + ~BufferQueue(); // Return number of queued buffers. size_t size() const; @@ -44,17 +44,22 @@ class BufferQueue { // Returns true unless no data could be written. bool WriteBack(const void* data, size_t bytes, size_t* bytes_written); - protected: - // These methods are called when the state of the queue changes. - virtual void NotifyReadableForTest() {} - virtual void NotifyWritableForTest() {} + bool is_writable() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return queue_.size() < capacity_; + } + + bool is_readable() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return !queue_.empty(); + } private: - size_t capacity_; - size_t default_size_; - mutable webrtc::Mutex mutex_; - std::deque queue_ RTC_GUARDED_BY(mutex_); - std::vector free_list_ RTC_GUARDED_BY(mutex_); + webrtc::SequenceChecker sequence_checker_; + const size_t capacity_; + const size_t default_size_; + std::deque queue_ RTC_GUARDED_BY(sequence_checker_); + std::vector free_list_ RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump b/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump new file mode 100644 index 000000000..dc5fb3ae1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump @@ -0,0 +1,104 @@ +/* + * Copyright 2012 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// To generate callback.h from callback.h.pump, execute: +// ../third_party/googletest/src/googletest/scripts/pump.py callback.h.pump + +// Callbacks are callable object containers. They can hold a function pointer +// or a function object and behave like a value type. Internally, data is +// reference-counted, making copies and pass-by-value inexpensive. +// +// Callbacks are typed using template arguments. The format is: +// CallbackN +// where N is the number of arguments supplied to the callable object. +// Callbacks are invoked using operator(), just like a function or a function +// object. Default-constructed callbacks are "empty," and executing an empty +// callback does nothing. A callback can be made empty by assigning it from +// a default-constructed callback. +// +// Callbacks are similar in purpose to std::function (which isn't available on +// all platforms we support) and a lightweight alternative to sigslots. Since +// they effectively hide the type of the object they call, they're useful in +// breaking dependencies between objects that need to interact with one another. +// Notably, they can hold the results of Bind(), std::bind*, etc, without needing +// to know the resulting object type of those calls. +// +// Sigslots, on the other hand, provide a fuller feature set, such as multiple +// subscriptions to a signal, optional thread-safety, and lifetime tracking of +// slots. When these features are needed, choose sigslots. +// +// Example: +// int sqr(int x) { return x * x; } +// struct AddK { +// int k; +// int operator()(int x) const { return x + k; } +// } add_k = {5}; +// +// Callback1 my_callback; +// cout << my_callback.empty() << endl; // true +// +// my_callback = Callback1(&sqr); +// cout << my_callback.empty() << endl; // false +// cout << my_callback(3) << endl; // 9 +// +// my_callback = Callback1(add_k); +// cout << my_callback(10) << endl; // 15 +// +// my_callback = Callback1(); +// cout << my_callback.empty() << endl; // true + +#ifndef RTC_BASE_CALLBACK_H_ +#define RTC_BASE_CALLBACK_H_ + +#include "rtc_base/ref_count.h" +#include "rtc_base/ref_counted_object.h" +#include "api/scoped_refptr.h" + +namespace rtc { + +$var n = 5 +$range i 0..n +$for i [[ +$range j 1..i + +template +class Callback$i { + public: + // Default copy operations are appropriate for this class. + Callback$i() {} + template Callback$i(const T& functor) + : helper_(new RefCountedObject< HelperImpl >(functor)) {} + R operator()($for j , [[P$j p$j]]) { + if (empty()) + return R(); + return helper_->Run($for j , [[p$j]]); + } + bool empty() const { return !helper_; } + + private: + struct Helper : RefCountInterface { + virtual ~Helper() {} + virtual R Run($for j , [[P$j p$j]]) = 0; + }; + template struct HelperImpl : Helper { + explicit HelperImpl(const T& functor) : functor_(functor) {} + virtual R Run($for j , [[P$j p$j]]) { + return functor_($for j , [[p$j]]); + } + T functor_; + }; + scoped_refptr helper_; +}; + +]] +} // namespace rtc + +#endif // RTC_BASE_CALLBACK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc new file mode 100644 index 000000000..ac947e225 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc @@ -0,0 +1,48 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/callback_list.h" + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace callback_list_impl { + +CallbackListReceivers::CallbackListReceivers() = default; + +CallbackListReceivers::~CallbackListReceivers() { + RTC_CHECK(!send_in_progress_); +} + +void CallbackListReceivers::Foreach( + rtc::FunctionView fv) { + RTC_CHECK(!send_in_progress_); + send_in_progress_ = true; + for (auto& r : receivers_) { + fv(r); + } + send_in_progress_ = false; +} + +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<1>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<2>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<3>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<4>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::NontrivialUntypedFunctionArgs); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::FunctionPointerUntypedFunctionArgs); + +} // namespace callback_list_impl +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h new file mode 100644 index 000000000..659b838d0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h @@ -0,0 +1,167 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_CALLBACK_LIST_H_ +#define RTC_BASE_CALLBACK_LIST_H_ + +#include +#include + +#include "api/function_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/assume.h" +#include "rtc_base/system/inline.h" +#include "rtc_base/untyped_function.h" + +namespace webrtc { +namespace callback_list_impl { + +class CallbackListReceivers { + public: + CallbackListReceivers(); + CallbackListReceivers(const CallbackListReceivers&) = delete; + CallbackListReceivers& operator=(const CallbackListReceivers&) = delete; + CallbackListReceivers(CallbackListReceivers&&) = delete; + CallbackListReceivers& operator=(CallbackListReceivers&&) = delete; + ~CallbackListReceivers(); + + template + RTC_NO_INLINE void AddReceiver(UntypedFunctionArgsT args) { + RTC_CHECK(!send_in_progress_); + receivers_.push_back(UntypedFunction::Create(args)); + } + + void Foreach(rtc::FunctionView fv); + + private: + std::vector receivers_; + bool send_in_progress_ = false; +}; + +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<1>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<2>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<3>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<4>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::NontrivialUntypedFunctionArgs); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::FunctionPointerUntypedFunctionArgs); + +} // namespace callback_list_impl + +// A collection of receivers (callable objects) that can be called all at once. +// Optimized for minimal binary size. The template arguments dictate what +// signature the callbacks must have; for example, a CallbackList +// will require callbacks with signature void(int, float). +// +// CallbackList is neither copyable nor movable (could easily be made movable if +// necessary). Callbacks must be movable, but need not be copyable. +// +// Usage example: +// +// // Declaration (usually a member variable). +// CallbackList foo_; +// +// // Register callbacks. This can be done zero or more times. The +// // callbacks must accept the arguments types listed in the CallbackList's +// // template argument list, and must return void. +// foo_.AddReceiver([...](int a, float b) {...}); // Lambda. +// foo_.AddReceiver(SomeFunction); // Function pointer. +// +// // Call the zero or more receivers, one after the other. +// foo_.Send(17, 3.14); +// +// Callback lifetime considerations +// -------------------------------- +// +// CallbackList::AddReceiver() takes ownership of the given callback by moving +// it in place. The callback can be any callable object; in particular, it may +// have a nontrivial destructor, which will be run when the CallbackList is +// destroyed. The callback may thus access data via any type of smart pointer, +// expressing e.g. unique, shared, or weak ownership. Of course, if the data is +// guaranteed to outlive the callback, a plain raw pointer can be used. +// +// Take care when trying to have the callback own reference-counted data. The +// CallbackList will keep the callback alive, and the callback will keep its +// data alive, so as usual with reference-counted ownership, keep an eye out for +// cycles! +// +// Thread safety +// ------------- +// +// Like most C++ types, CallbackList is thread compatible: it's not safe to +// access it concurrently from multiple threads, but it can be made safe if it +// is protected by a mutex, for example. +// +// Excercise some care when deciding what mutexes to hold when you call +// CallbackList::Send(). In particular, do not hold mutexes that callbacks may +// need to grab. If a larger object has a CallbackList member and a single mutex +// that protects all of its data members, this may e.g. make it necessary to +// protect its CallbackList with a separate mutex; otherwise, there will be a +// deadlock if the callbacks try to access the object. +// +// CallbackList as a class data member +// ----------------------------------- +// +// CallbackList is a normal C++ data type, and should be private when it is a +// data member of a class. For thread safety reasons (see above), it is likely +// best to not have an accessor for the entire CallbackList, and instead only +// allow callers to add callbacks: +// +// template +// void AddFooCallback(F&& callback) { +// // Maybe grab a mutex here? +// foo_callbacks_.AddReceiver(std::forward(callback)); +// } +// +// Removing callbacks +// ------------------ +// +// TODO(kwiberg): The current design doesn’t support removing callbacks, only +// adding them, but removal support can easily be added. +template +class CallbackList { + public: + CallbackList() = default; + CallbackList(const CallbackList&) = delete; + CallbackList& operator=(const CallbackList&) = delete; + CallbackList(CallbackList&&) = delete; + CallbackList& operator=(CallbackList&&) = delete; + + // Adds a new receiver. The receiver (a callable object or a function pointer) + // must be movable, but need not be copyable. Its call signature should be + // `void(ArgT...)`. + template + void AddReceiver(F&& f) { + receivers_.AddReceiver( + UntypedFunction::PrepareArgs(std::forward(f))); + } + + // Calls all receivers with the given arguments. While the Send is in + // progress, no method calls are allowed; specifically, this means that the + // callbacks may not do anything with this CallbackList instance. + template + void Send(ArgU&&... args) { + receivers_.Foreach([&](UntypedFunction& f) { + f.Call(std::forward(args)...); + }); + } + + private: + callback_list_impl::CallbackListReceivers receivers_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_CALLBACK_LIST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc index e5fc2ed41..239ea9f0d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc @@ -36,6 +36,21 @@ #include "rtc_base/checks.h" namespace { + +RTC_NORETURN void WriteFatalLogAndAbort(const std::string& output) { + const char* output_c = output.c_str(); +#if defined(WEBRTC_ANDROID) + __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output_c); +#endif + fflush(stdout); + fprintf(stderr, "%s", output_c); + fflush(stderr); +#if defined(WEBRTC_WIN) + DebugBreak(); +#endif + abort(); +} + #if defined(__GNUC__) __attribute__((__format__(__printf__, 2, 3))) #endif @@ -149,19 +164,7 @@ RTC_NORETURN void FatalLog(const char* file, va_end(args); - const char* output = s.c_str(); - -#if defined(WEBRTC_ANDROID) - __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output); -#endif - - fflush(stdout); - fprintf(stderr, "%s", output); - fflush(stderr); -#if defined(WEBRTC_WIN) - DebugBreak(); -#endif - abort(); + WriteFatalLogAndAbort(s); } #else // RTC_CHECK_MSG_ENABLED RTC_NORETURN void FatalLog(const char* file, int line) { @@ -174,22 +177,40 @@ RTC_NORETURN void FatalLog(const char* file, int line) { "# Check failed.\n" "# ", file, line, LAST_SYSTEM_ERROR); - const char* output = s.c_str(); - -#if defined(WEBRTC_ANDROID) - __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output); -#endif - - fflush(stdout); - fprintf(stderr, "%s", output); - fflush(stderr); -#if defined(WEBRTC_WIN) - DebugBreak(); -#endif - abort(); + WriteFatalLogAndAbort(s); } #endif // RTC_CHECK_MSG_ENABLED +#if RTC_DCHECK_IS_ON + +RTC_NORETURN void UnreachableCodeReached(const char* file, int line) { + std::string s; + AppendFormat(&s, + "\n\n" + "#\n" + "# Unreachable code reached: %s, line %d\n" + "# last system error: %u\n" + "# ", + file, line, LAST_SYSTEM_ERROR); + WriteFatalLogAndAbort(s); +} + +#else // !RTC_DCHECK_IS_ON + +RTC_NORETURN void UnreachableCodeReached() { + std::string s; + AppendFormat(&s, + "\n\n" + "#\n" + "# Unreachable code reached (file and line unknown)\n" + "# last system error: %u\n" + "# ", + LAST_SYSTEM_ERROR); + WriteFatalLogAndAbort(s); +} + +#endif // !RTC_DCHECK_IS_ON + } // namespace webrtc_checks_impl } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h index 61c074ac8..508de2a57 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h @@ -338,6 +338,22 @@ class FatalLogCall final { const char* message_; }; +#if RTC_DCHECK_IS_ON + +// Be helpful, and include file and line in the RTC_CHECK_NOTREACHED error +// message. +#define RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS __FILE__, __LINE__ +RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(const char* file, int line); + +#else + +// Be mindful of binary size, and don't include file and line in the +// RTC_CHECK_NOTREACHED error message. +#define RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS +RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); + +#endif + } // namespace webrtc_checks_impl // The actual stream used isn't important. We reference |ignored| in the code @@ -430,6 +446,14 @@ class FatalLogCall final { #define RTC_UNREACHABLE_CODE_HIT false #define RTC_NOTREACHED() RTC_DCHECK(RTC_UNREACHABLE_CODE_HIT) +// Kills the process with an error message. Never returns. Use when you wish to +// assert that a point in the code is never reached. +#define RTC_CHECK_NOTREACHED() \ + do { \ + ::rtc::webrtc_checks_impl::UnreachableCodeReached( \ + RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS); \ + } while (0) + // TODO(bugs.webrtc.org/8454): Add an RTC_ prefix or rename differently. #define FATAL() \ ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/constructor_magic.h b/TMessagesProj/jni/voip/webrtc/rtc_base/constructor_magic.h index e63c2ff62..8d12a7b13 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/constructor_magic.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/constructor_magic.h @@ -11,24 +11,10 @@ #ifndef RTC_BASE_CONSTRUCTOR_MAGIC_H_ #define RTC_BASE_CONSTRUCTOR_MAGIC_H_ -// Put this in the declarations for a class to be unassignable. -#define RTC_DISALLOW_ASSIGN(TypeName) \ - TypeName& operator=(const TypeName&) = delete - // A macro to disallow the copy constructor and operator= functions. This should // be used in the declarations for a class. #define RTC_DISALLOW_COPY_AND_ASSIGN(TypeName) \ TypeName(const TypeName&) = delete; \ - RTC_DISALLOW_ASSIGN(TypeName) - -// A macro to disallow all the implicit constructors, namely the default -// constructor, copy constructor and operator= functions. -// -// This should be used in the declarations for a class that wants to prevent -// anyone from instantiating it. This is especially useful for classes -// containing only static methods. -#define RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \ - TypeName() = delete; \ - RTC_DISALLOW_COPY_AND_ASSIGN(TypeName) + TypeName& operator=(const TypeName&) = delete #endif // RTC_BASE_CONSTRUCTOR_MAGIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h index 3612f5a1c..10805ad45 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h @@ -45,7 +45,7 @@ namespace rtc { /////////////////////////////////////////////////////////////////////////////// class DEPRECATED_SignalThread : public sigslot::has_slots<>, - protected MessageHandler { + protected MessageHandlerAutoCleanup { public: DEPRECATED_SignalThread(); @@ -110,14 +110,17 @@ class DEPRECATED_SignalThread : public sigslot::has_slots<>, class Worker : public Thread { public: explicit Worker(DEPRECATED_SignalThread* parent); + + Worker() = delete; + Worker(const Worker&) = delete; + Worker& operator=(const Worker&) = delete; + ~Worker() override; void Run() override; bool IsProcessingMessagesForTesting() override; private: DEPRECATED_SignalThread* parent_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker); }; class RTC_SCOPED_LOCKABLE EnterExit { @@ -131,6 +134,11 @@ class DEPRECATED_SignalThread : public sigslot::has_slots<>, RTC_DCHECK_NE(0, t_->refcount_); ++t_->refcount_; } + + EnterExit() = delete; + EnterExit(const EnterExit&) = delete; + EnterExit& operator=(const EnterExit&) = delete; + ~EnterExit() RTC_UNLOCK_FUNCTION() { bool d = (0 == --t_->refcount_); t_->cs_.Leave(); @@ -140,8 +148,6 @@ class DEPRECATED_SignalThread : public sigslot::has_slots<>, private: DEPRECATED_SignalThread* t_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit); }; void Run(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc index 6d5650acc..0f5332009 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc @@ -12,8 +12,7 @@ #include -#include - +#include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -22,30 +21,18 @@ namespace { constexpr char kFieldTrial[] = "WebRTC-VP8-CpuSpeed-Arm"; constexpr int kMinSetting = -16; constexpr int kMaxSetting = -1; -} // namespace -absl::optional> -CpuSpeedExperiment::GetConfigs() { - if (!webrtc::field_trial::IsEnabled(kFieldTrial)) - return absl::nullopt; - - const std::string group = webrtc::field_trial::FindFullName(kFieldTrial); - if (group.empty()) - return absl::nullopt; - - std::vector configs(3); - if (sscanf(group.c_str(), "Enabled-%d,%d,%d,%d,%d,%d", &(configs[0].pixels), - &(configs[0].cpu_speed), &(configs[1].pixels), - &(configs[1].cpu_speed), &(configs[2].pixels), - &(configs[2].cpu_speed)) != 6) { - RTC_LOG(LS_WARNING) << "Too few parameters provided."; - return absl::nullopt; +std::vector GetValidOrEmpty( + const std::vector& configs) { + if (configs.empty()) { + RTC_LOG(LS_WARNING) << "Unsupported size, value ignored."; + return {}; } for (const auto& config : configs) { if (config.cpu_speed < kMinSetting || config.cpu_speed > kMaxSetting) { RTC_LOG(LS_WARNING) << "Unsupported cpu speed setting, value ignored."; - return absl::nullopt; + return {}; } } @@ -53,20 +40,51 @@ CpuSpeedExperiment::GetConfigs() { if (configs[i].pixels < configs[i - 1].pixels || configs[i].cpu_speed > configs[i - 1].cpu_speed) { RTC_LOG(LS_WARNING) << "Invalid parameter value provided."; - return absl::nullopt; + return {}; } } - return absl::optional>(configs); + return configs; } -int CpuSpeedExperiment::GetValue(int pixels, - const std::vector& configs) { +bool HasLeCores(const std::vector& configs) { for (const auto& config : configs) { - if (pixels <= config.pixels) - return config.cpu_speed; + if (config.cpu_speed_le_cores == 0) + return false; } - return kMinSetting; + return true; +} +} // namespace + +CpuSpeedExperiment::CpuSpeedExperiment() : cores_("cores") { + FieldTrialStructList configs( + {FieldTrialStructMember("pixels", [](Config* c) { return &c->pixels; }), + FieldTrialStructMember("cpu_speed", + [](Config* c) { return &c->cpu_speed; }), + FieldTrialStructMember( + "cpu_speed_le_cores", + [](Config* c) { return &c->cpu_speed_le_cores; })}, + {}); + ParseFieldTrial({&configs, &cores_}, field_trial::FindFullName(kFieldTrial)); + + configs_ = GetValidOrEmpty(configs.Get()); +} + +CpuSpeedExperiment::~CpuSpeedExperiment() {} + +absl::optional CpuSpeedExperiment::GetValue(int pixels, + int num_cores) const { + if (configs_.empty()) + return absl::nullopt; + + bool use_le = HasLeCores(configs_) && cores_ && num_cores <= cores_.Value(); + + for (const auto& config : configs_) { + if (pixels <= config.pixels) + return use_le ? absl::optional(config.cpu_speed_le_cores) + : absl::optional(config.cpu_speed); + } + return absl::optional(kMinSetting); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.h index e6c834094..7c7268c55 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.h @@ -15,25 +15,49 @@ #include "absl/types/optional.h" +#include "rtc_base/experiments/field_trial_parser.h" + namespace webrtc { class CpuSpeedExperiment { public: - struct Config { - bool operator==(const Config& o) const { - return pixels == o.pixels && cpu_speed == o.cpu_speed; - } + CpuSpeedExperiment(); + ~CpuSpeedExperiment(); - int pixels; // The video frame size. - int cpu_speed; // The |cpu_speed| to be used if the frame size is less - // than or equal to |pixels|. + // Example: + // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/ + // pixels <= 100 -> cpu speed: -1 + // pixels <= 200 -> cpu speed: -2 + // pixels <= 300 -> cpu speed: -3 + + // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/, + // cpu_speed_le_cores:-4|-5|-6,cores:3/ + // If |num_cores| > 3 + // pixels <= 100 -> cpu speed: -1 + // pixels <= 200 -> cpu speed: -2 + // pixels <= 300 -> cpu speed: -3 + // else + // pixels <= 100 -> cpu speed: -4 + // pixels <= 200 -> cpu speed: -5 + // pixels <= 300 -> cpu speed: -6 + + struct Config { + int pixels = 0; // The video frame size. + int cpu_speed = 0; // The |cpu_speed| to be used if the frame size is less + // than or equal to |pixels|. + // Optional. + int cpu_speed_le_cores = 0; // Same as |cpu_speed| above but only used if + // |num_cores| <= |cores_|. }; - // Returns the configurations from field trial on success. - static absl::optional> GetConfigs(); + // Gets the cpu speed based on |pixels| and |num_cores|. + absl::optional GetValue(int pixels, int num_cores) const; - // Gets the cpu speed from the |configs| based on |pixels|. - static int GetValue(int pixels, const std::vector& configs); + private: + std::vector configs_; + + // Threshold for when to use |cpu_speed_le_cores|. + FieldTrialOptional cores_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc index c8d83ebe4..d2443b05c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc @@ -21,14 +21,17 @@ const double kMinScaleFactor = 0.01; QualityScalerSettings::QualityScalerSettings( const WebRtcKeyValueConfig* const key_value_config) - : min_frames_("min_frames"), + : sampling_period_ms_("sampling_period_ms"), + average_qp_window_("average_qp_window"), + min_frames_("min_frames"), initial_scale_factor_("initial_scale_factor"), scale_factor_("scale_factor"), initial_bitrate_interval_ms_("initial_bitrate_interval_ms"), initial_bitrate_factor_("initial_bitrate_factor") { ParseFieldTrial( - {&min_frames_, &initial_scale_factor_, &scale_factor_, - &initial_bitrate_interval_ms_, &initial_bitrate_factor_}, + {&sampling_period_ms_, &average_qp_window_, &min_frames_, + &initial_scale_factor_, &scale_factor_, &initial_bitrate_interval_ms_, + &initial_bitrate_factor_}, key_value_config->Lookup("WebRTC-Video-QualityScalerSettings")); } @@ -37,6 +40,22 @@ QualityScalerSettings QualityScalerSettings::ParseFromFieldTrials() { return QualityScalerSettings(&field_trial_config); } +absl::optional QualityScalerSettings::SamplingPeriodMs() const { + if (sampling_period_ms_ && sampling_period_ms_.Value() <= 0) { + RTC_LOG(LS_WARNING) << "Unsupported sampling_period_ms value, ignored."; + return absl::nullopt; + } + return sampling_period_ms_.GetOptional(); +} + +absl::optional QualityScalerSettings::AverageQpWindow() const { + if (average_qp_window_ && average_qp_window_.Value() <= 0) { + RTC_LOG(LS_WARNING) << "Unsupported average_qp_window value, ignored."; + return absl::nullopt; + } + return average_qp_window_.GetOptional(); +} + absl::optional QualityScalerSettings::MinFrames() const { if (min_frames_ && min_frames_.Value() < kMinFrames) { RTC_LOG(LS_WARNING) << "Unsupported min_frames value, ignored."; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h index e3b12c54e..b4b6a427a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h @@ -21,6 +21,8 @@ class QualityScalerSettings final { public: static QualityScalerSettings ParseFromFieldTrials(); + absl::optional SamplingPeriodMs() const; + absl::optional AverageQpWindow() const; absl::optional MinFrames() const; absl::optional InitialScaleFactor() const; absl::optional ScaleFactor() const; @@ -31,6 +33,8 @@ class QualityScalerSettings final { explicit QualityScalerSettings( const WebRtcKeyValueConfig* const key_value_config); + FieldTrialOptional sampling_period_ms_; + FieldTrialOptional average_qp_window_; FieldTrialOptional min_frames_; FieldTrialOptional initial_scale_factor_; FieldTrialOptional scale_factor_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc index 71c2eb198..6766db62c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc @@ -75,21 +75,19 @@ constexpr char VideoRateControlConfig::kKey[]; std::unique_ptr VideoRateControlConfig::Parser() { // The empty comments ensures that each pair is on a separate line. return StructParametersParser::Create( - "pacing_factor", &pacing_factor, // - "alr_probing", &alr_probing, // - "vp8_qp_max", &vp8_qp_max, // - "vp8_min_pixels", &vp8_min_pixels, // - "trust_vp8", &trust_vp8, // - "trust_vp9", &trust_vp9, // - "video_hysteresis", &video_hysteresis, // - "screenshare_hysteresis", &screenshare_hysteresis, // - "probe_max_allocation", &probe_max_allocation, // - "bitrate_adjuster", &bitrate_adjuster, // - "adjuster_use_headroom", &adjuster_use_headroom, // - "vp8_s0_boost", &vp8_s0_boost, // - "vp8_base_heavy_tl3_alloc", &vp8_base_heavy_tl3_alloc, // - "vp8_dynamic_rate", &vp8_dynamic_rate, // - "vp9_dynamic_rate", &vp9_dynamic_rate); + "pacing_factor", &pacing_factor, // + "alr_probing", &alr_probing, // + "vp8_qp_max", &vp8_qp_max, // + "vp8_min_pixels", &vp8_min_pixels, // + "trust_vp8", &trust_vp8, // + "trust_vp9", &trust_vp9, // + "video_hysteresis", &video_hysteresis, // + "screenshare_hysteresis", &screenshare_hysteresis, // + "probe_max_allocation", &probe_max_allocation, // + "bitrate_adjuster", &bitrate_adjuster, // + "adjuster_use_headroom", &adjuster_use_headroom, // + "vp8_s0_boost", &vp8_s0_boost, // + "vp8_base_heavy_tl3_alloc", &vp8_base_heavy_tl3_alloc); } RateControlSettings::RateControlSettings( @@ -182,18 +180,10 @@ bool RateControlSettings::Vp8BoostBaseLayerQuality() const { return video_config_.vp8_s0_boost; } -bool RateControlSettings::Vp8DynamicRateSettings() const { - return video_config_.vp8_dynamic_rate; -} - bool RateControlSettings::LibvpxVp9TrustedRateController() const { return video_config_.trust_vp9; } -bool RateControlSettings::Vp9DynamicRateSettings() const { - return video_config_.vp9_dynamic_rate; -} - double RateControlSettings::GetSimulcastHysteresisFactor( VideoCodecMode mode) const { if (mode == VideoCodecMode::kScreensharing) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h index 6898bf6dd..db7f1cd13 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h @@ -36,18 +36,16 @@ struct VideoRateControlConfig { bool alr_probing = false; absl::optional vp8_qp_max; absl::optional vp8_min_pixels; - bool trust_vp8 = false; - bool trust_vp9 = false; - double video_hysteresis = 1.0; + bool trust_vp8 = true; + bool trust_vp9 = true; + double video_hysteresis = 1.2; // Default to 35% hysteresis for simulcast screenshare. double screenshare_hysteresis = 1.35; bool probe_max_allocation = true; - bool bitrate_adjuster = false; - bool adjuster_use_headroom = false; - bool vp8_s0_boost = true; + bool bitrate_adjuster = true; + bool adjuster_use_headroom = true; + bool vp8_s0_boost = false; bool vp8_base_heavy_tl3_alloc = false; - bool vp8_dynamic_rate = false; - bool vp9_dynamic_rate = false; std::unique_ptr Parser(); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h index 040b24205..8bd50b69f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h @@ -31,7 +31,8 @@ const int kFakeIPv4NetworkPrefixLength = 24; const int kFakeIPv6NetworkPrefixLength = 64; // Fake network manager that allows us to manually specify the IPs to use. -class FakeNetworkManager : public NetworkManagerBase, public MessageHandler { +class FakeNetworkManager : public NetworkManagerBase, + public MessageHandlerAutoCleanup { public: FakeNetworkManager() {} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc new file mode 100644 index 000000000..83ee8075f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc @@ -0,0 +1,42 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/gunit.h" + +#include + +#include "absl/strings/match.h" + +::testing::AssertionResult AssertStartsWith(const char* text_expr, + const char* prefix_expr, + absl::string_view text, + absl::string_view prefix) { + if (absl::StartsWith(text, prefix)) { + return ::testing::AssertionSuccess(); + } else { + return ::testing::AssertionFailure() + << text_expr << "\nwhich is\n\"" << text + << "\"\ndoes not start with\n" + << prefix_expr << "\nwhich is\n\"" << prefix << "\""; + } +} + +::testing::AssertionResult AssertStringContains(const char* str_expr, + const char* substr_expr, + const std::string& str, + const std::string& substr) { + if (str.find(substr) != std::string::npos) { + return ::testing::AssertionSuccess(); + } else { + return ::testing::AssertionFailure() + << str_expr << "\nwhich is\n\"" << str << "\"\ndoes not contain\n" + << substr_expr << "\nwhich is\n\"" << substr << "\""; + } +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h new file mode 100644 index 000000000..dedf3ee06 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h @@ -0,0 +1,168 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_GUNIT_H_ +#define RTC_BASE_GUNIT_H_ + +#include "rtc_base/fake_clock.h" +#include "rtc_base/logging.h" +#include "rtc_base/thread.h" +#include "test/gtest.h" + +// Wait until "ex" is true, or "timeout" expires. +#define WAIT(ex, timeout) \ + for (int64_t start = rtc::SystemTimeMillis(); \ + !(ex) && rtc::SystemTimeMillis() < start + (timeout);) { \ + rtc::Thread::Current()->ProcessMessages(0); \ + rtc::Thread::Current()->SleepMs(1); \ + } + +// This returns the result of the test in res, so that we don't re-evaluate +// the expression in the XXXX_WAIT macros below, since that causes problems +// when the expression is only true the first time you check it. +#define WAIT_(ex, timeout, res) \ + do { \ + int64_t start = rtc::SystemTimeMillis(); \ + res = (ex); \ + while (!res && rtc::SystemTimeMillis() < start + (timeout)) { \ + rtc::Thread::Current()->ProcessMessages(0); \ + rtc::Thread::Current()->SleepMs(1); \ + res = (ex); \ + } \ + } while (0) + +// The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout. +// One can add failure message by appending "<< msg". +#define EXPECT_TRUE_WAIT(ex, timeout) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + WAIT_(ex, timeout, res); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) + +#define EXPECT_EQ_WAIT(v1, v2, timeout) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + WAIT_(v1 == v2, timeout, res); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) + +#define ASSERT_TRUE_WAIT(ex, timeout) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + WAIT_(ex, timeout, res); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) + +#define ASSERT_EQ_WAIT(v1, v2, timeout) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + WAIT_(v1 == v2, timeout, res); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) + +// Version with a "soft" timeout and a margin. This logs if the timeout is +// exceeded, but it only fails if the expression still isn't true after the +// margin time passes. +#define EXPECT_TRUE_WAIT_MARGIN(ex, timeout, margin) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + WAIT_(ex, timeout, res); \ + if (res) \ + break; \ + RTC_LOG(LS_WARNING) << "Expression " << #ex << " still not true after " \ + << (timeout) << "ms; waiting an additional " << margin \ + << "ms"; \ + WAIT_(ex, margin, res); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) + +// Wait until "ex" is true, or "timeout" expires, using fake clock where +// messages are processed every millisecond. +// TODO(pthatcher): Allow tests to control how many milliseconds to advance. +#define SIMULATED_WAIT(ex, timeout, clock) \ + for (int64_t start = rtc::TimeMillis(); \ + !(ex) && rtc::TimeMillis() < start + (timeout);) { \ + (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ + } + +// This returns the result of the test in res, so that we don't re-evaluate +// the expression in the XXXX_WAIT macros below, since that causes problems +// when the expression is only true the first time you check it. +#define SIMULATED_WAIT_(ex, timeout, res, clock) \ + do { \ + int64_t start = rtc::TimeMillis(); \ + res = (ex); \ + while (!res && rtc::TimeMillis() < start + (timeout)) { \ + (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ + res = (ex); \ + } \ + } while (0) + +// The typical EXPECT_XXXX, but done until true or a timeout with a fake clock. +#define EXPECT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ + do { \ + bool res; \ + SIMULATED_WAIT_(ex, timeout, res, clock); \ + if (!res) { \ + EXPECT_TRUE(ex); \ + } \ + } while (0) + +#define EXPECT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) + +#define ASSERT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + SIMULATED_WAIT_(ex, timeout, res, clock); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) + +#define ASSERT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (bool res = true) { \ + SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ + if (!res) \ + goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ + } else \ + GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) + +// Usage: EXPECT_PRED_FORMAT2(AssertStartsWith, text, "prefix"); +testing::AssertionResult AssertStartsWith(const char* text_expr, + const char* prefix_expr, + absl::string_view text, + absl::string_view prefix); + +// Usage: EXPECT_PRED_FORMAT2(AssertStringContains, str, "substring"); +testing::AssertionResult AssertStringContains(const char* str_expr, + const char* substr_expr, + const std::string& str, + const std::string& substr); + +#endif // RTC_BASE_GUNIT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/location.h b/TMessagesProj/jni/voip/webrtc/rtc_base/location.h index ad8f47913..ff1eea95a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/location.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/location.h @@ -13,7 +13,6 @@ #include -#include "rtc_base/stringize_macros.h" #include "rtc_base/system/rtc_export.h" namespace rtc { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc index d07a7e75e..13a5f0259 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc @@ -33,6 +33,7 @@ static const int kMaxLogLineSize = 1024 - 60; #endif // WEBRTC_MAC && !defined(WEBRTC_IOS) || WEBRTC_ANDROID +#include #include #include @@ -110,9 +111,13 @@ LogMessage::LogMessage(const char* file, // Also ensure WallClockStartTime is initialized, so that it matches // LogStartTime. WallClockStartTime(); - print_stream_ << "[" << rtc::LeftPad('0', 3, rtc::ToString(time / 1000)) - << ":" << rtc::LeftPad('0', 3, rtc::ToString(time % 1000)) - << "] "; + // TODO(kwiberg): Switch to absl::StrFormat, if binary size is ok. + char timestamp[50]; // Maximum string length of an int64_t is 20. + int len = + snprintf(timestamp, sizeof(timestamp), "[%03" PRId64 ":%03" PRId64 "]", + time / 1000, time % 1000); + RTC_DCHECK_LT(len, sizeof(timestamp)); + print_stream_ << timestamp; } if (thread_) { @@ -481,11 +486,6 @@ void Log(const LogArgType* fmt, ...) { } } - if (LogMessage::IsNoop(meta.meta.Severity())) { - va_end(args); - return; - } - LogMessage log_message(meta.meta.File(), meta.meta.Line(), meta.meta.Severity(), meta.err_ctx, meta.err); if (tag) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h index 0852c0618..d2607c28b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h @@ -487,6 +487,12 @@ class LogMessage { // |streams_| collection is empty, the LogMessage will be considered a noop // LogMessage. static bool IsNoop(LoggingSeverity severity); + // Version of IsNoop that uses fewer instructions at the call site, since the + // caller doesn't have to pass an argument. + template + RTC_NO_INLINE static bool IsNoop() { + return IsNoop(S); + } #else // Next methods do nothing; no one will call these functions. LogMessage(const char* file, int line, LoggingSeverity sev) {} @@ -525,7 +531,11 @@ class LogMessage { inline static int GetLogToStream(LogSink* stream = nullptr) { return 0; } inline static int GetMinLogSeverity() { return 0; } inline static void ConfigureLogging(const char* params) {} - inline static bool IsNoop(LoggingSeverity severity) { return true; } + static constexpr bool IsNoop(LoggingSeverity severity) { return true; } + template + static constexpr bool IsNoop() { + return IsNoop(S); + } #endif // RTC_LOG_ENABLED() private: @@ -598,16 +608,18 @@ class LogMessage { // Logging Helpers ////////////////////////////////////////////////////////////////////// -#define RTC_LOG_FILE_LINE(sev, file, line) \ - RTC_LOG_ENABLED() && \ - ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadata(file, line, sev) +#define RTC_LOG_FILE_LINE(sev, file, line) \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadata(file, line, sev) -#define RTC_LOG(sev) RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) +#define RTC_LOG(sev) \ + !rtc::LogMessage::IsNoop<::rtc::sev>() && \ + RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) // The _V version is for when a variable is passed in. -#define RTC_LOG_V(sev) RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) +#define RTC_LOG_V(sev) \ + !rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) // The _F version prefixes the message with the current function name. #if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F) @@ -626,11 +638,12 @@ inline bool LogCheckLevel(LoggingSeverity sev) { return (LogMessage::GetMinLogSeverity() <= sev); } -#define RTC_LOG_E(sev, ctx, err) \ - RTC_LOG_ENABLED() && ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadataErr { \ - {__FILE__, __LINE__, ::rtc::sev}, ::rtc::ERRCTX_##ctx, (err) \ +#define RTC_LOG_E(sev, ctx, err) \ + !rtc::LogMessage::IsNoop<::rtc::sev>() && \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadataErr { \ + {__FILE__, __LINE__, ::rtc::sev}, ::rtc::ERRCTX_##ctx, (err) \ } #define RTC_LOG_T(sev) RTC_LOG(sev) << this << ": " @@ -663,11 +676,12 @@ inline const char* AdaptString(const std::string& str) { } } // namespace webrtc_logging_impl -#define RTC_LOG_TAG(sev, tag) \ - RTC_LOG_ENABLED() && ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadataTag { \ - sev, ::rtc::webrtc_logging_impl::AdaptString(tag) \ +#define RTC_LOG_TAG(sev, tag) \ + !rtc::LogMessage::IsNoop(sev) && \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadataTag { \ + sev, ::rtc::webrtc_logging_impl::AdaptString(tag) \ } #else diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.cc index 49e926719..3fbea8dc2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.cc @@ -104,7 +104,7 @@ StreamResult FifoBuffer::Read(void* buffer, // if we were full before, and now we're not, post an event if (!was_writable && copy > 0) { - PostEvent(owner_, SE_WRITE, 0); + PostEvent(SE_WRITE, 0); } } return result; @@ -129,7 +129,7 @@ StreamResult FifoBuffer::Write(const void* buffer, // if we didn't have any data to read before, and now we do, post an event if (!was_readable && copy > 0) { - PostEvent(owner_, SE_READ, 0); + PostEvent(SE_READ, 0); } } return result; @@ -155,7 +155,7 @@ void FifoBuffer::ConsumeReadData(size_t size) { read_position_ = (read_position_ + size) % buffer_length_; data_length_ -= size; if (!was_writable && size > 0) { - PostEvent(owner_, SE_WRITE, 0); + PostEvent(SE_WRITE, 0); } } @@ -185,7 +185,7 @@ void FifoBuffer::ConsumeWriteBuffer(size_t size) { const bool was_readable = (data_length_ > 0); data_length_ += size; if (!was_readable && size > 0) { - PostEvent(owner_, SE_READ, 0); + PostEvent(SE_READ, 0); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h index 04c4cbf33..bf2edf6e2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h @@ -15,6 +15,8 @@ #include "rtc_base/stream.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace rtc { @@ -98,6 +100,12 @@ class FifoBuffer final : public StreamInterface { bool GetWriteRemaining(size_t* size) const; private: + void PostEvent(int events, int err) { + owner_->PostTask(webrtc::ToQueuedTask(task_safety_, [this, events, err]() { + SignalEvent(this, events, err); + })); + } + // Helper method that implements ReadOffset. Caller must acquire a lock // when calling this method. StreamResult ReadOffsetLocked(void* buffer, @@ -114,6 +122,8 @@ class FifoBuffer final : public StreamInterface { size_t* bytes_written) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + webrtc::ScopedTaskSafety task_safety_; + // keeps the opened/closed state of the stream StreamState state_ RTC_GUARDED_BY(mutex_); // the allocated buffer @@ -125,7 +135,7 @@ class FifoBuffer final : public StreamInterface { // offset to the readable data size_t read_position_ RTC_GUARDED_BY(mutex_); // stream callbacks are dispatched on this thread - Thread* owner_; + Thread* const owner_; // object lock mutable webrtc::Mutex mutex_; RTC_DISALLOW_COPY_AND_ASSIGN(FifoBuffer); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc index 18a06e241..e6e973dbd 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc @@ -14,7 +14,16 @@ namespace rtc { -MessageHandler::~MessageHandler() { +MessageHandlerAutoCleanup::MessageHandlerAutoCleanup() {} + +MessageHandlerAutoCleanup::~MessageHandlerAutoCleanup() { + // Note that even though this clears currently pending messages for the + // message handler, it's still racy since it doesn't prevent threads that + // might be in the process of posting new messages with would-be dangling + // pointers. + // This is related to the design of Message having a raw pointer. + // We could consider whether it would be safer to require message handlers + // to be reference counted (as some are). ThreadManager::Clear(this); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h index 85cb78548..62c8344e1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h @@ -21,17 +21,27 @@ namespace rtc { struct Message; -// Messages get dispatched to a MessageHandler +// MessageQueue/Thread Messages get dispatched via the MessageHandler interface. class RTC_EXPORT MessageHandler { public: - virtual ~MessageHandler(); + virtual ~MessageHandler() {} virtual void OnMessage(Message* msg) = 0; +}; + +// Warning: Provided for backwards compatibility. +// +// This class performs expensive cleanup in the dtor that will affect all +// instances of Thread (and their pending message queues) and will block the +// current thread as well as all other threads. +class RTC_EXPORT MessageHandlerAutoCleanup : public MessageHandler { + public: + ~MessageHandlerAutoCleanup() override; protected: - MessageHandler() {} + MessageHandlerAutoCleanup(); private: - RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandler); + RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerAutoCleanup); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc index 0d518c0b4..8aabdcb7e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc @@ -806,6 +806,11 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { } #endif + if (network_monitor_ && + !network_monitor_->IsAdapterAvailable(network.name())) { + return true; + } + // Ignore any networks with a 0.x.y.z IP if (network.prefix().family() == AF_INET) { return (network.prefix().v4AddressAsHostOrderInteger() < 0x01000000); @@ -845,15 +850,11 @@ void BasicNetworkManager::StopUpdating() { } void BasicNetworkManager::StartNetworkMonitor() { - NetworkMonitorFactory* factory = network_monitor_factory_; - if (factory == nullptr) { - factory = NetworkMonitorFactory::GetFactory(); - if (factory == nullptr) { - return; - } + if (network_monitor_factory_ == nullptr) { + return; } if (!network_monitor_) { - network_monitor_.reset(factory->CreateNetworkMonitor()); + network_monitor_.reset(network_monitor_factory_->CreateNetworkMonitor()); if (!network_monitor_) { return; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h index 3dad521a7..7103f0fa2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h @@ -224,7 +224,7 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // Basic implementation of the NetworkManager interface that gets list // of networks using OS APIs. class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, - public MessageHandler, + public MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: BasicNetworkManager(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.cc index 8fd5f786d..70c2ad502 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.cc @@ -10,37 +10,21 @@ #include "rtc_base/network_monitor.h" -#include - #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" - -namespace { -const uint32_t UPDATE_NETWORKS_MESSAGE = 1; -} // namespace namespace rtc { -NetworkMonitorInterface::NetworkMonitorInterface() {} +const char* NetworkPreferenceToString(NetworkPreference preference) { + switch (preference) { + case NetworkPreference::NEUTRAL: + return "NEUTRAL"; + case NetworkPreference::NOT_PREFERRED: + return "NOT_PREFERRED"; + } + RTC_CHECK_NOTREACHED(); +} + +NetworkMonitorInterface::NetworkMonitorInterface() {} NetworkMonitorInterface::~NetworkMonitorInterface() {} -NetworkMonitorBase::NetworkMonitorBase() : worker_thread_(Thread::Current()) {} -NetworkMonitorBase::~NetworkMonitorBase() {} - -void NetworkMonitorBase::OnNetworksChanged() { - RTC_LOG(LS_VERBOSE) << "Network change is received at the network monitor"; - worker_thread_->Post(RTC_FROM_HERE, this, UPDATE_NETWORKS_MESSAGE); -} - -void NetworkMonitorBase::OnMessage(Message* msg) { - RTC_DCHECK(msg->message_id == UPDATE_NETWORKS_MESSAGE); - SignalNetworksChanged(); -} - -AdapterType NetworkMonitorBase::GetVpnUnderlyingAdapterType( - const std::string& interface_name) { - return ADAPTER_TYPE_UNKNOWN; -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h index eb3c3d65f..4a3002f42 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h @@ -12,11 +12,7 @@ #define RTC_BASE_NETWORK_MONITOR_H_ #include "rtc_base/network_constants.h" -// TODO(deadbeef): Remove this include when downstream code stops using -// NetworkMonitorFactory::SetFactory. -#include "rtc_base/network_monitor_factory.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" namespace rtc { @@ -38,6 +34,8 @@ enum class NetworkPreference { NOT_PREFERRED = -1, }; +const char* NetworkPreferenceToString(NetworkPreference preference); + class NetworkBinderInterface { public: // Binds a socket to the network that is attached to |address| so that all @@ -64,8 +62,7 @@ class NetworkBinderInterface { * * Memory consideration: * NetworkMonitor is owned by the caller (NetworkManager). The global network - * monitor factory is owned by the factory itself but needs to be released from - * the factory creator. + * monitor factory is owned by the PeerConnectionFactory. */ // Generic network monitor interface. It starts and stops monitoring network // changes, and fires the SignalNetworksChanged event when networks change. @@ -79,36 +76,25 @@ class NetworkMonitorInterface { virtual void Start() = 0; virtual void Stop() = 0; - // Implementations should call this method on the base when networks change, - // and the base will fire SignalNetworksChanged on the right thread. - virtual void OnNetworksChanged() = 0; - virtual AdapterType GetAdapterType(const std::string& interface_name) = 0; virtual AdapterType GetVpnUnderlyingAdapterType( const std::string& interface_name) = 0; + virtual NetworkPreference GetNetworkPreference( const std::string& interface_name) = 0; -}; -class NetworkMonitorBase : public NetworkMonitorInterface, - public MessageHandler, - public sigslot::has_slots<> { - public: - NetworkMonitorBase(); - ~NetworkMonitorBase() override; - - void OnNetworksChanged() override; - - void OnMessage(Message* msg) override; - - AdapterType GetVpnUnderlyingAdapterType( - const std::string& interface_name) override; - - protected: - Thread* worker_thread() { return worker_thread_; } - - private: - Thread* worker_thread_; + // Is this interface available to use? WebRTC shouldn't attempt to use it if + // this returns false. + // + // It's possible for this status to change, in which case + // SignalNetworksChanged will be fired. + // + // These specific use case this was added for was a phone with two SIM cards, + // where attempting to use all interfaces returned from getifaddrs caused the + // connection to be dropped. + virtual bool IsAdapterAvailable(const std::string& interface_name) { + return true; + } }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.cc index 915f2e3c4..9fac4d95a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.cc @@ -10,33 +10,9 @@ #include "rtc_base/network_monitor_factory.h" -namespace { -// This is set by NetworkMonitorFactory::SetFactory and the caller of -// NetworkMonitorFactory::SetFactory must be responsible for calling -// ReleaseFactory to destroy the factory. -rtc::NetworkMonitorFactory* network_monitor_factory = nullptr; -} // namespace - namespace rtc { NetworkMonitorFactory::NetworkMonitorFactory() {} NetworkMonitorFactory::~NetworkMonitorFactory() {} -void NetworkMonitorFactory::SetFactory(NetworkMonitorFactory* factory) { - if (network_monitor_factory != nullptr) { - delete network_monitor_factory; - } - network_monitor_factory = factory; -} - -void NetworkMonitorFactory::ReleaseFactory(NetworkMonitorFactory* factory) { - if (factory == network_monitor_factory) { - SetFactory(nullptr); - } -} - -NetworkMonitorFactory* NetworkMonitorFactory::GetFactory() { - return network_monitor_factory; -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h index ac463d82c..dadcd4aa8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h @@ -24,15 +24,6 @@ class NetworkMonitorInterface; */ class NetworkMonitorFactory { public: - // This is not thread-safe; it should be called once (or once per audio/video - // call) during the call initialization. - // DEPRECATED: Should pass NetworkMonitorFactory through - // PeerConnectionFactoryDependencies instead. - static void SetFactory(NetworkMonitorFactory* factory); - - static void ReleaseFactory(NetworkMonitorFactory* factory); - static NetworkMonitorFactory* GetFactory(); - virtual NetworkMonitorInterface* CreateNetworkMonitor() = 0; virtual ~NetworkMonitorFactory(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/math_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/math_utils.h index 4bf48e22b..0f1d51b09 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/math_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/math_utils.h @@ -8,14 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_MATH_UTILS_H_ -#define RTC_BASE_NUMERICS_MATH_UTILS_H_ +#ifndef API_NUMERICS_MATH_UTILS_H_ +#define API_NUMERICS_MATH_UTILS_H_ #include #include #include "rtc_base/checks.h" +namespace webrtc { +namespace webrtc_impl { // Given two numbers |x| and |y| such that x >= y, computes the difference // x - y without causing undefined behavior due to signed overflow. template @@ -67,4 +69,7 @@ constexpr T minus_infinity_or_min() { return std::numeric_limits::min(); } -#endif // RTC_BASE_NUMERICS_MATH_UTILS_H_ +} // namespace webrtc_impl +} // namespace webrtc + +#endif // API_NUMERICS_MATH_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/running_statistics.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/running_statistics.h index 4a3516d3f..bbcc7e2a7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/running_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/running_statistics.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ -#define RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ +#ifndef API_NUMERICS_RUNNING_STATISTICS_H_ +#define API_NUMERICS_RUNNING_STATISTICS_H_ #include #include @@ -20,6 +20,7 @@ #include "rtc_base/numerics/math_utils.h" namespace webrtc { +namespace webrtc_impl { // tl;dr: Robust and efficient online computation of statistics, // using Welford's method for variance. [1] @@ -154,6 +155,7 @@ class RunningStatistics { double cumul_ = 0; // Variance * size_, sometimes noted m2. }; +} // namespace webrtc_impl } // namespace webrtc -#endif // RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ +#endif // API_NUMERICS_RUNNING_STATISTICS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/safe_conversions.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/safe_conversions.h index 5d5867251..e00219cbd 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/safe_conversions.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/safe_conversions.h @@ -63,12 +63,10 @@ inline constexpr Dst saturated_cast(Src value) { // Should fail only on attempting to assign NaN to a saturated integer. case internal::TYPE_INVALID: - FATAL(); - return std::numeric_limits::max(); + RTC_CHECK_NOTREACHED(); } - FATAL(); - return static_cast(value); + RTC_CHECK_NOTREACHED(); } } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/sample_stats.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/sample_stats.h index f6347414b..39af1c6a3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/sample_stats.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/sample_stats.h @@ -10,10 +10,10 @@ #ifndef RTC_BASE_NUMERICS_SAMPLE_STATS_H_ #define RTC_BASE_NUMERICS_SAMPLE_STATS_H_ +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/numerics/samples_stats_counter.h" namespace webrtc { template diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h index 0e76836ba..6f1f7dcca 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h @@ -32,7 +32,8 @@ namespace rtc { -class OpenSSLAdapter final : public SSLAdapter, public MessageHandler { +class OpenSSLAdapter final : public SSLAdapter, + public MessageHandlerAutoCleanup { public: static bool InitializeSSL(); static bool CleanupSSL(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc index 9459f76df..bd9bb04fd 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc @@ -244,13 +244,8 @@ std::unique_ptr OpenSSLCertificate::Clone() const { std::string OpenSSLCertificate::ToPEMString() const { BIO* bio = BIO_new(BIO_s_mem()); - if (!bio) { - FATAL() << "Unreachable code."; - } - if (!PEM_write_bio_X509(bio, x509_)) { - BIO_free(bio); - FATAL() << "Unreachable code."; - } + RTC_CHECK(bio); + RTC_CHECK(PEM_write_bio_X509(bio, x509_)); BIO_write(bio, "\0", 1); char* buffer; BIO_get_mem_data(bio, &buffer); @@ -264,13 +259,8 @@ void OpenSSLCertificate::ToDER(Buffer* der_buffer) const { der_buffer->SetSize(0); // Calculates the DER representation of the certificate, from scratch. BIO* bio = BIO_new(BIO_s_mem()); - if (!bio) { - FATAL() << "Unreachable code."; - } - if (!i2d_X509_bio(bio, x509_)) { - BIO_free(bio); - FATAL() << "Unreachable code."; - } + RTC_CHECK(bio); + RTC_CHECK(i2d_X509_bio(bio, x509_)); char* data = nullptr; size_t length = BIO_get_mem_data(bio, &data); der_buffer->SetData(data, length); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc index cd7f8025c..f59b4edf1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc @@ -21,6 +21,7 @@ #include #endif +#include #include #include #include @@ -34,6 +35,7 @@ #include "rtc_base/openssl_identity.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/stream.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" @@ -50,7 +52,6 @@ namespace rtc { namespace { - // SRTP cipher suite table. |internal_name| is used to construct a // colon-separated profile strings which is needed by // SSL_CTX_set_tlsext_use_srtp(). @@ -265,9 +266,25 @@ static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) { // OpenSSLStreamAdapter ///////////////////////////////////////////////////////////////////////////// +static std::atomic g_use_legacy_tls_protocols_override(false); +static std::atomic g_allow_legacy_tls_protocols(false); + +void SetAllowLegacyTLSProtocols(const absl::optional& allow) { + g_use_legacy_tls_protocols_override.store(allow.has_value()); + if (allow.has_value()) + g_allow_legacy_tls_protocols.store(allow.value()); +} + +bool ShouldAllowLegacyTLSProtocols() { + return g_use_legacy_tls_protocols_override.load() + ? g_allow_legacy_tls_protocols.load() + : webrtc::field_trial::IsEnabled("WebRTC-LegacyTlsProtocols"); +} + OpenSSLStreamAdapter::OpenSSLStreamAdapter( std::unique_ptr stream) : SSLStreamAdapter(std::move(stream)), + owner_(rtc::Thread::Current()), state_(SSL_NONE), role_(SSL_CLIENT), ssl_read_needs_write_(false), @@ -278,10 +295,10 @@ OpenSSLStreamAdapter::OpenSSLStreamAdapter( ssl_max_version_(SSL_PROTOCOL_TLS_12), // Default is to support legacy TLS protocols. // This will be changed to default non-support in M82 or M83. - support_legacy_tls_protocols_flag_( - !webrtc::field_trial::IsDisabled("WebRTC-LegacyTlsProtocols")) {} + support_legacy_tls_protocols_flag_(ShouldAllowLegacyTLSProtocols()) {} OpenSSLStreamAdapter::~OpenSSLStreamAdapter() { + timeout_task_.Stop(); Cleanup(0); } @@ -530,7 +547,7 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, size_t data_len, size_t* written, int* error) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")"; switch (state_) { case SSL_NONE: @@ -570,18 +587,18 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); RTC_DCHECK_LE(code, data_len); if (written) *written = code; return SR_SUCCESS; case SSL_ERROR_WANT_READ: - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; ssl_write_needs_read_ = true; return SR_BLOCK; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; return SR_BLOCK; case SSL_ERROR_ZERO_RETURN: @@ -599,7 +616,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, size_t data_len, size_t* read, int* error) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")"; switch (state_) { case SSL_NONE: // pass-through in clear text @@ -637,7 +654,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); RTC_DCHECK_LE(code, data_len); if (read) { @@ -649,7 +666,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, unsigned int pending = SSL_pending(ssl_); if (pending) { - RTC_LOG(LS_INFO) << " -- short DTLS read. flushing"; + RTC_DLOG(LS_INFO) << " -- short DTLS read. flushing"; FlushInput(pending); if (error) { *error = SSE_MSG_TRUNC; @@ -659,14 +676,14 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, } return SR_SUCCESS; case SSL_ERROR_WANT_READ: - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; return SR_BLOCK; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; ssl_read_needs_write_ = true; return SR_BLOCK; case SSL_ERROR_ZERO_RETURN: - RTC_LOG(LS_VERBOSE) << " -- remote side closed"; + RTC_DLOG(LS_VERBOSE) << " -- remote side closed"; Close(); return SR_EOS; default: @@ -696,7 +713,7 @@ void OpenSSLStreamAdapter::FlushInput(unsigned int left) { return; } - RTC_LOG(LS_VERBOSE) << " -- flushed " << code << " bytes"; + RTC_DLOG(LS_VERBOSE) << " -- flushed " << code << " bytes"; left -= code; } } @@ -734,7 +751,7 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, RTC_DCHECK(stream == this->stream()); if ((events & SE_OPEN)) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; if (state_ != SSL_WAIT) { RTC_DCHECK(state_ == SSL_NONE); events_to_signal |= SE_OPEN; @@ -748,9 +765,9 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } if ((events & (SE_READ | SE_WRITE))) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent" - << ((events & SE_READ) ? " SE_READ" : "") - << ((events & SE_WRITE) ? " SE_WRITE" : ""); + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent" + << ((events & SE_READ) ? " SE_READ" : "") + << ((events & SE_WRITE) ? " SE_WRITE" : ""); if (state_ == SSL_NONE) { events_to_signal |= events & (SE_READ | SE_WRITE); } else if (state_ == SSL_CONNECTING) { @@ -761,20 +778,20 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } else if (state_ == SSL_CONNECTED) { if (((events & SE_READ) && ssl_write_needs_read_) || (events & SE_WRITE)) { - RTC_LOG(LS_VERBOSE) << " -- onStreamWriteable"; + RTC_DLOG(LS_VERBOSE) << " -- onStreamWriteable"; events_to_signal |= SE_WRITE; } if (((events & SE_WRITE) && ssl_read_needs_write_) || (events & SE_READ)) { - RTC_LOG(LS_VERBOSE) << " -- onStreamReadable"; + RTC_DLOG(LS_VERBOSE) << " -- onStreamReadable"; events_to_signal |= SE_READ; } } } if ((events & SE_CLOSE)) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err - << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err + << ")"; Cleanup(0); events_to_signal |= SE_CLOSE; // SE_CLOSE is the only event that uses the final parameter to OnEvent(). @@ -787,10 +804,37 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } } +void OpenSSLStreamAdapter::PostEvent(int events, int err) { + owner_->PostTask(webrtc::ToQueuedTask( + task_safety_, [this, events, err]() { SignalEvent(this, events, err); })); +} + +void OpenSSLStreamAdapter::SetTimeout(int delay_ms) { + // We need to accept 0 delay here as well as >0 delay, because + // DTLSv1_get_timeout seems to frequently return 0 ms. + RTC_DCHECK_GE(delay_ms, 0); + RTC_DCHECK(!timeout_task_.Running()); + + timeout_task_ = webrtc::RepeatingTaskHandle::DelayedStart( + owner_, webrtc::TimeDelta::Millis(delay_ms), + [flag = task_safety_.flag(), this]() { + if (flag->alive()) { + RTC_DLOG(LS_INFO) << "DTLS timeout expired"; + timeout_task_.Stop(); + DTLSv1_handle_timeout(ssl_); + ContinueSSL(); + } else { + RTC_NOTREACHED(); + } + // This callback will never run again (stopped above). + return webrtc::TimeDelta::PlusInfinity(); + }); +} + int OpenSSLStreamAdapter::BeginSSL() { RTC_DCHECK(state_ == SSL_CONNECTING); // The underlying stream has opened. - RTC_LOG(LS_INFO) << "BeginSSL with peer."; + RTC_DLOG(LS_INFO) << "BeginSSL with peer."; BIO* bio = nullptr; @@ -833,18 +877,18 @@ int OpenSSLStreamAdapter::BeginSSL() { } int OpenSSLStreamAdapter::ContinueSSL() { - RTC_LOG(LS_VERBOSE) << "ContinueSSL"; + RTC_DLOG(LS_VERBOSE) << "ContinueSSL"; RTC_DCHECK(state_ == SSL_CONNECTING); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); + timeout_task_.Stop(); const int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); const int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; // By this point, OpenSSL should have given us a certificate, or errored // out if one was missing. RTC_DCHECK(peer_cert_chain_ || !GetClientAuthEnabled()); @@ -865,18 +909,16 @@ int OpenSSLStreamAdapter::ContinueSSL() { break; case SSL_ERROR_WANT_READ: { - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; struct timeval timeout; if (DTLSv1_get_timeout(ssl_, &timeout)) { int delay = timeout.tv_sec * 1000 + timeout.tv_usec / 1000; - - Thread::Current()->PostDelayed(RTC_FROM_HERE, delay, this, MSG_TIMEOUT, - 0); + SetTimeout(delay); } } break; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; break; case SSL_ERROR_ZERO_RETURN: @@ -886,8 +928,8 @@ int OpenSSLStreamAdapter::ContinueSSL() { if (err_code != 0 && ERR_GET_REASON(err_code) == SSL_R_NO_SHARED_CIPHER) { ssl_handshake_err = SSLHandshakeError::INCOMPATIBLE_CIPHERSUITE; } - RTC_LOG(LS_VERBOSE) << " -- error " << code << ", " << err_code << ", " - << ERR_GET_REASON(err_code); + RTC_DLOG(LS_VERBOSE) << " -- error " << code << ", " << err_code << ", " + << ERR_GET_REASON(err_code); SignalSSLHandshakeError(ssl_handshake_err); return (ssl_error != 0) ? ssl_error : -1; } @@ -910,7 +952,7 @@ void OpenSSLStreamAdapter::Error(const char* context, } void OpenSSLStreamAdapter::Cleanup(uint8_t alert) { - RTC_LOG(LS_INFO) << "Cleanup"; + RTC_DLOG(LS_INFO) << "Cleanup"; if (state_ != SSL_ERROR) { state_ = SSL_CLOSED; @@ -948,18 +990,7 @@ void OpenSSLStreamAdapter::Cleanup(uint8_t alert) { peer_cert_chain_.reset(); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); -} - -void OpenSSLStreamAdapter::OnMessage(Message* msg) { - // Process our own messages and then pass others to the superclass - if (MSG_TIMEOUT == msg->message_id) { - RTC_LOG(LS_INFO) << "DTLS timeout expired"; - DTLSv1_handle_timeout(ssl_); - ContinueSSL(); - } else { - StreamInterface::OnMessage(msg); - } + timeout_task_.Stop(); } SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { @@ -1072,7 +1103,7 @@ bool OpenSSLStreamAdapter::VerifyPeerCertificate() { // Ignore any verification error if the digest matches, since there is no // value in checking the validity of a self-signed cert issued by untrusted // sources. - RTC_LOG(LS_INFO) << "Accepted peer certificate."; + RTC_DLOG(LS_INFO) << "Accepted peer certificate."; peer_certificate_verified_ = true; return true; } @@ -1107,7 +1138,7 @@ int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { // If the peer certificate digest isn't known yet, we'll wait to verify // until it's known, and for now just return a success status. if (stream->peer_certificate_digest_algorithm_.empty()) { - RTC_LOG(LS_INFO) << "Waiting to verify certificate until digest is known."; + RTC_DLOG(LS_INFO) << "Waiting to verify certificate until digest is known."; return 1; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h index 7ea324321..fbfccd684 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h @@ -19,11 +19,15 @@ #include #include +#include "absl/types/optional.h" #include "rtc_base/buffer.h" #include "rtc_base/openssl_identity.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" namespace rtc { @@ -55,6 +59,12 @@ class SSLCertChain; /////////////////////////////////////////////////////////////////////////////// +// If |allow| has a value, its value determines if legacy TLS protocols are +// allowed, overriding the default configuration. +// If |allow| has no value, any previous override is removed and the default +// configuration is restored. +RTC_EXPORT void SetAllowLegacyTLSProtocols(const absl::optional& allow); + class OpenSSLStreamAdapter final : public SSLStreamAdapter { public: explicit OpenSSLStreamAdapter(std::unique_ptr stream); @@ -137,7 +147,8 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSL_CLOSED // Clean close }; - enum { MSG_TIMEOUT = MSG_MAX + 1 }; + void PostEvent(int events, int err); + void SetTimeout(int delay_ms); // The following three methods return 0 on success and a negative // error code on failure. The error code may be from OpenSSL or -1 @@ -161,9 +172,6 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { void Error(const char* context, int err, uint8_t alert, bool signal); void Cleanup(uint8_t alert); - // Override MessageHandler - void OnMessage(Message* msg) override; - // Flush the input buffers by reading left bytes (for DTLS) void FlushInput(unsigned int left); @@ -184,6 +192,10 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { !peer_certificate_digest_value_.empty(); } + rtc::Thread* const owner_; + webrtc::ScopedTaskSafety task_safety_; + webrtc::RepeatingTaskHandle timeout_task_; + SSLState state_; SSLRole role_; int ssl_error_code_; // valid when state_ == SSL_ERROR or SSL_CLOSED diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc index 68ee20bab..f707d339b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc @@ -19,12 +19,14 @@ OperationsChain::CallbackHandle::CallbackHandle( : operations_chain_(std::move(operations_chain)) {} OperationsChain::CallbackHandle::~CallbackHandle() { +#if RTC_DCHECK_IS_ON RTC_DCHECK(has_run_); +#endif } void OperationsChain::CallbackHandle::OnOperationComplete() { +#if RTC_DCHECK_IS_ON RTC_DCHECK(!has_run_); -#ifdef RTC_DCHECK_IS_ON has_run_ = true; #endif // RTC_DCHECK_IS_ON operations_chain_->OnOperationComplete(); @@ -49,6 +51,17 @@ OperationsChain::~OperationsChain() { RTC_DCHECK(chained_operations_.empty()); } +void OperationsChain::SetOnChainEmptyCallback( + std::function on_chain_empty_callback) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + on_chain_empty_callback_ = std::move(on_chain_empty_callback); +} + +bool OperationsChain::IsEmpty() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return chained_operations_.empty(); +} + std::function OperationsChain::CreateOperationsChainCallback() { return [handle = rtc::scoped_refptr( new CallbackHandle(this))]() { handle->OnOperationComplete(); }; @@ -59,9 +72,12 @@ void OperationsChain::OnOperationComplete() { // The front element is the operation that just completed, remove it. RTC_DCHECK(!chained_operations_.empty()); chained_operations_.pop(); - // If there are any other operations chained, execute the next one. + // If there are any other operations chained, execute the next one. Otherwise, + // invoke the "on chain empty" callback if it has been set. if (!chained_operations_.empty()) { chained_operations_.front()->Run(); + } else if (on_chain_empty_callback_.has_value()) { + on_chain_empty_callback_.value()(); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h index b6ec46e04..44a3d9acb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h @@ -18,6 +18,7 @@ #include #include +#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" @@ -49,11 +50,15 @@ class OperationWithFunctor final : public Operation { : functor_(std::forward(functor)), callback_(std::move(callback)) {} - ~OperationWithFunctor() override { RTC_DCHECK(has_run_); } + ~OperationWithFunctor() override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(has_run_); +#endif // RTC_DCHECK_IS_ON + } void Run() override { +#if RTC_DCHECK_IS_ON RTC_DCHECK(!has_run_); -#ifdef RTC_DCHECK_IS_ON has_run_ = true; #endif // RTC_DCHECK_IS_ON // The functor being executed may invoke the callback synchronously, @@ -69,7 +74,7 @@ class OperationWithFunctor final : public Operation { private: typename std::remove_reference::type functor_; std::function callback_; -#ifdef RTC_DCHECK_IS_ON +#if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON }; @@ -112,6 +117,9 @@ class OperationsChain final : public RefCountedObject { static scoped_refptr Create(); ~OperationsChain(); + void SetOnChainEmptyCallback(std::function on_chain_empty_callback); + bool IsEmpty() const; + // Chains an operation. Chained operations are executed in FIFO order. The // operation starts when |functor| is executed by the OperationsChain and is // contractually obligated to invoke the callback passed to it when the @@ -163,7 +171,7 @@ class OperationsChain final : public RefCountedObject { private: scoped_refptr operations_chain_; -#ifdef RTC_DCHECK_IS_ON +#if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON @@ -181,6 +189,8 @@ class OperationsChain final : public RefCountedObject { // to it. std::queue> chained_operations_ RTC_GUARDED_BY(sequence_checker_); + absl::optional> on_chain_empty_callback_ + RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(OperationsChain); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc index 05b32557b..cf6e79279 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc @@ -103,6 +103,20 @@ typedef char* SockOptArg; #endif #endif +namespace { +class ScopedSetTrue { + public: + ScopedSetTrue(bool* value) : value_(value) { + RTC_DCHECK(!*value_); + *value_ = true; + } + ~ScopedSetTrue() { *value_ = false; } + + private: + bool* value_; +}; +} // namespace + namespace rtc { std::unique_ptr SocketServer::CreateDefault() { @@ -835,7 +849,7 @@ void SocketDispatcher::OnEvent(uint32_t ff, int err) { #if defined(WEBRTC_USE_EPOLL) -static int GetEpollEvents(uint32_t ff) { +inline static int GetEpollEvents(uint32_t ff) { int events = 0; if (ff & (DE_READ | DE_ACCEPT)) { events |= EPOLLIN; @@ -1061,7 +1075,8 @@ PhysicalSocketServer::~PhysicalSocketServer() { close(epoll_fd_); } #endif - RTC_DCHECK(dispatchers_.empty()); + RTC_DCHECK(dispatcher_by_key_.empty()); + RTC_DCHECK(key_by_dispatcher_.empty()); } void PhysicalSocketServer::WakeUp() { @@ -1100,45 +1115,32 @@ AsyncSocket* PhysicalSocketServer::WrapSocket(SOCKET s) { void PhysicalSocketServer::Add(Dispatcher* pdispatcher) { CritScope cs(&crit_); - if (processing_dispatchers_) { - // A dispatcher is being added while a "Wait" call is processing the - // list of socket events. - // Defer adding to "dispatchers_" set until processing is done to avoid - // invalidating the iterator in "Wait". - pending_remove_dispatchers_.erase(pdispatcher); - pending_add_dispatchers_.insert(pdispatcher); - } else { - dispatchers_.insert(pdispatcher); + if (key_by_dispatcher_.count(pdispatcher)) { + RTC_LOG(LS_WARNING) + << "PhysicalSocketServer asked to add a duplicate dispatcher."; + return; } + uint64_t key = next_dispatcher_key_++; + dispatcher_by_key_.emplace(key, pdispatcher); + key_by_dispatcher_.emplace(pdispatcher, key); #if defined(WEBRTC_USE_EPOLL) if (epoll_fd_ != INVALID_SOCKET) { - AddEpoll(pdispatcher); + AddEpoll(pdispatcher, key); } #endif // WEBRTC_USE_EPOLL } void PhysicalSocketServer::Remove(Dispatcher* pdispatcher) { CritScope cs(&crit_); - if (processing_dispatchers_) { - // A dispatcher is being removed while a "Wait" call is processing the - // list of socket events. - // Defer removal from "dispatchers_" set until processing is done to avoid - // invalidating the iterator in "Wait". - if (!pending_add_dispatchers_.erase(pdispatcher) && - dispatchers_.find(pdispatcher) == dispatchers_.end()) { - RTC_LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown " - "dispatcher, potentially from a duplicate call to " - "Add."; - return; - } - - pending_remove_dispatchers_.insert(pdispatcher); - } else if (!dispatchers_.erase(pdispatcher)) { + if (!key_by_dispatcher_.count(pdispatcher)) { RTC_LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown " "dispatcher, potentially from a duplicate call to Add."; return; } + uint64_t key = key_by_dispatcher_.at(pdispatcher); + key_by_dispatcher_.erase(pdispatcher); + dispatcher_by_key_.erase(key); #if defined(WEBRTC_USE_EPOLL) if (epoll_fd_ != INVALID_SOCKET) { RemoveEpoll(pdispatcher); @@ -1152,34 +1154,22 @@ void PhysicalSocketServer::Update(Dispatcher* pdispatcher) { return; } + // Don't update dispatchers that haven't yet been added. CritScope cs(&crit_); - if (dispatchers_.find(pdispatcher) == dispatchers_.end()) { + if (!key_by_dispatcher_.count(pdispatcher)) { return; } - UpdateEpoll(pdispatcher); + UpdateEpoll(pdispatcher, key_by_dispatcher_.at(pdispatcher)); #endif } -void PhysicalSocketServer::AddRemovePendingDispatchers() { - if (!pending_add_dispatchers_.empty()) { - for (Dispatcher* pdispatcher : pending_add_dispatchers_) { - dispatchers_.insert(pdispatcher); - } - pending_add_dispatchers_.clear(); - } - - if (!pending_remove_dispatchers_.empty()) { - for (Dispatcher* pdispatcher : pending_remove_dispatchers_) { - dispatchers_.erase(pdispatcher); - } - pending_remove_dispatchers_.clear(); - } -} - #if defined(WEBRTC_POSIX) bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { + // We don't support reentrant waiting. + RTC_DCHECK(!waiting_); + ScopedSetTrue s(&waiting_); #if defined(WEBRTC_USE_EPOLL) // We don't keep a dedicated "epoll" descriptor containing only the non-IO // (i.e. signaling) dispatcher, so "poll" will be used instead of the default @@ -1205,6 +1195,9 @@ static void ProcessEvents(Dispatcher* dispatcher, &len); } + // Most often the socket is writable or readable or both, so make a single + // virtual call to get requested events + const uint32_t requested_events = dispatcher->GetRequestedEvents(); uint32_t ff = 0; // Check readable descriptors. If we're waiting on an accept, signal @@ -1212,7 +1205,7 @@ static void ProcessEvents(Dispatcher* dispatcher, // readable or really closed. // TODO(pthatcher): Only peek at TCP descriptors. if (readable) { - if (dispatcher->GetRequestedEvents() & DE_ACCEPT) { + if (requested_events & DE_ACCEPT) { ff |= DE_ACCEPT; } else if (errcode || dispatcher->IsDescriptorClosed()) { ff |= DE_CLOSE; @@ -1224,7 +1217,7 @@ static void ProcessEvents(Dispatcher* dispatcher, // Check writable descriptors. If we're waiting on a connect, detect // success versus failure by the reaped error code. if (writable) { - if (dispatcher->GetRequestedEvents() & DE_CONNECT) { + if (requested_events & DE_CONNECT) { if (!errcode) { ff |= DE_CONNECT; } else { @@ -1258,13 +1251,9 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { stop_us = rtc::TimeMicros() + cmsWait * 1000; } - // Zero all fd_sets. Don't need to do this inside the loop since - // select() zeros the descriptors not signaled fd_set fdsRead; - FD_ZERO(&fdsRead); fd_set fdsWrite; - FD_ZERO(&fdsWrite); // Explicitly unpoison these FDs on MemorySanitizer which doesn't handle the // inline assembly in FD_ZERO. // http://crbug.com/344505 @@ -1276,16 +1265,22 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { fWait_ = true; while (fWait_) { + // Zero all fd_sets. Although select() zeros the descriptors not signaled, + // we may need to do this for dispatchers that were deleted while + // iterating. + FD_ZERO(&fdsRead); + FD_ZERO(&fdsWrite); int fdmax = -1; { CritScope cr(&crit_); - // TODO(jbauch): Support re-entrant waiting. - RTC_DCHECK(!processing_dispatchers_); - for (Dispatcher* pdispatcher : dispatchers_) { + current_dispatcher_keys_.clear(); + for (auto const& kv : dispatcher_by_key_) { + uint64_t key = kv.first; + Dispatcher* pdispatcher = kv.second; // Query dispatchers for read and write wait state - RTC_DCHECK(pdispatcher); if (!process_io && (pdispatcher != signal_wakeup_)) continue; + current_dispatcher_keys_.push_back(key); int fd = pdispatcher->GetDescriptor(); // "select"ing a file descriptor that is equal to or larger than // FD_SETSIZE will result in undefined behavior. @@ -1323,8 +1318,14 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { } else { // We have signaled descriptors CritScope cr(&crit_); - processing_dispatchers_ = true; - for (Dispatcher* pdispatcher : dispatchers_) { + // Iterate only on the dispatchers whose sockets were passed into + // WSAEventSelect; this avoids the ABA problem (a socket being + // destroyed and a new one created with the same file descriptor). + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) + continue; + Dispatcher* pdispatcher = dispatcher_by_key_.at(key); + int fd = pdispatcher->GetDescriptor(); bool readable = FD_ISSET(fd, &fdsRead); @@ -1340,11 +1341,6 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { // The error code can be signaled through reads or writes. ProcessEvents(pdispatcher, readable, writable, readable || writable); } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Recalc the time remaining to wait. Doing it here means it doesn't get @@ -1365,7 +1361,7 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { #if defined(WEBRTC_USE_EPOLL) -void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) { +void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher, uint64_t key) { RTC_DCHECK(epoll_fd_ != INVALID_SOCKET); int fd = pdispatcher->GetDescriptor(); RTC_DCHECK(fd != INVALID_SOCKET); @@ -1375,7 +1371,7 @@ void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) { struct epoll_event event = {0}; event.events = GetEpollEvents(pdispatcher->GetRequestedEvents()); - event.data.ptr = pdispatcher; + event.data.u64 = key; int err = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, fd, &event); RTC_DCHECK_EQ(err, 0); if (err == -1) { @@ -1404,7 +1400,7 @@ void PhysicalSocketServer::RemoveEpoll(Dispatcher* pdispatcher) { } } -void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher) { +void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher, uint64_t key) { RTC_DCHECK(epoll_fd_ != INVALID_SOCKET); int fd = pdispatcher->GetDescriptor(); RTC_DCHECK(fd != INVALID_SOCKET); @@ -1414,7 +1410,7 @@ void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher) { struct epoll_event event = {0}; event.events = GetEpollEvents(pdispatcher->GetRequestedEvents()); - event.data.ptr = pdispatcher; + event.data.u64 = key; int err = epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, fd, &event); RTC_DCHECK_EQ(err, 0); if (err == -1) { @@ -1456,11 +1452,12 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { CritScope cr(&crit_); for (int i = 0; i < n; ++i) { const epoll_event& event = epoll_events_[i]; - Dispatcher* pdispatcher = static_cast(event.data.ptr); - if (dispatchers_.find(pdispatcher) == dispatchers_.end()) { + uint64_t key = event.data.u64; + if (!dispatcher_by_key_.count(key)) { // The dispatcher for this socket no longer exists. continue; } + Dispatcher* pdispatcher = dispatcher_by_key_.at(key); bool readable = (event.events & (EPOLLIN | EPOLLPRI)); bool writable = (event.events & EPOLLOUT); @@ -1472,7 +1469,7 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { if (cmsWait != kForever) { tvWait = TimeDiff(tvStop, TimeMillis()); - if (tvWait < 0) { + if (tvWait <= 0) { // Return success on timeout. return true; } @@ -1555,6 +1552,10 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) { #if defined(WEBRTC_WIN) bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { + // We don't support reentrant waiting. + RTC_DCHECK(!waiting_); + ScopedSetTrue s(&waiting_); + int64_t cmsTotal = cmsWait; int64_t cmsElapsed = 0; int64_t msStart = Time(); @@ -1562,37 +1563,40 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { fWait_ = true; while (fWait_) { std::vector events; - std::vector event_owners; + std::vector event_owners; events.push_back(socket_ev_); { CritScope cr(&crit_); - // TODO(jbauch): Support re-entrant waiting. - RTC_DCHECK(!processing_dispatchers_); - - // Calling "CheckSignalClose" might remove a closed dispatcher from the - // set. This must be deferred to prevent invalidating the iterator. - processing_dispatchers_ = true; - for (Dispatcher* disp : dispatchers_) { + // Get a snapshot of all current dispatchers; this is used to avoid the + // ABA problem (see later comment) and avoids the dispatcher_by_key_ + // iterator being invalidated by calling CheckSignalClose, which may + // remove the dispatcher from the list. + current_dispatcher_keys_.clear(); + for (auto const& kv : dispatcher_by_key_) { + current_dispatcher_keys_.push_back(kv.first); + } + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) { + continue; + } + Dispatcher* disp = dispatcher_by_key_.at(key); + if (!disp) + continue; if (!process_io && (disp != signal_wakeup_)) continue; SOCKET s = disp->GetSocket(); if (disp->CheckSignalClose()) { - // We just signalled close, don't poll this socket + // We just signalled close, don't poll this socket. } else if (s != INVALID_SOCKET) { WSAEventSelect(s, events[0], FlagsToEvents(disp->GetRequestedEvents())); } else { events.push_back(disp->GetWSAEvent()); - event_owners.push_back(disp); + event_owners.push_back(key); } } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Which is shorter, the delay wait or the asked wait? @@ -1624,15 +1628,23 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { int index = dw - WSA_WAIT_EVENT_0; if (index > 0) { --index; // The first event is the socket event - Dispatcher* disp = event_owners[index]; - // The dispatcher could have been removed while waiting for events. - if (dispatchers_.find(disp) != dispatchers_.end()) { - disp->OnPreEvent(0); - disp->OnEvent(0, 0); + uint64_t key = event_owners[index]; + if (!dispatcher_by_key_.count(key)) { + // The dispatcher could have been removed while waiting for events. + continue; } + Dispatcher* disp = dispatcher_by_key_.at(key); + disp->OnPreEvent(0); + disp->OnEvent(0, 0); } else if (process_io) { - processing_dispatchers_ = true; - for (Dispatcher* disp : dispatchers_) { + // Iterate only on the dispatchers whose sockets were passed into + // WSAEventSelect; this avoids the ABA problem (a socket being + // destroyed and a new one created with the same SOCKET handle). + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) { + continue; + } + Dispatcher* disp = dispatcher_by_key_.at(key); SOCKET s = disp->GetSocket(); if (s == INVALID_SOCKET) continue; @@ -1698,11 +1710,6 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { } } } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Reset the network event until new activity occurs diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h index 7eaf590e3..cc21a67b1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h @@ -18,7 +18,7 @@ #include #include -#include +#include #include #include "rtc_base/deprecated/recursive_critical_section.h" @@ -85,17 +85,13 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { // The number of events to process with one call to "epoll_wait". static constexpr size_t kNumEpollEvents = 128; - typedef std::set DispatcherSet; - - void AddRemovePendingDispatchers() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - #if defined(WEBRTC_POSIX) bool WaitSelect(int cms, bool process_io); #endif // WEBRTC_POSIX #if defined(WEBRTC_USE_EPOLL) - void AddEpoll(Dispatcher* dispatcher); + void AddEpoll(Dispatcher* dispatcher, uint64_t key); void RemoveEpoll(Dispatcher* dispatcher); - void UpdateEpoll(Dispatcher* dispatcher); + void UpdateEpoll(Dispatcher* dispatcher, uint64_t key); bool WaitEpoll(int cms); bool WaitPoll(int cms, Dispatcher* dispatcher); @@ -106,16 +102,31 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { std::array epoll_events_; const int epoll_fd_ = INVALID_SOCKET; #endif // WEBRTC_USE_EPOLL - DispatcherSet dispatchers_ RTC_GUARDED_BY(crit_); - DispatcherSet pending_add_dispatchers_ RTC_GUARDED_BY(crit_); - DispatcherSet pending_remove_dispatchers_ RTC_GUARDED_BY(crit_); - bool processing_dispatchers_ RTC_GUARDED_BY(crit_) = false; + // uint64_t keys are used to uniquely identify a dispatcher in order to avoid + // the ABA problem during the epoll loop (a dispatcher being destroyed and + // replaced by one with the same address). + uint64_t next_dispatcher_key_ RTC_GUARDED_BY(crit_) = 0; + std::unordered_map dispatcher_by_key_ + RTC_GUARDED_BY(crit_); + // Reverse lookup necessary for removals/updates. + std::unordered_map key_by_dispatcher_ + RTC_GUARDED_BY(crit_); + // A list of dispatcher keys that we're interested in for the current + // select() or WSAWaitForMultipleEvents() loop. Again, used to avoid the ABA + // problem (a socket being destroyed and a new one created with the same + // handle, erroneously receiving the events from the destroyed socket). + // + // Kept as a member variable just for efficiency. + std::vector current_dispatcher_keys_; Signaler* signal_wakeup_; // Assigned in constructor only RecursiveCriticalSection crit_; #if defined(WEBRTC_WIN) const WSAEVENT socket_ev_; #endif bool fWait_; + // Are we currently in a select()/epoll()/WSAWaitForMultipleEvents loop? + // Used for a DCHECK, because we don't support reentrant waiting. + bool waiting_ = false; }; class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/random.h b/TMessagesProj/jni/voip/webrtc/rtc_base/random.h index 93241a3e9..0e2d103cb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/random.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/random.h @@ -16,7 +16,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,6 +34,10 @@ class Random { // See also discussion here: https://codereview.webrtc.org/1623543002/ explicit Random(uint64_t seed); + Random() = delete; + Random(const Random&) = delete; + Random& operator=(const Random&) = delete; + // Return pseudo-random integer of the specified type. // We need to limit the size to 32 bits to keep the output close to uniform. template @@ -73,8 +76,6 @@ class Random { } uint64_t state_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Random); }; // Return pseudo-random number in the interval [0.0, 1.0). diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_limiter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_limiter.h index 051ccf6aa..9bbe21f9c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_limiter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_limiter.h @@ -14,7 +14,6 @@ #include #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -29,6 +28,11 @@ class Clock; class RateLimiter { public: RateLimiter(Clock* clock, int64_t max_window_ms); + + RateLimiter() = delete; + RateLimiter(const RateLimiter&) = delete; + RateLimiter& operator=(const RateLimiter&) = delete; + ~RateLimiter(); // Try to use rate to send bytes. Returns true on success and if so updates @@ -49,8 +53,6 @@ class RateLimiter { RateStatistics current_rate_ RTC_GUARDED_BY(lock_); int64_t window_size_ms_ RTC_GUARDED_BY(lock_); uint32_t max_rate_bps_ RTC_GUARDED_BY(lock_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RateLimiter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h index 015229b04..241bd72a1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h @@ -40,7 +40,7 @@ class RollingAccumulator { size_t count() const { return static_cast(stats_.Size()); } void Reset() { - stats_ = webrtc::RunningStatistics(); + stats_ = webrtc::webrtc_impl::RunningStatistics(); next_index_ = 0U; max_ = T(); max_stale_ = false; @@ -129,7 +129,7 @@ class RollingAccumulator { double ComputeVariance() const { return stats_.GetVariance().value_or(0); } private: - webrtc::RunningStatistics stats_; + webrtc::webrtc_impl::RunningStatistics stats_; size_t next_index_; mutable T max_; mutable bool max_stale_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc index 4c9d378dd..d95b64539 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc @@ -30,75 +30,6 @@ namespace { const char kIdentityName[] = "WebRTC"; const uint64_t kYearInSeconds = 365 * 24 * 60 * 60; -enum { - MSG_GENERATE, - MSG_GENERATE_DONE, -}; - -// Helper class for generating certificates asynchronously; a single task -// instance is responsible for a single asynchronous certificate generation -// request. We are using a separate helper class so that a generation request -// can outlive the |RTCCertificateGenerator| that spawned it. -class RTCCertificateGenerationTask : public RefCountInterface, - public MessageHandler { - public: - RTCCertificateGenerationTask( - Thread* signaling_thread, - Thread* worker_thread, - const KeyParams& key_params, - const absl::optional& expires_ms, - const scoped_refptr& callback) - : signaling_thread_(signaling_thread), - worker_thread_(worker_thread), - key_params_(key_params), - expires_ms_(expires_ms), - callback_(callback) { - RTC_DCHECK(signaling_thread_); - RTC_DCHECK(worker_thread_); - RTC_DCHECK(callback_); - } - ~RTCCertificateGenerationTask() override {} - - // Handles |MSG_GENERATE| and its follow-up |MSG_GENERATE_DONE|. - void OnMessage(Message* msg) override { - switch (msg->message_id) { - case MSG_GENERATE: - RTC_DCHECK(worker_thread_->IsCurrent()); - // Perform the certificate generation work here on the worker thread. - certificate_ = RTCCertificateGenerator::GenerateCertificate( - key_params_, expires_ms_); - // Handle callbacks on signaling thread. Pass on the |msg->pdata| - // (which references |this| with ref counting) to that thread. - signaling_thread_->Post(RTC_FROM_HERE, this, MSG_GENERATE_DONE, - msg->pdata); - break; - case MSG_GENERATE_DONE: - RTC_DCHECK(signaling_thread_->IsCurrent()); - // Perform callback with result here on the signaling thread. - if (certificate_) { - callback_->OnSuccess(certificate_); - } else { - callback_->OnFailure(); - } - // Destroy |msg->pdata| which references |this| with ref counting. This - // may result in |this| being deleted - do not touch member variables - // after this line. - delete msg->pdata; - return; - default: - RTC_NOTREACHED(); - } - } - - private: - Thread* const signaling_thread_; - Thread* const worker_thread_; - const KeyParams key_params_; - const absl::optional expires_ms_; - const scoped_refptr callback_; - scoped_refptr certificate_; -}; - } // namespace // static @@ -148,13 +79,16 @@ void RTCCertificateGenerator::GenerateCertificateAsync( // Create a new |RTCCertificateGenerationTask| for this generation request. It // is reference counted and referenced by the message data, ensuring it lives // until the task has completed (independent of |RTCCertificateGenerator|). - ScopedRefMessageData* msg_data = - new ScopedRefMessageData( - new RefCountedObject( - signaling_thread_, worker_thread_, key_params, expires_ms, - callback)); - worker_thread_->Post(RTC_FROM_HERE, msg_data->data().get(), MSG_GENERATE, - msg_data); + worker_thread_->PostTask(RTC_FROM_HERE, [key_params, expires_ms, + signaling_thread = signaling_thread_, + cb = callback]() { + scoped_refptr certificate = + RTCCertificateGenerator::GenerateCertificate(key_params, expires_ms); + signaling_thread->PostTask( + RTC_FROM_HERE, [cert = std::move(certificate), cb = std::move(cb)]() { + cert ? cb->OnSuccess(cert) : cb->OnFailure(); + }); + }); } } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h index 23a3836e6..8f869f4a9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h @@ -15,8 +15,8 @@ // Google. // It was generated with the following command line: -// > python tools/sslroots/generate_sslroots.py -// https://pki.google.com/roots.pem +// > python tools_webrtc/sslroots/generate_sslroots.py +// https://pki.goog/roots.pem // clang-format off // Don't bother formatting generated code, @@ -1699,82 +1699,6 @@ const unsigned char GlobalSign_ECC_Root_CA___R5_certificate[546]={ }; -/* subject:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */ -/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */ - - -const unsigned char AddTrust_External_Root_certificate[1082]={ -0x30,0x82,0x04,0x36,0x30,0x82,0x03,0x1E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14, -0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73, -0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C, -0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,0x20, -0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20, -0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74, -0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38, -0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,0x5A, -0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31, -0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75, -0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D, -0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61, -0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30, -0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74, -0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F, -0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01, -0x01,0x00,0xB7,0xF7,0x1A,0x33,0xE6,0xF2,0x00,0x04,0x2D,0x39,0xE0,0x4E,0x5B,0xED, -0x1F,0xBC,0x6C,0x0F,0xCD,0xB5,0xFA,0x23,0xB6,0xCE,0xDE,0x9B,0x11,0x33,0x97,0xA4, -0x29,0x4C,0x7D,0x93,0x9F,0xBD,0x4A,0xBC,0x93,0xED,0x03,0x1A,0xE3,0x8F,0xCF,0xE5, -0x6D,0x50,0x5A,0xD6,0x97,0x29,0x94,0x5A,0x80,0xB0,0x49,0x7A,0xDB,0x2E,0x95,0xFD, -0xB8,0xCA,0xBF,0x37,0x38,0x2D,0x1E,0x3E,0x91,0x41,0xAD,0x70,0x56,0xC7,0xF0,0x4F, -0x3F,0xE8,0x32,0x9E,0x74,0xCA,0xC8,0x90,0x54,0xE9,0xC6,0x5F,0x0F,0x78,0x9D,0x9A, -0x40,0x3C,0x0E,0xAC,0x61,0xAA,0x5E,0x14,0x8F,0x9E,0x87,0xA1,0x6A,0x50,0xDC,0xD7, -0x9A,0x4E,0xAF,0x05,0xB3,0xA6,0x71,0x94,0x9C,0x71,0xB3,0x50,0x60,0x0A,0xC7,0x13, -0x9D,0x38,0x07,0x86,0x02,0xA8,0xE9,0xA8,0x69,0x26,0x18,0x90,0xAB,0x4C,0xB0,0x4F, -0x23,0xAB,0x3A,0x4F,0x84,0xD8,0xDF,0xCE,0x9F,0xE1,0x69,0x6F,0xBB,0xD7,0x42,0xD7, -0x6B,0x44,0xE4,0xC7,0xAD,0xEE,0x6D,0x41,0x5F,0x72,0x5A,0x71,0x08,0x37,0xB3,0x79, -0x65,0xA4,0x59,0xA0,0x94,0x37,0xF7,0x00,0x2F,0x0D,0xC2,0x92,0x72,0xDA,0xD0,0x38, -0x72,0xDB,0x14,0xA8,0x45,0xC4,0x5D,0x2A,0x7D,0xB7,0xB4,0xD6,0xC4,0xEE,0xAC,0xCD, -0x13,0x44,0xB7,0xC9,0x2B,0xDD,0x43,0x00,0x25,0xFA,0x61,0xB9,0x69,0x6A,0x58,0x23, -0x11,0xB7,0xA7,0x33,0x8F,0x56,0x75,0x59,0xF5,0xCD,0x29,0xD7,0x46,0xB7,0x0A,0x2B, -0x65,0xB6,0xD3,0x42,0x6F,0x15,0xB2,0xB8,0x7B,0xFB,0xEF,0xE9,0x5D,0x53,0xD5,0x34, -0x5A,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xDC,0x30,0x81,0xD9,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,0x26,0xF7, -0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0x30,0x0B,0x06,0x03, -0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13, -0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x99,0x06,0x03,0x55, -0x1D,0x23,0x04,0x81,0x91,0x30,0x81,0x8E,0x80,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4, -0x26,0xF7,0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0xA1,0x73, -0xA4,0x71,0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53, -0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54, -0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B, -0x13,0x1D,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72, -0x6E,0x61,0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31, -0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75, -0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52, -0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xB0,0x9B,0xE0,0x85,0x25,0xC2, -0xD6,0x23,0xE2,0x0F,0x96,0x06,0x92,0x9D,0x41,0x98,0x9C,0xD9,0x84,0x79,0x81,0xD9, -0x1E,0x5B,0x14,0x07,0x23,0x36,0x65,0x8F,0xB0,0xD8,0x77,0xBB,0xAC,0x41,0x6C,0x47, -0x60,0x83,0x51,0xB0,0xF9,0x32,0x3D,0xE7,0xFC,0xF6,0x26,0x13,0xC7,0x80,0x16,0xA5, -0xBF,0x5A,0xFC,0x87,0xCF,0x78,0x79,0x89,0x21,0x9A,0xE2,0x4C,0x07,0x0A,0x86,0x35, -0xBC,0xF2,0xDE,0x51,0xC4,0xD2,0x96,0xB7,0xDC,0x7E,0x4E,0xEE,0x70,0xFD,0x1C,0x39, -0xEB,0x0C,0x02,0x51,0x14,0x2D,0x8E,0xBD,0x16,0xE0,0xC1,0xDF,0x46,0x75,0xE7,0x24, -0xAD,0xEC,0xF4,0x42,0xB4,0x85,0x93,0x70,0x10,0x67,0xBA,0x9D,0x06,0x35,0x4A,0x18, -0xD3,0x2B,0x7A,0xCC,0x51,0x42,0xA1,0x7A,0x63,0xD1,0xE6,0xBB,0xA1,0xC5,0x2B,0xC2, -0x36,0xBE,0x13,0x0D,0xE6,0xBD,0x63,0x7E,0x79,0x7B,0xA7,0x09,0x0D,0x40,0xAB,0x6A, -0xDD,0x8F,0x8A,0xC3,0xF6,0xF6,0x8C,0x1A,0x42,0x05,0x51,0xD4,0x45,0xF5,0x9F,0xA7, -0x62,0x21,0x68,0x15,0x20,0x43,0x3C,0x99,0xE7,0x7C,0xBD,0x24,0xD8,0xA9,0x91,0x17, -0x73,0x88,0x3F,0x56,0x1B,0x31,0x38,0x18,0xB4,0x71,0x0F,0x9A,0xCD,0xC8,0x0E,0x9E, -0x8E,0x2E,0x1B,0xE1,0x8C,0x98,0x83,0xCB,0x1F,0x31,0xF1,0x44,0x4C,0xC6,0x04,0x73, -0x49,0x76,0x60,0x0F,0xC7,0xF8,0xBD,0x17,0x80,0x6B,0x2E,0xE9,0xCC,0x4C,0x0E,0x5A, -0x9A,0x79,0x0F,0x20,0x0A,0x2E,0xD5,0x9E,0x63,0x26,0x1E,0x55,0x92,0x94,0xD8,0x82, -0x17,0x5A,0x7B,0xD0,0xBC,0xC7,0x8F,0x4E,0x86,0x04, -}; - - /* subject:/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ /* issuer :/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ @@ -2572,50 +2496,6 @@ const unsigned char Entrust_Root_Certification_Authority___EC1_certificate[765]= }; -/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA - R8 */ -/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA - R8 */ - - -const unsigned char GlobalSign_Root_CA___R8_certificate[567]={ -0x30,0x82,0x02,0x33,0x30,0x82,0x01,0xB9,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x48, -0x1B,0x6A,0x09,0xF4,0xF9,0x60,0x71,0x3A,0xFE,0x81,0xCC,0x86,0xDD,0x30,0x0A,0x06, -0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x5C,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, -0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76, -0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F, -0x6F,0x74,0x20,0x43,0x41,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17, -0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20, -0x43,0x41,0x20,0x2D,0x20,0x52,0x38,0x30,0x1E,0x17,0x0D,0x31,0x36,0x30,0x36,0x31, -0x35,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x36,0x31,0x35, -0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x5C,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, -0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13, -0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73, -0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74, -0x20,0x43,0x41,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x47,0x6C, -0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41, -0x20,0x2D,0x20,0x52,0x38,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D, -0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xB8,0xEE,0x7C, -0x30,0x87,0xD8,0x94,0x1F,0x54,0x6B,0x6D,0x98,0x9D,0xFC,0x75,0xFB,0x5B,0x88,0xAB, -0x42,0xBA,0x8D,0x7D,0x39,0x7E,0xDD,0x44,0x3D,0x39,0x3C,0xE1,0x05,0xA1,0x4A,0x64, -0x60,0xAC,0x37,0xA6,0x73,0xB0,0xF9,0xC9,0x45,0x4B,0x0B,0x06,0xD0,0x3A,0xE0,0xF1, -0x6D,0x5F,0xFA,0x5E,0x5B,0x5A,0x52,0xB5,0x76,0xE3,0x46,0xDB,0xD5,0x1E,0x8C,0x74, -0x7A,0x42,0xC9,0x41,0x35,0x4F,0xC6,0xD4,0xE2,0x28,0x60,0xAB,0x34,0x8A,0xCE,0xB1, -0x40,0x23,0x46,0xA5,0xAE,0x19,0x24,0x52,0x7C,0x90,0x55,0x44,0xCE,0xA3,0x42,0x30, -0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, -0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01, -0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x2F,0x3A,0x12, -0x26,0x80,0xE8,0x8A,0xC2,0x50,0x78,0x6D,0x06,0xC4,0x34,0x7E,0xE2,0x49,0x39,0x57, -0x76,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00, -0x30,0x65,0x02,0x31,0x00,0xC7,0xA1,0x3D,0xB2,0x92,0x90,0xFA,0xCA,0x5D,0xE0,0x27, -0x84,0x82,0x3B,0x21,0xCC,0xF4,0x8D,0xF8,0x94,0x56,0xF2,0x20,0x5F,0x11,0xC0,0xAC, -0xBC,0x5F,0x15,0xA5,0x0B,0xC8,0x16,0x43,0xA7,0xF8,0xC5,0x7F,0x8D,0x20,0xA0,0x7F, -0x5E,0xFC,0x16,0x1C,0x27,0x02,0x30,0x1E,0x8C,0xF5,0x56,0xBF,0x38,0xDB,0x9C,0xE6, -0xA6,0xD7,0x84,0x29,0xE6,0xDF,0x0D,0x53,0x2E,0xE8,0x2B,0x01,0xB7,0x7D,0x09,0x3C, -0xB1,0x32,0x6A,0x1A,0x9A,0xB8,0x0A,0xEA,0xE8,0xAD,0x08,0xF2,0x74,0x39,0xD5,0x2B, -0x22,0x36,0xDC,0xEF,0x46,0x66,0xD8, -}; - - /* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ /* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ @@ -2817,7 +2697,6 @@ const unsigned char* const kSSLCertCertificateList[] = { COMODO_Certification_Authority_certificate, GlobalSign_ECC_Root_CA___R4_certificate, GlobalSign_ECC_Root_CA___R5_certificate, - AddTrust_External_Root_certificate, USERTrust_ECC_Certification_Authority_certificate, Entrust_net_Premium_2048_Secure_Server_CA_certificate, AffirmTrust_Premium_ECC_certificate, @@ -2830,7 +2709,6 @@ const unsigned char* const kSSLCertCertificateList[] = { DigiCert_Trusted_Root_G4_certificate, COMODO_ECC_Certification_Authority_certificate, Entrust_Root_Certification_Authority___EC1_certificate, - GlobalSign_Root_CA___R8_certificate, GeoTrust_Global_CA_certificate, DigiCert_Assured_ID_Root_G3_certificate, Go_Daddy_Root_Certificate_Authority___G2_certificate, @@ -2861,7 +2739,6 @@ const size_t kSSLCertCertificateSizeList[] = { 1057, 485, 546, - 1082, 659, 1070, 514, @@ -2874,7 +2751,6 @@ const size_t kSSLCertCertificateSizeList[] = { 1428, 653, 765, - 567, 856, 586, 969, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h index 3da0b0946..7bff72651 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h @@ -93,11 +93,11 @@ bool IsGcmCryptoSuiteName(const std::string& crypto_suite); enum SSLRole { SSL_CLIENT, SSL_SERVER }; enum SSLMode { SSL_MODE_TLS, SSL_MODE_DTLS }; -// Note: TLS_10, TLS_11, and DTLS_10 will all be ignored, and only -// DTLS1_2 will be accepted, if the trial flag -// WebRTC-LegacyTlsProtocols/Disabled/ is passed in. Support for these -// protocol versions will be completely removed in M84 or later. -// TODO(https://bugs.webrtc.org/10261). +// Note: TLS_10, TLS_11, and DTLS_10 will all be ignored, and only DTLS1_2 will +// be accepted unless the trial flag WebRTC-LegacyTlsProtocols/Enabled/ is +// passed in or an explicit override is used. Support for the legacy protocol +// versions will be completely removed in the future. +// See https://bugs.webrtc.org/10261. enum SSLProtocolVersion { SSL_PROTOCOL_NOT_GIVEN = -1, SSL_PROTOCOL_TLS_10 = 0, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc index 1b0a4d759..ee72f8d2b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc @@ -24,7 +24,6 @@ namespace rtc { /////////////////////////////////////////////////////////////////////////////// // StreamInterface /////////////////////////////////////////////////////////////////////////////// -StreamInterface::~StreamInterface() {} StreamResult StreamInterface::WriteAll(const void* data, size_t data_len, @@ -44,29 +43,12 @@ StreamResult StreamInterface::WriteAll(const void* data, return result; } -void StreamInterface::PostEvent(Thread* t, int events, int err) { - t->Post(RTC_FROM_HERE, this, MSG_POST_EVENT, - new StreamEventData(events, err)); -} - -void StreamInterface::PostEvent(int events, int err) { - PostEvent(Thread::Current(), events, err); -} - bool StreamInterface::Flush() { return false; } StreamInterface::StreamInterface() {} -void StreamInterface::OnMessage(Message* msg) { - if (MSG_POST_EVENT == msg->message_id) { - StreamEventData* pe = static_cast(msg->pdata); - SignalEvent(this, pe->events, pe->error); - delete msg->pdata; - } -} - /////////////////////////////////////////////////////////////////////////////// // StreamAdapterInterface /////////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h index dc77a7111..9bf11a240 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h @@ -48,16 +48,9 @@ enum StreamResult { SR_ERROR, SR_SUCCESS, SR_BLOCK, SR_EOS }; // SE_WRITE: Data can be written, so Write is likely to not return SR_BLOCK enum StreamEvent { SE_OPEN = 1, SE_READ = 2, SE_WRITE = 4, SE_CLOSE = 8 }; -struct StreamEventData : public MessageData { - int events, error; - StreamEventData(int ev, int er) : events(ev), error(er) {} -}; - -class RTC_EXPORT StreamInterface : public MessageHandler { +class RTC_EXPORT StreamInterface { public: - enum { MSG_POST_EVENT = 0xF1F1, MSG_MAX = MSG_POST_EVENT }; - - ~StreamInterface() override; + virtual ~StreamInterface() {} virtual StreamState GetState() const = 0; @@ -96,13 +89,6 @@ class RTC_EXPORT StreamInterface : public MessageHandler { // certain events will be raised in the future. sigslot::signal3 SignalEvent; - // Like calling SignalEvent, but posts a message to the specified thread, - // which will call SignalEvent. This helps unroll the stack and prevent - // re-entrancy. - void PostEvent(Thread* t, int events, int err); - // Like the aforementioned method, but posts to the current thread. - void PostEvent(int events, int err); - // Return true if flush is successful. virtual bool Flush(); @@ -125,9 +111,6 @@ class RTC_EXPORT StreamInterface : public MessageHandler { protected: StreamInterface(); - // MessageHandler Interface - void OnMessage(Message* msg) override; - private: RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterface); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc index dfbb54805..1720c62d5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc @@ -50,10 +50,4 @@ std::string ToHex(const int i) { return std::string(buffer); } -std::string LeftPad(char padding, unsigned length, std::string s) { - if (s.length() >= length) - return s; - return std::string(length - s.length(), padding) + s; -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h index 3518702ec..23c55cb89 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h @@ -88,8 +88,6 @@ std::string string_trim(const std::string& s); // TODO(jonasolsson): replace with absl::Hex when that becomes available. std::string ToHex(const int i); -std::string LeftPad(char padding, unsigned length, std::string s); - } // namespace rtc #endif // RTC_BASE_STRING_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stringize_macros.h b/TMessagesProj/jni/voip/webrtc/rtc_base/stringize_macros.h deleted file mode 100644 index aee8d1455..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stringize_macros.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Modified from the Chromium original: -// src/base/strings/stringize_macros.h - -// This file defines preprocessor macros for stringizing preprocessor -// symbols (or their output) and manipulating preprocessor symbols -// that define strings. - -#ifndef RTC_BASE_STRINGIZE_MACROS_H_ -#define RTC_BASE_STRINGIZE_MACROS_H_ - -// This is not very useful as it does not expand defined symbols if -// called directly. Use its counterpart without the _NO_EXPANSION -// suffix, below. -#define STRINGIZE_NO_EXPANSION(x) #x - -// Use this to quote the provided parameter, first expanding it if it -// is a preprocessor symbol. -// -// For example, if: -// #define A FOO -// #define B(x) myobj->FunctionCall(x) -// -// Then: -// STRINGIZE(A) produces "FOO" -// STRINGIZE(B(y)) produces "myobj->FunctionCall(y)" -#define STRINGIZE(x) STRINGIZE_NO_EXPANSION(x) - -#endif // RTC_BASE_STRINGIZE_MACROS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h index eb0b1fff0..9eac49a93 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h @@ -141,7 +141,8 @@ class SwapQueue { return false; } - std::swap(*input, queue_[next_write_index_]); + using std::swap; + swap(*input, queue_[next_write_index_]); // Increment the value of num_elements_ to account for the inserted element. // Release memory ordering prevents the reads and writes to @@ -181,7 +182,8 @@ class SwapQueue { return false; } - std::swap(*output, queue_[next_read_index_]); + using std::swap; + swap(*output, queue_[next_read_index_]); // Decrement the value of num_elements_ to account for the removed element. // Release memory ordering prevents the reads and writes to diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h index 1ccbbdcbd..620fe74e4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h @@ -15,7 +15,6 @@ #include "absl/base/const_init.h" #include "rtc_base/checks.h" -#include "rtc_base/platform_thread_types.h" #include "rtc_base/system/unused.h" #include "rtc_base/thread_annotations.h" @@ -40,54 +39,17 @@ class RTC_LOCKABLE Mutex final { Mutex& operator=(const Mutex&) = delete; void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { - rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder(); impl_.Lock(); - // |holder_| changes from 0 to CurrentThreadRef(). - holder_.store(current, std::memory_order_relaxed); } RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { - rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder(); - if (impl_.TryLock()) { - // |holder_| changes from 0 to CurrentThreadRef(). - holder_.store(current, std::memory_order_relaxed); - return true; - } - return false; + return impl_.TryLock(); } void Unlock() RTC_UNLOCK_FUNCTION() { - // |holder_| changes from CurrentThreadRef() to 0. If something else than - // CurrentThreadRef() is stored in |holder_|, the Unlock results in - // undefined behavior as mutexes can't be unlocked from another thread than - // the one that locked it, or called while not being locked. - holder_.store(0, std::memory_order_relaxed); impl_.Unlock(); } private: - rtc::PlatformThreadRef CurrentThreadRefAssertingNotBeingHolder() { - rtc::PlatformThreadRef holder = holder_.load(std::memory_order_relaxed); - rtc::PlatformThreadRef current = rtc::CurrentThreadRef(); - // TODO(bugs.webrtc.org/11567): remove this temporary check after migrating - // fully to Mutex. - RTC_CHECK_NE(holder, current); - return current; - } - MutexImpl impl_; - // TODO(bugs.webrtc.org/11567): remove |holder_| after migrating fully to - // Mutex. - // |holder_| contains the PlatformThreadRef of the thread currently holding - // the lock, or 0. - // Remarks on the used memory orders: the atomic load in - // CurrentThreadRefAssertingNotBeingHolder() observes either of two things: - // 1. our own previous write to holder_ with our thread ID. - // 2. another thread (with ID y) writing y and then 0 from an initial value of - // 0. If we're observing case 1, our own stores are obviously ordered before - // the load, and hit the CHECK. If we're observing case 2, the value observed - // w.r.t |impl_| being locked depends on the memory order. Since we only care - // that it's different from CurrentThreadRef()), we use the more performant - // option, memory_order_relaxed. - std::atomic holder_ = {0}; }; // MutexLock, for serializing execution through a scope. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.cc deleted file mode 100644 index 15ef3d706..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.cc +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_posix.h" - -#include - -namespace webrtc { - -RWLockPosix::RWLockPosix() : lock_() {} - -RWLockPosix::~RWLockPosix() { - pthread_rwlock_destroy(&lock_); -} - -RWLockPosix* RWLockPosix::Create() { - RWLockPosix* ret_val = new RWLockPosix(); - if (!ret_val->Init()) { - delete ret_val; - return NULL; - } - return ret_val; -} - -bool RWLockPosix::Init() { - return pthread_rwlock_init(&lock_, 0) == 0; -} - -void RWLockPosix::AcquireLockExclusive() { - pthread_rwlock_wrlock(&lock_); -} - -void RWLockPosix::ReleaseLockExclusive() { - pthread_rwlock_unlock(&lock_); -} - -void RWLockPosix::AcquireLockShared() { - pthread_rwlock_rdlock(&lock_); -} - -void RWLockPosix::ReleaseLockShared() { - pthread_rwlock_unlock(&lock_); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.h deleted file mode 100644 index a103fe771..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_posix.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ - -#include - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -namespace webrtc { - -class RWLockPosix : public RWLockWrapper { - public: - static RWLockPosix* Create(); - ~RWLockPosix() override; - - void AcquireLockExclusive() override; - void ReleaseLockExclusive() override; - - void AcquireLockShared() override; - void ReleaseLockShared() override; - - private: - RWLockPosix(); - bool Init(); - - pthread_rwlock_t lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.cc deleted file mode 100644 index 3274c78a9..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.cc +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_win.h" - -#include "rtc_base/logging.h" - -namespace webrtc { - -RWLockWin::RWLockWin() { - InitializeSRWLock(&lock_); -} - -RWLockWin* RWLockWin::Create() { - return new RWLockWin(); -} - -void RWLockWin::AcquireLockExclusive() { - AcquireSRWLockExclusive(&lock_); -} - -void RWLockWin::ReleaseLockExclusive() { - ReleaseSRWLockExclusive(&lock_); -} - -void RWLockWin::AcquireLockShared() { - AcquireSRWLockShared(&lock_); -} - -void RWLockWin::ReleaseLockShared() { - ReleaseSRWLockShared(&lock_); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.h deleted file mode 100644 index 43bde1da9..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_win.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ - -#include - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -namespace webrtc { - -class RWLockWin : public RWLockWrapper { - public: - static RWLockWin* Create(); - - void AcquireLockExclusive() override; - void ReleaseLockExclusive() override; - - void AcquireLockShared() override; - void ReleaseLockShared() override; - - private: - RWLockWin(); - - SRWLOCK lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.cc deleted file mode 100644 index fb464192a..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -#if defined(_WIN32) -#include "rtc_base/synchronization/rw_lock_win.h" -#else -#include "rtc_base/synchronization/rw_lock_posix.h" -#endif - -namespace webrtc { - -RWLockWrapper* RWLockWrapper::CreateRWLock() { -#ifdef _WIN32 - return RWLockWin::Create(); -#else - return RWLockPosix::Create(); -#endif -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.h deleted file mode 100644 index 39f52fca3..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/rw_lock_wrapper.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ - -#include "rtc_base/thread_annotations.h" - -// Note, Windows pre-Vista version of RW locks are not supported natively. For -// these OSs regular critical sections have been used to approximate RW lock -// functionality and will therefore have worse performance. - -namespace webrtc { - -class RTC_LOCKABLE RWLockWrapper { - public: - static RWLockWrapper* CreateRWLock(); - virtual ~RWLockWrapper() {} - - virtual void AcquireLockExclusive() RTC_EXCLUSIVE_LOCK_FUNCTION() = 0; - virtual void ReleaseLockExclusive() RTC_UNLOCK_FUNCTION() = 0; - - virtual void AcquireLockShared() RTC_SHARED_LOCK_FUNCTION() = 0; - virtual void ReleaseLockShared() RTC_UNLOCK_FUNCTION() = 0; -}; - -// RAII extensions of the RW lock. Prevents Acquire/Release missmatches and -// provides more compact locking syntax. -class RTC_SCOPED_LOCKABLE ReadLockScoped { - public: - explicit ReadLockScoped(RWLockWrapper& rw_lock) - RTC_SHARED_LOCK_FUNCTION(rw_lock) - : rw_lock_(rw_lock) { - rw_lock_.AcquireLockShared(); - } - - ~ReadLockScoped() RTC_UNLOCK_FUNCTION() { rw_lock_.ReleaseLockShared(); } - - private: - RWLockWrapper& rw_lock_; -}; - -class RTC_SCOPED_LOCKABLE WriteLockScoped { - public: - explicit WriteLockScoped(RWLockWrapper& rw_lock) - RTC_EXCLUSIVE_LOCK_FUNCTION(rw_lock) - : rw_lock_(rw_lock) { - rw_lock_.AcquireLockExclusive(); - } - - ~WriteLockScoped() RTC_UNLOCK_FUNCTION() { rw_lock_.ReleaseLockExclusive(); } - - private: - RWLockWrapper& rw_lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/assume.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/assume.h new file mode 100644 index 000000000..231c9e18a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/assume.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_ASSUME_H_ +#define RTC_BASE_SYSTEM_ASSUME_H_ + +// Possibly evaluate `p`, promising the compiler that the result is true; the +// compiler is allowed (but not required) to use this information when +// optimizing the code. USE WITH CAUTION! If you promise the compiler things +// that aren't true, it will build a broken binary for you. +// +// As a simple example, the compiler is allowed to transform this +// +// RTC_ASSUME(x == 4); +// return x; +// +// into this +// +// return 4; +// +// It is even allowed to propagate the assumption "backwards in time", if it can +// prove that it must have held at some earlier time. For example, the compiler +// is allowed to transform this +// +// int Add(int x, int y) { +// if (x == 17) +// y += 1; +// RTC_ASSUME(x != 17); +// return x + y; +// } +// +// into this +// +// int Add(int x, int y) { +// return x + y; +// } +// +// since if `x` isn't 17 on the third line of the function body, the test of `x +// == 17` on the first line must fail since nothing can modify the local +// variable `x` in between. +// +// The intended use is to allow the compiler to optimize better. For example, +// here we allow the compiler to omit an instruction that ensures correct +// rounding of negative arguments: +// +// int DivBy2(int x) { +// RTC_ASSUME(x >= 0); +// return x / 2; +// } +// +// and here we allow the compiler to possibly omit a null check: +// +// void Delete(int* p) { +// RTC_ASSUME(p != nullptr); +// delete p; +// } +// +// clang-format off +#if defined(__GNUC__) +#define RTC_ASSUME(p) do { if (!(p)) __builtin_unreachable(); } while (0) +#else +#define RTC_ASSUME(p) do {} while (0) +#endif +// clang-format on + +#endif // RTC_BASE_SYSTEM_ASSUME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc index 2882f50da..32449020c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc @@ -32,6 +32,7 @@ #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/deprecated/recursive_critical_section.h" +#include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/null_socket_server.h" #include "rtc_base/synchronization/sequence_checker.h" @@ -73,7 +74,7 @@ const int kSlowDispatchLoggingThreshold = 50; // 50 ms class MessageHandlerWithTask final : public MessageHandler { public: - MessageHandlerWithTask() = default; + MessageHandlerWithTask() {} void OnMessage(Message* msg) override { static_cast(msg->pdata)->Run(); @@ -164,6 +165,9 @@ void ThreadManager::RemoveFromSendGraph(Thread* thread) { void ThreadManager::RegisterSendAndCheckForCycles(Thread* source, Thread* target) { + RTC_DCHECK(source); + RTC_DCHECK(target); + CritScope cs(&crit_); std::deque all_targets({target}); // We check the pre-existing who-sends-to-who graph for any path from target @@ -890,46 +894,62 @@ void Thread::Send(const Location& posted_from, AssertBlockingIsAllowedOnCurrentThread(); - AutoThread thread; Thread* current_thread = Thread::Current(); - RTC_DCHECK(current_thread != nullptr); // AutoThread ensures this - RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this)); + #if RTC_DCHECK_IS_ON - ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread, - this); -#endif - bool ready = false; - PostTask( - webrtc::ToQueuedTask([msg]() mutable { msg.phandler->OnMessage(&msg); }, - [this, &ready, current_thread] { - CritScope cs(&crit_); - ready = true; - current_thread->socketserver()->WakeUp(); - })); - - bool waited = false; - crit_.Enter(); - while (!ready) { - crit_.Leave(); - current_thread->socketserver()->Wait(kForever, false); - waited = true; - crit_.Enter(); + if (current_thread) { + RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this)); + ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread, + this); } - crit_.Leave(); +#endif - // Our Wait loop above may have consumed some WakeUp events for this - // Thread, that weren't relevant to this Send. Losing these WakeUps can - // cause problems for some SocketServers. - // - // Concrete example: - // Win32SocketServer on thread A calls Send on thread B. While processing the - // message, thread B Posts a message to A. We consume the wakeup for that - // Post while waiting for the Send to complete, which means that when we exit - // this loop, we need to issue another WakeUp, or else the Posted message - // won't be processed in a timely manner. + // Perhaps down the line we can get rid of this workaround and always require + // current_thread to be valid when Send() is called. + std::unique_ptr done_event; + if (!current_thread) + done_event.reset(new rtc::Event()); - if (waited) { - current_thread->socketserver()->WakeUp(); + bool ready = false; + PostTask(webrtc::ToQueuedTask( + [&msg]() mutable { msg.phandler->OnMessage(&msg); }, + [this, &ready, current_thread, done = done_event.get()] { + if (current_thread) { + CritScope cs(&crit_); + ready = true; + current_thread->socketserver()->WakeUp(); + } else { + done->Set(); + } + })); + + if (current_thread) { + bool waited = false; + crit_.Enter(); + while (!ready) { + crit_.Leave(); + current_thread->socketserver()->Wait(kForever, false); + waited = true; + crit_.Enter(); + } + crit_.Leave(); + + // Our Wait loop above may have consumed some WakeUp events for this + // Thread, that weren't relevant to this Send. Losing these WakeUps can + // cause problems for some SocketServers. + // + // Concrete example: + // Win32SocketServer on thread A calls Send on thread B. While processing + // the message, thread B Posts a message to A. We consume the wakeup for + // that Post while waiting for the Send to complete, which means that when + // we exit this loop, we need to issue another WakeUp, or else the Posted + // message won't be processed in a timely manner. + + if (waited) { + current_thread->socketserver()->WakeUp(); + } + } else { + done_event->Wait(rtc::Event::kForever); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h index 353d63032..ed19e9892 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h @@ -339,6 +339,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // will be used only for reference-based comparison, so instance can be safely // deleted. If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing. void AllowInvokesToThread(Thread* thread); + // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing. void DisallowAllInvokes(); // Returns true if |target| was allowed by AllowInvokesToThread() or if no @@ -437,13 +438,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // irrevocable. Must be called on this thread. void DisallowBlockingCalls() { SetAllowBlockingCalls(false); } -#ifdef WEBRTC_ANDROID - // Sets the per-thread allow-blocking-calls flag to true, sidestepping the - // invariants upheld by DisallowBlockingCalls() and - // ScopedDisallowBlockingCalls. Must be called on this thread. - void DEPRECATED_AllowBlockingCalls() { SetAllowBlockingCalls(true); } -#endif - protected: class CurrentThreadSetter : CurrentTaskQueueSetter { public: @@ -533,6 +527,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { private: class QueuedTaskHandler final : public MessageHandler { public: + QueuedTaskHandler() {} void OnMessage(Message* msg) override; }; @@ -625,7 +620,9 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // AutoThread automatically installs itself at construction // uninstalls at destruction, if a Thread object is // _not already_ associated with the current OS thread. - +// +// NOTE: *** This class should only be used by tests *** +// class AutoThread : public Thread { public: AutoThread(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc index bf9f726c4..99445284d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc @@ -15,8 +15,7 @@ namespace webrtc { TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms) - : _rwLock(RWLockWrapper::CreateRWLock()), - _startMs(0), + : _startMs(0), _firstTimestamp(0), _wrapArounds(0), _prevUnwrappedTimestamp(-1), @@ -34,12 +33,7 @@ TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms) Reset(start_ms); } -TimestampExtrapolator::~TimestampExtrapolator() { - delete _rwLock; -} - void TimestampExtrapolator::Reset(int64_t start_ms) { - WriteLockScoped wl(*_rwLock); _startMs = start_ms; _prevMs = _startMs; _firstTimestamp = 0; @@ -58,13 +52,10 @@ void TimestampExtrapolator::Reset(int64_t start_ms) { } void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { - _rwLock->AcquireLockExclusive(); if (tMs - _prevMs > 10e3) { // Ten seconds without a complete frame. // Reset the extrapolator - _rwLock->ReleaseLockExclusive(); Reset(tMs); - _rwLock->AcquireLockExclusive(); } else { _prevMs = tMs; } @@ -100,7 +91,6 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { if (_prevUnwrappedTimestamp >= 0 && unwrapped_ts90khz < _prevUnwrappedTimestamp) { // Drop reordered frames. - _rwLock->ReleaseLockExclusive(); return; } @@ -131,11 +121,9 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { if (_packetCount < _startUpFilterDelayInPackets) { _packetCount++; } - _rwLock->ReleaseLockExclusive(); } int64_t TimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz) { - ReadLockScoped rl(*_rwLock); int64_t localTimeMs = 0; CheckForWrapArounds(timestamp90khz); double unwrapped_ts90khz = diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h index 63af57b22..b325d2cba 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h @@ -13,14 +13,12 @@ #include -#include "rtc_base/synchronization/rw_lock_wrapper.h" - namespace webrtc { +// Not thread safe. class TimestampExtrapolator { public: explicit TimestampExtrapolator(int64_t start_ms); - ~TimestampExtrapolator(); void Update(int64_t tMs, uint32_t ts90khz); int64_t ExtrapolateLocalTime(uint32_t timestamp90khz); void Reset(int64_t start_ms); @@ -28,7 +26,6 @@ class TimestampExtrapolator { private: void CheckForWrapArounds(uint32_t ts90khz); bool DelayChangeDetection(double error); - RWLockWrapper* _rwLock; double _w[2]; double _pP[2][2]; int64_t _startMs; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/untyped_function.h b/TMessagesProj/jni/voip/webrtc/rtc_base/untyped_function.h new file mode 100644 index 000000000..c1f59458b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/untyped_function.h @@ -0,0 +1,324 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_UNTYPED_FUNCTION_H_ +#define RTC_BASE_UNTYPED_FUNCTION_H_ + +#include +#include +#include +#include +#include + +#include "rtc_base/system/assume.h" + +namespace webrtc { +namespace webrtc_function_impl { + +using FunVoid = void(); + +// Inline storage size is this many machine words. +enum : size_t { kInlineStorageWords = 4 }; + +union VoidUnion { + void* void_ptr; + FunVoid* fun_ptr; + typename std::aligned_storage::type + inline_storage; +}; + +// Returns the number of elements of the `inline_storage` array required to +// store an object of type T. +template +constexpr size_t InlineStorageSize() { + // sizeof(T) / sizeof(uintptr_t), but rounded up. + return (sizeof(T) + sizeof(uintptr_t) - 1) / sizeof(uintptr_t); +} + +template +struct CallHelpers; +template +struct CallHelpers { + // Return type of the three helpers below. + using return_type = RetT; + // Complete function type of the three helpers below. + using function_type = RetT(VoidUnion*, ArgT...); + // Helper for calling the `void_ptr` case of VoidUnion. + template + static RetT CallVoidPtr(VoidUnion* vu, ArgT... args) { + return (*static_cast(vu->void_ptr))(std::forward(args)...); + } + // Helper for calling the `fun_ptr` case of VoidUnion. + static RetT CallFunPtr(VoidUnion* vu, ArgT... args) { + return (reinterpret_cast(vu->fun_ptr))( + std::forward(args)...); + } + // Helper for calling the `inline_storage` case of VoidUnion. + template + static RetT CallInlineStorage(VoidUnion* vu, ArgT... args) { + return (*reinterpret_cast(&vu->inline_storage))( + std::forward(args)...); + } +}; + +} // namespace webrtc_function_impl + +// A class that holds (and owns) any callable. The same function call signature +// must be provided when constructing and calling the object. +// +// The point of not having the call signature as a class template parameter is +// to have one single concrete type for all signatures; this reduces binary +// size. +class UntypedFunction final { + public: + // Callables of at most this size can be stored inline, if they are trivial. + // (Useful in tests and benchmarks; avoid using this in production code.) + enum : size_t { + kInlineStorageSize = sizeof(webrtc_function_impl::VoidUnion::inline_storage) + }; + static_assert(kInlineStorageSize == + webrtc_function_impl::kInlineStorageWords * + sizeof(uintptr_t), + ""); + + // The *UntypedFunctionArgs structs are used to transfer arguments from + // PrepareArgs() to Create(). They are trivial, but may own heap allocations, + // so make sure to pass them to Create() exactly once! + // + // The point of doing Create(PrepareArgs(foo)) instead of just Create(foo) is + // to separate the code that has to be inlined (PrepareArgs) from the code + // that can be noninlined (Create); the *UntypedFunctionArgs types are + // designed to efficiently carry the required information from one to the + // other. + template + struct TrivialUntypedFunctionArgs { + static_assert(N >= 1, ""); + static_assert(N <= webrtc_function_impl::kInlineStorageWords, ""); + // We use an uintptr_t array here instead of std::aligned_storage, because + // the former can be efficiently passed in registers when using + // TrivialUntypedFunctionArgs as a function argument. (We can't do the same + // in VoidUnion, because std::aligned_storage but not uintptr_t can be + // legally reinterpret_casted to arbitrary types. + // TrivialUntypedFunctionArgs, on the other hand, only needs to handle + // placement new and memcpy.) + alignas(std::max_align_t) uintptr_t inline_storage[N]; + webrtc_function_impl::FunVoid* call; + }; + struct NontrivialUntypedFunctionArgs { + void* void_ptr; + webrtc_function_impl::FunVoid* call; + void (*del)(webrtc_function_impl::VoidUnion*); + }; + struct FunctionPointerUntypedFunctionArgs { + webrtc_function_impl::FunVoid* fun_ptr; + webrtc_function_impl::FunVoid* call; + }; + + // Create function for lambdas and other callables that are trivial and small; + // it accepts every type of argument except those noted in its enable_if call. + template < + typename Signature, + typename F, + typename F_deref = typename std::remove_reference::type, + typename std::enable_if< + // Not for function pointers; we have another overload for that below. + !std::is_function< + typename std::remove_pointer::type>::value && + + // Not for nullptr; we have a constructor for that below. + !std::is_same::type>::value && + + // Not for UntypedFunction objects; use move construction or + // assignment. + !std::is_same::type>::value && + + // Only for trivial callables that will fit in inline storage. + std::is_trivially_move_constructible::value && + std::is_trivially_destructible::value && + sizeof(F_deref) <= kInlineStorageSize>::type* = nullptr, + size_t InlineSize = webrtc_function_impl::InlineStorageSize()> + static TrivialUntypedFunctionArgs PrepareArgs(F&& f) { + // The callable is trivial and small enough, so we just store its bytes + // in the inline storage. + TrivialUntypedFunctionArgs args; + new (&args.inline_storage) F_deref(std::forward(f)); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers< + Signature>::template CallInlineStorage); + return args; + } + template + static UntypedFunction Create(TrivialUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + std::memcpy(&vu.inline_storage, args.inline_storage, + sizeof(args.inline_storage)); + return UntypedFunction(vu, args.call, nullptr); + } + + // Create function for lambdas and other callables that are nontrivial or + // large; it accepts every type of argument except those noted in its + // enable_if call. + template ::type, + typename std::enable_if< + // Not for function pointers; we have another overload for that + // below. + !std::is_function< + typename std::remove_pointer::type>::value && + + // Not for nullptr; we have a constructor for that below. + !std::is_same::type>::value && + + // Not for UntypedFunction objects; use move construction or + // assignment. + !std::is_same::type>::value && + + // Only for nontrivial callables, or callables that won't fit in + // inline storage. + !(std::is_trivially_move_constructible::value && + std::is_trivially_destructible::value && + sizeof(F_deref) <= kInlineStorageSize)>::type* = nullptr> + static NontrivialUntypedFunctionArgs PrepareArgs(F&& f) { + // The callable is either nontrivial or too large, so we can't keep it + // in the inline storage; use the heap instead. + NontrivialUntypedFunctionArgs args; + args.void_ptr = new F_deref(std::forward(f)); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers::template CallVoidPtr< + F_deref>); + args.del = static_cast( + [](webrtc_function_impl::VoidUnion* vu) { + // Assuming that this pointer isn't null allows the + // compiler to eliminate a null check in the (inlined) + // delete operation. + RTC_ASSUME(vu->void_ptr != nullptr); + delete reinterpret_cast(vu->void_ptr); + }); + return args; + } + static UntypedFunction Create(NontrivialUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + vu.void_ptr = args.void_ptr; + return UntypedFunction(vu, args.call, args.del); + } + + // Create function that accepts function pointers. If the argument is null, + // the result is an empty UntypedFunction. + template + static FunctionPointerUntypedFunctionArgs PrepareArgs(Signature* f) { + FunctionPointerUntypedFunctionArgs args; + args.fun_ptr = reinterpret_cast(f); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers::CallFunPtr); + return args; + } + static UntypedFunction Create(FunctionPointerUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + vu.fun_ptr = args.fun_ptr; + return UntypedFunction(vu, args.fun_ptr == nullptr ? nullptr : args.call, + nullptr); + } + + // Prepares arguments and creates an UntypedFunction in one go. + template + static UntypedFunction Create(F&& f) { + return Create(PrepareArgs(std::forward(f))); + } + + // Default constructor. Creates an empty UntypedFunction. + UntypedFunction() : call_(nullptr), delete_(nullptr) {} + + // Nullptr constructor and assignment. Creates an empty UntypedFunction. + UntypedFunction(std::nullptr_t) // NOLINT(runtime/explicit) + : call_(nullptr), delete_(nullptr) {} + UntypedFunction& operator=(std::nullptr_t) { + call_ = nullptr; + if (delete_) { + delete_(&f_); + delete_ = nullptr; + } + return *this; + } + + // Not copyable. + UntypedFunction(const UntypedFunction&) = delete; + UntypedFunction& operator=(const UntypedFunction&) = delete; + + // Move construction and assignment. + UntypedFunction(UntypedFunction&& other) + : f_(other.f_), call_(other.call_), delete_(other.delete_) { + other.delete_ = nullptr; + } + UntypedFunction& operator=(UntypedFunction&& other) { + if (delete_) { + delete_(&f_); + } + f_ = other.f_; + call_ = other.call_; + delete_ = other.delete_; + other.delete_ = nullptr; + return *this; + } + + ~UntypedFunction() { + if (delete_) { + delete_(&f_); + } + } + + friend void swap(UntypedFunction& a, UntypedFunction& b) { + using std::swap; + swap(a.f_, b.f_); + swap(a.call_, b.call_); + swap(a.delete_, b.delete_); + } + + // Returns true if we have a function, false if we don't (i.e., we're null). + explicit operator bool() const { return call_ != nullptr; } + + template + typename webrtc_function_impl::CallHelpers::return_type Call( + ArgT&&... args) { + return reinterpret_cast< + typename webrtc_function_impl::CallHelpers::function_type*>( + call_)(&f_, std::forward(args)...); + } + + // Returns true iff we don't need to call a destructor. This is guaranteed + // to hold for a moved-from object. + bool IsTriviallyDestructible() { return delete_ == nullptr; } + + private: + UntypedFunction(webrtc_function_impl::VoidUnion f, + webrtc_function_impl::FunVoid* call, + void (*del)(webrtc_function_impl::VoidUnion*)) + : f_(f), call_(call), delete_(del) {} + + // The callable thing, or a pointer to it. + webrtc_function_impl::VoidUnion f_; + + // Pointer to a dispatch function that knows the type of the callable thing + // that's stored in f_, and how to call it. An UntypedFunction object is empty + // (null) iff call_ is null. + webrtc_function_impl::FunVoid* call_; + + // Pointer to a function that knows how to delete the callable thing that's + // stored in f_. Null if `f_` is trivially deletable. + void (*delete_)(webrtc_function_impl::VoidUnion*); +}; + +} // namespace webrtc + +#endif // RTC_BASE_UNTYPED_FUNCTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h index 84f8fb1bd..f33ebccd3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h @@ -305,7 +305,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // Implements the socket interface using the virtual network. Packets are // passed as messages using the message queue of the socket server. class VirtualSocket : public AsyncSocket, - public MessageHandler, + public MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: VirtualSocket(VirtualSocketServer* server, int family, int type, bool async); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h b/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h index 3e63a7587..8b2ba099c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h @@ -241,6 +241,10 @@ class WeakPtrFactory { public: explicit WeakPtrFactory(T* ptr) : ptr_(ptr) {} + WeakPtrFactory() = delete; + WeakPtrFactory(const WeakPtrFactory&) = delete; + WeakPtrFactory& operator=(const WeakPtrFactory&) = delete; + ~WeakPtrFactory() { ptr_ = nullptr; } WeakPtr GetWeakPtr() { @@ -263,7 +267,6 @@ class WeakPtrFactory { private: internal::WeakReferenceOwner weak_reference_owner_; T* ptr_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WeakPtrFactory); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.cc new file mode 100644 index 000000000..b3be9abfa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/get_activation_factory.h" + +#include +#include + +namespace { + +FARPROC LoadComBaseFunction(const char* function_name) { + static HMODULE const handle = + ::LoadLibraryExW(L"combase.dll", nullptr, LOAD_LIBRARY_SEARCH_SYSTEM32); + return handle ? ::GetProcAddress(handle, function_name) : nullptr; +} + +decltype(&::RoGetActivationFactory) GetRoGetActivationFactoryFunction() { + static decltype(&::RoGetActivationFactory) const function = + reinterpret_cast( + LoadComBaseFunction("RoGetActivationFactory")); + return function; +} + +} // namespace + +namespace webrtc { + +bool ResolveCoreWinRTDelayload() { + return GetRoGetActivationFactoryFunction() && + ResolveCoreWinRTStringDelayload(); +} + +HRESULT RoGetActivationFactoryProxy(HSTRING class_id, + const IID& iid, + void** out_factory) { + auto get_factory_func = GetRoGetActivationFactoryFunction(); + if (!get_factory_func) + return E_FAIL; + return get_factory_func(class_id, iid, out_factory); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h new file mode 100644 index 000000000..801f39d31 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ +#define RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ + +#include + +#include "rtc_base/win/hstring.h" + +namespace webrtc { + +// Provides access to Core WinRT functions which may not be available on +// Windows 7. Loads functions dynamically at runtime to prevent library +// dependencies. + +// Callers must check the return value of ResolveCoreWinRTDelayLoad() before +// using these functions. + +bool ResolveCoreWinRTDelayload(); + +HRESULT RoGetActivationFactoryProxy(HSTRING class_id, + const IID& iid, + void** out_factory); + +// Retrieves an activation factory for the type specified. +template +HRESULT GetActivationFactory(InterfaceType** factory) { + HSTRING class_id_hstring; + HRESULT hr = CreateHstring(runtime_class_id, wcslen(runtime_class_id), + &class_id_hstring); + if (FAILED(hr)) + return hr; + + hr = RoGetActivationFactoryProxy(class_id_hstring, IID_PPV_ARGS(factory)); + if (FAILED(hr)) + return hr; + + return DeleteHstring(class_id_hstring); +} + +} // namespace webrtc + +#endif // RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.cc new file mode 100644 index 000000000..5a362a97c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/hstring.h" + +#include +#include + +namespace { + +FARPROC LoadComBaseFunction(const char* function_name) { + static HMODULE const handle = + ::LoadLibraryExW(L"combase.dll", nullptr, LOAD_LIBRARY_SEARCH_SYSTEM32); + return handle ? ::GetProcAddress(handle, function_name) : nullptr; +} + +decltype(&::WindowsCreateString) GetWindowsCreateString() { + static decltype(&::WindowsCreateString) const function = + reinterpret_cast( + LoadComBaseFunction("WindowsCreateString")); + return function; +} + +decltype(&::WindowsDeleteString) GetWindowsDeleteString() { + static decltype(&::WindowsDeleteString) const function = + reinterpret_cast( + LoadComBaseFunction("WindowsDeleteString")); + return function; +} + +} // namespace + +namespace webrtc { + +bool ResolveCoreWinRTStringDelayload() { + return GetWindowsDeleteString() && GetWindowsCreateString(); +} + +HRESULT CreateHstring(const wchar_t* src, uint32_t len, HSTRING* out_hstr) { + decltype(&::WindowsCreateString) create_string_func = + GetWindowsCreateString(); + if (!create_string_func) + return E_FAIL; + return create_string_func(src, len, out_hstr); +} + +HRESULT DeleteHstring(HSTRING hstr) { + decltype(&::WindowsDeleteString) delete_string_func = + GetWindowsDeleteString(); + if (!delete_string_func) + return E_FAIL; + return delete_string_func(hstr); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.h new file mode 100644 index 000000000..8fb119a9e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/hstring.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_HSTRING_H_ +#define RTC_BASE_WIN_HSTRING_H_ + +#include +#include +#include + +namespace webrtc { + +// Callers must check the return value of ResolveCoreWinRTStringDelayLoad() +// before using these functions. +bool ResolveCoreWinRTStringDelayload(); + +HRESULT CreateHstring(const wchar_t* src, uint32_t len, HSTRING* out_hstr); + +HRESULT DeleteHstring(HSTRING hstr); + +} // namespace webrtc + +#endif // RTC_BASE_WIN_HSTRING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc new file mode 100644 index 000000000..b83ad32a6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/scoped_com_initializer.h" + +namespace webrtc { + +ScopedCOMInitializer::ScopedCOMInitializer() { + RTC_DLOG(INFO) << "Single-Threaded Apartment (STA) COM thread"; + Initialize(COINIT_APARTMENTTHREADED); +} + +// Constructor for MTA initialization. +ScopedCOMInitializer::ScopedCOMInitializer(SelectMTA mta) { + RTC_DLOG(INFO) << "Multi-Threaded Apartment (MTA) COM thread"; + Initialize(COINIT_MULTITHREADED); +} + +ScopedCOMInitializer::~ScopedCOMInitializer() { + if (Succeeded()) { + CoUninitialize(); + } +} + +void ScopedCOMInitializer::Initialize(COINIT init) { + // Initializes the COM library for use by the calling thread, sets the + // thread's concurrency model, and creates a new apartment for the thread + // if one is required. CoInitializeEx must be called at least once, and is + // usually called only once, for each thread that uses the COM library. + hr_ = CoInitializeEx(NULL, init); + RTC_CHECK_NE(RPC_E_CHANGED_MODE, hr_) + << "Invalid COM thread model change (MTA->STA)"; + // Multiple calls to CoInitializeEx by the same thread are allowed as long + // as they pass the same concurrency flag, but subsequent valid calls + // return S_FALSE. To close the COM library gracefully on a thread, each + // successful call to CoInitializeEx, including any call that returns + // S_FALSE, must be balanced by a corresponding call to CoUninitialize. + if (hr_ == S_OK) { + RTC_DLOG(INFO) + << "The COM library was initialized successfully on this thread"; + } else if (hr_ == S_FALSE) { + RTC_DLOG(WARNING) + << "The COM library is already initialized on this thread"; + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h new file mode 100644 index 000000000..918812fc7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ +#define RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +// Initializes COM in the constructor (STA or MTA), and uninitializes COM in the +// destructor. Taken from base::win::ScopedCOMInitializer. +// +// WARNING: This should only be used once per thread, ideally scoped to a +// similar lifetime as the thread itself. You should not be using this in +// random utility functions that make COM calls; instead ensure that these +// functions are running on a COM-supporting thread! +// See https://msdn.microsoft.com/en-us/library/ms809971.aspx for details. +class ScopedCOMInitializer { + public: + // Enum value provided to initialize the thread as an MTA instead of STA. + // There are two types of apartments, Single Threaded Apartments (STAs) + // and Multi Threaded Apartments (MTAs). Within a given process there can + // be multiple STA’s but there is only one MTA. STA is typically used by + // "GUI applications" and MTA by "worker threads" with no UI message loop. + enum SelectMTA { kMTA }; + + // Constructor for STA initialization. + ScopedCOMInitializer(); + + // Constructor for MTA initialization. + explicit ScopedCOMInitializer(SelectMTA mta); + + ~ScopedCOMInitializer(); + + ScopedCOMInitializer(const ScopedCOMInitializer&) = delete; + ScopedCOMInitializer& operator=(const ScopedCOMInitializer&) = delete; + + bool Succeeded() { return SUCCEEDED(hr_); } + + private: + void Initialize(COINIT init); + + HRESULT hr_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc index 2e6c1577c..42148adee 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc @@ -203,8 +203,12 @@ Version MajorMinorBuildToVersion(int major, int minor, int build) { return VERSION_WIN10_RS2; } else if (build < 17134) { return VERSION_WIN10_RS3; - } else { + } else if (build < 17763) { return VERSION_WIN10_RS4; + } else if (build < 18362) { + return VERSION_WIN10_RS5; + } else { + return VERSION_WIN10_19H1; } } else if (major > 6) { RTC_NOTREACHED(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h index 1ad319e4c..33449e2b3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h @@ -43,6 +43,8 @@ enum Version { VERSION_WIN10_RS2 = 10, // Redstone 2: Version 1703, Build 15063. VERSION_WIN10_RS3 = 11, // Redstone 3: Version 1709, Build 16299. VERSION_WIN10_RS4 = 12, // Redstone 4: Version 1803, Build 17134. + VERSION_WIN10_RS5 = 13, // Redstone 5: Version 1809, Build 17763. + VERSION_WIN10_19H1 = 14, // 19H1: Version 1903, Build 18362. // On edit, update tools\metrics\histograms\enums.xml "WindowsVersion" and // "GpuBlacklistFeatureTestResultsWindows2". VERSION_WIN_LAST, // Indicates error condition. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h index 63599ca54..e9bc7b40b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h @@ -44,7 +44,8 @@ static base::android::ScopedJavaLocalRef Java_CandidatePairChangeEvent_ env, const base::android::JavaRef& local, const base::android::JavaRef& remote, JniIntWrapper lastDataReceivedMs, - const base::android::JavaRef& reason) { + const base::android::JavaRef& reason, + JniIntWrapper estimatedDisconnectedTimeMs) { jclass clazz = org_webrtc_CandidatePairChangeEvent_clazz(env); CHECK_CLAZZ(env, clazz, org_webrtc_CandidatePairChangeEvent_clazz(env), NULL); @@ -55,13 +56,13 @@ static base::android::ScopedJavaLocalRef Java_CandidatePairChangeEvent_ env, clazz, "", - "(Lorg/webrtc/IceCandidate;Lorg/webrtc/IceCandidate;ILjava/lang/String;)V", + "(Lorg/webrtc/IceCandidate;Lorg/webrtc/IceCandidate;ILjava/lang/String;I)V", &g_org_webrtc_CandidatePairChangeEvent_Constructor); jobject ret = env->NewObject(clazz, call_context.base.method_id, local.obj(), remote.obj(), as_jint(lastDataReceivedMs), - reason.obj()); + reason.obj(), as_jint(estimatedDisconnectedTimeMs)); return base::android::ScopedJavaLocalRef(env, ret); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h index bd1b2e604..ca874d549 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h @@ -157,10 +157,10 @@ JNI_GENERATOR_EXPORT void Java_org_webrtc_RtpTransceiver_nativeStopStandard( return JNI_RtpTransceiver_StopStandard(env, rtpTransceiver); } -static void JNI_RtpTransceiver_SetDirection(JNIEnv* env, jlong rtpTransceiver, +static jboolean JNI_RtpTransceiver_SetDirection(JNIEnv* env, jlong rtpTransceiver, const base::android::JavaParamRef& rtpTransceiverDirection); -JNI_GENERATOR_EXPORT void Java_org_webrtc_RtpTransceiver_nativeSetDirection( +JNI_GENERATOR_EXPORT jboolean Java_org_webrtc_RtpTransceiver_nativeSetDirection( JNIEnv* env, jclass jcaller, jlong rtpTransceiver, diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/EncodedImage_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/EncodedImage_jni.h index da8a56926..738313039 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/EncodedImage_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/EncodedImage_jni.h @@ -83,7 +83,6 @@ static base::android::ScopedJavaLocalRef Java_EncodedImage_Constructor( jlong captureTimeNs, const base::android::JavaRef& frameType, JniIntWrapper rotation, - jboolean completeFrame, const base::android::JavaRef& qp) { jclass clazz = org_webrtc_EncodedImage_clazz(env); CHECK_CLAZZ(env, clazz, @@ -95,14 +94,13 @@ static base::android::ScopedJavaLocalRef Java_EncodedImage_Constructor( env, clazz, "", -"(Ljava/nio/ByteBuffer;Ljava/lang/Runnable;IIJLorg/webrtc/EncodedImage$FrameType;IZLjava/lang/Integer;)V", +"(Ljava/nio/ByteBuffer;Ljava/lang/Runnable;IIJLorg/webrtc/EncodedImage$FrameType;ILjava/lang/Integer;)V", &g_org_webrtc_EncodedImage_Constructor); jobject ret = env->NewObject(clazz, call_context.base.method_id, buffer.obj(), releaseCallback.obj(), as_jint(encodedWidth), - as_jint(encodedHeight), captureTimeNs, frameType.obj(), as_jint(rotation), - completeFrame, qp.obj()); + as_jint(encodedHeight), captureTimeNs, frameType.obj(), as_jint(rotation), qp.obj()); return base::android::ScopedJavaLocalRef(env, ret); } @@ -237,28 +235,6 @@ static jint Java_EncodedImage_getRotation(JNIEnv* env, const base::android::Java return ret; } -static std::atomic g_org_webrtc_EncodedImage_getCompleteFrame(nullptr); -static jboolean Java_EncodedImage_getCompleteFrame(JNIEnv* env, const - base::android::JavaRef& obj) { - jclass clazz = org_webrtc_EncodedImage_clazz(env); - CHECK_CLAZZ(env, obj.obj(), - org_webrtc_EncodedImage_clazz(env), false); - - jni_generator::JniJavaCallContextChecked call_context; - call_context.Init< - base::android::MethodID::TYPE_INSTANCE>( - env, - clazz, - "getCompleteFrame", - "()Z", - &g_org_webrtc_EncodedImage_getCompleteFrame); - - jboolean ret = - env->CallBooleanMethod(obj.obj(), - call_context.base.method_id); - return ret; -} - static std::atomic g_org_webrtc_EncodedImage_getQp(nullptr); static base::android::ScopedJavaLocalRef Java_EncodedImage_getQp(JNIEnv* env, const base::android::JavaRef& obj) { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/base/network_monitor.h b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/base/network_monitor.h deleted file mode 100644 index fc7d3e54c..000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/base/network_monitor.h +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ -#define SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ - -// TODO(deadbeef): Remove this forwarding header once clients switch to the -// new one. -#include "sdk/android/native_api/network_monitor/network_monitor.h" - -#endif // SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/DEPS b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/DEPS deleted file mode 100644 index ae33fa683..000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/DEPS +++ /dev/null @@ -1,15 +0,0 @@ -include_rules = [ - "+third_party/libyuv", - "+call/callfactoryinterface.h", - "+common_video", - "+logging/rtc_event_log/rtc_event_log_factory.h", - "+media/base", - "+media/engine", - "+modules/audio_device/include/audio_device.h", - "+modules/audio_processing/include/audio_processing.h", - "+modules/include", - "+modules/utility/include/jvm_android.h", - "+modules/video_coding", - "+pc", - "+system_wrappers/include", -] diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc index e41fd035d..434e6d3af 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc @@ -21,6 +21,7 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h" #include "sdk/android/generated_base_jni/NetworkMonitor_jni.h" #include "sdk/android/native_api/jni/java_types.h" @@ -30,6 +31,37 @@ namespace webrtc { namespace jni { +namespace { + +const char* NetworkTypeToString(NetworkType type) { + switch (type) { + case NETWORK_UNKNOWN: + return "UNKNOWN"; + case NETWORK_ETHERNET: + return "ETHERNET"; + case NETWORK_WIFI: + return "WIFI"; + case NETWORK_5G: + return "5G"; + case NETWORK_4G: + return "4G"; + case NETWORK_3G: + return "3G"; + case NETWORK_2G: + return "2G"; + case NETWORK_UNKNOWN_CELLULAR: + return "UNKNOWN_CELLULAR"; + case NETWORK_BLUETOOTH: + return "BLUETOOTH"; + case NETWORK_VPN: + return "VPN"; + case NETWORK_NONE: + return "NONE"; + } +} + +} // namespace + enum AndroidSdkVersion { SDK_VERSION_LOLLIPOP = 21, SDK_VERSION_MARSHMALLOW = 23 @@ -196,12 +228,13 @@ AndroidNetworkMonitor::AndroidNetworkMonitor( const JavaRef& j_application_context) : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)), j_application_context_(env, j_application_context), - j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)) {} + j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)), + network_thread_(rtc::Thread::Current()) {} AndroidNetworkMonitor::~AndroidNetworkMonitor() = default; void AndroidNetworkMonitor::Start() { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); if (started_) { return; } @@ -215,7 +248,7 @@ void AndroidNetworkMonitor::Start() { // This is kind of magic behavior, but doing this allows the SocketServer to // use this as a NetworkBinder to bind sockets on a particular network when // it creates sockets. - worker_thread()->socketserver()->set_network_binder(this); + network_thread_->socketserver()->set_network_binder(this); JNIEnv* env = AttachCurrentThreadIfNeeded(); Java_NetworkMonitor_startMonitoring( @@ -223,7 +256,7 @@ void AndroidNetworkMonitor::Start() { } void AndroidNetworkMonitor::Stop() { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); if (!started_) { return; } @@ -232,8 +265,8 @@ void AndroidNetworkMonitor::Stop() { // Once the network monitor stops, it will clear all network information and // it won't find the network handle to bind anyway. - if (worker_thread()->socketserver()->network_binder() == this) { - worker_thread()->socketserver()->set_network_binder(nullptr); + if (network_thread_->socketserver()->network_binder() == this) { + network_thread_->socketserver()->set_network_binder(nullptr); } JNIEnv* env = AttachCurrentThreadIfNeeded(); @@ -249,7 +282,7 @@ void AndroidNetworkMonitor::Stop() { rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( int socket_fd, const rtc::IPAddress& address) { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); // Android prior to Lollipop didn't have support for binding sockets to // networks. This may also occur if there is no connectivity manager @@ -346,17 +379,9 @@ rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( return rtc::NetworkBindingResult::FAILURE; } -void AndroidNetworkMonitor::OnNetworkConnected( - const NetworkInformation& network_info) { - worker_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_w, - this, network_info)); - // Fire SignalNetworksChanged to update the list of networks. - OnNetworksChanged(); -} - -void AndroidNetworkMonitor::OnNetworkConnected_w( +void AndroidNetworkMonitor::OnNetworkConnected_n( const NetworkInformation& network_info) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString(); adapter_type_by_name_[network_info.interface_name] = AdapterTypeFromNetworkType(network_info.type, surface_cellular_types_); @@ -369,11 +394,13 @@ void AndroidNetworkMonitor::OnNetworkConnected_w( for (const rtc::IPAddress& address : network_info.ip_addresses) { network_handle_by_address_[address] = network_info.handle; } + SignalNetworksChanged(); } absl::optional AndroidNetworkMonitor::FindNetworkHandleFromAddress( const rtc::IPAddress& ip_address) const { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Find network handle."; if (find_network_handle_without_ipv6_temporary_part_) { for (auto const& iter : network_info_by_handle_) { @@ -396,14 +423,9 @@ AndroidNetworkMonitor::FindNetworkHandleFromAddress( } } -void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) { +void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle; - worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle)); -} - -void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) { auto iter = network_info_by_handle_.find(handle); if (iter != network_info_by_handle_.end()) { for (const rtc::IPAddress& address : iter->second.ip_addresses) { @@ -413,31 +435,33 @@ void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) { } } -void AndroidNetworkMonitor::OnNetworkPreference( +void AndroidNetworkMonitor::OnNetworkPreference_n( NetworkType type, rtc::NetworkPreference preference) { - worker_thread()->Invoke(RTC_FROM_HERE, [&] { - auto adapter_type = - AdapterTypeFromNetworkType(type, surface_cellular_types_); - network_preference_by_adapter_type_[adapter_type] = preference; - }); - OnNetworksChanged(); + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Android network monitor preference for " + << NetworkTypeToString(type) << " changed to " + << rtc::NetworkPreferenceToString(preference); + auto adapter_type = AdapterTypeFromNetworkType(type, surface_cellular_types_); + network_preference_by_adapter_type_[adapter_type] = preference; + SignalNetworksChanged(); } void AndroidNetworkMonitor::SetNetworkInfos( const std::vector& network_infos) { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); network_handle_by_address_.clear(); network_info_by_handle_.clear(); RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size() << " networks"; for (const NetworkInformation& network : network_infos) { - OnNetworkConnected_w(network); + OnNetworkConnected_n(network); } } rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); auto iter = adapter_type_by_name_.find(if_name); rtc::AdapterType type = (iter == adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN @@ -450,6 +474,7 @@ rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); auto iter = vpn_underlying_adapter_type_by_name_.find(if_name); rtc::AdapterType type = (iter == vpn_underlying_adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN @@ -459,6 +484,7 @@ rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference( const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); auto iter = adapter_type_by_name_.find(if_name); if (iter == adapter_type_by_name_.end()) { return rtc::NetworkPreference::NEUTRAL; @@ -499,7 +525,11 @@ AndroidNetworkMonitorFactory::CreateNetworkMonitor() { void AndroidNetworkMonitor::NotifyConnectionTypeChanged( JNIEnv* env, const JavaRef& j_caller) { - OnNetworksChanged(); + invoker_.AsyncInvoke(RTC_FROM_HERE, network_thread_, [this] { + RTC_LOG(LS_INFO) + << "Android network monitor detected connection type change."; + SignalNetworksChanged(); + }); } void AndroidNetworkMonitor::NotifyOfActiveNetworkList( @@ -518,14 +548,19 @@ void AndroidNetworkMonitor::NotifyOfNetworkConnect( const JavaRef& j_network_info) { NetworkInformation network_info = GetNetworkInformationFromJava(env, j_network_info); - OnNetworkConnected(network_info); + network_thread_->Invoke( + RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_n, + this, network_info)); } void AndroidNetworkMonitor::NotifyOfNetworkDisconnect( JNIEnv* env, const JavaRef& j_caller, jlong network_handle) { - OnNetworkDisconnected(static_cast(network_handle)); + network_thread_->Invoke( + RTC_FROM_HERE, + rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_n, this, + static_cast(network_handle))); } void AndroidNetworkMonitor::NotifyOfNetworkPreference( @@ -536,7 +571,10 @@ void AndroidNetworkMonitor::NotifyOfNetworkPreference( NetworkType type = GetNetworkTypeFromJava(env, j_connection_type); rtc::NetworkPreference preference = static_cast(jpreference); - OnNetworkPreference(type, preference); + + network_thread_->Invoke( + RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkPreference_n, + this, type, preference)); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h index d1008f266..eff212254 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h @@ -17,9 +17,11 @@ #include #include "absl/types/optional.h" +#include "rtc_base/async_invoker.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { @@ -61,7 +63,7 @@ struct NetworkInformation { std::string ToString() const; }; -class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, +class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, public rtc::NetworkBinderInterface { public: AndroidNetworkMonitor(JNIEnv* env, @@ -83,10 +85,6 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, rtc::NetworkPreference GetNetworkPreference( const std::string& if_name) override; - void OnNetworkConnected(const NetworkInformation& network_info); - void OnNetworkDisconnected(NetworkHandle network_handle); - void OnNetworkPreference(NetworkType type, rtc::NetworkPreference preference); - // Always expected to be called on the network thread. void SetNetworkInfos(const std::vector& network_infos); @@ -111,22 +109,30 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, const rtc::IPAddress& address) const; private: - void OnNetworkConnected_w(const NetworkInformation& network_info); - void OnNetworkDisconnected_w(NetworkHandle network_handle); + void OnNetworkConnected_n(const NetworkInformation& network_info); + void OnNetworkDisconnected_n(NetworkHandle network_handle); + void OnNetworkPreference_n(NetworkType type, + rtc::NetworkPreference preference); const int android_sdk_int_; ScopedJavaGlobalRef j_application_context_; ScopedJavaGlobalRef j_network_monitor_; - rtc::ThreadChecker thread_checker_; - bool started_ = false; - std::map adapter_type_by_name_; - std::map vpn_underlying_adapter_type_by_name_; - std::map network_handle_by_address_; - std::map network_info_by_handle_; + rtc::Thread* network_thread_; + bool started_ RTC_GUARDED_BY(network_thread_) = false; + std::map adapter_type_by_name_ + RTC_GUARDED_BY(network_thread_); + std::map vpn_underlying_adapter_type_by_name_ + RTC_GUARDED_BY(network_thread_); + std::map network_handle_by_address_ + RTC_GUARDED_BY(network_thread_); + std::map network_info_by_handle_ + RTC_GUARDED_BY(network_thread_); std::map - network_preference_by_adapter_type_; - bool find_network_handle_without_ipv6_temporary_part_; - bool surface_cellular_types_; + network_preference_by_adapter_type_ RTC_GUARDED_BY(network_thread_); + bool find_network_handle_without_ipv6_temporary_part_ + RTC_GUARDED_BY(network_thread_) = false; + bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false; + rtc::AsyncInvoker invoker_; }; class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.h index 378d380a1..eeac48f1e 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.h @@ -13,10 +13,8 @@ #include -#include "common_video/include/i420_buffer_pool.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/adapted_video_track_source.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" #include "rtc_base/timestamp_aligner.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS deleted file mode 100644 index 9a3adee68..000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS +++ /dev/null @@ -1,4 +0,0 @@ -include_rules = [ - "+base/android/jni_android.h", - "+modules/audio_device", -] diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc index 65bef4b1b..34b14f450 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc @@ -18,8 +18,6 @@ #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/sleep.h" - namespace webrtc { namespace jni { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc index d77488ff4..eb5d93fa2 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -151,15 +151,13 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t PlayoutDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t RecordingDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetPlayoutDevice(uint16_t index) override { @@ -171,8 +169,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetPlayoutDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetRecordingDevice(uint16_t index) override { @@ -184,8 +181,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetRecordingDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t PlayoutIsAvailable(bool* available) override { @@ -396,62 +392,52 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetMicrophoneVolume(uint32_t volume) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << volume << ")"; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneVolume(uint32_t* volume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MinMicrophoneVolume(uint32_t* minVolume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SpeakerMuteIsAvailable(bool* available) override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetSpeakerMute(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SpeakerMute(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMuteIsAvailable(bool* available) override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetMicrophoneMute(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMute(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t StereoPlayoutIsAvailable(bool* available) const override { @@ -569,8 +555,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t EnableBuiltInAGC(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "HW AGC is not available"; - return -1; + RTC_CHECK_NOTREACHED(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc index e13653ca3..839f6a8f6 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc @@ -70,7 +70,7 @@ ScopedJavaLocalRef NativeToJavaEncodedImage( static_cast(image._encodedWidth), static_cast(image._encodedHeight), image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type, - static_cast(image.rotation_), image._completeFrame, qp); + static_cast(image.rotation_), qp); } ScopedJavaLocalRef NativeToJavaFrameTypeArray( @@ -98,8 +98,6 @@ EncodedImage JavaToNativeEncodedImage(JNIEnv* env, Java_EncodedImage_getEncodedHeight(env, j_encoded_image); frame.rotation_ = (VideoRotation)Java_EncodedImage_getRotation(env, j_encoded_image); - frame._completeFrame = - Java_EncodedImage_getCompleteFrame(env, j_encoded_image); frame.qp_ = JavaToNativeOptionalInt( env, Java_EncodedImage_getQp(env, j_encoded_image)) diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc index 116533364..6706782e3 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc @@ -128,7 +128,8 @@ ScopedJavaLocalRef NativeToJavaCandidatePairChange( env, NativeToJavaCandidate(env, selected_pair.local_candidate()), NativeToJavaCandidate(env, selected_pair.remote_candidate()), static_cast(event.last_data_received_ms), - NativeToJavaString(env, event.reason)); + NativeToJavaString(env, event.reason), + static_cast(event.estimated_disconnected_time_ms)); } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_transceiver.cc index a0b3c20fd..1d468461f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_transceiver.cc @@ -151,18 +151,25 @@ void JNI_RtpTransceiver_StopStandard(JNIEnv* jni, ->StopStandard(); } -void JNI_RtpTransceiver_SetDirection( +jboolean JNI_RtpTransceiver_SetDirection( JNIEnv* jni, jlong j_rtp_transceiver_pointer, const base::android::JavaParamRef& j_rtp_transceiver_direction) { if (IsNull(jni, j_rtp_transceiver_direction)) { - return; + return false; } RtpTransceiverDirection direction = static_cast( Java_RtpTransceiverDirection_getNativeIndex(jni, j_rtp_transceiver_direction)); - reinterpret_cast(j_rtp_transceiver_pointer) - ->SetDirection(direction); + webrtc::RTCError error = + reinterpret_cast(j_rtp_transceiver_pointer) + ->SetDirectionWithError(direction); + if (!error.ok()) { + RTC_LOG(LS_WARNING) << "SetDirection failed, code " + << ToString(error.type()) << ", message " + << error.message(); + } + return error.ok(); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc index d45c6f1d8..3aa18abbd 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc @@ -257,17 +257,6 @@ absl::optional VideoDecoderWrapper::ParseQP( } break; } -#ifndef DISABLE_H265 - case kVideoCodecH265: { - h265_bitstream_parser_.ParseBitstream(input_image.data(), - input_image.size()); - int qp_int; - if (h265_bitstream_parser_.GetLastSliceQp(&qp_int)) { - qp = qp_int; - } - break; - } -#endif default: break; // Default is to not provide QP. } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h index 1e02ba60a..f5c4787a6 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h @@ -18,7 +18,6 @@ #include "api/video_codecs/video_decoder.h" #include "common_video/h264/h264_bitstream_parser.h" -#include "common_video/h265/h265_bitstream_parser.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" @@ -102,9 +101,6 @@ class VideoDecoderWrapper : public VideoDecoder { H264BitstreamParser h264_bitstream_parser_ RTC_GUARDED_BY(decoder_thread_checker_); - H265BitstreamParser h265_bitstream_parser_ - RTC_GUARDED_BY(decoder_thread_checker_); - DecodedImageCallback* callback_ RTC_GUARDED_BY(callback_race_checker_); // Accessed both on the decoder thread and the callback thread. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc index ada2a5bbd..f64f1b466 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc @@ -116,7 +116,10 @@ int32_t VideoEncoderWrapper::Release() { int32_t status = JavaToNativeVideoCodecStatus( jni, Java_VideoEncoder_release(jni, encoder_)); RTC_LOG(LS_INFO) << "release: " << status; - frame_extra_infos_.clear(); + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.clear(); + } initialized_ = false; return status; @@ -141,7 +144,10 @@ int32_t VideoEncoderWrapper::Encode( FrameExtraInfo info; info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec; info.timestamp_rtp = frame.timestamp(); - frame_extra_infos_.push_back(info); + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.push_back(info); + } ScopedJavaLocalRef j_frame = NativeToJavaVideoFrame(jni, frame); ScopedJavaLocalRef ret = @@ -232,19 +238,23 @@ void VideoEncoderWrapper::OnEncodedFrame( // entries that don't belong to us, and we need to be careful not to // remove them. Removing only those entries older than the current frame // provides this guarantee. - while (!frame_extra_infos_.empty() && - frame_extra_infos_.front().capture_time_ns < capture_time_ns) { + FrameExtraInfo frame_extra_info; + { + MutexLock lock(&frame_extra_infos_lock_); + while (!frame_extra_infos_.empty() && + frame_extra_infos_.front().capture_time_ns < capture_time_ns) { + frame_extra_infos_.pop_front(); + } + if (frame_extra_infos_.empty() || + frame_extra_infos_.front().capture_time_ns != capture_time_ns) { + RTC_LOG(LS_WARNING) + << "Java encoder produced an unexpected frame with timestamp: " + << capture_time_ns; + return; + } + frame_extra_info = frame_extra_infos_.front(); frame_extra_infos_.pop_front(); } - if (frame_extra_infos_.empty() || - frame_extra_infos_.front().capture_time_ns != capture_time_ns) { - RTC_LOG(LS_WARNING) - << "Java encoder produced an unexpected frame with timestamp: " - << capture_time_ns; - return; - } - FrameExtraInfo frame_extra_info = std::move(frame_extra_infos_.front()); - frame_extra_infos_.pop_front(); // This is a bit subtle. The |frame| variable from the lambda capture is // const. Which implies that (i) we need to make a copy to be able to @@ -346,7 +356,6 @@ CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo( static_cast(gof_idx_++ % gof_.num_frames_in_gof); info.codecSpecific.VP9.num_spatial_layers = 1; info.codecSpecific.VP9.first_frame_in_picture = true; - info.codecSpecific.VP9.end_of_picture = true; info.codecSpecific.VP9.spatial_layer_resolution_present = false; if (info.codecSpecific.VP9.ss_data_available) { info.codecSpecific.VP9.spatial_layer_resolution_present = true; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h index dfde795e3..16eb1c2b8 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h @@ -24,6 +24,7 @@ #include "common_video/h265/h265_bitstream_parser.h" #endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/synchronization/mutex.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/video_frame.h" @@ -85,7 +86,10 @@ class VideoEncoderWrapper : public VideoEncoder { const ScopedJavaGlobalRef encoder_; const ScopedJavaGlobalRef int_array_class_; - std::deque frame_extra_infos_; + // Modified both on the encoder thread and the callback thread. + Mutex frame_extra_infos_lock_; + std::deque frame_extra_infos_ + RTC_GUARDED_BY(frame_extra_infos_lock_); EncodedImageCallback* callback_; bool initialized_; int num_resets_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc index d57fe8f9b..860eebe5e 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc @@ -152,14 +152,14 @@ const ScopedJavaGlobalRef& AndroidVideoBuffer::video_frame_buffer() return j_video_frame_buffer_; } -rtc::scoped_refptr AndroidVideoBuffer::CropAndScale( - JNIEnv* jni, +rtc::scoped_refptr AndroidVideoBuffer::CropAndScale( int crop_x, int crop_y, int crop_width, int crop_height, int scale_width, int scale_height) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); return Adopt(jni, Java_Buffer_cropAndScale(jni, j_video_frame_buffer_, crop_x, crop_y, crop_width, crop_height, scale_width, scale_height)); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h index f6b569a3e..5e39b8a77 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h @@ -42,13 +42,12 @@ class AndroidVideoBuffer : public VideoFrameBuffer { // Crops a region defined by |crop_x|, |crop_y|, |crop_width| and // |crop_height|. Scales it to size |scale_width| x |scale_height|. - rtc::scoped_refptr CropAndScale(JNIEnv* jni, - int crop_x, - int crop_y, - int crop_width, - int crop_height, - int scale_width, - int scale_height); + rtc::scoped_refptr CropAndScale(int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scale_width, + int scale_height) override; protected: // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc index b8e9633f4..59de664c0 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc @@ -35,6 +35,20 @@ std::string VectorToString(const std::vector& vector) { return sb.Release(); } +// This overload is required because std::vector range loops don't +// return references but objects, causing -Wrange-loop-analysis diagnostics. +std::string VectorToString(const std::vector& vector) { + rtc::StringBuilder sb; + sb << "["; + const char* separator = ""; + for (bool element : vector) { + sb << separator << rtc::ToString(element); + separator = ","; + } + sb << "]"; + return sb.Release(); +} + // Produces "[\"a\",\"b\",\"c\"]". Works for vectors of both const char* and // std::string element types. template diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc index a56d30d3c..d29d819fc 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc @@ -98,13 +98,12 @@ std::unique_ptr RTCStatsReport::Take(const std::string& id) { return stats; } -void RTCStatsReport::TakeMembersFrom( - rtc::scoped_refptr victim) { - for (StatsMap::iterator it = victim->stats_.begin(); - it != victim->stats_.end(); ++it) { +void RTCStatsReport::TakeMembersFrom(rtc::scoped_refptr other) { + for (StatsMap::iterator it = other->stats_.begin(); it != other->stats_.end(); + ++it) { AddStats(std::unique_ptr(it->second.release())); } - victim->stats_.clear(); + other->stats_.clear(); } RTCStatsReport::ConstIterator RTCStatsReport::begin() const { diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc index 2fe85468c..5a803de07 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc @@ -92,6 +92,7 @@ RTCCertificateStats::~RTCCertificateStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL(RTCCodecStats, RTCStats, "codec", + &transport_id, &payload_type, &mime_type, &clock_rate, @@ -104,6 +105,7 @@ RTCCodecStats::RTCCodecStats(const std::string& id, int64_t timestamp_us) RTCCodecStats::RTCCodecStats(std::string&& id, int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), + transport_id("transportId"), payload_type("payloadType"), mime_type("mimeType"), clock_rate("clockRate"), @@ -112,6 +114,7 @@ RTCCodecStats::RTCCodecStats(std::string&& id, int64_t timestamp_us) RTCCodecStats::RTCCodecStats(const RTCCodecStats& other) : RTCStats(other.id(), other.timestamp_us()), + transport_id(other.transport_id), payload_type(other.payload_type), mime_type(other.mime_type), clock_rate(other.clock_rate), diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h index 8eac3d18b..3c60f63da 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h @@ -13,10 +13,10 @@ #include +#include #include #include "api/units/timestamp.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" #include "rtc_base/system/rtc_export.h" #include "system_wrappers/include/ntp_time.h" @@ -78,8 +78,12 @@ class SimulatedClock : public Clock { void AdvanceTime(TimeDelta delta); private: - Timestamp time_; - std::unique_ptr lock_; + // The time is read and incremented with relaxed order. Each thread will see + // monotonically increasing time, and when threads post tasks or messages to + // one another, the synchronization done as part of the message passing should + // ensure that any causual chain of events on multiple threads also + // corresponds to monotonically increasing time. + std::atomic time_us_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/cpu_features_wrapper.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/cpu_features_wrapper.h index 739161afc..612b4a5d6 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/cpu_features_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/cpu_features_wrapper.h @@ -13,12 +13,10 @@ #include -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif +namespace webrtc { // List of features in x86. -typedef enum { kSSE2, kSSE3 } CPUFeature; +typedef enum { kSSE2, kSSE3, kAVX2 } CPUFeature; // List of features in ARM. enum { @@ -28,21 +26,17 @@ enum { kCPUFeatureLDREXSTREX = (1 << 3) }; -typedef int (*WebRtc_CPUInfo)(CPUFeature feature); - // Returns true if the CPU supports the feature. -extern WebRtc_CPUInfo WebRtc_GetCPUInfo; +int GetCPUInfo(CPUFeature feature); // No CPU feature is available => straight C path. -extern WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM; +int GetCPUInfoNoASM(CPUFeature feature); // Return the features in an ARM device. // It detects the features in the hardware platform, and returns supported // values in the above enum definition as a bitmask. -extern uint64_t WebRtc_GetCPUFeaturesARM(void); +uint64_t GetCPUFeaturesARM(void); -#if defined(__cplusplus) || defined(c_plusplus) -} // extern "C" -#endif +} // namespace webrtc #endif // SYSTEM_WRAPPERS_INCLUDE_CPU_FEATURES_WRAPPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc index e0f4b401e..0ae624d84 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc @@ -26,7 +26,6 @@ #endif // defined(WEBRTC_POSIX) #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -239,16 +238,15 @@ Clock* Clock::GetRealTimeClock() { } SimulatedClock::SimulatedClock(int64_t initial_time_us) - : SimulatedClock(Timestamp::Micros(initial_time_us)) {} + : time_us_(initial_time_us) {} SimulatedClock::SimulatedClock(Timestamp initial_time) - : time_(initial_time), lock_(RWLockWrapper::CreateRWLock()) {} + : SimulatedClock(initial_time.us()) {} SimulatedClock::~SimulatedClock() {} Timestamp SimulatedClock::CurrentTime() { - ReadLockScoped synchronize(*lock_); - return time_; + return Timestamp::Micros(time_us_.load(std::memory_order_relaxed)); } NtpTime SimulatedClock::CurrentNtpTime() { @@ -271,9 +269,13 @@ void SimulatedClock::AdvanceTimeMicroseconds(int64_t microseconds) { AdvanceTime(TimeDelta::Micros(microseconds)); } +// TODO(bugs.webrtc.org(12102): It's desirable to let a single thread own +// advancement of the clock. We could then replace this read-modify-write +// operation with just a thread checker. But currently, that breaks a couple of +// tests, in particular, RepeatingTaskTest.ClockIntegration and +// CallStatsTest.LastProcessedRtt. void SimulatedClock::AdvanceTime(TimeDelta delta) { - WriteLockScoped synchronize(*lock_); - time_ += delta; + time_us_.fetch_add(delta.us(), std::memory_order_relaxed); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc index ebcb48c15..0f8121289 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc @@ -12,11 +12,14 @@ #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" +#include "system_wrappers/include/field_trial.h" #if defined(WEBRTC_ARCH_X86_FAMILY) && defined(_MSC_VER) #include #endif +namespace webrtc { + // No CPU feature is available => straight C path. int GetCPUInfoNoASM(CPUFeature feature) { (void)feature; @@ -24,6 +27,22 @@ int GetCPUInfoNoASM(CPUFeature feature) { } #if defined(WEBRTC_ARCH_X86_FAMILY) + +#if defined(WEBRTC_ENABLE_AVX2) +// xgetbv returns the value of an Intel Extended Control Register (XCR). +// Currently only XCR0 is defined by Intel so |xcr| should always be zero. +static uint64_t xgetbv(uint32_t xcr) { +#if defined(_MSC_VER) + return _xgetbv(xcr); +#else + uint32_t eax, edx; + + __asm__ volatile("xgetbv" : "=a"(eax), "=d"(edx) : "c"(xcr)); + return (static_cast(edx) << 32) | eax; +#endif // _MSC_VER +} +#endif // WEBRTC_ENABLE_AVX2 + #ifndef _MSC_VER // Intrinsic for "cpuid". #if defined(__pic__) && defined(__i386__) @@ -41,7 +60,7 @@ static inline void __cpuid(int cpu_info[4], int info_type) { __asm__ volatile("cpuid\n" : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3]) - : "a"(info_type)); + : "a"(info_type), "c"(0)); } #endif #endif // _MSC_VER @@ -49,7 +68,7 @@ static inline void __cpuid(int cpu_info[4], int info_type) { #if defined(WEBRTC_ARCH_X86_FAMILY) // Actual feature detection for x86. -static int GetCPUInfo(CPUFeature feature) { +int GetCPUInfo(CPUFeature feature) { int cpu_info[4]; __cpuid(cpu_info, 1); if (feature == kSSE2) { @@ -58,15 +77,39 @@ static int GetCPUInfo(CPUFeature feature) { if (feature == kSSE3) { return 0 != (cpu_info[2] & 0x00000001); } +#if defined(WEBRTC_ENABLE_AVX2) + if (feature == kAVX2 && + !webrtc::field_trial::IsEnabled("WebRTC-Avx2SupportKillSwitch")) { + int cpu_info7[4]; + __cpuid(cpu_info7, 0); + int num_ids = cpu_info7[0]; + if (num_ids < 7) { + return 0; + } + // Interpret CPU feature information. + __cpuid(cpu_info7, 7); + + // AVX instructions can be used when + // a) AVX are supported by the CPU, + // b) XSAVE is supported by the CPU, + // c) XSAVE is enabled by the kernel. + // See http://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled + // AVX2 support needs (avx_support && (cpu_info7[1] & 0x00000020) != 0;). + return (cpu_info[2] & 0x10000000) != 0 && + (cpu_info[2] & 0x04000000) != 0 /* XSAVE */ && + (cpu_info[2] & 0x08000000) != 0 /* OSXSAVE */ && + (xgetbv(0) & 0x00000006) == 6 /* XSAVE enabled by kernel */ && + (cpu_info7[1] & 0x00000020) != 0; + } +#endif // WEBRTC_ENABLE_AVX2 return 0; } #else // Default to straight C for other platforms. -static int GetCPUInfo(CPUFeature feature) { +int GetCPUInfo(CPUFeature feature) { (void)feature; return 0; } #endif -WebRtc_CPUInfo WebRtc_GetCPUInfo = GetCPUInfo; -WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM = GetCPUInfoNoASM; +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.c b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.cc similarity index 85% rename from TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.c rename to TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.cc index 0cb3a6c5e..95cc609b0 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.c +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_android.cc @@ -10,6 +10,10 @@ #include -uint64_t WebRtc_GetCPUFeaturesARM(void) { +namespace webrtc { + +uint64_t GetCPUFeaturesARM(void) { return android_getCpuFeatures(); } + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.c b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.cc similarity index 87% rename from TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.c rename to TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.cc index 004de5a6a..335bed4da 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.c +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features_linux.cc @@ -8,32 +8,39 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include #include -#include -#ifndef __GLIBC_PREREQ -#define __GLIBC_PREREQ(a, b) 0 + +#ifdef __GLIBC_PREREQ +#define WEBRTC_GLIBC_PREREQ(a, b) __GLIBC_PREREQ(a, b) +#else +#define WEBRTC_GLIBC_PREREQ(a, b) 0 #endif -#if __GLIBC_PREREQ(2, 16) + +#if WEBRTC_GLIBC_PREREQ(2, 16) #include #else -#include -#include #include +#include #include +#include #endif + #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" #if defined(WEBRTC_ARCH_ARM_FAMILY) #include -uint64_t WebRtc_GetCPUFeaturesARM(void) { +namespace webrtc { + +uint64_t GetCPUFeaturesARM(void) { uint64_t result = 0; int architecture = 0; - unsigned long hwcap = 0; + uint64_t hwcap = 0; const char* platform = NULL; -#if __GLIBC_PREREQ(2, 16) +#if WEBRTC_GLIBC_PREREQ(2, 16) hwcap = getauxval(AT_HWCAP); platform = (const char*)getauxval(AT_PLATFORM); #else @@ -57,7 +64,7 @@ uint64_t WebRtc_GetCPUFeaturesARM(void) { } close(fd); } -#endif // __GLIBC_PREREQ(2,16) +#endif // WEBRTC_GLIBC_PREREQ(2, 16) #if defined(__aarch64__) architecture = 8; if ((hwcap & HWCAP_FP) != 0) @@ -84,4 +91,6 @@ uint64_t WebRtc_GetCPUFeaturesARM(void) { result |= kCPUFeatureLDREXSTREX; return result; } + +} // namespace webrtc #endif // WEBRTC_ARCH_ARM_FAMILY diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc new file mode 100644 index 000000000..fb455193f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc @@ -0,0 +1,58 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "video/adaptation/balanced_constraint.h" + +namespace webrtc { + +BalancedConstraint::BalancedConstraint( + DegradationPreferenceProvider* degradation_preference_provider) + : encoder_target_bitrate_bps_(absl::nullopt), + degradation_preference_provider_(degradation_preference_provider) { + RTC_DCHECK(degradation_preference_provider_); + sequence_checker_.Detach(); +} + +void BalancedConstraint::OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); +} + +bool BalancedConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Don't adapt if BalancedDegradationSettings applies and determines this will + // exceed bitrate constraints. + if (degradation_preference_provider_->degradation_preference() == + DegradationPreference::BALANCED && + !balanced_settings_.CanAdaptUp(input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } + if (DidIncreaseResolution(restrictions_before, restrictions_after) && + !balanced_settings_.CanAdaptUpResolution( + input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } + return true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h new file mode 100644 index 000000000..5e0240802 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h @@ -0,0 +1,50 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ +#define VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ + +#include + +#include "absl/types/optional.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/degradation_preference_provider.h" +#include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +class BalancedConstraint : public AdaptationConstraint { + public: + explicit BalancedConstraint( + DegradationPreferenceProvider* degradation_preference_provider); + ~BalancedConstraint() override = default; + + void OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps); + + // AdaptationConstraint implementation. + std::string Name() const override { return "BalancedConstraint"; } + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const override; + + private: + SequenceChecker sequence_checker_; + absl::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(&sequence_checker_); + const BalancedDegradationSettings balanced_settings_; + const DegradationPreferenceProvider* degradation_preference_provider_; +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc new file mode 100644 index 000000000..1061c4557 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc @@ -0,0 +1,65 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "video/adaptation/bitrate_constraint.h" + +namespace webrtc { + +BitrateConstraint::BitrateConstraint() + : encoder_settings_(absl::nullopt), + encoder_target_bitrate_bps_(absl::nullopt) { + sequence_checker_.Detach(); +} + +void BitrateConstraint::OnEncoderSettingsUpdated( + absl::optional encoder_settings) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_settings_ = std::move(encoder_settings); +} + +void BitrateConstraint::OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); +} + +bool BitrateConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Make sure bitrate limits are not violated. + if (DidIncreaseResolution(restrictions_before, restrictions_after)) { + uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0); + absl::optional bitrate_limits = + encoder_settings_.has_value() + ? encoder_settings_->encoder_info() + .GetEncoderBitrateLimitsForResolution( + // Need some sort of expected resulting pixels to be used + // instead of unrestricted. + GetHigherResolutionThan( + input_state.frame_size_pixels().value())) + : absl::nullopt; + if (bitrate_limits.has_value() && bitrate_bps != 0) { + RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, + input_state.frame_size_pixels().value()); + return bitrate_bps >= + static_cast(bitrate_limits->min_start_bitrate_bps); + } + } + return true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h new file mode 100644 index 000000000..015edcc13 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h @@ -0,0 +1,52 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ +#define VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ + +#include + +#include "absl/types/optional.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +class BitrateConstraint : public AdaptationConstraint { + public: + BitrateConstraint(); + ~BitrateConstraint() override = default; + + void OnEncoderSettingsUpdated( + absl::optional encoder_settings); + void OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps); + + // AdaptationConstraint implementation. + std::string Name() const override { return "BitrateConstraint"; } + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const override; + + private: + SequenceChecker sequence_checker_; + absl::optional encoder_settings_ + RTC_GUARDED_BY(&sequence_checker_); + absl::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(&sequence_checker_); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc index ff8f1712d..c43848818 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc @@ -12,6 +12,7 @@ #include +#include "rtc_base/checks.h" #include "rtc_base/experiments/balanced_degradation_settings.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/task_utils/to_queued_task.h" @@ -19,27 +20,14 @@ namespace webrtc { -namespace { - -const int64_t kUnderuseDueToDisabledCooldownMs = 1000; - -} // namespace - // static -rtc::scoped_refptr QualityScalerResource::Create( - DegradationPreferenceProvider* degradation_preference_provider) { - return new rtc::RefCountedObject( - degradation_preference_provider); +rtc::scoped_refptr QualityScalerResource::Create() { + return new rtc::RefCountedObject(); } -QualityScalerResource::QualityScalerResource( - DegradationPreferenceProvider* degradation_preference_provider) +QualityScalerResource::QualityScalerResource() : VideoStreamEncoderResource("QualityScalerResource"), - quality_scaler_(nullptr), - last_underuse_due_to_disabled_timestamp_ms_(absl::nullopt), - degradation_preference_provider_(degradation_preference_provider) { - RTC_CHECK(degradation_preference_provider_); -} + quality_scaler_(nullptr) {} QualityScalerResource::~QualityScalerResource() { RTC_DCHECK(!quality_scaler_); @@ -60,6 +48,7 @@ void QualityScalerResource::StartCheckForOveruse( void QualityScalerResource::StopCheckForOveruse() { RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(is_started()); // Ensure we have no pending callbacks. This makes it safe to destroy the // QualityScaler and even task queues with tasks in-flight. quality_scaler_.reset(); @@ -83,21 +72,6 @@ void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image, RTC_DCHECK_RUN_ON(encoder_queue()); if (quality_scaler_ && encoded_image.qp_ >= 0) { quality_scaler_->ReportQp(encoded_image.qp_, time_sent_in_us); - } else if (!quality_scaler_) { - // Reference counting guarantees that this object is still alive by the time - // the task is executed. - // TODO(webrtc:11553): this is a workaround to ensure that all quality - // scaler imposed limitations are removed once qualty scaler is disabled - // mid call. - // Instead it should be done at a higher layer in the same way for all - // resources. - int64_t timestamp_ms = rtc::TimeMillis(); - if (!last_underuse_due_to_disabled_timestamp_ms_.has_value() || - timestamp_ms - last_underuse_due_to_disabled_timestamp_ms_.value() >= - kUnderuseDueToDisabledCooldownMs) { - last_underuse_due_to_disabled_timestamp_ms_ = timestamp_ms; - OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse); - } } } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h index 27c255567..06c22ca3c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h @@ -32,18 +32,15 @@ namespace webrtc { class QualityScalerResource : public VideoStreamEncoderResource, public QualityScalerQpUsageHandlerInterface { public: - static rtc::scoped_refptr Create( - DegradationPreferenceProvider* degradation_preference_provider); + static rtc::scoped_refptr Create(); - explicit QualityScalerResource( - DegradationPreferenceProvider* degradation_preference_provider); + QualityScalerResource(); ~QualityScalerResource() override; bool is_started() const; void StartCheckForOveruse(VideoEncoder::QpThresholds qp_thresholds); void StopCheckForOveruse(); - void SetQpThresholds(VideoEncoder::QpThresholds qp_thresholds); bool QpFastFilterLow(); void OnEncodeCompleted(const EncodedImage& encoded_image, @@ -55,15 +52,8 @@ class QualityScalerResource : public VideoStreamEncoderResource, void OnReportQpUsageLow() override; private: - // Members accessed on the encoder queue. std::unique_ptr quality_scaler_ RTC_GUARDED_BY(encoder_queue()); - // The timestamp of the last time we reported underuse because this resource - // was disabled in order to prevent getting stuck with QP adaptations. Used to - // make sure underuse reporting is not too spammy. - absl::optional last_underuse_due_to_disabled_timestamp_ms_ - RTC_GUARDED_BY(encoder_queue()); - DegradationPreferenceProvider* const degradation_preference_provider_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc index 16c8b3d5b..c7ca4bccf 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc @@ -22,11 +22,14 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_source_interface.h" #include "call/adaptation/video_source_restrictions.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/time_utils.h" +#include "video/adaptation/quality_scaler_resource.h" namespace webrtc { @@ -52,6 +55,7 @@ std::string ToString(VideoAdaptationReason reason) { case VideoAdaptationReason::kCpu: return "cpu"; } + RTC_CHECK_NOTREACHED(); } } // namespace @@ -128,137 +132,6 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { int initial_framedrop_; }; -VideoStreamEncoderResourceManager::BitrateConstraint::BitrateConstraint( - VideoStreamEncoderResourceManager* manager) - : manager_(manager), - resource_adaptation_queue_(nullptr), - encoder_settings_(absl::nullopt), - encoder_target_bitrate_bps_(absl::nullopt) {} - -void VideoStreamEncoderResourceManager::BitrateConstraint::SetAdaptationQueue( - TaskQueueBase* resource_adaptation_queue) { - resource_adaptation_queue_ = resource_adaptation_queue; -} - -void VideoStreamEncoderResourceManager::BitrateConstraint:: - OnEncoderSettingsUpdated(absl::optional encoder_settings) { - RTC_DCHECK_RUN_ON(manager_->encoder_queue_); - resource_adaptation_queue_->PostTask( - ToQueuedTask([this_ref = rtc::scoped_refptr(this), - encoder_settings] { - RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_); - this_ref->encoder_settings_ = std::move(encoder_settings); - })); -} - -void VideoStreamEncoderResourceManager::BitrateConstraint:: - OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps) { - RTC_DCHECK_RUN_ON(manager_->encoder_queue_); - resource_adaptation_queue_->PostTask( - ToQueuedTask([this_ref = rtc::scoped_refptr(this), - encoder_target_bitrate_bps] { - RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_); - this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps; - })); -} - -bool VideoStreamEncoderResourceManager::BitrateConstraint:: - IsAdaptationUpAllowed(const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - rtc::scoped_refptr reason_resource) const { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); - VideoAdaptationReason reason = - manager_->GetReasonFromResource(reason_resource); - // If increasing resolution due to kQuality, make sure bitrate limits are not - // violated. - // TODO(https://crbug.com/webrtc/11771): Why are we allowing violating bitrate - // constraints if adapting due to CPU? Shouldn't this condition be checked - // regardless of reason? - if (reason == VideoAdaptationReason::kQuality && - DidIncreaseResolution(restrictions_before, restrictions_after)) { - uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0); - absl::optional bitrate_limits = - encoder_settings_.has_value() - ? encoder_settings_->encoder_info() - .GetEncoderBitrateLimitsForResolution( - // Need some sort of expected resulting pixels to be used - // instead of unrestricted. - GetHigherResolutionThan( - input_state.frame_size_pixels().value())) - : absl::nullopt; - if (bitrate_limits.has_value() && bitrate_bps != 0) { - RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, - input_state.frame_size_pixels().value()); - return bitrate_bps >= - static_cast(bitrate_limits->min_start_bitrate_bps); - } - } - return true; -} - -VideoStreamEncoderResourceManager::BalancedConstraint::BalancedConstraint( - VideoStreamEncoderResourceManager* manager, - DegradationPreferenceProvider* degradation_preference_provider) - : manager_(manager), - resource_adaptation_queue_(nullptr), - encoder_target_bitrate_bps_(absl::nullopt), - degradation_preference_provider_(degradation_preference_provider) { - RTC_DCHECK(manager_); - RTC_DCHECK(degradation_preference_provider_); -} - -void VideoStreamEncoderResourceManager::BalancedConstraint::SetAdaptationQueue( - TaskQueueBase* resource_adaptation_queue) { - resource_adaptation_queue_ = resource_adaptation_queue; -} - -void VideoStreamEncoderResourceManager::BalancedConstraint:: - OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps) { - RTC_DCHECK_RUN_ON(manager_->encoder_queue_); - resource_adaptation_queue_->PostTask( - ToQueuedTask([this_ref = rtc::scoped_refptr(this), - encoder_target_bitrate_bps] { - RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_); - this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps; - })); -} - -bool VideoStreamEncoderResourceManager::BalancedConstraint:: - IsAdaptationUpAllowed(const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - rtc::scoped_refptr reason_resource) const { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); - VideoAdaptationReason reason = - manager_->GetReasonFromResource(reason_resource); - // Don't adapt if BalancedDegradationSettings applies and determines this will - // exceed bitrate constraints. - // TODO(https://crbug.com/webrtc/11771): Why are we allowing violating - // balanced settings if adapting due CPU? Shouldn't this condition be checked - // regardless of reason? - if (reason == VideoAdaptationReason::kQuality && - degradation_preference_provider_->degradation_preference() == - DegradationPreference::BALANCED && - !manager_->balanced_settings_.CanAdaptUp( - input_state.video_codec_type(), - input_state.frame_size_pixels().value(), - encoder_target_bitrate_bps_.value_or(0))) { - return false; - } - if (reason == VideoAdaptationReason::kQuality && - DidIncreaseResolution(restrictions_before, restrictions_after) && - !manager_->balanced_settings_.CanAdaptUpResolution( - input_state.video_codec_type(), - input_state.frame_size_pixels().value(), - encoder_target_bitrate_bps_.value_or(0))) { - return false; - } - return true; -} - VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( VideoStreamInputStateProvider* input_state_provider, VideoStreamEncoderObserver* encoder_stats_observer, @@ -267,16 +140,13 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( std::unique_ptr overuse_detector, DegradationPreferenceProvider* degradation_preference_provider) : degradation_preference_provider_(degradation_preference_provider), - bitrate_constraint_(new rtc::RefCountedObject(this)), - balanced_constraint_(new rtc::RefCountedObject( - this, + bitrate_constraint_(std::make_unique()), + balanced_constraint_(std::make_unique( degradation_preference_provider_)), encode_usage_resource_( EncodeUsageResource::Create(std::move(overuse_detector))), - quality_scaler_resource_( - QualityScalerResource::Create(degradation_preference_provider_)), + quality_scaler_resource_(QualityScalerResource::Create()), encoder_queue_(nullptr), - resource_adaptation_queue_(nullptr), input_state_provider_(input_state_provider), adaptation_processor_(nullptr), encoder_stats_observer_(encoder_stats_observer), @@ -293,24 +163,16 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( encoder_settings_(absl::nullopt) { RTC_CHECK(degradation_preference_provider_); RTC_CHECK(encoder_stats_observer_); - MapResourceToReason(encode_usage_resource_, VideoAdaptationReason::kCpu); - MapResourceToReason(quality_scaler_resource_, - VideoAdaptationReason::kQuality); } -VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() {} +VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() = + default; void VideoStreamEncoderResourceManager::Initialize( - rtc::TaskQueue* encoder_queue, - rtc::TaskQueue* resource_adaptation_queue) { + rtc::TaskQueue* encoder_queue) { RTC_DCHECK(!encoder_queue_); RTC_DCHECK(encoder_queue); - RTC_DCHECK(!resource_adaptation_queue_); - RTC_DCHECK(resource_adaptation_queue); encoder_queue_ = encoder_queue; - resource_adaptation_queue_ = resource_adaptation_queue; - bitrate_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get()); - balanced_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get()); encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); } @@ -318,7 +180,7 @@ void VideoStreamEncoderResourceManager::Initialize( void VideoStreamEncoderResourceManager::SetAdaptationProcessor( ResourceAdaptationProcessorInterface* adaptation_processor, VideoStreamAdapter* stream_adapter) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_); adaptation_processor_ = adaptation_processor; stream_adapter_ = stream_adapter; } @@ -341,48 +203,55 @@ void VideoStreamEncoderResourceManager::EnsureEncodeUsageResourceStarted() { RTC_DCHECK(encoder_settings_.has_value()); if (encode_usage_resource_->is_started()) { encode_usage_resource_->StopCheckForOveruse(); + } else { + // If the resource has not yet started then it needs to be added. + AddResource(encode_usage_resource_, VideoAdaptationReason::kCpu); } encode_usage_resource_->StartCheckForOveruse(GetCpuOveruseOptions()); } void VideoStreamEncoderResourceManager::StopManagedResources() { RTC_DCHECK_RUN_ON(encoder_queue_); - encode_usage_resource_->StopCheckForOveruse(); - quality_scaler_resource_->StopCheckForOveruse(); + RTC_DCHECK(adaptation_processor_); + if (encode_usage_resource_->is_started()) { + encode_usage_resource_->StopCheckForOveruse(); + RemoveResource(encode_usage_resource_); + } + if (quality_scaler_resource_->is_started()) { + quality_scaler_resource_->StopCheckForOveruse(); + RemoveResource(quality_scaler_resource_); + } } -void VideoStreamEncoderResourceManager::MapResourceToReason( +void VideoStreamEncoderResourceManager::AddResource( rtc::scoped_refptr resource, VideoAdaptationReason reason) { - MutexLock lock(&resource_lock_); + RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(resource); - RTC_DCHECK(absl::c_find_if(resources_, - [resource](const ResourceAndReason& r) { - return r.resource == resource; - }) == resources_.end()) - << "Resource " << resource->Name() << " already was inserted"; - resources_.emplace_back(resource, reason); + bool inserted; + std::tie(std::ignore, inserted) = resources_.emplace(resource, reason); + RTC_DCHECK(inserted) << "Resurce " << resource->Name() + << " already was inserted"; + adaptation_processor_->AddResource(resource); } -std::vector> -VideoStreamEncoderResourceManager::MappedResources() const { - MutexLock lock(&resource_lock_); - std::vector> resources; - for (auto const& resource_and_reason : resources_) { - resources.push_back(resource_and_reason.resource); +void VideoStreamEncoderResourceManager::RemoveResource( + rtc::scoped_refptr resource) { + { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(resource); + const auto& it = resources_.find(resource); + RTC_DCHECK(it != resources_.end()) + << "Resource \"" << resource->Name() << "\" not found."; + resources_.erase(it); } - return resources; + adaptation_processor_->RemoveResource(resource); } std::vector VideoStreamEncoderResourceManager::AdaptationConstraints() const { - return {bitrate_constraint_, balanced_constraint_}; -} - -rtc::scoped_refptr -VideoStreamEncoderResourceManager::quality_scaler_resource_for_testing() { - MutexLock lock(&resource_lock_); - return quality_scaler_resource_; + RTC_DCHECK_RUN_ON(encoder_queue_); + return {bitrate_constraint_.get(), balanced_constraint_.get()}; } void VideoStreamEncoderResourceManager::SetEncoderSettings( @@ -429,24 +298,12 @@ void VideoStreamEncoderResourceManager::SetEncoderRates( void VideoStreamEncoderResourceManager::OnFrameDroppedDueToSize() { RTC_DCHECK_RUN_ON(encoder_queue_); - // The VideoStreamEncoder makes the manager outlive the adaptation queue. This - // means that if the task gets executed, |this| has not been freed yet. - // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives - // the adaptation queue, add logic to prevent use-after-free on |this|. - resource_adaptation_queue_->PostTask([this] { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); - if (!adaptation_processor_) { - // The processor nulled before this task had a chance to execute. This - // happens if the processor is destroyed. No action needed. - return; - } - Adaptation reduce_resolution = stream_adapter_->GetAdaptDownResolution(); - if (reduce_resolution.status() == Adaptation::Status::kValid) { - stream_adapter_->ApplyAdaptation(reduce_resolution, - quality_scaler_resource_); - } - }); initial_frame_dropper_->OnFrameDroppedDueToSize(); + Adaptation reduce_resolution = stream_adapter_->GetAdaptDownResolution(); + if (reduce_resolution.status() == Adaptation::Status::kValid) { + stream_adapter_->ApplyAdaptation(reduce_resolution, + quality_scaler_resource_); + } } void VideoStreamEncoderResourceManager::OnEncodeStarted( @@ -468,7 +325,6 @@ void VideoStreamEncoderResourceManager::OnEncodeCompleted( encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec; encode_usage_resource_->OnEncodeCompleted( timestamp, time_sent_in_us, capture_time_us, encode_duration_us); - // Inform |quality_scaler_resource_| of the encode completed event. quality_scaler_resource_->OnEncodeCompleted(encoded_image, time_sent_in_us); } @@ -486,7 +342,7 @@ bool VideoStreamEncoderResourceManager::DropInitialFrames() const { void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() { RTC_DCHECK_RUN_ON(encoder_queue_); initial_frame_dropper_->OnMaybeEncodeFrame(); - if (quality_rampup_experiment_) { + if (quality_rampup_experiment_ && quality_scaler_resource_->is_started()) { DataRate bandwidth = encoder_rates_.has_value() ? encoder_rates_->bandwidth_allocation : DataRate::Zero(); @@ -502,10 +358,15 @@ void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings( absl::optional qp_thresholds) { RTC_DCHECK_RUN_ON(encoder_queue_); if (qp_thresholds.has_value()) { + if (quality_scaler_resource_->is_started()) { + quality_scaler_resource_->SetQpThresholds(qp_thresholds.value()); + } else { + quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value()); + AddResource(quality_scaler_resource_, VideoAdaptationReason::kQuality); + } + } else if (quality_scaler_resource_->is_started()) { quality_scaler_resource_->StopCheckForOveruse(); - quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value()); - } else { - quality_scaler_resource_->StopCheckForOveruse(); + RemoveResource(quality_scaler_resource_); } initial_frame_dropper_->OnQualityScalerSettingsUpdated(); } @@ -554,14 +415,11 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler( VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource( rtc::scoped_refptr resource) const { - MutexLock lock(&resource_lock_); - const auto& registered_resource = - absl::c_find_if(resources_, [&resource](const ResourceAndReason& r) { - return r.resource == resource; - }); + RTC_DCHECK_RUN_ON(encoder_queue_); + const auto& registered_resource = resources_.find(resource); RTC_DCHECK(registered_resource != resources_.end()) << resource->Name() << " not found."; - return registered_resource->reason; + return registered_resource->second; } // TODO(pbos): Lower these thresholds (to closer to 100%) when we handle @@ -598,28 +456,23 @@ void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated( const VideoAdaptationCounters& adaptation_counters, rtc::scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_); // TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback. if (!reason && adaptation_counters.Total() == 0) { // Adaptation was manually reset - clear the per-reason counters too. encoder_stats_observer_->ClearAdaptationStats(); } - // The VideoStreamEncoder makes the manager outlive the encoder queue. This - // means that if the task gets executed, |this| has not been freed yet. - encoder_queue_->PostTask([this, restrictions] { - RTC_DCHECK_RUN_ON(encoder_queue_); - video_source_restrictions_ = FilterRestrictionsByDegradationPreference( - restrictions, degradation_preference_); - MaybeUpdateTargetFrameRate(); - }); + video_source_restrictions_ = FilterRestrictionsByDegradationPreference( + restrictions, degradation_preference_); + MaybeUpdateTargetFrameRate(); } void VideoStreamEncoderResourceManager::OnResourceLimitationChanged( rtc::scoped_refptr resource, const std::map, VideoAdaptationCounters>& resource_limitations) { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_); if (!resource) { encoder_stats_observer_->ClearAdaptationStats(); return; @@ -641,19 +494,14 @@ void VideoStreamEncoderResourceManager::OnResourceLimitationChanged( adaptation_reason, limitations[VideoAdaptationReason::kCpu], limitations[VideoAdaptationReason::kQuality]); - encoder_queue_->PostTask(ToQueuedTask( - [cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0, - qp_resolution_adaptations = - limitations.at(VideoAdaptationReason::kQuality) - .resolution_adaptations, - this]() { - RTC_DCHECK_RUN_ON(encoder_queue_); - if (quality_rampup_experiment_) { - quality_rampup_experiment_->cpu_adapted(cpu_limited); - quality_rampup_experiment_->qp_resolution_adaptations( - qp_resolution_adaptations); - } - })); + if (quality_rampup_experiment_) { + bool cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0; + auto qp_resolution_adaptations = + limitations.at(VideoAdaptationReason::kQuality).resolution_adaptations; + quality_rampup_experiment_->cpu_adapted(cpu_limited); + quality_rampup_experiment_->qp_resolution_adaptations( + qp_resolution_adaptations); + } RTC_LOG(LS_INFO) << ActiveCountsToString(limitations); } @@ -716,19 +564,7 @@ std::string VideoStreamEncoderResourceManager::ActiveCountsToString( void VideoStreamEncoderResourceManager::OnQualityRampUp() { RTC_DCHECK_RUN_ON(encoder_queue_); - // The VideoStreamEncoder makes the manager outlive the adaptation queue. - // This means that if the task gets executed, |this| has not been freed yet. - // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives - // the adaptation queue, add logic to prevent use-after-free on |this|. - resource_adaptation_queue_->PostTask([this] { - RTC_DCHECK_RUN_ON(resource_adaptation_queue_); - if (!stream_adapter_) { - // The processor nulled before this task had a chance to execute. This - // happens if the processor is destroyed. No action needed. - return; - } - stream_adapter_->ClearRestrictions(); - }); + stream_adapter_->ClearRestrictions(); quality_rampup_experiment_.reset(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h index 10d0e66c1..932d90c20 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h @@ -40,7 +40,10 @@ #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" +#include "video/adaptation/balanced_constraint.h" +#include "video/adaptation/bitrate_constraint.h" #include "video/adaptation/encode_usage_resource.h" #include "video/adaptation/overuse_frame_detector.h" #include "video/adaptation/quality_rampup_experiment_helper.h" @@ -77,8 +80,7 @@ class VideoStreamEncoderResourceManager DegradationPreferenceProvider* degradation_preference_provider); ~VideoStreamEncoderResourceManager() override; - void Initialize(rtc::TaskQueue* encoder_queue, - rtc::TaskQueue* resource_adaptation_queue); + void Initialize(rtc::TaskQueue* encoder_queue); void SetAdaptationProcessor( ResourceAdaptationProcessorInterface* adaptation_processor, VideoStreamAdapter* stream_adapter); @@ -115,12 +117,10 @@ class VideoStreamEncoderResourceManager // Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order // to update legacy getStats(). - void MapResourceToReason(rtc::scoped_refptr resource, - VideoAdaptationReason reason); - std::vector> MappedResources() const; + void AddResource(rtc::scoped_refptr resource, + VideoAdaptationReason reason); + void RemoveResource(rtc::scoped_refptr resource); std::vector AdaptationConstraints() const; - rtc::scoped_refptr - quality_scaler_resource_for_testing(); // If true, the VideoStreamEncoder should eexecute its logic to maybe drop // frames baseed on size and bitrate. bool DropInitialFrames() const; @@ -163,84 +163,19 @@ class VideoStreamEncoderResourceManager const std::map& active_counts); - // TODO(hbos): Add tests for manager's constraints. - // Does not trigger adaptations, only prevents adapting up resolution. - class BitrateConstraint : public rtc::RefCountInterface, - public AdaptationConstraint { - public: - explicit BitrateConstraint(VideoStreamEncoderResourceManager* manager); - ~BitrateConstraint() override = default; - - void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue); - void OnEncoderSettingsUpdated( - absl::optional encoder_settings); - void OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps); - - // AdaptationConstraint implementation. - std::string Name() const override { return "BitrateConstraint"; } - bool IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - rtc::scoped_refptr reason_resource) const override; - - private: - // The |manager_| must be alive as long as this resource is added to the - // ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called. - VideoStreamEncoderResourceManager* const manager_; - TaskQueueBase* resource_adaptation_queue_; - absl::optional encoder_settings_ - RTC_GUARDED_BY(resource_adaptation_queue_); - absl::optional encoder_target_bitrate_bps_ - RTC_GUARDED_BY(resource_adaptation_queue_); - }; - - // Does not trigger adaptations, only prevents adapting up in BALANCED. - class BalancedConstraint : public rtc::RefCountInterface, - public AdaptationConstraint { - public: - BalancedConstraint( - VideoStreamEncoderResourceManager* manager, - DegradationPreferenceProvider* degradation_preference_provider); - ~BalancedConstraint() override = default; - - void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue); - void OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps); - - // AdaptationConstraint implementation. - std::string Name() const override { return "BalancedConstraint"; } - bool IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - rtc::scoped_refptr reason_resource) const override; - - private: - // The |manager_| must be alive as long as this resource is added to the - // ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called. - VideoStreamEncoderResourceManager* const manager_; - TaskQueueBase* resource_adaptation_queue_; - absl::optional encoder_target_bitrate_bps_ - RTC_GUARDED_BY(resource_adaptation_queue_); - DegradationPreferenceProvider* degradation_preference_provider_; - }; - DegradationPreferenceProvider* const degradation_preference_provider_; - const rtc::scoped_refptr bitrate_constraint_; - const rtc::scoped_refptr balanced_constraint_; + std::unique_ptr bitrate_constraint_ + RTC_GUARDED_BY(encoder_queue_); + const std::unique_ptr balanced_constraint_ + RTC_GUARDED_BY(encoder_queue_); const rtc::scoped_refptr encode_usage_resource_; const rtc::scoped_refptr quality_scaler_resource_; rtc::TaskQueue* encoder_queue_; - rtc::TaskQueue* resource_adaptation_queue_; VideoStreamInputStateProvider* const input_state_provider_ RTC_GUARDED_BY(encoder_queue_); - ResourceAdaptationProcessorInterface* adaptation_processor_ - RTC_GUARDED_BY(resource_adaptation_queue_); - VideoStreamAdapter* stream_adapter_ - RTC_GUARDED_BY(resource_adaptation_queue_); + ResourceAdaptationProcessorInterface* adaptation_processor_; + VideoStreamAdapter* stream_adapter_ RTC_GUARDED_BY(encoder_queue_); // Thread-safe. VideoStreamEncoderObserver* const encoder_stats_observer_; @@ -265,17 +200,8 @@ class VideoStreamEncoderResourceManager // Ties a resource to a reason for statistical reporting. This AdaptReason is // also used by this module to make decisions about how to adapt up/down. - struct ResourceAndReason { - ResourceAndReason(rtc::scoped_refptr resource, - VideoAdaptationReason reason) - : resource(resource), reason(reason) {} - virtual ~ResourceAndReason() = default; - - const rtc::scoped_refptr resource; - const VideoAdaptationReason reason; - }; - mutable Mutex resource_lock_; - std::vector resources_ RTC_GUARDED_BY(&resource_lock_); + std::map, VideoAdaptationReason> resources_ + RTC_GUARDED_BY(encoder_queue_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc new file mode 100644 index 000000000..b08f2f184 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/alignment_adjuster.h" + +#include +#include + +#include "absl/algorithm/container.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +// Round each scale factor to the closest rational in form alignment/i where i +// is a multiple of |requested_alignment|. Each resolution divisible by +// |alignment| will be divisible by |requested_alignment| after the scale factor +// is applied. +double RoundToMultiple(int alignment, + int requested_alignment, + VideoEncoderConfig* config, + bool update_config) { + double diff = 0.0; + for (auto& layer : config->simulcast_layers) { + double min_dist = std::numeric_limits::max(); + double new_scale = 1.0; + for (int i = requested_alignment; i <= alignment; + i += requested_alignment) { + double dist = std::abs(layer.scale_resolution_down_by - + alignment / static_cast(i)); + if (dist <= min_dist) { + min_dist = dist; + new_scale = alignment / static_cast(i); + } + } + diff += std::abs(layer.scale_resolution_down_by - new_scale); + if (update_config) { + RTC_LOG(LS_INFO) << "scale_resolution_down_by " + << layer.scale_resolution_down_by << " -> " << new_scale; + layer.scale_resolution_down_by = new_scale; + } + } + return diff; +} +} // namespace + +// Input: encoder_info.requested_resolution_alignment (K) +// Input: encoder_info.apply_alignment_to_all_simulcast_layers (B) +// Input: vector config->simulcast_layers.scale_resolution_down_by (S[i]) +// Output: +// If B is false, returns K and does not adjust scaling factors. +// Otherwise, returns adjusted alignment (A), adjusted scaling factors (S'[i]) +// are written in |config| such that: +// +// A / S'[i] are integers divisible by K +// sum abs(S'[i] - S[i]) -> min +// A integer <= 16 +// +// Solution chooses closest S'[i] in a form A / j where j is a multiple of K. + +int AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + const VideoEncoder::EncoderInfo& encoder_info, + VideoEncoderConfig* config) { + const int requested_alignment = encoder_info.requested_resolution_alignment; + if (!encoder_info.apply_alignment_to_all_simulcast_layers) { + return requested_alignment; + } + + if (requested_alignment < 1 || config->number_of_streams <= 1 || + config->simulcast_layers.size() <= 1) { + return requested_alignment; + } + + // Update alignment to also apply to simulcast layers. + const bool has_scale_resolution_down_by = absl::c_any_of( + config->simulcast_layers, [](const webrtc::VideoStream& layer) { + return layer.scale_resolution_down_by >= 1.0; + }); + + if (!has_scale_resolution_down_by) { + // Default resolution downscaling used (scale factors: 1, 2, 4, ...). + return requested_alignment * (1 << (config->simulcast_layers.size() - 1)); + } + + // Get alignment for downscaled layers. + // Adjust |scale_resolution_down_by| to a common multiple to limit the + // alignment value (to avoid largely cropped frames and possibly with an + // aspect ratio far from the original). + const int kMaxAlignment = 16; + + for (auto& layer : config->simulcast_layers) { + layer.scale_resolution_down_by = + std::max(layer.scale_resolution_down_by, 1.0); + layer.scale_resolution_down_by = + std::min(layer.scale_resolution_down_by, 10000.0); + } + + // Decide on common multiple to use. + double min_diff = std::numeric_limits::max(); + int best_alignment = 1; + for (int alignment = requested_alignment; alignment <= kMaxAlignment; + ++alignment) { + double diff = RoundToMultiple(alignment, requested_alignment, config, + /*update_config=*/false); + if (diff < min_diff) { + min_diff = diff; + best_alignment = alignment; + } + } + RoundToMultiple(best_alignment, requested_alignment, config, + /*update_config=*/true); + + return std::max(best_alignment, requested_alignment); +} +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h new file mode 100644 index 000000000..53d792788 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ALIGNMENT_ADJUSTER_H_ +#define VIDEO_ALIGNMENT_ADJUSTER_H_ + +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_config.h" + +namespace webrtc { + +class AlignmentAdjuster { + public: + // Returns the resolution alignment requested by the encoder (i.e + // |EncoderInfo::requested_resolution_alignment| which ensures that delivered + // frames to the encoder are divisible by this alignment). + // + // If |EncoderInfo::apply_alignment_to_all_simulcast_layers| is enabled, the + // alignment will be adjusted to ensure that each simulcast layer also is + // divisible by |requested_resolution_alignment|. The configured scale factors + // |scale_resolution_down_by| may be adjusted to a common multiple to limit + // the alignment value to avoid largely cropped frames and possibly with an + // aspect ratio far from the original. + static int GetAlignmentAndMaybeAdjustScaleFactors( + const VideoEncoder::EncoderInfo& info, + VideoEncoderConfig* config); +}; + +} // namespace webrtc + +#endif // VIDEO_ALIGNMENT_ADJUSTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc index fc9dff5b0..187bac6ee 100644 --- a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc +++ b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc @@ -73,7 +73,7 @@ BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( frame->size()); RTC_CHECK_LE(max_plaintext_byte_size, frame->size()); // Place the decrypted frame inline into the existing frame. - rtc::ArrayView inline_decrypted_bitstream(frame->data(), + rtc::ArrayView inline_decrypted_bitstream(frame->mutable_data(), max_plaintext_byte_size); // Enable authenticating the header if the field trial isn't disabled. diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc index b30eeeace..0e604cd76 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc @@ -213,7 +213,7 @@ void FrameEncodeMetadataWriter::UpdateBitstream( // Make sure that the data is not copied if owned by EncodedImage. const EncodedImage& buffer = *encoded_image; rtc::Buffer modified_buffer = - SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( + SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( buffer, encoded_image->ColorSpace()); encoded_image->SetEncodedData( diff --git a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc index a12b33423..ece756b2d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc @@ -48,8 +48,6 @@ namespace webrtc { namespace { static const int kFullStackTestDurationSecs = 45; -const char kVp8TrustedRateControllerFieldTrial[] = - "WebRTC-LibvpxVp8TrustedRateController/Enabled/"; struct ParamsWithLogging : public VideoQualityTest::Params { public: @@ -91,7 +89,7 @@ std::string ClipNameToClipPath(const char* clip_name) { // }; #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -105,7 +103,8 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCifPlr5Vp9) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_VP9_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -122,7 +121,7 @@ TEST(GenericDescriptorTest, ForemanCifPlr5Vp9) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, GeneratorWithoutPacketLossVp9Profile2) { +TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { // Profile 2 might not be available on some platforms until // https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved. bool profile_2_is_supported = false; @@ -148,7 +147,7 @@ TEST(FullStackTest, GeneratorWithoutPacketLossVp9Profile2) { fixture->RunWithAnalyzer(generator); } -TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_Multiplex) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -162,7 +161,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { +TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_Multiplex) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging generator; @@ -179,11 +178,11 @@ TEST(FullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { #if defined(WEBRTC_LINUX) // Crashes on the linux trusty perf bot: bugs.webrtc.org/9129. -#define MAYBE_ParisQcifWithoutPacketLoss DISABLED_ParisQcifWithoutPacketLoss +#define MAYBE_Net_Delay_0_0_Plr_0 DISABLED_Net_Delay_0_0_Plr_0 #else -#define MAYBE_ParisQcifWithoutPacketLoss ParisQcifWithoutPacketLoss +#define MAYBE_Net_Delay_0_0_Plr_0 Net_Delay_0_0_Plr_0 #endif -TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) { +TEST(FullStackTest, MAYBE_Net_Delay_0_0_Plr_0) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging paris_qcif; paris_qcif.call.send_side_bwe = true; @@ -197,7 +196,8 @@ TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) { fixture->RunWithAnalyzer(paris_qcif); } -TEST(GenericDescriptorTest, ForemanCifWithoutPacketLoss) { +TEST(GenericDescriptorTest, + Foreman_Cif_Net_Delay_0_0_Plr_0_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. ParamsWithLogging foreman_cif; @@ -213,7 +213,8 @@ TEST(GenericDescriptorTest, ForemanCifWithoutPacketLoss) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { +TEST(GenericDescriptorTest, + Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -229,30 +230,8 @@ TEST(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { fixture->RunWithAnalyzer(foreman_cif); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(GenericDescriptorTest, - ForemanCif30kbpsWithoutPacketLossTrustedRateControl) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - auto fixture = CreateVideoQualityTestFixture(); - - ParamsWithLogging foreman_cif; - foreman_cif.call.send_side_bwe = true; - foreman_cif.video[0] = { - true, 352, 288, 10, - 30000, 30000, 30000, false, - "VP8", 1, 0, 0, - false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = { - "foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_" - "descriptor", - 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.call.generic_descriptor = true; - fixture->RunWithAnalyzer(foreman_cif); -} - // Link capacity below default start rate. -TEST(FullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { +TEST(FullStackTest, Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -268,7 +247,8 @@ TEST(FullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { } // Restricted network and encoder overproducing by 30%. -TEST(FullStackTest, ForemanCifLink150kbpsBadRateController) { +TEST(FullStackTest, + Foreman_Cif_Link_150kbps_Delay100ms_30pkts_Queue_Overshoot30) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -291,7 +271,7 @@ TEST(FullStackTest, ForemanCifLink150kbpsBadRateController) { // Packet rate and loss are low enough that loss will happen with ~3s interval. // This triggers protection overhead to toggle between zero and non-zero. // Link queue is restrictive enough to trigger loss on probes. -TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { +TEST(FullStackTest, Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -310,7 +290,7 @@ TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCifPlr5) { +TEST(GenericDescriptorTest, Foreman_Cif_Delay_50_0_Plr_5_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -327,7 +307,8 @@ TEST(GenericDescriptorTest, ForemanCifPlr5) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCifPlr5Ulpfec) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_Ulpfec_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -345,7 +326,7 @@ TEST(GenericDescriptorTest, ForemanCifPlr5Ulpfec) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCifPlr5Flexfec) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -361,7 +342,7 @@ TEST(FullStackTest, ForemanCifPlr5Flexfec) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsPlr3Flexfec) { +TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -378,7 +359,7 @@ TEST(FullStackTest, ForemanCif500kbpsPlr3Flexfec) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsPlr3Ulpfec) { +TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -396,7 +377,7 @@ TEST(FullStackTest, ForemanCif500kbpsPlr3Ulpfec) { } #if defined(WEBRTC_USE_H264) -TEST(FullStackTest, ForemanCifWithoutPacketlossH264) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateVideoQualityTestFixture(); // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. ParamsWithLogging foreman_cif; @@ -411,7 +392,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketlossH264) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { +TEST(FullStackTest, Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -425,7 +406,8 @@ TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCifPlr5H264) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_H264_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -443,7 +425,7 @@ TEST(GenericDescriptorTest, ForemanCifPlr5H264) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { test::ScopedFieldTrials override_field_trials( AppendFieldTrials("WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -463,7 +445,7 @@ TEST(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { } // Verify that this is worth the bot time, before enabling. -TEST(FullStackTest, ForemanCifPlr5H264Flexfec) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -481,7 +463,7 @@ TEST(FullStackTest, ForemanCifPlr5H264Flexfec) { // Ulpfec with H264 is an unsupported combination, so this test is only useful // for debugging. It is therefore disabled by default. -TEST(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { +TEST(FullStackTest, DISABLED_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -498,7 +480,7 @@ TEST(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { } #endif // defined(WEBRTC_USE_H264) -TEST(FullStackTest, ForemanCif500kbps) { +TEST(FullStackTest, Foreman_Cif_500kbps) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -515,7 +497,7 @@ TEST(FullStackTest, ForemanCif500kbps) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsLimitedQueue) { +TEST(FullStackTest, Foreman_Cif_500kbps_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -532,7 +514,7 @@ TEST(FullStackTest, ForemanCif500kbpsLimitedQueue) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbps100ms) { +TEST(FullStackTest, Foreman_Cif_500kbps_100ms) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -549,7 +531,8 @@ TEST(FullStackTest, ForemanCif500kbps100ms) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { +TEST(GenericDescriptorTest, + Foreman_Cif_500kbps_100ms_32pkts_Queue_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -568,7 +551,7 @@ TEST(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { +TEST(FullStackTest, Foreman_Cif_500kbps_100ms_32pkts_Queue_Recv_Bwe) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = false; @@ -585,7 +568,7 @@ TEST(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { +TEST(FullStackTest, Foreman_Cif_1000kbps_100ms_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -603,7 +586,7 @@ TEST(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { } // TODO(sprang): Remove this if we have the similar ModerateLimits below? -TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { +TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -624,34 +607,8 @@ TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { fixture->RunWithAnalyzer(conf_motion_hd); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(FullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - auto fixture = CreateVideoQualityTestFixture(); - - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 1, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = { - "conference_motion_hd_1tl_moderate_limits_trusted_rate_ctrl", 0.0, 0.0, - kFullStackTestDurationSecs}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} - -TEST(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { +TEST(GenericDescriptorTest, + Conference_Motion_Hd_2tl_Moderate_Limits_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -675,7 +632,7 @@ TEST(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd3TLModerateLimits) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -697,7 +654,7 @@ TEST(FullStackTest, ConferenceMotionHd3TLModerateLimits) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd4TLModerateLimits) { +TEST(FullStackTest, Conference_Motion_Hd_4tl_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -719,7 +676,7 @@ TEST(FullStackTest, ConferenceMotionHd4TLModerateLimits) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Moderate_Limits) { test::ScopedFieldTrials field_trial( AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -743,8 +700,7 @@ TEST(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, - ConferenceMotionHd3TLModerateLimitsAltTLPatternAndBaseHeavyTLAllocation) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Heavy_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); test::ScopedFieldTrials field_trial( AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/" @@ -771,7 +727,7 @@ TEST(FullStackTest, } #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { +TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -794,7 +750,7 @@ TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { } #endif -TEST(FullStackTest, ScreenshareSlidesVP8_2TL) { +TEST(FullStackTest, Screenshare_Slides) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -809,7 +765,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL) { #if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { +TEST(FullStackTest, Screenshare_Slides_Simulcast) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -839,7 +795,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { #endif // !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { +TEST(FullStackTest, Screenshare_Slides_Scrolling) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging config; config.call.send_side_bwe = true; @@ -852,7 +808,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { fixture->RunWithAnalyzer(config); } -TEST(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { +TEST(GenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -869,7 +825,7 @@ TEST(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) { +TEST(FullStackTest, Screenshare_Slides_Very_Lossy) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -885,7 +841,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) { +TEST(FullStackTest, Screenshare_Slides_Lossy_Limited) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -902,7 +858,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_ModeratelyRestricted) { +TEST(FullStackTest, Screenshare_Slides_Moderately_Restricted) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -971,7 +927,7 @@ ParamsWithLogging::Video SimulcastVp8VideoLow() { #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { +TEST(FullStackTest, Screenshare_Slides_Vp9_3sl_High_Fps) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -990,7 +946,7 @@ TEST(FullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { // TODO(http://bugs.webrtc.org/9506): investigate. #if !defined(WEBRTC_MAC) -TEST(FullStackTest, VP9KSVC_3SL_High) { +TEST(FullStackTest, Vp9ksvc_3sl_High) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -1005,7 +961,7 @@ TEST(FullStackTest, VP9KSVC_3SL_High) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Low) { +TEST(FullStackTest, Vp9ksvc_3sl_Low) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -1020,7 +976,7 @@ TEST(FullStackTest, VP9KSVC_3SL_Low) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Low_Bw_Limited) { +TEST(FullStackTest, Vp9ksvc_3sl_Low_Bw_Limited) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" "WebRTC-Vp9ExternalRefCtrl/Enabled/")); @@ -1037,7 +993,7 @@ TEST(FullStackTest, VP9KSVC_3SL_Low_Bw_Limited) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { +TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -1055,10 +1011,9 @@ TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { } // TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { +TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted_Trusted_Rate) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" - "WebRTC-LibvpxVp9TrustedRateController/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1079,12 +1034,12 @@ TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { // Android bots can't handle FullHD, so disable the test. // TODO(bugs.webrtc.org/9220): Investigate source of flakiness on Mac. #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) -#define MAYBE_SimulcastFullHdOveruse DISABLED_SimulcastFullHdOveruse +#define MAYBE_Simulcast_HD_High DISABLED_Simulcast_HD_High #else -#define MAYBE_SimulcastFullHdOveruse SimulcastFullHdOveruse +#define MAYBE_Simulcast_HD_High Simulcast_HD_High #endif -TEST(FullStackTest, MAYBE_SimulcastFullHdOveruse) { +TEST(FullStackTest, MAYBE_Simulcast_HD_High) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1107,7 +1062,7 @@ TEST(FullStackTest, MAYBE_SimulcastFullHdOveruse) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, SimulcastVP8_3SL_High) { +TEST(FullStackTest, Simulcast_Vp8_3sl_High) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1133,7 +1088,7 @@ TEST(FullStackTest, SimulcastVP8_3SL_High) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, SimulcastVP8_3SL_Low) { +TEST(FullStackTest, Simulcast_Vp8_3sl_Low) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1163,11 +1118,11 @@ TEST(FullStackTest, SimulcastVP8_3SL_Low) { // available and exercises WebRTC calls with a high target bitrate(100 Mbps). // Android32 bots can't handle this high bitrate, so disable test for those. #if defined(WEBRTC_ANDROID) -#define MAYBE_HighBitrateWithFakeCodec DISABLED_HighBitrateWithFakeCodec +#define MAYBE_High_Bitrate_With_Fake_Codec DISABLED_High_Bitrate_With_Fake_Codec #else -#define MAYBE_HighBitrateWithFakeCodec HighBitrateWithFakeCodec +#define MAYBE_High_Bitrate_With_Fake_Codec High_Bitrate_With_Fake_Codec #endif // defined(WEBRTC_ANDROID) -TEST(FullStackTest, MAYBE_HighBitrateWithFakeCodec) { +TEST(FullStackTest, MAYBE_High_Bitrate_With_Fake_Codec) { auto fixture = CreateVideoQualityTestFixture(); const int target_bitrate = 100000000; ParamsWithLogging generator; @@ -1199,12 +1154,12 @@ TEST(FullStackTest, MAYBE_HighBitrateWithFakeCodec) { #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) // Fails on mobile devices: // https://bugs.chromium.org/p/webrtc/issues/detail?id=7301 -#define MAYBE_LargeRoomVP8_50thumb DISABLED_LargeRoomVP8_50thumb +#define MAYBE_Largeroom_50thumb DISABLED_Largeroom_50thumb #else -#define MAYBE_LargeRoomVP8_50thumb LargeRoomVP8_50thumb +#define MAYBE_Largeroom_50thumb Largeroom_50thumb #endif -TEST(FullStackTest, MAYBE_LargeRoomVP8_50thumb) { +TEST(FullStackTest, MAYBE_Largeroom_50thumb) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging large_room; large_room.call.send_side_bwe = true; diff --git a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests_plot.py b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests_plot.py index f50c297b1..c195b72a5 100755 --- a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests_plot.py +++ b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests_plot.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Generate graphs for data generated by loopback tests. Usage examples: @@ -34,14 +33,14 @@ import numpy # Fields DROPPED = 0 -INPUT_TIME = 1 # ms (timestamp) -SEND_TIME = 2 # ms (timestamp) -RECV_TIME = 3 # ms (timestamp) -RENDER_TIME = 4 # ms (timestamp) -ENCODED_FRAME_SIZE = 5 # bytes +INPUT_TIME = 1 # ms (timestamp) +SEND_TIME = 2 # ms (timestamp) +RECV_TIME = 3 # ms (timestamp) +RENDER_TIME = 4 # ms (timestamp) +ENCODED_FRAME_SIZE = 5 # bytes PSNR = 6 SSIM = 7 -ENCODE_TIME = 8 # ms (time interval) +ENCODE_TIME = 8 # ms (time interval) TOTAL_RAW_FIELDS = 9 @@ -78,111 +77,116 @@ _FIELDS = [ NAME_TO_ID = {field[1]: field[0] for field in _FIELDS} ID_TO_TITLE = {field[0]: field[2] for field in _FIELDS} + def FieldArgToId(arg): - if arg == "none": - return None - if arg in NAME_TO_ID: - return NAME_TO_ID[arg] - if arg + "_ms" in NAME_TO_ID: - return NAME_TO_ID[arg + "_ms"] - raise Exception("Unrecognized field name \"{}\"".format(arg)) + if arg == "none": + return None + if arg in NAME_TO_ID: + return NAME_TO_ID[arg] + if arg + "_ms" in NAME_TO_ID: + return NAME_TO_ID[arg + "_ms"] + raise Exception("Unrecognized field name \"{}\"".format(arg)) class PlotLine(object): - """Data for a single graph line.""" + """Data for a single graph line.""" - def __init__(self, label, values, flags): - self.label = label - self.values = values - self.flags = flags + def __init__(self, label, values, flags): + self.label = label + self.values = values + self.flags = flags class Data(object): - """Object representing one full stack test.""" + """Object representing one full stack test.""" - def __init__(self, filename): - self.title = "" - self.length = 0 - self.samples = defaultdict(list) + def __init__(self, filename): + self.title = "" + self.length = 0 + self.samples = defaultdict(list) - self._ReadSamples(filename) + self._ReadSamples(filename) - def _ReadSamples(self, filename): - """Reads graph data from the given file.""" - f = open(filename) - it = iter(f) + def _ReadSamples(self, filename): + """Reads graph data from the given file.""" + f = open(filename) + it = iter(f) - self.title = it.next().strip() - self.length = int(it.next()) - field_names = [name.strip() for name in it.next().split()] - field_ids = [NAME_TO_ID[name] for name in field_names] + self.title = it.next().strip() + self.length = int(it.next()) + field_names = [name.strip() for name in it.next().split()] + field_ids = [NAME_TO_ID[name] for name in field_names] - for field_id in field_ids: - self.samples[field_id] = [0.0] * self.length + for field_id in field_ids: + self.samples[field_id] = [0.0] * self.length - for sample_id in xrange(self.length): - for col, value in enumerate(it.next().split()): - self.samples[field_ids[col]][sample_id] = float(value) + for sample_id in xrange(self.length): + for col, value in enumerate(it.next().split()): + self.samples[field_ids[col]][sample_id] = float(value) - self._SubtractFirstInputTime() - self._GenerateAdditionalData() + self._SubtractFirstInputTime() + self._GenerateAdditionalData() - f.close() + f.close() - def _SubtractFirstInputTime(self): - offset = self.samples[INPUT_TIME][0] - for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]: - if field in self.samples: - self.samples[field] = [x - offset for x in self.samples[field]] + def _SubtractFirstInputTime(self): + offset = self.samples[INPUT_TIME][0] + for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]: + if field in self.samples: + self.samples[field] = [x - offset for x in self.samples[field]] - def _GenerateAdditionalData(self): - """Calculates sender time, receiver time etc. from the raw data.""" - s = self.samples - last_render_time = 0 - for field_id in [SENDER_TIME, RECEIVER_TIME, END_TO_END, RENDERED_DELTA]: - s[field_id] = [0] * self.length + def _GenerateAdditionalData(self): + """Calculates sender time, receiver time etc. from the raw data.""" + s = self.samples + last_render_time = 0 + for field_id in [ + SENDER_TIME, RECEIVER_TIME, END_TO_END, RENDERED_DELTA + ]: + s[field_id] = [0] * self.length - for k in range(self.length): - s[SENDER_TIME][k] = s[SEND_TIME][k] - s[INPUT_TIME][k] + for k in range(self.length): + s[SENDER_TIME][k] = s[SEND_TIME][k] - s[INPUT_TIME][k] - decoded_time = s[RENDER_TIME][k] - s[RECEIVER_TIME][k] = decoded_time - s[RECV_TIME][k] - s[END_TO_END][k] = decoded_time - s[INPUT_TIME][k] - if not s[DROPPED][k]: - if k > 0: - s[RENDERED_DELTA][k] = decoded_time - last_render_time - last_render_time = decoded_time + decoded_time = s[RENDER_TIME][k] + s[RECEIVER_TIME][k] = decoded_time - s[RECV_TIME][k] + s[END_TO_END][k] = decoded_time - s[INPUT_TIME][k] + if not s[DROPPED][k]: + if k > 0: + s[RENDERED_DELTA][k] = decoded_time - last_render_time + last_render_time = decoded_time - def _Hide(self, values): - """ + def _Hide(self, values): + """ Replaces values for dropped frames with None. These values are then skipped by the Plot() method. """ - return [None if self.samples[DROPPED][k] else values[k] - for k in range(len(values))] + return [ + None if self.samples[DROPPED][k] else values[k] + for k in range(len(values)) + ] - def AddSamples(self, config, target_lines_list): - """Creates graph lines from the current data set with given config.""" - for field in config.fields: - # field is None means the user wants just to skip the color. - if field is None: - target_lines_list.append(None) - continue + def AddSamples(self, config, target_lines_list): + """Creates graph lines from the current data set with given config.""" + for field in config.fields: + # field is None means the user wants just to skip the color. + if field is None: + target_lines_list.append(None) + continue - field_id = field & FIELD_MASK - values = self.samples[field_id] + field_id = field & FIELD_MASK + values = self.samples[field_id] - if field & HIDE_DROPPED: - values = self._Hide(values) + if field & HIDE_DROPPED: + values = self._Hide(values) - target_lines_list.append(PlotLine( - self.title + " " + ID_TO_TITLE[field_id], - values, field & ~FIELD_MASK)) + target_lines_list.append( + PlotLine(self.title + " " + ID_TO_TITLE[field_id], values, + field & ~FIELD_MASK)) def AverageOverCycle(values, length): - """ + """ Returns the list: [ avg(values[0], values[length], ...), @@ -194,221 +198,272 @@ def AverageOverCycle(values, length): Skips None values when calculating the average value. """ - total = [0.0] * length - count = [0] * length - for k, val in enumerate(values): - if val is not None: - total[k % length] += val - count[k % length] += 1 + total = [0.0] * length + count = [0] * length + for k, val in enumerate(values): + if val is not None: + total[k % length] += val + count[k % length] += 1 - result = [0.0] * length - for k in range(length): - result[k] = total[k] / count[k] if count[k] else None - return result + result = [0.0] * length + for k in range(length): + result[k] = total[k] / count[k] if count[k] else None + return result class PlotConfig(object): - """Object representing a single graph.""" + """Object representing a single graph.""" - def __init__(self, fields, data_list, cycle_length=None, frames=None, - offset=0, output_filename=None, title="Graph"): - self.fields = fields - self.data_list = data_list - self.cycle_length = cycle_length - self.frames = frames - self.offset = offset - self.output_filename = output_filename - self.title = title + def __init__(self, + fields, + data_list, + cycle_length=None, + frames=None, + offset=0, + output_filename=None, + title="Graph"): + self.fields = fields + self.data_list = data_list + self.cycle_length = cycle_length + self.frames = frames + self.offset = offset + self.output_filename = output_filename + self.title = title - def Plot(self, ax1): - lines = [] - for data in self.data_list: - if not data: - # Add None lines to skip the colors. - lines.extend([None] * len(self.fields)) - else: - data.AddSamples(self, lines) + def Plot(self, ax1): + lines = [] + for data in self.data_list: + if not data: + # Add None lines to skip the colors. + lines.extend([None] * len(self.fields)) + else: + data.AddSamples(self, lines) - def _SliceValues(values): - if self.offset: - values = values[self.offset:] - if self.frames: - values = values[:self.frames] - return values + def _SliceValues(values): + if self.offset: + values = values[self.offset:] + if self.frames: + values = values[:self.frames] + return values - length = None - for line in lines: - if line is None: - continue + length = None + for line in lines: + if line is None: + continue - line.values = _SliceValues(line.values) - if self.cycle_length: - line.values = AverageOverCycle(line.values, self.cycle_length) + line.values = _SliceValues(line.values) + if self.cycle_length: + line.values = AverageOverCycle(line.values, self.cycle_length) - if length is None: - length = len(line.values) - elif length != len(line.values): - raise Exception("All arrays should have the same length!") + if length is None: + length = len(line.values) + elif length != len(line.values): + raise Exception("All arrays should have the same length!") - ax1.set_xlabel("Frame", fontsize="large") - if any(line.flags & RIGHT_Y_AXIS for line in lines if line): - ax2 = ax1.twinx() - ax2.set_xlabel("Frame", fontsize="large") - else: - ax2 = None + ax1.set_xlabel("Frame", fontsize="large") + if any(line.flags & RIGHT_Y_AXIS for line in lines if line): + ax2 = ax1.twinx() + ax2.set_xlabel("Frame", fontsize="large") + else: + ax2 = None - # Have to implement color_cycle manually, due to two scales in a graph. - color_cycle = ["b", "r", "g", "c", "m", "y", "k"] - color_iter = itertools.cycle(color_cycle) + # Have to implement color_cycle manually, due to two scales in a graph. + color_cycle = ["b", "r", "g", "c", "m", "y", "k"] + color_iter = itertools.cycle(color_cycle) - for line in lines: - if not line: - color_iter.next() - continue + for line in lines: + if not line: + color_iter.next() + continue - if self.cycle_length: - x = numpy.array(range(self.cycle_length)) - else: - x = numpy.array(range(self.offset, self.offset + len(line.values))) - y = numpy.array(line.values) - ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1 - ax.Plot(x, y, "o-", label=line.label, markersize=3.0, linewidth=1.0, - color=color_iter.next()) + if self.cycle_length: + x = numpy.array(range(self.cycle_length)) + else: + x = numpy.array( + range(self.offset, self.offset + len(line.values))) + y = numpy.array(line.values) + ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1 + ax.Plot(x, + y, + "o-", + label=line.label, + markersize=3.0, + linewidth=1.0, + color=color_iter.next()) - ax1.grid(True) - if ax2: - ax1.legend(loc="upper left", shadow=True, fontsize="large") - ax2.legend(loc="upper right", shadow=True, fontsize="large") - else: - ax1.legend(loc="best", shadow=True, fontsize="large") + ax1.grid(True) + if ax2: + ax1.legend(loc="upper left", shadow=True, fontsize="large") + ax2.legend(loc="upper right", shadow=True, fontsize="large") + else: + ax1.legend(loc="best", shadow=True, fontsize="large") def LoadFiles(filenames): - result = [] - for filename in filenames: - if filename in LoadFiles.cache: - result.append(LoadFiles.cache[filename]) - else: - data = Data(filename) - LoadFiles.cache[filename] = data - result.append(data) - return result + result = [] + for filename in filenames: + if filename in LoadFiles.cache: + result.append(LoadFiles.cache[filename]) + else: + data = Data(filename) + LoadFiles.cache[filename] = data + result.append(data) + return result + + LoadFiles.cache = {} def GetParser(): - class CustomAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): - if "ordered_args" not in namespace: - namespace.ordered_args = [] - namespace.ordered_args.append((self.dest, values)) + class CustomAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + if "ordered_args" not in namespace: + namespace.ordered_args = [] + namespace.ordered_args.append((self.dest, values)) - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - "-c", "--cycle_length", nargs=1, action=CustomAction, - type=int, help="Cycle length over which to average the values.") - parser.add_argument( - "-f", "--field", nargs=1, action=CustomAction, - help="Name of the field to show. Use 'none' to skip a color.") - parser.add_argument("-r", "--right", nargs=0, action=CustomAction, - help="Use right Y axis for given field.") - parser.add_argument("-d", "--drop", nargs=0, action=CustomAction, - help="Hide values for dropped frames.") - parser.add_argument("-o", "--offset", nargs=1, action=CustomAction, type=int, - help="Frame offset.") - parser.add_argument("-n", "--next", nargs=0, action=CustomAction, - help="Separator for multiple graphs.") - parser.add_argument( - "--frames", nargs=1, action=CustomAction, type=int, - help="Frame count to show or take into account while averaging.") - parser.add_argument("-t", "--title", nargs=1, action=CustomAction, - help="Title of the graph.") - parser.add_argument( - "-O", "--output_filename", nargs=1, action=CustomAction, - help="Use to save the graph into a file. " - "Otherwise, a window will be shown.") - parser.add_argument( - "files", nargs="+", action=CustomAction, - help="List of text-based files generated by loopback tests.") - return parser + parser.add_argument("-c", + "--cycle_length", + nargs=1, + action=CustomAction, + type=int, + help="Cycle length over which to average the values.") + parser.add_argument( + "-f", + "--field", + nargs=1, + action=CustomAction, + help="Name of the field to show. Use 'none' to skip a color.") + parser.add_argument("-r", + "--right", + nargs=0, + action=CustomAction, + help="Use right Y axis for given field.") + parser.add_argument("-d", + "--drop", + nargs=0, + action=CustomAction, + help="Hide values for dropped frames.") + parser.add_argument("-o", + "--offset", + nargs=1, + action=CustomAction, + type=int, + help="Frame offset.") + parser.add_argument("-n", + "--next", + nargs=0, + action=CustomAction, + help="Separator for multiple graphs.") + parser.add_argument( + "--frames", + nargs=1, + action=CustomAction, + type=int, + help="Frame count to show or take into account while averaging.") + parser.add_argument("-t", + "--title", + nargs=1, + action=CustomAction, + help="Title of the graph.") + parser.add_argument("-O", + "--output_filename", + nargs=1, + action=CustomAction, + help="Use to save the graph into a file. " + "Otherwise, a window will be shown.") + parser.add_argument( + "files", + nargs="+", + action=CustomAction, + help="List of text-based files generated by loopback tests.") + return parser def _PlotConfigFromArgs(args, graph_num): - # Pylint complains about using kwargs, so have to do it this way. - cycle_length = None - frames = None - offset = 0 - output_filename = None - title = "Graph" + # Pylint complains about using kwargs, so have to do it this way. + cycle_length = None + frames = None + offset = 0 + output_filename = None + title = "Graph" - fields = [] - files = [] - mask = 0 - for key, values in args: - if key == "cycle_length": - cycle_length = values[0] - elif key == "frames": - frames = values[0] - elif key == "offset": - offset = values[0] - elif key == "output_filename": - output_filename = values[0] - elif key == "title": - title = values[0] - elif key == "drop": - mask |= HIDE_DROPPED - elif key == "right": - mask |= RIGHT_Y_AXIS - elif key == "field": - field_id = FieldArgToId(values[0]) - fields.append(field_id | mask if field_id is not None else None) - mask = 0 # Reset mask after the field argument. - elif key == "files": - files.extend(values) + fields = [] + files = [] + mask = 0 + for key, values in args: + if key == "cycle_length": + cycle_length = values[0] + elif key == "frames": + frames = values[0] + elif key == "offset": + offset = values[0] + elif key == "output_filename": + output_filename = values[0] + elif key == "title": + title = values[0] + elif key == "drop": + mask |= HIDE_DROPPED + elif key == "right": + mask |= RIGHT_Y_AXIS + elif key == "field": + field_id = FieldArgToId(values[0]) + fields.append(field_id | mask if field_id is not None else None) + mask = 0 # Reset mask after the field argument. + elif key == "files": + files.extend(values) - if not files: - raise Exception("Missing file argument(s) for graph #{}".format(graph_num)) - if not fields: - raise Exception("Missing field argument(s) for graph #{}".format(graph_num)) + if not files: + raise Exception( + "Missing file argument(s) for graph #{}".format(graph_num)) + if not fields: + raise Exception( + "Missing field argument(s) for graph #{}".format(graph_num)) - return PlotConfig(fields, LoadFiles(files), cycle_length=cycle_length, - frames=frames, offset=offset, output_filename=output_filename, - title=title) + return PlotConfig(fields, + LoadFiles(files), + cycle_length=cycle_length, + frames=frames, + offset=offset, + output_filename=output_filename, + title=title) def PlotConfigsFromArgs(args): - """Generates plot configs for given command line arguments.""" - # The way it works: - # First we detect separators -n/--next and split arguments into groups, one - # for each plot. For each group, we partially parse it with - # argparse.ArgumentParser, modified to remember the order of arguments. - # Then we traverse the argument list and fill the PlotConfig. - args = itertools.groupby(args, lambda x: x in ["-n", "--next"]) - prep_args = list(list(group) for match, group in args if not match) + """Generates plot configs for given command line arguments.""" + # The way it works: + # First we detect separators -n/--next and split arguments into groups, one + # for each plot. For each group, we partially parse it with + # argparse.ArgumentParser, modified to remember the order of arguments. + # Then we traverse the argument list and fill the PlotConfig. + args = itertools.groupby(args, lambda x: x in ["-n", "--next"]) + prep_args = list(list(group) for match, group in args if not match) - parser = GetParser() - plot_configs = [] - for index, raw_args in enumerate(prep_args): - graph_args = parser.parse_args(raw_args).ordered_args - plot_configs.append(_PlotConfigFromArgs(graph_args, index)) - return plot_configs + parser = GetParser() + plot_configs = [] + for index, raw_args in enumerate(prep_args): + graph_args = parser.parse_args(raw_args).ordered_args + plot_configs.append(_PlotConfigFromArgs(graph_args, index)) + return plot_configs def ShowOrSavePlots(plot_configs): - for config in plot_configs: - fig = plt.figure(figsize=(14.0, 10.0)) - ax = fig.add_subPlot(1, 1, 1) + for config in plot_configs: + fig = plt.figure(figsize=(14.0, 10.0)) + ax = fig.add_subPlot(1, 1, 1) - plt.title(config.title) - config.Plot(ax) - if config.output_filename: - print "Saving to", config.output_filename - fig.savefig(config.output_filename) - plt.close(fig) + plt.title(config.title) + config.Plot(ax) + if config.output_filename: + print "Saving to", config.output_filename + fig.savefig(config.output_filename) + plt.close(fig) + + plt.show() - plt.show() if __name__ == "__main__": - ShowOrSavePlots(PlotConfigsFromArgs(sys.argv[1:])) + ShowOrSavePlots(PlotConfigsFromArgs(sys.argv[1:])) diff --git a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc index 7a4b44909..d515a5271 100644 --- a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc @@ -49,8 +49,6 @@ using VideoCodecConfig = namespace { constexpr int kTestDurationSec = 45; -constexpr char kVp8TrustedRateControllerFieldTrial[] = - "WebRTC-LibvpxVp8TrustedRateController/Enabled/"; EmulatedNetworkNode* CreateEmulatedNodeWithConfig( NetworkEmulationManager* emulation, @@ -109,7 +107,7 @@ std::string ClipNameToClipPath(const char* clip_name) { } // namespace #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -134,7 +132,8 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCifPlr5Vp9) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_VP9_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -165,13 +164,13 @@ TEST(PCGenericDescriptorTest, ForemanCifPlr5Vp9) { #if (defined(WEBRTC_ANDROID) && \ (defined(WEBRTC_ARCH_ARM64) || defined(WEBRTC_ARCH_ARM))) || \ (defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64)) -#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \ - DISABLED_GeneratorWithoutPacketLossVp9Profile2 +#define MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 \ + DISABLED_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 #else -#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \ - GeneratorWithoutPacketLossVp9Profile2 +#define MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 \ + Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 #endif -TEST(PCFullStackTest, MAYBE_GeneratorWithoutPacketLossVp9Profile2) { +TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -228,7 +227,7 @@ TEST(PCFullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { */ #endif // defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) { +TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -250,7 +249,8 @@ TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -273,7 +273,8 @@ TEST(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -302,42 +303,8 @@ TEST(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { fixture->Run(std::move(run_params)); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(PCGenericDescriptorTest, - ForemanCif30kbpsWithoutPacketLossTrustedRateControl) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - BuiltInNetworkBehaviorConfig config; - auto fixture = CreateTestFixture( - "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_" - "descriptor", - *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), - [](PeerConfigurer* alice) { - VideoConfig video(352, 288, 10); - video.stream_label = "alice-video"; - auto frame_generator = CreateFromYuvFileFrameGenerator( - video, ClipNameToClipPath("foreman_cif")); - alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - - BitrateSettings bitrate_settings; - bitrate_settings.min_bitrate_bps = 30000; - bitrate_settings.start_bitrate_bps = 30000; - bitrate_settings.max_bitrate_bps = 30000; - alice->SetBitrateSettings(bitrate_settings); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - // Link capacity below default start rate. -TEST(PCFullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -361,7 +328,7 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifLink130kbps100msDelay1PercentPacketLossUlpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -387,7 +354,7 @@ TEST(PCFullStackTest, ForemanCifLink130kbps100msDelay1PercentPacketLossUlpfec) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -414,7 +381,8 @@ TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) { } // Restricted network and encoder overproducing by 30%. -TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) { +TEST(PCFullStackTest, + Pc_Foreman_Cif_Link_150kbps_Delay100ms_30pkts_Queue_Overshoot30) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -445,7 +413,7 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) { // Packet rate and loss are low enough that loss will happen with ~3s interval. // This triggers protection overhead to toggle between zero and non-zero. // Link queue is restrictive enough to trigger loss on probes. -TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -473,7 +441,8 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCifPlr5) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -498,7 +467,8 @@ TEST(PCGenericDescriptorTest, ForemanCifPlr5) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_Ulpfec_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -523,7 +493,7 @@ TEST(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -548,7 +518,7 @@ TEST(PCFullStackTest, ForemanCifPlr5Flexfec) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsPlr3Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -574,7 +544,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Flexfec) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -601,7 +571,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) { } #if defined(WEBRTC_USE_H264) -TEST(PCFullStackTest, ForemanCifWithoutPacketlossH264) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -624,7 +594,7 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketlossH264) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { +TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -653,7 +623,8 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCifPlr5H264) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -678,7 +649,7 @@ TEST(PCGenericDescriptorTest, ForemanCifPlr5H264) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { test::ScopedFieldTrials override_field_trials( AppendFieldTrials("WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); @@ -706,7 +677,7 @@ TEST(PCFullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -733,7 +704,7 @@ TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) { // Ulpfec with H264 is an unsupported combination, so this test is only useful // for debugging. It is therefore disabled by default. -TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { +TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -759,7 +730,7 @@ TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { } #endif // defined(WEBRTC_USE_H264) -TEST(PCFullStackTest, ForemanCif500kbps) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -784,7 +755,7 @@ TEST(PCFullStackTest, ForemanCif500kbps) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsLimitedQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -810,7 +781,7 @@ TEST(PCFullStackTest, ForemanCif500kbpsLimitedQueue) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbps100ms) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -836,7 +807,8 @@ TEST(PCFullStackTest, ForemanCif500kbps100ms) { fixture->Run(std::move(run_params)); } -TEST(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_500kbps_100ms_32pkts_Queue_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -883,7 +855,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { } */ -TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -910,7 +882,7 @@ TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) { } // TODO(sprang): Remove this if we have the similar ModerateLimits below? -TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { +TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -936,36 +908,6 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { fixture->Run(std::move(run_params)); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - BuiltInNetworkBehaviorConfig config; - config.queue_length_packets = 50; - config.loss_percent = 3; - config.queue_delay_ms = 100; - config.link_capacity_kbps = 2000; - auto fixture = CreateTestFixture( - "pc_conference_motion_hd_1tl_moderate_limits_trusted_rate_ctrl", - *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), - [](PeerConfigurer* alice) { - VideoConfig video(1280, 720, 50); - video.stream_label = "alice-video"; - auto frame_generator = CreateFromYuvFileFrameGenerator( - video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); - alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - /* // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework TEST(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { @@ -1093,7 +1035,7 @@ TEST(PCFullStackTest, */ #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { +TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -1122,7 +1064,7 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { } #endif -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_NoConferenceMode) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_No_Conference_Mode) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -1146,7 +1088,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_NoConferenceMode) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL) { +TEST(PCFullStackTest, Pc_Screenshare_Slides) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -1172,7 +1114,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL) { // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. #if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_NoConferenceMode) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast_No_Conference_Mode) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -1198,7 +1140,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_NoConferenceMode) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( @@ -1230,40 +1172,6 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { #if !defined(WEBRTC_MAC) // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. #if !defined(WEBRTC_WIN) -const char kScreenshareSimulcastVariableFramerateExperiment[] = - "WebRTC-VP8VariableFramerateScreenshare/" - "Enabled,min_fps:5.0,min_qp:15,undershoot:30/"; -// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_Variable_Framerate) { - test::ScopedFieldTrials field_trial( - AppendFieldTrials(kScreenshareSimulcastVariableFramerateExperiment)); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging screenshare; - screenshare.call.send_side_bwe = true; - screenshare.screenshare[0] = {true, false, 10}; - screenshare.video[0] = {true, 1850, 1110, 30, 800000, 2500000, - 2500000, false, "VP8", 2, 1, 400000, - false, false, false, ""}; - screenshare.analyzer = {"screenshare_slides_simulcast_variable_framerate", - 0.0, 0.0, kTestDurationSec}; - ParamsWithLogging screenshare_params_high; - screenshare_params_high.video[0] = { - true, 1850, 1110, 60, 600000, 1250000, 1250000, false, - "VP8", 2, 0, 400000, false, false, false, ""}; - VideoQualityTest::Params screenshare_params_low; - screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000, - 1000000, false, "VP8", 2, 0, 400000, - false, false, false, ""}; - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), - VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; - screenshare.ss[0] = { - streams, 1, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(screenshare); -} - // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_low) { auto fixture = CreateVideoQualityTestFixture(); @@ -1311,7 +1219,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { } // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { +TEST(PCGenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -1437,7 +1345,7 @@ ParamsWithLogging::Video SimulcastVp8VideoLow() { #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); @@ -1467,7 +1375,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, VP9SVC_3SL_High) { +TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); @@ -1496,7 +1404,7 @@ TEST(PCFullStackTest, VP9SVC_3SL_High) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, VP9SVC_3SL_Low) { +TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); @@ -1598,8 +1506,7 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { // TODO(webrtc:9722): Remove when experiment is cleaned up. TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" - "WebRTC-LibvpxVp9TrustedRateController/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1621,11 +1528,11 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { // Android bots can't handle FullHD, so disable the test. // TODO(bugs.webrtc.org/9220): Investigate source of flakiness on Mac. #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) -#define MAYBE_SimulcastFullHdOveruse DISABLED_SimulcastFullHdOveruse +#define MAYBE_Pc_Simulcast_HD_High DISABLED_Pc_Simulcast_HD_High #else -#define MAYBE_SimulcastFullHdOveruse SimulcastFullHdOveruse +#define MAYBE_Pc_Simulcast_HD_High Pc_Simulcast_HD_High #endif -TEST(PCFullStackTest, MAYBE_SimulcastFullHdOveruse) { +TEST(PCFullStackTest, MAYBE_Pc_Simulcast_HD_High) { webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials( "WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/")); std::unique_ptr network_emulation_manager = @@ -1651,7 +1558,7 @@ TEST(PCFullStackTest, MAYBE_SimulcastFullHdOveruse) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, SimulcastVP8_3SL_High) { +TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -1677,7 +1584,7 @@ TEST(PCFullStackTest, SimulcastVP8_3SL_High) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, SimulcastVP8_3SL_Low) { +TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc index 28e9a0ba9..29ace9043 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc @@ -89,11 +89,18 @@ void RtpStreamsSynchronizer::Process() { log_stats = true; } + int64_t last_audio_receive_time_ms = + audio_measurement_.latest_receive_time_ms; absl::optional audio_info = syncable_audio_->GetInfo(); if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { return; } + if (last_audio_receive_time_ms == audio_measurement_.latest_receive_time_ms) { + // No new audio packet has been received since last update. + return; + } + int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; absl::optional video_info = syncable_video_->GetInfo(); if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc index 49be355a3..4096fceb9 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc @@ -92,11 +92,18 @@ void RtpStreamsSynchronizer::UpdateDelay() { log_stats = true; } + int64_t last_audio_receive_time_ms = + audio_measurement_.latest_receive_time_ms; absl::optional audio_info = syncable_audio_->GetInfo(); if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { return; } + if (last_audio_receive_time_ms == audio_measurement_.latest_receive_time_ms) { + // No new audio packet has been received since last update. + return; + } + int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; absl::optional video_info = syncable_video_->GetInfo(); if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { @@ -147,8 +154,12 @@ void RtpStreamsSynchronizer::UpdateDelay() { << "target_delay_ms: " << target_video_delay_ms << "} "; } - syncable_audio_->SetMinimumPlayoutDelay(target_audio_delay_ms); - syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms); + if (!syncable_audio_->SetMinimumPlayoutDelay(target_audio_delay_ms)) { + sync_->ReduceAudioDelay(); + } + if (!syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms)) { + sync_->ReduceVideoDelay(); + } } // TODO(https://bugs.webrtc.org/7065): Move RtpToNtpEstimator out of diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h index 0cdc26d40..2746295fc 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h @@ -146,8 +146,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // Implements RtpPacketSinkInterface. void OnRtpPacket(const RtpPacketReceived& packet) override; - // TODO(philipel): Stop using VCMPacket in the new jitter buffer and then - // remove this function. Public only for tests. + // Public only for tests. void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, const RTPVideoHeader& video); diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc index db68706bc..d623e7a87 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc @@ -331,6 +331,10 @@ void RtpVideoStreamReceiver2::AddReceiveCodec( const std::map& codec_params, bool raw_payload) { RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || + field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); + } payload_type_map_.emplace( payload_type, raw_payload ? std::make_unique() diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h index 69bfb532f..79e95b688 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h @@ -121,8 +121,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // Implements RtpPacketSinkInterface. void OnRtpPacket(const RtpPacketReceived& packet) override; - // TODO(philipel): Stop using VCMPacket in the new jitter buffer and then - // remove this function. Public only for tests. + // Public only for tests. void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, const RTPVideoHeader& video); diff --git a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc index b0ff9b517..92545ecf9 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc @@ -976,13 +976,11 @@ void SendStatisticsProxy::OnSendEncodedImage( stats->frames_encoded++; stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms; - // Report resolution of top spatial layer in case of VP9 SVC. - bool is_svc_low_spatial_layer = - (codec_info && codec_info->codecType == kVideoCodecVP9) - ? !codec_info->codecSpecific.VP9.end_of_picture - : false; + // Report resolution of the top spatial layer. + bool is_top_spatial_layer = + codec_info == nullptr || codec_info->end_of_picture; - if (!stats->width || !stats->height || !is_svc_low_spatial_layer) { + if (!stats->width || !stats->height || is_top_spatial_layer) { stats->width = encoded_image._encodedWidth; stats->height = encoded_image._encodedHeight; update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds(); diff --git a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc index 8c808f13c..d5c77c1ec 100644 --- a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc +++ b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc @@ -184,4 +184,12 @@ void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) { base_target_delay_ms_ = target_delay_ms; } +void StreamSynchronization::ReduceAudioDelay() { + audio_delay_.extra_ms *= 0.9f; +} + +void StreamSynchronization::ReduceVideoDelay() { + video_delay_.extra_ms *= 0.9f; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.h b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.h index 1aba62d1e..2da6a49a1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.h +++ b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.h @@ -44,6 +44,12 @@ class StreamSynchronization { // |target_delay_ms|. void SetTargetBufferingDelay(int target_delay_ms); + // Lowers the audio delay by 10%. Can be used to recover from errors. + void ReduceAudioDelay(); + + // Lowers the video delay by 10%. Can be used to recover from errors. + void ReduceVideoDelay(); + uint32_t audio_stream_id() const { return audio_stream_id_; } uint32_t video_stream_id() const { return video_stream_id_; } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc index a1174b519..c16c3b383 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc @@ -522,7 +522,8 @@ void VideoAnalyzer::PollStats() { } if (audio_receive_stream_ != nullptr) { - AudioReceiveStream::Stats receive_stats = audio_receive_stream_->GetStats(); + AudioReceiveStream::Stats receive_stats = + audio_receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true); audio_expand_rate_.AddSample(receive_stats.expand_rate); audio_accelerate_rate_.AddSample(receive_stats.accelerate_rate); audio_jitter_buffer_ms_.AddSample(receive_stats.jitter_buffer_ms); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h index c2401d282..18bacc16f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h @@ -35,7 +35,7 @@ class VideoAnalyzer : public PacketReceiver, public Transport, public rtc::VideoSinkInterface { public: - using Statistics = RunningStatistics; + using Statistics = webrtc_impl::RunningStatistics; VideoAnalyzer(test::LayerFilteringTransport* transport, const std::string& test_label, diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc index d2f3f9b5b..a58aa1f33 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc @@ -238,8 +238,7 @@ class QualityTestVideoEncoder : public VideoEncoder, private: // Implement EncodedImageCallback Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { if (codec_specific_info) { int simulcast_index; if (codec_specific_info->codecType == kVideoCodecVP9) { @@ -258,8 +257,7 @@ class QualityTestVideoEncoder : public VideoEncoder, } } - return callback_->OnEncodedImage(encoded_image, codec_specific_info, - fragmentation); + return callback_->OnEncodedImage(encoded_image, codec_specific_info); } void OnDroppedFrame(DropReason reason) override { @@ -864,6 +862,10 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, video_encoder_configs_[video_idx].encoder_specific_settings = new rtc::RefCountedObject< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), + 1); + // Min bitrate will be enforced by spatial layer config instead. + video_encoder_configs_[video_idx].simulcast_layers[0].min_bitrate_bps = 0; } else if (params_.video[video_idx].automatic_scaling) { if (params_.video[video_idx].codec == "VP8") { VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); @@ -1106,6 +1108,11 @@ void VideoQualityTest::CreateCapturers() { static_cast(params_.video[video_idx].width), static_cast(params_.video[video_idx].height), test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt); + } else if (params_.video[video_idx].clip_path == "GeneratorNV12") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), + test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt); } else if (params_.video[video_idx].clip_path.empty()) { video_sources_[video_idx] = test::CreateVideoCapturer( params_.video[video_idx].width, params_.video[video_idx].height, @@ -1341,8 +1348,8 @@ rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal // COM initialization but the new ADM requires COM to be initialized // externally. - com_initializer_ = std::make_unique( - webrtc_win::ScopedCOMInitializer::kMTA); + com_initializer_ = + std::make_unique(ScopedCOMInitializer::kMTA); RTC_CHECK(com_initializer_->Succeeded()); RTC_CHECK(webrtc_win::core_audio_utility::IsSupported()); RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported()); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h index 217783079..f49ce385b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h @@ -30,6 +30,7 @@ #include "video/video_analyzer.h" #ifdef WEBRTC_WIN #include "modules/audio_device/win/core_audio_utility_win.h" +#include "rtc_base/win/scoped_com_initializer.h" #endif namespace webrtc { @@ -137,7 +138,7 @@ class VideoQualityTest : public test::CallTest, #ifdef WEBRTC_WIN // Windows Core Audio based ADM needs to run on a COM initialized thread. // Only referenced in combination with --audio --use_real_adm flags. - std::unique_ptr com_initializer_; + std::unique_ptr com_initializer_; #endif }; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc index c2769cc93..418901131 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc @@ -115,8 +115,6 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { VideoCodec codec; - memset(&codec, 0, sizeof(codec)); - codec.codecType = PayloadStringToCodecType(decoder.video_format.name); if (codec.codecType == kVideoCodecVP8) { @@ -567,7 +565,7 @@ void VideoReceiveStream::OnCompleteFrame( } last_complete_frame_time_ms_ = time_now_ms; - const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; if (playout_delay.min_ms >= 0) { MutexLock lock(&playout_delay_lock_); frame_minimum_playout_delay_ms_ = playout_delay.min_ms; @@ -620,11 +618,12 @@ void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs( RTC_NOTREACHED(); } -void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { +bool VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); MutexLock lock(&playout_delay_lock_); syncable_minimum_playout_delay_ms_ = delay_ms; UpdatePlayoutDelays(); + return true; } int64_t VideoReceiveStream::GetWaitMs() const { diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h index 57329f492..5fb9cf72d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h @@ -37,7 +37,6 @@ namespace webrtc { class CallStats; class ProcessThread; -class RTPFragmentationHeader; class RtpStreamReceiverInterface; class RtpStreamReceiverControllerInterface; class RtxReceiveStream; @@ -127,7 +126,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, int64_t time_ms) override; // SetMinimumPlayoutDelay is only called by A/V sync. - void SetMinimumPlayoutDelay(int delay_ms) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; std::vector GetSources() const override; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc index 9e4c2ad76..8cc14e57c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc @@ -113,8 +113,6 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { VideoCodec codec; - memset(&codec, 0, sizeof(codec)); - codec.codecType = PayloadStringToCodecType(decoder.video_format.name); if (codec.codecType == kVideoCodecVP8) { @@ -132,8 +130,19 @@ VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { return associated_codec; } - codec.width = 320; - codec.height = 180; + FieldTrialOptional width("w"); + FieldTrialOptional height("h"); + ParseFieldTrial( + {&width, &height}, + field_trial::FindFullName("WebRTC-Video-InitialDecoderResolution")); + if (width && height) { + codec.width = width.Value(); + codec.height = height.Value(); + } else { + codec.width = 320; + codec.height = 180; + } + const int kDefaultStartBitrate = 300; codec.startBitrate = codec.minBitrate = codec.maxBitrate = kDefaultStartBitrate; @@ -222,6 +231,9 @@ VideoReceiveStream2::VideoReceiveStream2( max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() .MaxWaitForFrameMs() .value_or(kMaxWaitForFrameMs)), + low_latency_renderer_enabled_("enabled", true), + low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", + true), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -262,6 +274,10 @@ VideoReceiveStream2::VideoReceiveStream2( rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, true); } + + ParseFieldTrial({&low_latency_renderer_enabled_, + &low_latency_renderer_include_predecode_buffer_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } VideoReceiveStream2::~VideoReceiveStream2() { @@ -544,7 +560,7 @@ void VideoReceiveStream2::OnCompleteFrame( } last_complete_frame_time_ms_ = time_now_ms; - const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; if (playout_delay.min_ms >= 0) { frame_minimum_playout_delay_ms_ = playout_delay.min_ms; UpdatePlayoutDelays(); @@ -596,10 +612,11 @@ void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( RTC_NOTREACHED(); } -void VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { +bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); syncable_minimum_playout_delay_ms_ = delay_ms; UpdatePlayoutDelays(); + return true; } int64_t VideoReceiveStream2::GetMaxWaitMs() const { @@ -768,6 +785,22 @@ void VideoReceiveStream2::UpdatePlayoutDelays() const { syncable_minimum_playout_delay_ms_}); if (minimum_delay_ms >= 0) { timing_->set_min_playout_delay(minimum_delay_ms); + if (frame_minimum_playout_delay_ms_ == 0 && + frame_maximum_playout_delay_ms_ > 0 && low_latency_renderer_enabled_) { + // TODO(kron): Estimate frame rate from video stream. + constexpr double kFrameRate = 60.0; + // Convert playout delay in ms to number of frames. + int max_composition_delay_in_frames = std::lrint( + static_cast(frame_maximum_playout_delay_ms_ * kFrameRate) / + rtc::kNumMillisecsPerSec); + if (low_latency_renderer_include_predecode_buffer_) { + // Subtract frames in buffer. + max_composition_delay_in_frames = std::max( + max_composition_delay_in_frames - frame_buffer_->Size(), 0); + } + timing_->SetMaxCompositionDelayInFrames( + absl::make_optional(max_composition_delay_in_frames)); + } } const int maximum_delay_ms = frame_maximum_playout_delay_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h index 71b336e58..e8e3edc3d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h @@ -37,7 +37,6 @@ namespace webrtc { class ProcessThread; -class RTPFragmentationHeader; class RtpStreamReceiverInterface; class RtpStreamReceiverControllerInterface; class RtxReceiveStream; @@ -149,7 +148,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, int64_t time_ms) override; // SetMinimumPlayoutDelay is only called by A/V sync. - void SetMinimumPlayoutDelay(int delay_ms) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; std::vector GetSources() const override; @@ -258,6 +257,16 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) = false; + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter |enabled| + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_; + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter + // |include_predecode_buffer| determines if the predecode buffer should be + // taken into account when calculating maximum number of frames in composition + // queue. + FieldTrialParameter low_latency_renderer_include_predecode_buffer_; + // Defined last so they are destroyed before all other members. rtc::TaskQueue decode_queue_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc index 30ed86dbd..d6e1b6bbf 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc @@ -29,8 +29,6 @@ namespace webrtc { namespace { -constexpr char kTargetBitrateRtcpFieldTrial[] = "WebRTC-Target-Bitrate-Rtcp"; - size_t CalculateMaxHeaderSize(const RtpConfig& config) { size_t header_size = kRtpHeaderSize; size_t extensions_size = 0; @@ -113,13 +111,6 @@ VideoSendStream::VideoSendStream( // it was created on. thread_sync_event_.Wait(rtc::Event::kForever); send_stream_->RegisterProcessThread(module_process_thread); - // TODO(sprang): Enable this also for regular video calls by default, if it - // works well. - if (encoder_config.content_type == VideoEncoderConfig::ContentType::kScreen || - field_trial::IsEnabled(kTargetBitrateRtcpFieldTrial)) { - video_stream_encoder_->SetBitrateAllocationObserver(send_stream_.get()); - } - ReconfigureVideoEncoder(std::move(encoder_config)); } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc index 712af87a0..ee4301862 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc @@ -49,6 +49,13 @@ static constexpr int64_t kMaxVbaThrottleTimeMs = 500; constexpr TimeDelta kEncoderTimeOut = TimeDelta::Seconds(2); +// When send-side BWE is used a stricter 1.1x pacing factor is used, rather than +// the 2.5x which is used with receive-side BWE. Provides a more careful +// bandwidth rampup with less risk of overshoots causing adverse effects like +// packet loss. Not used for receive side BWE, since there we lack the probing +// feature and so may result in too slow initial rampup. +static constexpr double kStrictPacingMultiplier = 1.1; + bool TransportSeqNumExtensionConfigured(const VideoSendStream::Config& config) { const std::vector& extensions = config.rtp.extensions; return absl::c_any_of(extensions, [](const RtpExtension& ext) { @@ -175,7 +182,7 @@ bool SameStreamsEnabled(const VideoBitrateAllocation& lhs, } // namespace PacingConfig::PacingConfig() - : pacing_factor("factor", PacedSender::kDefaultPaceMultiplier), + : pacing_factor("factor", kStrictPacingMultiplier), max_pacing_delay("max_delay", TimeDelta::Millis(PacedSender::kMaxQueueLengthMs)) { ParseFieldTrial({&pacing_factor, &max_pacing_delay}, @@ -300,17 +307,6 @@ VideoSendStreamImpl::VideoSendStreamImpl( video_stream_encoder_->SetStartBitrate( bitrate_allocator_->GetStartBitrate(this)); - - // Only request rotation at the source when we positively know that the remote - // side doesn't support the rotation extension. This allows us to prepare the - // encoder in the expectation that rotation is supported - which is the common - // case. - bool rotation_applied = absl::c_none_of( - config_->rtp.extensions, [](const RtpExtension& extension) { - return extension.uri == RtpExtension::kVideoRotationUri; - }); - - video_stream_encoder_->SetSink(this, rotation_applied); } VideoSendStreamImpl::~VideoSendStreamImpl() { @@ -323,6 +319,21 @@ VideoSendStreamImpl::~VideoSendStreamImpl() { void VideoSendStreamImpl::RegisterProcessThread( ProcessThread* module_process_thread) { + // Called on libjingle's worker thread (not worker_queue_), as part of the + // initialization steps. That's also the correct thread/queue for setting the + // state for |video_stream_encoder_|. + + // Only request rotation at the source when we positively know that the remote + // side doesn't support the rotation extension. This allows us to prepare the + // encoder in the expectation that rotation is supported - which is the common + // case. + bool rotation_applied = absl::c_none_of( + config_->rtp.extensions, [](const RtpExtension& extension) { + return extension.uri == RtpExtension::kVideoRotationUri; + }); + + video_stream_encoder_->SetSink(this, rotation_applied); + rtp_video_sender_->RegisterProcessThread(module_process_thread); } @@ -468,6 +479,20 @@ void VideoSendStreamImpl::OnBitrateAllocationUpdated( } } +void VideoSendStreamImpl::OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) { + if (!worker_queue_->IsCurrent()) { + auto ptr = weak_ptr_; + worker_queue_->PostTask([allocation = std::move(allocation), ptr] { + if (!ptr.get()) + return; + ptr->OnVideoLayersAllocationUpdated(allocation); + }); + return; + } + rtp_video_sender_->OnVideoLayersAllocationUpdated(allocation); +} + void VideoSendStreamImpl::SignalEncoderActive() { RTC_DCHECK_RUN_ON(worker_queue_); if (rtp_video_sender_->IsActive()) { @@ -558,8 +583,7 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { // Encoded is called on whatever thread the real encoder implementation run // on. In the case of hardware encoders, there might be several encoders // running in parallel on different threads. @@ -582,8 +606,8 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( } EncodedImageCallback::Result result(EncodedImageCallback::Result::OK); - result = rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info, - fragmentation); + result = + rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info); // Check if there's a throttled VideoBitrateAllocation that we should try // sending. rtc::WeakPtr send_stream = weak_ptr_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h index 834fed469..41a7859a7 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h @@ -67,8 +67,7 @@ struct PacingConfig { // An encoder may deliver frames through the EncodedImageCallback on an // arbitrary thread. class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, - public VideoStreamEncoderInterface::EncoderSink, - public VideoBitrateAllocationObserver { + public VideoStreamEncoderInterface::EncoderSink { public: VideoSendStreamImpl( Clock* clock, @@ -113,27 +112,28 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, // Implements BitrateAllocatorObserver. uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override; + // Implements VideoStreamEncoderInterface::EncoderSink void OnEncoderConfigurationChanged( std::vector streams, bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) override; + void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) override; + void OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) override; + // Implements EncodedImageCallback. The implementation routes encoded frames // to the |payload_router_| and |config.pre_encode_callback| if set. // Called on an arbitrary encoder callback thread. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const CodecSpecificInfo* codec_specific_info) override; // Implements EncodedImageCallback. void OnDroppedFrame(EncodedImageCallback::DropReason reason) override; - // Implements VideoBitrateAllocationObserver. - void OnBitrateAllocationUpdated( - const VideoBitrateAllocation& allocation) override; - // Starts monitoring and sends a keyframe. void StartupVideoSendStream(); // Removes the bitrate observer, stops monitoring and notifies the video diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc index 64a1a5571..52e4ddbc4 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc @@ -1276,180 +1276,6 @@ TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) { TestPacketFragmentationSize(kVP8, true); } -// The test will go through a number of phases. -// 1. Start sending packets. -// 2. As soon as the RTP stream has been detected, signal a low REMB value to -// suspend the stream. -// 3. Wait until |kSuspendTimeFrames| have been captured without seeing any RTP -// packets. -// 4. Signal a high REMB and then wait for the RTP stream to start again. -// When the stream is detected again, and the stats show that the stream -// is no longer suspended, the test ends. -TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) { - static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps. - - class RembObserver : public test::SendTest { - public: - class CaptureObserver : public rtc::VideoSinkInterface { - public: - explicit CaptureObserver(RembObserver* remb_observer) - : remb_observer_(remb_observer) {} - - void OnFrame(const VideoFrame&) { - MutexLock lock(&remb_observer_->mutex_); - if (remb_observer_->test_state_ == kDuringSuspend && - ++remb_observer_->suspended_frame_count_ > kSuspendTimeFrames) { - VideoSendStream::Stats stats = remb_observer_->stream_->GetStats(); - EXPECT_TRUE(stats.suspended); - remb_observer_->SendRtcpFeedback(remb_observer_->high_remb_bps_); - remb_observer_->test_state_ = kWaitingForPacket; - } - } - - private: - RembObserver* const remb_observer_; - }; - - RembObserver() - : SendTest(kDefaultTimeoutMs), - clock_(Clock::GetRealTimeClock()), - capture_observer_(this), - stream_(nullptr), - test_state_(kBeforeSuspend), - rtp_count_(0), - last_sequence_number_(0), - suspended_frame_count_(0), - low_remb_bps_(0), - high_remb_bps_(0) {} - - private: - Action OnSendRtp(const uint8_t* packet, size_t length) override { - MutexLock lock(&mutex_); - ++rtp_count_; - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet, length)); - last_sequence_number_ = rtp_packet.SequenceNumber(); - - if (test_state_ == kBeforeSuspend) { - // The stream has started. Try to suspend it. - SendRtcpFeedback(low_remb_bps_); - test_state_ = kDuringSuspend; - } else if (test_state_ == kDuringSuspend) { - if (rtp_packet.padding_size() == 0) { - // Received non-padding packet during suspension period. Reset the - // counter. - suspended_frame_count_ = 0; - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } else if (test_state_ == kWaitingForPacket) { - if (rtp_packet.padding_size() == 0) { - // Non-padding packet observed. Test is almost complete. Will just - // have to wait for the stats to change. - test_state_ = kWaitingForStats; - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } else if (test_state_ == kWaitingForStats) { - VideoSendStream::Stats stats = stream_->GetStats(); - if (stats.suspended == false) { - // Stats flipped to false. Test is complete. - observation_complete_.Set(); - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } - - return SEND_PACKET; - } - - void set_low_remb_bps(int value) { - MutexLock lock(&mutex_); - low_remb_bps_ = value; - } - - void set_high_remb_bps(int value) { - MutexLock lock(&mutex_); - high_remb_bps_ = value; - } - - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { - stream_ = send_stream; - } - - void OnFrameGeneratorCapturerCreated( - test::FrameGeneratorCapturer* frame_generator_capturer) override { - frame_generator_capturer->AddOrUpdateSink(&capture_observer_, - rtc::VideoSinkWants()); - } - - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - RTC_DCHECK_EQ(1, encoder_config->number_of_streams); - transport_adapter_.reset( - new internal::TransportAdapter(send_config->send_transport)); - transport_adapter_->Enable(); - send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - send_config->suspend_below_min_bitrate = true; - int min_bitrate_bps = - test::DefaultVideoStreamFactory::kDefaultMinBitratePerStream[0]; - set_low_remb_bps(min_bitrate_bps - 10000); - int threshold_window = std::max(min_bitrate_bps / 10, 20000); - ASSERT_GT(encoder_config->max_bitrate_bps, - min_bitrate_bps + threshold_window + 5000); - set_high_remb_bps(min_bitrate_bps + threshold_window + 5000); - } - - void PerformTest() override { - EXPECT_TRUE(Wait()) << "Timed out during suspend-below-min-bitrate test."; - } - - enum TestState { - kBeforeSuspend, - kDuringSuspend, - kWaitingForPacket, - kWaitingForStats - }; - - virtual void SendRtcpFeedback(int remb_value) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) { - FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0], - last_sequence_number_, rtp_count_, 0); - RtpRtcpInterface::Configuration config; - config.clock = clock_; - config.receive_statistics = &receive_stats; - config.outgoing_transport = transport_adapter_.get(); - config.rtcp_report_interval_ms = kRtcpIntervalMs; - config.local_media_ssrc = kVideoSendSsrcs[0]; - RTCPSender rtcp_sender(config); - - rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize); - rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]); - if (remb_value > 0) { - rtcp_sender.SetRemb(remb_value, std::vector()); - } - RTCPSender::FeedbackState feedback_state; - EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); - } - - std::unique_ptr transport_adapter_; - Clock* const clock_; - CaptureObserver capture_observer_; - VideoSendStream* stream_; - - Mutex mutex_; - TestState test_state_ RTC_GUARDED_BY(mutex_); - int rtp_count_ RTC_GUARDED_BY(mutex_); - int last_sequence_number_ RTC_GUARDED_BY(mutex_); - int suspended_frame_count_ RTC_GUARDED_BY(mutex_); - int low_remb_bps_ RTC_GUARDED_BY(mutex_); - int high_remb_bps_ RTC_GUARDED_BY(mutex_); - } test; - - RunBaseTest(&test); -} - // This test that padding stops being send after a while if the Camera stops // producing video frames and that padding resumes if the camera restarts. TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { @@ -2935,12 +2761,13 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { static const int kMaxBitrateKbps = 413; static const int kIncreasedStartBitrateKbps = 451; static const int kIncreasedMaxBitrateKbps = 597; - // If these fields trial are on, we get lower bitrates than expected by this - // test, due to the packetization overhead and encoder pushback. + // TODO(bugs.webrtc.org/12058): If these fields trial are on, we get lower + // bitrates than expected by this test, due to encoder pushback and subtracted + // overhead. webrtc::test::ScopedFieldTrials field_trials( std::string(field_trial::GetFieldTrialString()) + - "WebRTC-SubtractPacketizationOverhead/Disabled/" - "WebRTC-VideoRateControl/bitrate_adjuster:false/"); + "WebRTC-VideoRateControl/bitrate_adjuster:false/" + "WebRTC-SendSideBwe-WithOverhead/Disabled/"); class EncoderBitrateThresholdObserver : public test::SendTest, public VideoBitrateAllocatorFactory, @@ -3150,8 +2977,10 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { CodecSpecificInfo specifics; specifics.codecType = kVideoCodecGeneric; - uint8_t buffer[16] = {0}; - EncodedImage encoded(buffer, sizeof(buffer), sizeof(buffer)); + EncodedImage encoded; + auto buffer = EncodedImageBuffer::Create(16); + memset(buffer->data(), 0, 16); + encoded.SetEncodedData(buffer); encoded.SetTimestamp(input_image.timestamp()); encoded.capture_time_ms_ = input_image.render_time_ms(); @@ -3166,7 +2995,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { callback = callback_; } RTC_DCHECK(callback); - if (callback->OnEncodedImage(encoded, &specifics, nullptr).error != + if (callback->OnEncodedImage(encoded, &specifics).error != EncodedImageCallback::Result::OK) { return -1; } @@ -4020,7 +3849,7 @@ class ContentSwitchTest : public test::SendTest { auto internal_send_peer = test::VideoSendStreamPeer(send_stream_); float pacing_factor = internal_send_peer.GetPacingFactorOverride().value_or(0.0f); - float expected_pacing_factor = PacedSender::kDefaultPaceMultiplier; + float expected_pacing_factor = 1.1; // Strict pacing factor. if (send_stream_->GetStats().content_type == webrtc::VideoContentType::SCREENSHARE) { expected_pacing_factor = 1.0f; // Currently used pacing factor in ALR. diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc index a5c0941e0..376eb85ea 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc @@ -43,25 +43,33 @@ VideoSourceSinkController::VideoSourceSinkController( RTC_DCHECK(sink_); } +VideoSourceSinkController::~VideoSourceSinkController() { + RTC_DCHECK_RUN_ON(&sequence_checker_); +} + void VideoSourceSinkController::SetSource( rtc::VideoSourceInterface* source) { - rtc::VideoSourceInterface* old_source; - rtc::VideoSinkWants wants; - { - MutexLock lock(&mutex_); - old_source = source_; - source_ = source; - wants = CurrentSettingsToSinkWants(); - } + RTC_DCHECK_RUN_ON(&sequence_checker_); + + rtc::VideoSourceInterface* old_source = source_; + source_ = source; + if (old_source != source && old_source) old_source->RemoveSink(sink_); + if (!source) return; - source->AddOrUpdateSink(sink_, wants); + + source->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants()); +} + +bool VideoSourceSinkController::HasSource() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return source_ != nullptr; } void VideoSourceSinkController::PushSourceSinkSettings() { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) return; rtc::VideoSinkWants wants = CurrentSettingsToSinkWants(); @@ -70,62 +78,62 @@ void VideoSourceSinkController::PushSourceSinkSettings() { } VideoSourceRestrictions VideoSourceSinkController::restrictions() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return restrictions_; } absl::optional VideoSourceSinkController::pixels_per_frame_upper_limit() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return pixels_per_frame_upper_limit_; } absl::optional VideoSourceSinkController::frame_rate_upper_limit() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return frame_rate_upper_limit_; } bool VideoSourceSinkController::rotation_applied() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return rotation_applied_; } int VideoSourceSinkController::resolution_alignment() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return resolution_alignment_; } void VideoSourceSinkController::SetRestrictions( VideoSourceRestrictions restrictions) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); restrictions_ = std::move(restrictions); } void VideoSourceSinkController::SetPixelsPerFrameUpperLimit( absl::optional pixels_per_frame_upper_limit) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); pixels_per_frame_upper_limit_ = std::move(pixels_per_frame_upper_limit); } void VideoSourceSinkController::SetFrameRateUpperLimit( absl::optional frame_rate_upper_limit) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); frame_rate_upper_limit_ = std::move(frame_rate_upper_limit); } void VideoSourceSinkController::SetRotationApplied(bool rotation_applied) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); rotation_applied_ = rotation_applied; } void VideoSourceSinkController::SetResolutionAlignment( int resolution_alignment) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); resolution_alignment_ = resolution_alignment; } -// RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) +// RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { rtc::VideoSinkWants wants; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h index 877cf8590..ed8f99097 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h @@ -18,7 +18,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "call/adaptation/video_source_restrictions.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -31,7 +31,11 @@ class VideoSourceSinkController { VideoSourceSinkController(rtc::VideoSinkInterface* sink, rtc::VideoSourceInterface* source); + ~VideoSourceSinkController(); + void SetSource(rtc::VideoSourceInterface* source); + bool HasSource() const; + // Must be called in order for changes to settings to have an effect. This // allows you to modify multiple properties in a single push to the sink. void PushSourceSinkSettings(); @@ -53,20 +57,27 @@ class VideoSourceSinkController { private: rtc::VideoSinkWants CurrentSettingsToSinkWants() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); + + // Used to ensure that this class is called on threads/sequences that it and + // downstream implementations were designed for. + // In practice, this represent's libjingle's worker thread. + SequenceChecker sequence_checker_; - mutable Mutex mutex_; rtc::VideoSinkInterface* const sink_; - rtc::VideoSourceInterface* source_ RTC_GUARDED_BY(&mutex_); + rtc::VideoSourceInterface* source_ + RTC_GUARDED_BY(&sequence_checker_); // Pixel and frame rate restrictions. - VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&mutex_); + VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); // Ensures that even if we are not restricted, the sink is never configured // above this limit. Example: We are not CPU limited (no |restrictions_|) but // our encoder is capped at 30 fps (= |frame_rate_upper_limit_|). - absl::optional pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&mutex_); - absl::optional frame_rate_upper_limit_ RTC_GUARDED_BY(&mutex_); - bool rotation_applied_ RTC_GUARDED_BY(&mutex_) = false; - int resolution_alignment_ RTC_GUARDED_BY(&mutex_) = 1; + absl::optional pixels_per_frame_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + absl::optional frame_rate_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + bool rotation_applied_ RTC_GUARDED_BY(&sequence_checker_) = false; + int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc index 02ba45e25..f5b0f5f78 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc @@ -26,7 +26,7 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( std::map> decoder_settings) : timing_(Clock::GetRealTimeClock()), decode_callbacks_(this), - next_frame_timestamps_index_(0), + next_frame_info_index_(0), callbacks_(callbacks), keyframe_required_(true), decoder_factory_(decoder_factory), @@ -39,7 +39,6 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( decode_queue_(task_queue_factory->CreateTaskQueue( "video_stream_decoder_decode_queue", TaskQueueFactory::Priority::NORMAL)) { - frame_timestamps_.fill({-1, -1, -1}); bookkeeping_queue_.PostTask([this]() { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); StartNextDecode(); @@ -125,16 +124,15 @@ VideoDecoder* VideoStreamDecoderImpl::GetDecoder(int payload_type) { return decoder_.get(); } -void VideoStreamDecoderImpl::SaveFrameTimestamps( +void VideoStreamDecoderImpl::SaveFrameInfo( const video_coding::EncodedFrame& frame) { - FrameTimestamps* frame_timestamps = - &frame_timestamps_[next_frame_timestamps_index_]; - frame_timestamps->timestamp = frame.Timestamp(); - frame_timestamps->decode_start_time_ms = rtc::TimeMillis(); - frame_timestamps->render_time_us = frame.RenderTimeMs() * 1000; + FrameInfo* frame_info = &frame_info_[next_frame_info_index_]; + frame_info->timestamp = frame.Timestamp(); + frame_info->decode_start_time_ms = rtc::TimeMillis(); + frame_info->render_time_us = frame.RenderTimeMs() * 1000; + frame_info->content_type = frame.EncodedImage().content_type_; - next_frame_timestamps_index_ = - Add(next_frame_timestamps_index_, 1); + next_frame_info_index_ = Add(next_frame_info_index_, 1); } void VideoStreamDecoderImpl::StartNextDecode() { @@ -155,7 +153,7 @@ void VideoStreamDecoderImpl::OnNextFrameCallback( switch (result) { case video_coding::FrameBuffer::kFrameFound: { RTC_DCHECK(frame); - SaveFrameTimestamps(*frame); + SaveFrameInfo(*frame); MutexLock lock(&shut_down_mutex_); if (shut_down_) { @@ -230,14 +228,14 @@ VideoStreamDecoderImpl::DecodeResult VideoStreamDecoderImpl::DecodeFrame( } } -VideoStreamDecoderImpl::FrameTimestamps* -VideoStreamDecoderImpl::GetFrameTimestamps(int64_t timestamp) { - int start_time_index = next_frame_timestamps_index_; - for (int i = 0; i < kFrameTimestampsMemory; ++i) { - start_time_index = Subtract(start_time_index, 1); +VideoStreamDecoderImpl::FrameInfo* VideoStreamDecoderImpl::GetFrameInfo( + int64_t timestamp) { + int start_time_index = next_frame_info_index_; + for (int i = 0; i < kFrameInfoMemory; ++i) { + start_time_index = Subtract(start_time_index, 1); - if (frame_timestamps_[start_time_index].timestamp == timestamp) - return &frame_timestamps_[start_time_index]; + if (frame_info_[start_time_index].timestamp == timestamp) + return &frame_info_[start_time_index]; } return nullptr; @@ -250,29 +248,33 @@ void VideoStreamDecoderImpl::OnDecodedFrameCallback( int64_t decode_stop_time_ms = rtc::TimeMillis(); bookkeeping_queue_.PostTask([this, decode_stop_time_ms, decoded_image, - decode_time_ms, qp]() { + decode_time_ms, qp]() mutable { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); - FrameTimestamps* frame_timestamps = - GetFrameTimestamps(decoded_image.timestamp()); - if (!frame_timestamps) { + FrameInfo* frame_info = GetFrameInfo(decoded_image.timestamp()); + if (!frame_info) { RTC_LOG(LS_ERROR) << "No frame information found for frame with timestamp" << decoded_image.timestamp(); return; } - absl::optional casted_qp; + Callbacks::FrameInfo callback_info; + callback_info.content_type = frame_info->content_type; + if (qp) - casted_qp.emplace(*qp); + callback_info.qp.emplace(*qp); - absl::optional casted_decode_time_ms(decode_time_ms.value_or( - decode_stop_time_ms - frame_timestamps->decode_start_time_ms)); + if (!decode_time_ms) { + decode_time_ms = decode_stop_time_ms - frame_info->decode_start_time_ms; + } + decoded_image.set_processing_time( + {Timestamp::Millis(frame_info->decode_start_time_ms), + Timestamp::Millis(frame_info->decode_start_time_ms + + *decode_time_ms)}); + decoded_image.set_timestamp_us(frame_info->render_time_us); + timing_.StopDecodeTimer(*decode_time_ms, decode_stop_time_ms); - timing_.StopDecodeTimer(*casted_decode_time_ms, decode_stop_time_ms); - - VideoFrame copy = decoded_image; - copy.set_timestamp_us(frame_timestamps->render_time_us); - callbacks_->OnDecodedFrame(copy, casted_decode_time_ms, casted_qp); + callbacks_->OnDecodedFrame(decoded_image, callback_info); }); } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h index 2f33e9d34..69a819505 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h @@ -62,16 +62,16 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { kDecodeFailure, }; - struct FrameTimestamps { - int64_t timestamp; + struct FrameInfo { + int64_t timestamp = -1; int64_t decode_start_time_ms; int64_t render_time_us; + VideoContentType content_type; }; - void SaveFrameTimestamps(const video_coding::EncodedFrame& frame) - RTC_RUN_ON(bookkeeping_queue_); - FrameTimestamps* GetFrameTimestamps(int64_t timestamp) + void SaveFrameInfo(const video_coding::EncodedFrame& frame) RTC_RUN_ON(bookkeeping_queue_); + FrameInfo* GetFrameInfo(int64_t timestamp) RTC_RUN_ON(bookkeeping_queue_); void StartNextDecode() RTC_RUN_ON(bookkeeping_queue_); void OnNextFrameCallback(std::unique_ptr frame, video_coding::FrameBuffer::ReturnReason res) @@ -90,10 +90,10 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { // Some decoders are pipelined so it is not sufficient to save frame info // for the last frame only. - static constexpr int kFrameTimestampsMemory = 8; - std::array frame_timestamps_ + static constexpr int kFrameInfoMemory = 8; + std::array frame_info_ RTC_GUARDED_BY(bookkeeping_queue_); - int next_frame_timestamps_index_ RTC_GUARDED_BY(bookkeeping_queue_); + int next_frame_info_index_ RTC_GUARDED_BY(bookkeeping_queue_); VideoStreamDecoderInterface::Callbacks* const callbacks_ RTC_PT_GUARDED_BY(bookkeeping_queue_); video_coding::VideoLayerFrameId last_continuous_id_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc index dfd7bf49a..b3bef4901 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc @@ -26,6 +26,7 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_layers_allocation.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/resource_adaptation_processor.h" #include "call/adaptation/video_stream_adapter.h" @@ -34,18 +35,18 @@ #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" +#include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" +#include "video/alignment_adjuster.h" namespace webrtc { @@ -122,21 +123,44 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, } for (unsigned char i = 0; i < new_send_codec.numberOfSimulcastStreams; ++i) { - if (new_send_codec.simulcastStream[i].width != + if (!new_send_codec.simulcastStream[i].active) { + // No need to reset when stream is inactive. + continue; + } + + if (!prev_send_codec.simulcastStream[i].active || + new_send_codec.simulcastStream[i].width != prev_send_codec.simulcastStream[i].width || new_send_codec.simulcastStream[i].height != prev_send_codec.simulcastStream[i].height || - new_send_codec.simulcastStream[i].maxFramerate != - prev_send_codec.simulcastStream[i].maxFramerate || new_send_codec.simulcastStream[i].numberOfTemporalLayers != prev_send_codec.simulcastStream[i].numberOfTemporalLayers || new_send_codec.simulcastStream[i].qpMax != - prev_send_codec.simulcastStream[i].qpMax || - new_send_codec.simulcastStream[i].active != - prev_send_codec.simulcastStream[i].active) { + prev_send_codec.simulcastStream[i].qpMax) { return true; } } + + if (new_send_codec.codecType == kVideoCodecVP9) { + size_t num_spatial_layers = new_send_codec.VP9().numberOfSpatialLayers; + for (unsigned char i = 0; i < num_spatial_layers; ++i) { + if (new_send_codec.spatialLayers[i].width != + prev_send_codec.spatialLayers[i].width || + new_send_codec.spatialLayers[i].height != + prev_send_codec.spatialLayers[i].height || + new_send_codec.spatialLayers[i].numberOfTemporalLayers != + prev_send_codec.spatialLayers[i].numberOfTemporalLayers || + new_send_codec.spatialLayers[i].qpMax != + prev_send_codec.spatialLayers[i].qpMax) { + return true; + } + } + } + + if (new_send_codec.ScalabilityMode() != prev_send_codec.ScalabilityMode()) { + return true; + } + return false; } @@ -186,6 +210,73 @@ VideoBitrateAllocation UpdateAllocationFromEncoderInfo( return new_allocation; } +// Converts a VideoBitrateAllocation that contains allocated bitrate per layer, +// and an EncoderInfo that contains information about the actual encoder +// structure used by a codec. Stream structures can be Ksvc, Full SVC, Simulcast +// etc. +VideoLayersAllocation CreateVideoLayersAllocation( + const VideoCodec& encoder_config, + const VideoEncoder::RateControlParameters& current_rate, + const VideoEncoder::EncoderInfo& encoder_info) { + const VideoBitrateAllocation& target_bitrate = current_rate.target_bitrate; + VideoLayersAllocation layers_allocation; + if (target_bitrate.get_sum_bps() == 0) { + return layers_allocation; + } + + if (encoder_config.numberOfSimulcastStreams > 0) { + layers_allocation.resolution_and_frame_rate_is_valid = true; + for (int si = 0; si < encoder_config.numberOfSimulcastStreams; ++si) { + if (!target_bitrate.IsSpatialLayerUsed(si) || + target_bitrate.GetSpatialLayerSum(si) == 0) { + break; + } + layers_allocation.active_spatial_layers.emplace_back(); + VideoLayersAllocation::SpatialLayer& spatial_layer = + layers_allocation.active_spatial_layers.back(); + spatial_layer.width = encoder_config.simulcastStream[si].width; + spatial_layer.height = encoder_config.simulcastStream[si].height; + spatial_layer.rtp_stream_index = si; + spatial_layer.spatial_id = 0; + auto frame_rate_fraction = + VideoEncoder::EncoderInfo::kMaxFramerateFraction; + if (encoder_info.fps_allocation[si].size() == 1) { + // One TL is signalled to be used by the encoder. Do not distribute + // bitrate allocation across TLs (use sum at tl:0). + spatial_layer.target_bitrate_per_temporal_layer.push_back( + DataRate::BitsPerSec(target_bitrate.GetSpatialLayerSum(si))); + frame_rate_fraction = encoder_info.fps_allocation[si][0]; + } else { // Temporal layers are supported. + uint32_t temporal_layer_bitrate_bps = 0; + for (size_t ti = 0; + ti < encoder_config.simulcastStream[si].numberOfTemporalLayers; + ++ti) { + if (!target_bitrate.HasBitrate(si, ti)) { + break; + } + if (ti < encoder_info.fps_allocation[si].size()) { + // Use frame rate of the top used temporal layer. + frame_rate_fraction = encoder_info.fps_allocation[si][ti]; + } + temporal_layer_bitrate_bps += target_bitrate.GetBitrate(si, ti); + spatial_layer.target_bitrate_per_temporal_layer.push_back( + DataRate::BitsPerSec(temporal_layer_bitrate_bps)); + } + } + // Encoder may drop frames internally if `maxFramerate` is set. + spatial_layer.frame_rate_fps = std::min( + static_cast(encoder_config.simulcastStream[si].maxFramerate), + static_cast( + (current_rate.framerate_fps * frame_rate_fraction) / + VideoEncoder::EncoderInfo::kMaxFramerateFraction)); + } + } else { + // TODO(bugs.webrtc.org/12000): Implement support for kSVC and full SVC. + } + + return layers_allocation; +} + } // namespace VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings() @@ -218,48 +309,38 @@ bool VideoStreamEncoder::EncoderRateSettings::operator!=( class VideoStreamEncoder::DegradationPreferenceManager : public DegradationPreferenceProvider { public: - DegradationPreferenceManager() + explicit DegradationPreferenceManager( + VideoStreamAdapter* video_stream_adapter) : degradation_preference_(DegradationPreference::DISABLED), is_screenshare_(false), - effective_degradation_preference_(DegradationPreference::DISABLED) {} - - ~DegradationPreferenceManager() override { - RTC_DCHECK(!video_stream_adapter_); + effective_degradation_preference_(DegradationPreference::DISABLED), + video_stream_adapter_(video_stream_adapter) { + RTC_DCHECK(video_stream_adapter_); + sequence_checker_.Detach(); } + ~DegradationPreferenceManager() override = default; + DegradationPreference degradation_preference() const override { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return effective_degradation_preference_; } void SetDegradationPreference(DegradationPreference degradation_preference) { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(&sequence_checker_); degradation_preference_ = degradation_preference; MaybeUpdateEffectiveDegradationPreference(); } void SetIsScreenshare(bool is_screenshare) { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(&sequence_checker_); is_screenshare_ = is_screenshare; MaybeUpdateEffectiveDegradationPreference(); } - void SetVideoStreamAdapterQueue( - TaskQueueBase* video_stream_adapter_task_queue) { - RTC_DCHECK(!video_stream_adapter_task_queue_); - RTC_DCHECK(video_stream_adapter_task_queue); - RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue); - video_stream_adapter_task_queue_ = video_stream_adapter_task_queue; - } - - void SetVideoStreamAdapter(VideoStreamAdapter* video_stream_adapter) { - RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue_); - video_stream_adapter_ = video_stream_adapter; - } - private: void MaybeUpdateEffectiveDegradationPreference() - RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_) { + RTC_RUN_ON(&sequence_checker_) { DegradationPreference effective_degradation_preference = (is_screenshare_ && degradation_preference_ == DegradationPreference::BALANCED) @@ -268,27 +349,18 @@ class VideoStreamEncoder::DegradationPreferenceManager if (effective_degradation_preference != effective_degradation_preference_) { effective_degradation_preference_ = effective_degradation_preference; - if (video_stream_adapter_task_queue_) { - video_stream_adapter_task_queue_->PostTask( - ToQueuedTask([this, effective_degradation_preference]() { - RTC_DCHECK_RUN_ON(video_stream_adapter_task_queue_); - if (video_stream_adapter_) { - video_stream_adapter_->SetDegradationPreference( - effective_degradation_preference); - } - })); - } + video_stream_adapter_->SetDegradationPreference( + effective_degradation_preference); } } - mutable Mutex lock_; - DegradationPreference degradation_preference_ RTC_GUARDED_BY(&lock_); - bool is_screenshare_ RTC_GUARDED_BY(&lock_); + SequenceChecker sequence_checker_; + DegradationPreference degradation_preference_ + RTC_GUARDED_BY(&sequence_checker_); + bool is_screenshare_ RTC_GUARDED_BY(&sequence_checker_); DegradationPreference effective_degradation_preference_ - RTC_GUARDED_BY(&lock_); - TaskQueueBase* video_stream_adapter_task_queue_ = nullptr; - VideoStreamAdapter* video_stream_adapter_ - RTC_GUARDED_BY(&video_stream_adapter_task_queue_); + RTC_GUARDED_BY(&sequence_checker_); + VideoStreamAdapter* video_stream_adapter_ RTC_GUARDED_BY(&sequence_checker_); }; VideoStreamEncoder::VideoStreamEncoder( @@ -298,7 +370,7 @@ VideoStreamEncoder::VideoStreamEncoder( const VideoStreamEncoderSettings& settings, std::unique_ptr overuse_detector, TaskQueueFactory* task_queue_factory) - : shutdown_event_(true /* manual_reset */, false), + : main_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), sink_(nullptr), @@ -332,7 +404,6 @@ VideoStreamEncoder::VideoStreamEncoder( animation_start_time_(Timestamp::PlusInfinity()), cap_resolution_due_to_video_content_(false), expect_resize_state_(ExpectResizeState::kNoResize), - bitrate_observer_(nullptr), fec_controller_override_(nullptr), force_disable_frame_dropper_(false), input_framerate_(kFrameRateAvergingWindowSizeMs, 1000), @@ -347,13 +418,14 @@ VideoStreamEncoder::VideoStreamEncoder( encoder_switch_requested_(false), input_state_provider_(encoder_stats_observer), video_stream_adapter_( - std::make_unique(&input_state_provider_)), + std::make_unique(&input_state_provider_, + encoder_stats_observer)), resource_adaptation_processor_( std::make_unique( - encoder_stats_observer, video_stream_adapter_.get())), degradation_preference_manager_( - std::make_unique()), + std::make_unique( + video_stream_adapter_.get())), adaptation_constraints_(), stream_resource_manager_(&input_state_provider_, encoder_stats_observer, @@ -363,39 +435,28 @@ VideoStreamEncoder::VideoStreamEncoder( degradation_preference_manager_.get()), video_source_sink_controller_(/*sink=*/this, /*source=*/nullptr), - resource_adaptation_queue_(task_queue_factory->CreateTaskQueue( - "ResourceAdaptationQueue", - TaskQueueFactory::Priority::NORMAL)), encoder_queue_(task_queue_factory->CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL)) { + RTC_DCHECK(main_queue_); RTC_DCHECK(encoder_stats_observer); RTC_DCHECK_GE(number_of_cores, 1); - stream_resource_manager_.Initialize(&encoder_queue_, - &resource_adaptation_queue_); + stream_resource_manager_.Initialize(&encoder_queue_); rtc::Event initialize_processor_event; - resource_adaptation_queue_.PostTask([this, &initialize_processor_event] { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); - resource_adaptation_processor_->SetResourceAdaptationQueue( - resource_adaptation_queue_.Get()); + encoder_queue_.PostTask([this, &initialize_processor_event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + resource_adaptation_processor_->SetTaskQueue(encoder_queue_.Get()); stream_resource_manager_.SetAdaptationProcessor( resource_adaptation_processor_.get(), video_stream_adapter_.get()); resource_adaptation_processor_->AddResourceLimitationsListener( &stream_resource_manager_); video_stream_adapter_->AddRestrictionsListener(&stream_resource_manager_); video_stream_adapter_->AddRestrictionsListener(this); - degradation_preference_manager_->SetVideoStreamAdapterQueue( - resource_adaptation_queue_.Get()); - degradation_preference_manager_->SetVideoStreamAdapter( - video_stream_adapter_.get()); // Add the stream resource manager's resources to the processor. adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints(); - for (auto& resource : stream_resource_manager_.MappedResources()) { - resource_adaptation_processor_->AddResource(resource); - } for (auto* constraint : adaptation_constraints_) { video_stream_adapter_->AddAdaptationConstraint(constraint); } @@ -405,62 +466,42 @@ VideoStreamEncoder::VideoStreamEncoder( } VideoStreamEncoder::~VideoStreamEncoder() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(shutdown_event_.Wait(0)) + RTC_DCHECK_RUN_ON(main_queue_); + RTC_DCHECK(!video_source_sink_controller_.HasSource()) << "Must call ::Stop() before destruction."; } void VideoStreamEncoder::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetSource(nullptr); - if (resource_adaptation_processor_) { - for (auto& resource : stream_resource_manager_.MappedResources()) { - resource_adaptation_processor_->RemoveResource(resource); - } - } - rtc::Event shutdown_adaptation_processor_event; - resource_adaptation_queue_.PostTask([this, - &shutdown_adaptation_processor_event] { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); + rtc::Event shutdown_event; + + encoder_queue_.PostTask([this, &shutdown_event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); if (resource_adaptation_processor_) { - // Removed on the resource_adaptaiton_processor_ queue because the - // adaptation_constraints_ and adaptation_listeners_ fields are guarded by - // this queue. + stream_resource_manager_.StopManagedResources(); for (auto* constraint : adaptation_constraints_) { video_stream_adapter_->RemoveAdaptationConstraint(constraint); } + for (auto& resource : additional_resources_) { + stream_resource_manager_.RemoveResource(resource); + } + additional_resources_.clear(); video_stream_adapter_->RemoveRestrictionsListener(this); video_stream_adapter_->RemoveRestrictionsListener( &stream_resource_manager_); resource_adaptation_processor_->RemoveResourceLimitationsListener( &stream_resource_manager_); stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr); - degradation_preference_manager_->SetVideoStreamAdapter(nullptr); resource_adaptation_processor_.reset(); } - shutdown_adaptation_processor_event.Set(); - }); - shutdown_adaptation_processor_event.Wait(rtc::Event::kForever); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - stream_resource_manager_.StopManagedResources(); rate_allocator_ = nullptr; - bitrate_observer_ = nullptr; ReleaseEncoder(); - shutdown_event_.Set(); - }); - shutdown_event_.Wait(rtc::Event::kForever); -} - -void VideoStreamEncoder::SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) { - RTC_DCHECK_RUN_ON(&thread_checker_); - encoder_queue_.PostTask([this, bitrate_observer] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - RTC_DCHECK(!bitrate_observer_); - bitrate_observer_ = bitrate_observer; + encoder_ = nullptr; + shutdown_event.Set(); }); + shutdown_event.Wait(rtc::Event::kForever); } void VideoStreamEncoder::SetFecControllerOverride( @@ -477,15 +518,15 @@ void VideoStreamEncoder::SetFecControllerOverride( void VideoStreamEncoder::AddAdaptationResource( rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(main_queue_); // Map any externally added resources as kCpu for the sake of stats reporting. // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid // of this MapResourceToReason() call. rtc::Event map_resource_event; encoder_queue_.PostTask([this, resource, &map_resource_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); - stream_resource_manager_.MapResourceToReason(resource, - VideoAdaptationReason::kCpu); - resource_adaptation_processor_->AddResource(resource); + additional_resources_.push_back(resource); + stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); map_resource_event.Set(); }); map_resource_event.Wait(rtc::Event::kForever); @@ -493,21 +534,22 @@ void VideoStreamEncoder::AddAdaptationResource( std::vector> VideoStreamEncoder::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(main_queue_); return resource_adaptation_processor_->GetResources(); } void VideoStreamEncoder::SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetSource(source); input_state_provider_.OnHasInputChanged(source); - degradation_preference_manager_->SetDegradationPreference( - degradation_preference); // This may trigger reconfiguring the QualityScaler on the encoder queue. encoder_queue_.PostTask([this, degradation_preference] { RTC_DCHECK_RUN_ON(&encoder_queue_); + degradation_preference_manager_->SetDegradationPreference( + degradation_preference); stream_resource_manager_.SetDegradationPreferences(degradation_preference); if (encoder_) { stream_resource_manager_.ConfigureQualityScaler( @@ -517,8 +559,10 @@ void VideoStreamEncoder::SetSource( } void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetRotationApplied(rotation_applied); video_source_sink_controller_.PushSourceSinkSettings(); + encoder_queue_.PostTask([this, sink] { RTC_DCHECK_RUN_ON(&encoder_queue_); sink_ = sink; @@ -575,6 +619,7 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, // the VideoBitrateAllocator and call OnEncoderConfigurationChanged with a // "soft" reconfiguration. void VideoStreamEncoder::ReconfigureEncoder() { + // Running on the encoder queue. RTC_DCHECK(pending_encoder_reconfiguration_); if (!encoder_selector_ && @@ -585,11 +630,41 @@ void VideoStreamEncoder::ReconfigureEncoder() { conf.codec_name = encoder_switch_experiment_.to_codec; conf.param = encoder_switch_experiment_.to_param; conf.value = encoder_switch_experiment_.to_value; - settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + QueueRequestEncoderSwitch(conf); encoder_switch_requested_ = true; } + bool encoder_reset_required = false; + if (pending_encoder_creation_) { + // Destroy existing encoder instance before creating a new one. Otherwise + // attempt to create another instance will fail if encoder factory + // supports only single instance of encoder of given type. + encoder_.reset(); + + encoder_ = settings_.encoder_factory->CreateVideoEncoder( + encoder_config_.video_format); + // TODO(nisse): What to do if creating the encoder fails? Crash, + // or just discard incoming frames? + RTC_CHECK(encoder_); + + if (encoder_selector_) { + encoder_selector_->OnCurrentEncoder(encoder_config_.video_format); + } + + encoder_->SetFecControllerOverride(fec_controller_override_); + + codec_info_ = settings_.encoder_factory->QueryVideoEncoder( + encoder_config_.video_format); + + encoder_reset_required = true; + } + + // Possibly adjusts scale_resolution_down_by in |encoder_config_| to limit the + // alignment value. + int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + encoder_->GetEncoderInfo(), &encoder_config_); + std::vector streams = encoder_config_.video_stream_factory->CreateEncoderStreams( last_frame_info_->width, last_frame_info_->height, encoder_config_); @@ -622,31 +697,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { crop_width_ = last_frame_info_->width - highest_stream_width; crop_height_ = last_frame_info_->height - highest_stream_height; - bool encoder_reset_required = false; - if (pending_encoder_creation_) { - // Destroy existing encoder instance before creating a new one. Otherwise - // attempt to create another instance will fail if encoder factory - // supports only single instance of encoder of given type. - encoder_.reset(); - - encoder_ = settings_.encoder_factory->CreateVideoEncoder( - encoder_config_.video_format); - // TODO(nisse): What to do if creating the encoder fails? Crash, - // or just discard incoming frames? - RTC_CHECK(encoder_); - - if (encoder_selector_) { - encoder_selector_->OnCurrentEncoder(encoder_config_.video_format); - } - - encoder_->SetFecControllerOverride(fec_controller_override_); - - codec_info_ = settings_.encoder_factory->QueryVideoEncoder( - encoder_config_.video_format); - - encoder_reset_required = true; - } - encoder_bitrate_limits_ = encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( last_frame_info_->width * last_frame_info_->height); @@ -754,13 +804,18 @@ void VideoStreamEncoder::ReconfigureEncoder() { for (const auto& stream : streams) { max_framerate = std::max(stream.max_framerate, max_framerate); } - int alignment = encoder_->GetEncoderInfo().requested_resolution_alignment; - if (max_framerate != video_source_sink_controller_.frame_rate_upper_limit() || - alignment != video_source_sink_controller_.resolution_alignment()) { - video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); - video_source_sink_controller_.SetResolutionAlignment(alignment); - video_source_sink_controller_.PushSourceSinkSettings(); - } + + main_queue_->PostTask( + ToQueuedTask(task_safety_, [this, max_framerate, alignment]() { + RTC_DCHECK_RUN_ON(main_queue_); + if (max_framerate != + video_source_sink_controller_.frame_rate_upper_limit() || + alignment != video_source_sink_controller_.resolution_alignment()) { + video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); + video_source_sink_controller_.SetResolutionAlignment(alignment); + video_source_sink_controller_.PushSourceSinkSettings(); + } + })); if (codec.maxBitrate == 0) { // max is one bit per pixel @@ -788,7 +843,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // start bitrate or max framerate has changed. if (!encoder_reset_required) { encoder_reset_required = RequiresEncoderReset( - codec, send_codec_, was_encode_called_since_last_initialization_); + send_codec_, codec, was_encode_called_since_last_initialization_); } send_codec_ = codec; @@ -819,6 +874,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_->RegisterEncodeCompleteCallback(this); frame_encode_metadata_writer_.OnEncoderInit(send_codec_, HasInternalSource()); + next_frame_types_.clear(); + next_frame_types_.resize( + std::max(static_cast(codec.numberOfSimulcastStreams), 1), + VideoFrameType::kVideoFrameKey); } frame_encode_metadata_writer_.Reset(); @@ -830,10 +889,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { OnEncoderSettingsChanged(); if (success) { - next_frame_types_.clear(); - next_frame_types_.resize( - std::max(static_cast(codec.numberOfSimulcastStreams), 1), - VideoFrameType::kVideoFrameKey); RTC_LOG(LS_VERBOSE) << " max bitrate " << codec.maxBitrate << " start bitrate " << codec.startBitrate << " max frame rate " << codec.maxFramerate @@ -844,10 +899,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { } if (pending_encoder_creation_) { - // TODO(hbos): Stopping and restarting for backwards compatibility reasons. - // We may be able to change this to "EnsureStarted()" if it took care of - // reconfiguring the QualityScaler as well. (ConfigureQualityScaler() is - // invoked later in this method.) stream_resource_manager_.EnsureEncodeUsageResourceStarted(); pending_encoder_creation_ = false; } @@ -892,7 +943,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { last_encoder_rate_settings_.reset(); rate_settings.rate_control.framerate_fps = GetInputFramerateFps(); - SetEncoderRates(UpdateBitrateAllocationAndNotifyObserver(rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(rate_settings)); } encoder_stats_observer_->OnEncoderReconfigured(encoder_config_, streams); @@ -939,14 +990,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { VideoFrame incoming_frame = video_frame; // Local time in webrtc time base. - int64_t current_time_us = clock_->TimeInMicroseconds(); - int64_t current_time_ms = current_time_us / rtc::kNumMicrosecsPerMillisec; + Timestamp now = clock_->CurrentTime(); + // In some cases, e.g., when the frame from decoder is fed to encoder, // the timestamp may be set to the future. As the encoding pipeline assumes // capture time to be less than present time, we should reset the capture // timestamps here. Otherwise there may be issues with RTP send stream. - if (incoming_frame.timestamp_us() > current_time_us) - incoming_frame.set_timestamp_us(current_time_us); + if (incoming_frame.timestamp_us() > now.us()) + incoming_frame.set_timestamp_us(now.us()); // Capture time may come from clock with an offset and drift from clock_. int64_t capture_ntp_time_ms; @@ -955,7 +1006,7 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } else if (video_frame.render_time_ms() != 0) { capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; } else { - capture_ntp_time_ms = current_time_ms + delta_ntp_internal_ms_; + capture_ntp_time_ms = now.ms() + delta_ntp_internal_ms_; } incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); @@ -979,14 +1030,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } bool log_stats = false; - if (current_time_ms - last_frame_log_ms_ > kFrameLogIntervalMs) { - last_frame_log_ms_ = current_time_ms; + if (now.ms() - last_frame_log_ms_ > kFrameLogIntervalMs) { + last_frame_log_ms_ = now.ms(); log_stats = true; } last_captured_timestamp_ = incoming_frame.ntp_time_ms(); - int64_t post_time_us = rtc::TimeMicros(); + int64_t post_time_us = clock_->CurrentTime().us(); ++posted_frames_waiting_for_encode_; encoder_queue_.PostTask( @@ -1071,7 +1122,7 @@ void VideoStreamEncoder::TraceFrameDropEnd() { } VideoStreamEncoder::EncoderRateSettings -VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( +VideoStreamEncoder::UpdateBitrateAllocation( const EncoderRateSettings& rate_settings) { VideoBitrateAllocation new_allocation; // Only call allocators if bitrate > 0 (ie, not suspended), otherwise they @@ -1082,24 +1133,8 @@ VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( rate_settings.rate_control.framerate_fps)); } - if (bitrate_observer_ && new_allocation.get_sum_bps() > 0) { - if (encoder_ && encoder_initialized_) { - // Avoid too old encoder_info_. - const int64_t kMaxDiffMs = 100; - const bool updated_recently = - (last_encode_info_ms_ && ((clock_->TimeInMilliseconds() - - *last_encode_info_ms_) < kMaxDiffMs)); - // Update allocation according to info from encoder. - bitrate_observer_->OnBitrateAllocationUpdated( - UpdateAllocationFromEncoderInfo( - new_allocation, - updated_recently ? encoder_info_ : encoder_->GetEncoderInfo())); - } else { - bitrate_observer_->OnBitrateAllocationUpdated(new_allocation); - } - } - EncoderRateSettings new_rate_settings = rate_settings; + new_rate_settings.rate_control.target_bitrate = new_allocation; new_rate_settings.rate_control.bitrate = new_allocation; // VideoBitrateAllocator subclasses may allocate a bitrate higher than the // target in order to sustain the min bitrate of the video codec. In this @@ -1120,9 +1155,6 @@ VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( new_rate_settings.rate_control.bitrate = adjusted_allocation; } - encoder_stats_observer_->OnBitrateAllocationUpdated( - send_codec_, new_rate_settings.rate_control.bitrate); - return new_rate_settings; } @@ -1142,6 +1174,15 @@ void VideoStreamEncoder::SetEncoderRates( bool rate_control_changed = (!last_encoder_rate_settings_.has_value() || last_encoder_rate_settings_->rate_control != rate_settings.rate_control); + // For layer allocation signal we care only about the target bitrate (not the + // adjusted one) and the target fps. + bool layer_allocation_changed = + !last_encoder_rate_settings_.has_value() || + last_encoder_rate_settings_->rate_control.target_bitrate != + rate_settings.rate_control.target_bitrate || + last_encoder_rate_settings_->rate_control.framerate_fps != + rate_settings.rate_control.framerate_fps; + if (last_encoder_rate_settings_ != rate_settings) { last_encoder_rate_settings_ = rate_settings; } @@ -1165,10 +1206,35 @@ void VideoStreamEncoder::SetEncoderRates( if (rate_control_changed) { encoder_->SetRates(rate_settings.rate_control); + + encoder_stats_observer_->OnBitrateAllocationUpdated( + send_codec_, rate_settings.rate_control.bitrate); frame_encode_metadata_writer_.OnSetRates( rate_settings.rate_control.bitrate, static_cast(rate_settings.rate_control.framerate_fps + 0.5)); stream_resource_manager_.SetEncoderRates(rate_settings.rate_control); + if (layer_allocation_changed && + settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoLayersAllocation) { + sink_->OnVideoLayersAllocationUpdated(CreateVideoLayersAllocation( + send_codec_, rate_settings.rate_control, encoder_->GetEncoderInfo())); + } + } + if ((settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation) || + (encoder_config_.content_type == + VideoEncoderConfig::ContentType::kScreen && + settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing)) { + sink_->OnBitrateAllocationUpdated( + // Update allocation according to info from encoder. An encoder may + // choose to not use all layers due to for example HW. + UpdateAllocationFromEncoderInfo( + rate_settings.rate_control.target_bitrate, + encoder_->GetEncoderInfo())); } } @@ -1220,8 +1286,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, EncoderRateSettings new_rate_settings = *last_encoder_rate_settings_; new_rate_settings.rate_control.framerate_fps = static_cast(framerate_fps); - SetEncoderRates( - UpdateBitrateAllocationAndNotifyObserver(new_rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(new_rate_settings)); } last_parameters_update_ms_.emplace(now_ms); } @@ -1277,8 +1342,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, // Frame dropping is enabled iff frame dropping is not force-disabled, and // rate controller is not trusted. const bool frame_dropping_enabled = false; - /*!force_disable_frame_dropper_ && - !encoder_info_.has_trusted_rate_controller;*/ + //!force_disable_frame_dropper_ && + //!encoder_info_.has_trusted_rate_controller; frame_dropper_.Enable(frame_dropping_enabled); if (frame_dropping_enabled && frame_dropper_.DropFrame()) { RTC_LOG(LS_VERBOSE) @@ -1339,20 +1404,23 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, last_encode_info_ms_ = clock_->TimeInMilliseconds(); VideoFrame out_frame(video_frame); - - const VideoFrameBuffer::Type buffer_type = - out_frame.video_frame_buffer()->type(); - const bool is_buffer_type_supported = - buffer_type == VideoFrameBuffer::Type::kI420 || - (buffer_type == VideoFrameBuffer::Type::kNative && - info.supports_native_handle); - - if (!is_buffer_type_supported) { + if (out_frame.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNative && + !info.supports_native_handle) { // This module only supports software encoding. - rtc::scoped_refptr converted_buffer( - out_frame.video_frame_buffer()->ToI420()); - - if (!converted_buffer) { + rtc::scoped_refptr buffer = + out_frame.video_frame_buffer()->GetMappedFrameBuffer( + info.preferred_pixel_formats); + bool buffer_was_converted = false; + if (!buffer) { + buffer = out_frame.video_frame_buffer()->ToI420(); + // TODO(https://crbug.com/webrtc/12021): Once GetI420 is pure virtual, + // this just true as an I420 buffer would return from + // GetMappedFrameBuffer. + buffer_was_converted = + (out_frame.video_frame_buffer()->GetI420() == nullptr); + } + if (!buffer) { RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame."; return; } @@ -1366,8 +1434,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, update_rect = VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()}; } - - out_frame.set_video_frame_buffer(converted_buffer); + out_frame.set_video_frame_buffer(buffer); out_frame.set_update_rect(update_rect); } @@ -1376,29 +1443,24 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, out_frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) { // If the frame can't be converted to I420, drop it. - auto i420_buffer = video_frame.video_frame_buffer()->ToI420(); - if (!i420_buffer) { - RTC_LOG(LS_ERROR) << "Frame conversion for crop failed, dropping frame."; - return; - } int cropped_width = video_frame.width() - crop_width_; int cropped_height = video_frame.height() - crop_height_; - rtc::scoped_refptr cropped_buffer = - I420Buffer::Create(cropped_width, cropped_height); + rtc::scoped_refptr cropped_buffer; // TODO(ilnik): Remove scaling if cropping is too big, as it should never // happen after SinkWants signaled correctly from ReconfigureEncoder. VideoFrame::UpdateRect update_rect = video_frame.update_rect(); if (crop_width_ < 4 && crop_height_ < 4) { - cropped_buffer->CropAndScaleFrom(*i420_buffer, crop_width_ / 2, - crop_height_ / 2, cropped_width, - cropped_height); + cropped_buffer = video_frame.video_frame_buffer()->CropAndScale( + crop_width_ / 2, crop_height_ / 2, cropped_width, cropped_height, + cropped_width, cropped_height); update_rect.offset_x -= crop_width_ / 2; update_rect.offset_y -= crop_height_ / 2; update_rect.Intersect( VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}); } else { - cropped_buffer->ScaleFrom(*i420_buffer); + cropped_buffer = video_frame.video_frame_buffer()->Scale(cropped_width, + cropped_height); if (!update_rect.IsEmpty()) { // Since we can't reason about pixels after scaling, we invalidate whole // picture, if anything changed. @@ -1406,6 +1468,11 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}; } } + if (!cropped_buffer) { + RTC_LOG(LS_ERROR) << "Cropping and scaling frame failed, dropping frame."; + return; + } + out_frame.set_video_frame_buffer(cropped_buffer); out_frame.set_update_rect(update_rect); out_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); @@ -1461,12 +1528,14 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, if (settings_.encoder_switch_request_callback) { if (encoder_selector_) { if (auto encoder = encoder_selector_->OnEncoderBroken()) { - settings_.encoder_switch_request_callback->RequestEncoderSwitch( - *encoder); + QueueRequestEncoderSwitch(*encoder); } } else { encoder_failed_ = true; - settings_.encoder_switch_request_callback->RequestEncoderFallback(); + main_queue_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderFallback(); + })); } } else { RTC_LOG(LS_ERROR) @@ -1538,8 +1607,7 @@ void VideoStreamEncoder::OnLossNotification( EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* /*fragmentation*/) { + const CodecSpecificInfo* codec_specific_info) { TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", "timestamp", encoded_image.Timestamp()); const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); @@ -1615,7 +1683,7 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( } EncodedImageCallback::Result result = - sink_->OnEncodedImage(image_copy, codec_specific_info, nullptr); + sink_->OnEncodedImage(image_copy, codec_specific_info); // We are only interested in propagating the meta-data about the image, not // encoded data itself, to the post encode function. Since we cannot be sure @@ -1635,7 +1703,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( temporal_index = 0; } - RunPostEncode(image_copy, rtc::TimeMicros(), temporal_index, frame_size); + RunPostEncode(image_copy, clock_->CurrentTime().us(), temporal_index, + frame_size); if (result.error == Result::OK) { // In case of an internal encoder running on a separate thread, the @@ -1726,8 +1795,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, if (encoder_selector_) { if (auto encoder = encoder_selector_->OnAvailableBitrate(link_allocation)) { - settings_.encoder_switch_request_callback->RequestEncoderSwitch( - *encoder); + QueueRequestEncoderSwitch(*encoder); } } else if (encoder_switch_experiment_.IsBitrateBelowThreshold( target_bitrate) && @@ -1736,7 +1804,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, conf.codec_name = encoder_switch_experiment_.to_codec; conf.param = encoder_switch_experiment_.to_param; conf.value = encoder_switch_experiment_.to_value; - settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + QueueRequestEncoderSwitch(conf); encoder_switch_requested_ = true; } @@ -1761,7 +1829,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, EncoderRateSettings new_rate_settings{ VideoBitrateAllocation(), static_cast(framerate_fps), link_allocation, target_bitrate, stable_target_bitrate}; - SetEncoderRates(UpdateBitrateAllocationAndNotifyObserver(new_rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(new_rate_settings)); if (target_bitrate.bps() != 0) encoder_target_bitrate_bps_ = target_bitrate.bps(); @@ -1775,7 +1843,8 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, } if (video_suspension_changed && !video_is_suspended && pending_frame_ && !DropDueToSize(pending_frame_->size())) { - int64_t pending_time_us = rtc::TimeMicros() - pending_frame_post_time_us_; + int64_t pending_time_us = + clock_->CurrentTime().us() - pending_frame_post_time_us_; if (pending_time_us < kPendingFrameTimeoutMs * 1000) EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); pending_frame_.reset(); @@ -1817,12 +1886,16 @@ void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( const VideoAdaptationCounters& adaptation_counters, rtc::scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); - std::string resource_name = reason ? reason->Name() : ""; - RTC_LOG(INFO) << "Updating sink restrictions from " << resource_name << " to " + RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_LOG(INFO) << "Updating sink restrictions from " + << (reason ? reason->Name() : std::string("")) << " to " << restrictions.ToString(); - video_source_sink_controller_.SetRestrictions(std::move(restrictions)); - video_source_sink_controller_.PushSourceSinkSettings(); + main_queue_->PostTask(ToQueuedTask( + task_safety_, [this, restrictions = std::move(restrictions)]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetRestrictions(std::move(restrictions)); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, @@ -1843,9 +1916,9 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { encode_duration_us = // TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_? - rtc::kNumMicrosecsPerMillisec * - (encoded_image.timing_.encode_finish_ms - - encoded_image.timing_.encode_start_ms); + TimeDelta::Millis(encoded_image.timing_.encode_finish_ms - + encoded_image.timing_.encode_start_ms) + .us(); } // Run post encode tasks, such as overuse detection and frame rate/drop @@ -2081,20 +2154,43 @@ void VideoStreamEncoder::CheckForAnimatedContent( RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent " "animation detection."; } - video_source_sink_controller_.SetPixelsPerFrameUpperLimit( - should_cap_resolution ? absl::optional(kMaxAnimationPixels) - : absl::nullopt); - video_source_sink_controller_.PushSourceSinkSettings(); + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, + should_cap_resolution]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetPixelsPerFrameUpperLimit( + should_cap_resolution ? absl::optional(kMaxAnimationPixels) + : absl::nullopt); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } } + +// RTC_RUN_ON(&encoder_queue_) +void VideoStreamEncoder::QueueRequestEncoderSwitch( + const EncoderSwitchRequestCallback::Config& conf) { + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, conf]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + })); +} + +// RTC_RUN_ON(&encoder_queue_) +void VideoStreamEncoder::QueueRequestEncoderSwitch( + const webrtc::SdpVideoFormat& format) { + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, format]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderSwitch(format); + })); +} + void VideoStreamEncoder::InjectAdaptationResource( rtc::scoped_refptr resource, VideoAdaptationReason reason) { rtc::Event map_resource_event; encoder_queue_.PostTask([this, resource, reason, &map_resource_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); - stream_resource_manager_.MapResourceToReason(resource, reason); - resource_adaptation_processor_->AddResource(resource); + additional_resources_.push_back(resource); + stream_resource_manager_.AddResource(resource, reason); map_resource_event.Set(); }); map_resource_event.Wait(rtc::Event::kForever); @@ -2103,8 +2199,8 @@ void VideoStreamEncoder::InjectAdaptationResource( void VideoStreamEncoder::InjectAdaptationConstraint( AdaptationConstraint* adaptation_constraint) { rtc::Event event; - resource_adaptation_queue_.PostTask([this, adaptation_constraint, &event] { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); + encoder_queue_.PostTask([this, adaptation_constraint, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); if (!resource_adaptation_processor_) { // The VideoStreamEncoder was stopped and the processor destroyed before // this task had a chance to execute. No action needed. @@ -2117,17 +2213,11 @@ void VideoStreamEncoder::InjectAdaptationConstraint( event.Wait(rtc::Event::kForever); } -rtc::scoped_refptr -VideoStreamEncoder::quality_scaler_resource_for_testing() { - RTC_DCHECK_RUN_ON(&encoder_queue_); - return stream_resource_manager_.quality_scaler_resource_for_testing(); -} - void VideoStreamEncoder::AddRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { rtc::Event event; - resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); + encoder_queue_.PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->AddRestrictionsListener(restrictions_listener); event.Set(); @@ -2138,8 +2228,8 @@ void VideoStreamEncoder::AddRestrictionsListenerForTesting( void VideoStreamEncoder::RemoveRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { rtc::Event event; - resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&resource_adaptation_queue_); + encoder_queue_.PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener); event.Set(); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h index 5761896e1..7dfc99084 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h @@ -33,13 +33,13 @@ #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/frame_dropper.h" -#include "rtc_base/event.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" @@ -80,9 +80,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // TODO(perkj): Can we remove VideoCodec.startBitrate ? void SetStartBitrate(int start_bitrate_bps) override; - void SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) override; - void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override; @@ -112,9 +109,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Used for testing. For example the |ScalingObserverInterface| methods must // be called on |encoder_queue_|. rtc::TaskQueue* encoder_queue() { return &encoder_queue_; } - rtc::TaskQueue* resource_adaptation_queue() { - return &resource_adaptation_queue_; - } void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, @@ -128,9 +122,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, VideoAdaptationReason reason); void InjectAdaptationConstraint(AdaptationConstraint* adaptation_constraint); - rtc::scoped_refptr - quality_scaler_resource_for_testing(); - void AddRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener); void RemoveRestrictionsListenerForTesting( @@ -189,8 +180,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Implements EncodedImageCallback. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const CodecSpecificInfo* codec_specific_info) override; void OnDroppedFrame(EncodedImageCallback::DropReason reason) override; @@ -199,9 +189,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void TraceFrameDropEnd(); // Returns a copy of |rate_settings| with the |bitrate| field updated using - // the current VideoBitrateAllocator, and notifies any listeners of the new - // allocation. - EncoderRateSettings UpdateBitrateAllocationAndNotifyObserver( + // the current VideoBitrateAllocator. + EncoderRateSettings UpdateBitrateAllocation( const EncoderRateSettings& rate_settings) RTC_RUN_ON(&encoder_queue_); uint32_t GetInputFramerateFps() RTC_RUN_ON(&encoder_queue_); @@ -214,12 +203,21 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, DataSize frame_size); bool HasInternalSource() const RTC_RUN_ON(&encoder_queue_); void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_); + // After calling this function |resource_adaptation_processor_| will be null. + void ShutdownResourceAdaptationQueue(); void CheckForAnimatedContent(const VideoFrame& frame, int64_t time_when_posted_in_ms) RTC_RUN_ON(&encoder_queue_); - rtc::Event shutdown_event_; + // TODO(bugs.webrtc.org/11341) : Remove this version of RequestEncoderSwitch. + void QueueRequestEncoderSwitch( + const EncoderSwitchRequestCallback::Config& conf) + RTC_RUN_ON(&encoder_queue_); + void QueueRequestEncoderSwitch(const webrtc::SdpVideoFormat& format) + RTC_RUN_ON(&encoder_queue_); + + TaskQueueBase* const main_queue_; const uint32_t number_of_cores_; @@ -232,9 +230,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, std::unique_ptr const encoder_selector_; VideoStreamEncoderObserver* const encoder_stats_observer_; - // |thread_checker_| checks that public methods that are related to lifetime - // of VideoStreamEncoder are called on the same thread. - rtc::ThreadChecker thread_checker_; VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(&encoder_queue_); std::unique_ptr encoder_ RTC_GUARDED_BY(&encoder_queue_) @@ -303,8 +298,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, kFirstFrameAfterResize // Resize observed. } expect_resize_state_ RTC_GUARDED_BY(&encoder_queue_); - VideoBitrateAllocationObserver* bitrate_observer_ - RTC_GUARDED_BY(&encoder_queue_); FecControllerOverride* fec_controller_override_ RTC_GUARDED_BY(&encoder_queue_); absl::optional last_parameters_update_ms_ @@ -407,20 +400,19 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, bool encoder_switch_requested_ RTC_GUARDED_BY(&encoder_queue_); // Provies video stream input states: current resolution and frame rate. - // This class is thread-safe. VideoStreamInputStateProvider input_state_provider_; std::unique_ptr video_stream_adapter_ - RTC_GUARDED_BY(&resource_adaptation_queue_); + RTC_GUARDED_BY(&encoder_queue_); // Responsible for adapting input resolution or frame rate to ensure resources - // (e.g. CPU or bandwidth) are not overused. - // Adding resources can occur on any thread, but all other methods need to be - // called on the adaptation thread. + // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any + // thread. std::unique_ptr resource_adaptation_processor_; - std::unique_ptr degradation_preference_manager_; + std::unique_ptr degradation_preference_manager_ + RTC_GUARDED_BY(&encoder_queue_); std::vector adaptation_constraints_ - RTC_GUARDED_BY(&resource_adaptation_queue_); + RTC_GUARDED_BY(&encoder_queue_); // Handles input, output and stats reporting related to VideoStreamEncoder // specific resources, such as "encode usage percent" measurements and "QP // scaling". Also involved with various mitigations such as inital frame @@ -428,21 +420,24 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // The manager primarily operates on the |encoder_queue_| but its lifetime is // tied to the VideoStreamEncoder (which is destroyed off the encoder queue) // and its resource list is accessible from any thread. - VideoStreamEncoderResourceManager stream_resource_manager_; + VideoStreamEncoderResourceManager stream_resource_manager_ + RTC_GUARDED_BY(&encoder_queue_); + std::vector> additional_resources_ + RTC_GUARDED_BY(&encoder_queue_); // Carries out the VideoSourceRestrictions provided by the // ResourceAdaptationProcessor, i.e. reconfigures the source of video frames // to provide us with different resolution or frame rate. // This class is thread-safe. - VideoSourceSinkController video_source_sink_controller_; + VideoSourceSinkController video_source_sink_controller_ + RTC_GUARDED_BY(main_queue_); // Public methods are proxied to the task queues. The queues must be destroyed // first to make sure no tasks run that use other members. - // TODO(https://crbug.com/webrtc/11172): Move ownership of the - // ResourceAdaptationProcessor and its task queue to Call when processors are - // multi-stream aware. - rtc::TaskQueue resource_adaptation_queue_; rtc::TaskQueue encoder_queue_; + // Used to cancel any potentially pending tasks to the main thread. + ScopedTaskSafety task_safety_; + RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder); }; diff --git a/TMessagesProj/src/main/AndroidManifest.xml b/TMessagesProj/src/main/AndroidManifest.xml index bb9aace5c..6ce638f81 100644 --- a/TMessagesProj/src/main/AndroidManifest.xml +++ b/TMessagesProj/src/main/AndroidManifest.xml @@ -80,6 +80,7 @@ android:theme="@style/Theme.TMessages.Start" android:manageSpaceActivity="org.telegram.ui.ExternalActionActivity" android:supportsRtl="false" + android:requestLegacyExternalStorage="true" tools:replace="android:supportsRtl"> + + + + + + diff --git a/TMessagesProj/src/main/assets/darkblue.attheme b/TMessagesProj/src/main/assets/darkblue.attheme index 0a3bf5b13..d7d8af39e 100644 --- a/TMessagesProj/src/main/assets/darkblue.attheme +++ b/TMessagesProj/src/main/assets/darkblue.attheme @@ -462,3 +462,4 @@ chat_outSentClock=-8213557 dialogBackgroundGray=-14932431 chat_searchPanelText=-8796932 chat_inContactIcon=-1 +voipgroup_topPanelGray=-10521727 diff --git a/TMessagesProj/src/main/assets/emoji/0_0.png b/TMessagesProj/src/main/assets/emoji/0_0.png index 425de9ac3..c2ce490de 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_0.png and b/TMessagesProj/src/main/assets/emoji/0_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1.png b/TMessagesProj/src/main/assets/emoji/0_1.png index 146ede33d..08c79072a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1.png and b/TMessagesProj/src/main/assets/emoji/0_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_10.png b/TMessagesProj/src/main/assets/emoji/0_10.png index 5f974a971..9589ec712 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_10.png and b/TMessagesProj/src/main/assets/emoji/0_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_100.png b/TMessagesProj/src/main/assets/emoji/0_100.png index 318e3f0ab..56c8d2545 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_100.png and b/TMessagesProj/src/main/assets/emoji/0_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1000.png b/TMessagesProj/src/main/assets/emoji/0_1000.png index 49e6653b1..620c9c5ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1000.png and b/TMessagesProj/src/main/assets/emoji/0_1000.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1001.png b/TMessagesProj/src/main/assets/emoji/0_1001.png index d817337dd..b62966b89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1001.png and b/TMessagesProj/src/main/assets/emoji/0_1001.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1002.png b/TMessagesProj/src/main/assets/emoji/0_1002.png index ea5c3d4ee..781fb81dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1002.png and b/TMessagesProj/src/main/assets/emoji/0_1002.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1003.png b/TMessagesProj/src/main/assets/emoji/0_1003.png index 18cfdb7ac..2341d7ccc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1003.png and b/TMessagesProj/src/main/assets/emoji/0_1003.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1004.png b/TMessagesProj/src/main/assets/emoji/0_1004.png index 161c13d44..b5b8e9e5d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1004.png and b/TMessagesProj/src/main/assets/emoji/0_1004.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1005.png b/TMessagesProj/src/main/assets/emoji/0_1005.png index 758abb256..b61898ace 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1005.png and b/TMessagesProj/src/main/assets/emoji/0_1005.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1006.png b/TMessagesProj/src/main/assets/emoji/0_1006.png index 7dd16897c..3a23727a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1006.png and b/TMessagesProj/src/main/assets/emoji/0_1006.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1007.png b/TMessagesProj/src/main/assets/emoji/0_1007.png index e1a5090c8..c4cdf2bdf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1007.png and b/TMessagesProj/src/main/assets/emoji/0_1007.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1008.png b/TMessagesProj/src/main/assets/emoji/0_1008.png index 16de2c3a1..83481d6b1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1008.png and b/TMessagesProj/src/main/assets/emoji/0_1008.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1009.png b/TMessagesProj/src/main/assets/emoji/0_1009.png index 2375d8a39..6d6aa56d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1009.png and b/TMessagesProj/src/main/assets/emoji/0_1009.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_101.png b/TMessagesProj/src/main/assets/emoji/0_101.png index 7f5bd4f5f..455ce7487 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_101.png and b/TMessagesProj/src/main/assets/emoji/0_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1010.png b/TMessagesProj/src/main/assets/emoji/0_1010.png index 64fa233cb..27de03583 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1010.png and b/TMessagesProj/src/main/assets/emoji/0_1010.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1011.png b/TMessagesProj/src/main/assets/emoji/0_1011.png index 5f933fb4f..49e8a0ce5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1011.png and b/TMessagesProj/src/main/assets/emoji/0_1011.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1012.png b/TMessagesProj/src/main/assets/emoji/0_1012.png index 50abb1ff9..76e55a1e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1012.png and b/TMessagesProj/src/main/assets/emoji/0_1012.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1013.png b/TMessagesProj/src/main/assets/emoji/0_1013.png index 60608b132..714838bfe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1013.png and b/TMessagesProj/src/main/assets/emoji/0_1013.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1014.png b/TMessagesProj/src/main/assets/emoji/0_1014.png index 8e91f62a6..ea87733d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1014.png and b/TMessagesProj/src/main/assets/emoji/0_1014.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1015.png b/TMessagesProj/src/main/assets/emoji/0_1015.png index 47fe878ac..083837c4a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1015.png and b/TMessagesProj/src/main/assets/emoji/0_1015.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1016.png b/TMessagesProj/src/main/assets/emoji/0_1016.png index 87c2b4943..8056dcb1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1016.png and b/TMessagesProj/src/main/assets/emoji/0_1016.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1017.png b/TMessagesProj/src/main/assets/emoji/0_1017.png index bfed2547f..c4a09a9ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1017.png and b/TMessagesProj/src/main/assets/emoji/0_1017.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1018.png b/TMessagesProj/src/main/assets/emoji/0_1018.png index 78823e80a..1bda6e19a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1018.png and b/TMessagesProj/src/main/assets/emoji/0_1018.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1019.png b/TMessagesProj/src/main/assets/emoji/0_1019.png index 6c49b0ac9..4edf8beb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1019.png and b/TMessagesProj/src/main/assets/emoji/0_1019.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_102.png b/TMessagesProj/src/main/assets/emoji/0_102.png index 66ba27339..3c483755e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_102.png and b/TMessagesProj/src/main/assets/emoji/0_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1020.png b/TMessagesProj/src/main/assets/emoji/0_1020.png index 2826605e3..c121d9a75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1020.png and b/TMessagesProj/src/main/assets/emoji/0_1020.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1021.png b/TMessagesProj/src/main/assets/emoji/0_1021.png index 567cf2b51..5ea08d65a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1021.png and b/TMessagesProj/src/main/assets/emoji/0_1021.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1022.png b/TMessagesProj/src/main/assets/emoji/0_1022.png index 35cb5f24a..d743ab7fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1022.png and b/TMessagesProj/src/main/assets/emoji/0_1022.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1023.png b/TMessagesProj/src/main/assets/emoji/0_1023.png index 33b80c962..ce6c575e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1023.png and b/TMessagesProj/src/main/assets/emoji/0_1023.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1024.png b/TMessagesProj/src/main/assets/emoji/0_1024.png index 28e888545..48ead8a0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1024.png and b/TMessagesProj/src/main/assets/emoji/0_1024.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1025.png b/TMessagesProj/src/main/assets/emoji/0_1025.png index 77f4c03c8..e075e6154 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1025.png and b/TMessagesProj/src/main/assets/emoji/0_1025.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1026.png b/TMessagesProj/src/main/assets/emoji/0_1026.png index 573a744d7..9c8a62576 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1026.png and b/TMessagesProj/src/main/assets/emoji/0_1026.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1027.png b/TMessagesProj/src/main/assets/emoji/0_1027.png index 30ec5841d..ef92c1e48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1027.png and b/TMessagesProj/src/main/assets/emoji/0_1027.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1028.png b/TMessagesProj/src/main/assets/emoji/0_1028.png index 0a5e9fe45..c4ab53b3a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1028.png and b/TMessagesProj/src/main/assets/emoji/0_1028.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1029.png b/TMessagesProj/src/main/assets/emoji/0_1029.png index f7f56f667..8f804acb3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1029.png and b/TMessagesProj/src/main/assets/emoji/0_1029.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_103.png b/TMessagesProj/src/main/assets/emoji/0_103.png index 7f8234782..6b379a8ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_103.png and b/TMessagesProj/src/main/assets/emoji/0_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1030.png b/TMessagesProj/src/main/assets/emoji/0_1030.png index ef139ca55..24a4dc464 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1030.png and b/TMessagesProj/src/main/assets/emoji/0_1030.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1031.png b/TMessagesProj/src/main/assets/emoji/0_1031.png index 4c5b3dc5f..f27fb08fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1031.png and b/TMessagesProj/src/main/assets/emoji/0_1031.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1032.png b/TMessagesProj/src/main/assets/emoji/0_1032.png index 734739774..868d0b3b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1032.png and b/TMessagesProj/src/main/assets/emoji/0_1032.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1033.png b/TMessagesProj/src/main/assets/emoji/0_1033.png index 8f6648dcc..dc4f339ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1033.png and b/TMessagesProj/src/main/assets/emoji/0_1033.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1034.png b/TMessagesProj/src/main/assets/emoji/0_1034.png index 40219a65f..f2d9c2dbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1034.png and b/TMessagesProj/src/main/assets/emoji/0_1034.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1035.png b/TMessagesProj/src/main/assets/emoji/0_1035.png index bb264afaa..43761db1d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1035.png and b/TMessagesProj/src/main/assets/emoji/0_1035.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1036.png b/TMessagesProj/src/main/assets/emoji/0_1036.png index 961ae4a76..9a6ca2f11 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1036.png and b/TMessagesProj/src/main/assets/emoji/0_1036.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1037.png b/TMessagesProj/src/main/assets/emoji/0_1037.png index df0fba699..1c95ebb8c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1037.png and b/TMessagesProj/src/main/assets/emoji/0_1037.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1038.png b/TMessagesProj/src/main/assets/emoji/0_1038.png index 7c65de157..362c1cf1e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1038.png and b/TMessagesProj/src/main/assets/emoji/0_1038.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1039.png b/TMessagesProj/src/main/assets/emoji/0_1039.png index ccffef60b..1a4c9b82e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1039.png and b/TMessagesProj/src/main/assets/emoji/0_1039.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_104.png b/TMessagesProj/src/main/assets/emoji/0_104.png index 554a2ee47..66ba27339 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_104.png and b/TMessagesProj/src/main/assets/emoji/0_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1040.png b/TMessagesProj/src/main/assets/emoji/0_1040.png index 5975cd855..6239d309a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1040.png and b/TMessagesProj/src/main/assets/emoji/0_1040.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1041.png b/TMessagesProj/src/main/assets/emoji/0_1041.png index a1ae15a8c..69371fd2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1041.png and b/TMessagesProj/src/main/assets/emoji/0_1041.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1042.png b/TMessagesProj/src/main/assets/emoji/0_1042.png index 6a6aba16b..49018fdfd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1042.png and b/TMessagesProj/src/main/assets/emoji/0_1042.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1043.png b/TMessagesProj/src/main/assets/emoji/0_1043.png index 3b7ea012c..801aad265 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1043.png and b/TMessagesProj/src/main/assets/emoji/0_1043.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1044.png b/TMessagesProj/src/main/assets/emoji/0_1044.png index 89ebc459d..2ec6461c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1044.png and b/TMessagesProj/src/main/assets/emoji/0_1044.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1045.png b/TMessagesProj/src/main/assets/emoji/0_1045.png index 05b372c78..0060ff09b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1045.png and b/TMessagesProj/src/main/assets/emoji/0_1045.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1046.png b/TMessagesProj/src/main/assets/emoji/0_1046.png index 58cc9f996..877adb494 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1046.png and b/TMessagesProj/src/main/assets/emoji/0_1046.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1047.png b/TMessagesProj/src/main/assets/emoji/0_1047.png index 1ec06bd02..3e4aaa92f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1047.png and b/TMessagesProj/src/main/assets/emoji/0_1047.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1048.png b/TMessagesProj/src/main/assets/emoji/0_1048.png index 3e4394fed..06e972918 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1048.png and b/TMessagesProj/src/main/assets/emoji/0_1048.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1049.png b/TMessagesProj/src/main/assets/emoji/0_1049.png index 4562ee82f..f1485300f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1049.png and b/TMessagesProj/src/main/assets/emoji/0_1049.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_105.png b/TMessagesProj/src/main/assets/emoji/0_105.png index 971707caa..bfc26e726 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_105.png and b/TMessagesProj/src/main/assets/emoji/0_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1050.png b/TMessagesProj/src/main/assets/emoji/0_1050.png index 4985e11f2..bb0224ff9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1050.png and b/TMessagesProj/src/main/assets/emoji/0_1050.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1051.png b/TMessagesProj/src/main/assets/emoji/0_1051.png index 504192bbf..0b290daaa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1051.png and b/TMessagesProj/src/main/assets/emoji/0_1051.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1052.png b/TMessagesProj/src/main/assets/emoji/0_1052.png index b67c531ea..ed1565e61 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1052.png and b/TMessagesProj/src/main/assets/emoji/0_1052.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1053.png b/TMessagesProj/src/main/assets/emoji/0_1053.png index 4ab42d07e..840bf140b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1053.png and b/TMessagesProj/src/main/assets/emoji/0_1053.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1054.png b/TMessagesProj/src/main/assets/emoji/0_1054.png index 7e95f396b..0168e2492 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1054.png and b/TMessagesProj/src/main/assets/emoji/0_1054.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1055.png b/TMessagesProj/src/main/assets/emoji/0_1055.png index 25b1b73ef..d88f068f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1055.png and b/TMessagesProj/src/main/assets/emoji/0_1055.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1056.png b/TMessagesProj/src/main/assets/emoji/0_1056.png index 19374ea0a..fed2a7291 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1056.png and b/TMessagesProj/src/main/assets/emoji/0_1056.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1057.png b/TMessagesProj/src/main/assets/emoji/0_1057.png index 2b204583e..95d4a0601 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1057.png and b/TMessagesProj/src/main/assets/emoji/0_1057.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1058.png b/TMessagesProj/src/main/assets/emoji/0_1058.png index 6ea4f945a..009b17b2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1058.png and b/TMessagesProj/src/main/assets/emoji/0_1058.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1059.png b/TMessagesProj/src/main/assets/emoji/0_1059.png index 3d9bb7765..01284ecbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1059.png and b/TMessagesProj/src/main/assets/emoji/0_1059.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_106.png b/TMessagesProj/src/main/assets/emoji/0_106.png index eb7cc0ee3..f9e183e56 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_106.png and b/TMessagesProj/src/main/assets/emoji/0_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1060.png b/TMessagesProj/src/main/assets/emoji/0_1060.png index defcdc546..bbc7434eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1060.png and b/TMessagesProj/src/main/assets/emoji/0_1060.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1061.png b/TMessagesProj/src/main/assets/emoji/0_1061.png index f603611cd..b5150c515 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1061.png and b/TMessagesProj/src/main/assets/emoji/0_1061.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1062.png b/TMessagesProj/src/main/assets/emoji/0_1062.png index a9c58f950..730461c3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1062.png and b/TMessagesProj/src/main/assets/emoji/0_1062.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1063.png b/TMessagesProj/src/main/assets/emoji/0_1063.png index adbe405af..9b227e284 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1063.png and b/TMessagesProj/src/main/assets/emoji/0_1063.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1064.png b/TMessagesProj/src/main/assets/emoji/0_1064.png index ff72aa4f4..4c482ee89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1064.png and b/TMessagesProj/src/main/assets/emoji/0_1064.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1065.png b/TMessagesProj/src/main/assets/emoji/0_1065.png index 05924db6f..777e93899 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1065.png and b/TMessagesProj/src/main/assets/emoji/0_1065.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1066.png b/TMessagesProj/src/main/assets/emoji/0_1066.png index 0cef21bdd..6eb354095 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1066.png and b/TMessagesProj/src/main/assets/emoji/0_1066.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1067.png b/TMessagesProj/src/main/assets/emoji/0_1067.png index 5ba3b85a0..1b3d5ba4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1067.png and b/TMessagesProj/src/main/assets/emoji/0_1067.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1068.png b/TMessagesProj/src/main/assets/emoji/0_1068.png index c67877482..80787bf3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1068.png and b/TMessagesProj/src/main/assets/emoji/0_1068.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1069.png b/TMessagesProj/src/main/assets/emoji/0_1069.png index 77452722e..2ba96f87b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1069.png and b/TMessagesProj/src/main/assets/emoji/0_1069.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_107.png b/TMessagesProj/src/main/assets/emoji/0_107.png index e188c3d93..a9ea544e7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_107.png and b/TMessagesProj/src/main/assets/emoji/0_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1070.png b/TMessagesProj/src/main/assets/emoji/0_1070.png index 85f6b3f37..c748cf61d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1070.png and b/TMessagesProj/src/main/assets/emoji/0_1070.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1071.png b/TMessagesProj/src/main/assets/emoji/0_1071.png index 6f766a77c..2106b438e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1071.png and b/TMessagesProj/src/main/assets/emoji/0_1071.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1072.png b/TMessagesProj/src/main/assets/emoji/0_1072.png index 9a1ea2355..908a67846 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1072.png and b/TMessagesProj/src/main/assets/emoji/0_1072.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1073.png b/TMessagesProj/src/main/assets/emoji/0_1073.png index 9845c134e..7d10c9d0d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1073.png and b/TMessagesProj/src/main/assets/emoji/0_1073.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1074.png b/TMessagesProj/src/main/assets/emoji/0_1074.png index 1f8b45420..09da2fa07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1074.png and b/TMessagesProj/src/main/assets/emoji/0_1074.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1075.png b/TMessagesProj/src/main/assets/emoji/0_1075.png index b0c4cd7e4..520e4648e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1075.png and b/TMessagesProj/src/main/assets/emoji/0_1075.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1076.png b/TMessagesProj/src/main/assets/emoji/0_1076.png index b17141677..573fd0375 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1076.png and b/TMessagesProj/src/main/assets/emoji/0_1076.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1077.png b/TMessagesProj/src/main/assets/emoji/0_1077.png index 4f6b11c59..52cd3a020 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1077.png and b/TMessagesProj/src/main/assets/emoji/0_1077.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1078.png b/TMessagesProj/src/main/assets/emoji/0_1078.png index 5f6022915..461ebca6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1078.png and b/TMessagesProj/src/main/assets/emoji/0_1078.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1079.png b/TMessagesProj/src/main/assets/emoji/0_1079.png index 6cfe38744..c7887ec07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1079.png and b/TMessagesProj/src/main/assets/emoji/0_1079.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_108.png b/TMessagesProj/src/main/assets/emoji/0_108.png index 932c1a5e0..78e5012ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_108.png and b/TMessagesProj/src/main/assets/emoji/0_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1080.png b/TMessagesProj/src/main/assets/emoji/0_1080.png index ff48799b2..34b37a25a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1080.png and b/TMessagesProj/src/main/assets/emoji/0_1080.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1081.png b/TMessagesProj/src/main/assets/emoji/0_1081.png index 2fcfdd518..4f47a9101 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1081.png and b/TMessagesProj/src/main/assets/emoji/0_1081.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1082.png b/TMessagesProj/src/main/assets/emoji/0_1082.png index 18aa902bd..d877b916f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1082.png and b/TMessagesProj/src/main/assets/emoji/0_1082.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1083.png b/TMessagesProj/src/main/assets/emoji/0_1083.png index b32117970..392532a53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1083.png and b/TMessagesProj/src/main/assets/emoji/0_1083.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1084.png b/TMessagesProj/src/main/assets/emoji/0_1084.png index 79b73f386..ecb6bec46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1084.png and b/TMessagesProj/src/main/assets/emoji/0_1084.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1085.png b/TMessagesProj/src/main/assets/emoji/0_1085.png index b65f24a80..20ab57b4c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1085.png and b/TMessagesProj/src/main/assets/emoji/0_1085.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1086.png b/TMessagesProj/src/main/assets/emoji/0_1086.png index 996f19e1f..66e38b558 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1086.png and b/TMessagesProj/src/main/assets/emoji/0_1086.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1087.png b/TMessagesProj/src/main/assets/emoji/0_1087.png index b9de8cfa4..2a3aaad75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1087.png and b/TMessagesProj/src/main/assets/emoji/0_1087.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1088.png b/TMessagesProj/src/main/assets/emoji/0_1088.png index 414559a4b..430d91665 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1088.png and b/TMessagesProj/src/main/assets/emoji/0_1088.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1089.png b/TMessagesProj/src/main/assets/emoji/0_1089.png index 61f90db55..59a856f0d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1089.png and b/TMessagesProj/src/main/assets/emoji/0_1089.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_109.png b/TMessagesProj/src/main/assets/emoji/0_109.png index 0b71783e6..30eec6309 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_109.png and b/TMessagesProj/src/main/assets/emoji/0_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1090.png b/TMessagesProj/src/main/assets/emoji/0_1090.png index d0e877272..f41392bff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1090.png and b/TMessagesProj/src/main/assets/emoji/0_1090.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1091.png b/TMessagesProj/src/main/assets/emoji/0_1091.png index 14c62fb9d..acf8f978f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1091.png and b/TMessagesProj/src/main/assets/emoji/0_1091.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1092.png b/TMessagesProj/src/main/assets/emoji/0_1092.png index c001fe645..bbc2d12c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1092.png and b/TMessagesProj/src/main/assets/emoji/0_1092.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1093.png b/TMessagesProj/src/main/assets/emoji/0_1093.png index 6032ba10e..6a90a9297 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1093.png and b/TMessagesProj/src/main/assets/emoji/0_1093.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1094.png b/TMessagesProj/src/main/assets/emoji/0_1094.png index 4851f8f4e..968164193 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1094.png and b/TMessagesProj/src/main/assets/emoji/0_1094.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1095.png b/TMessagesProj/src/main/assets/emoji/0_1095.png index a8a1cfbca..697f4285e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1095.png and b/TMessagesProj/src/main/assets/emoji/0_1095.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1096.png b/TMessagesProj/src/main/assets/emoji/0_1096.png index 34f231f7f..319ec82f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1096.png and b/TMessagesProj/src/main/assets/emoji/0_1096.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1097.png b/TMessagesProj/src/main/assets/emoji/0_1097.png index f47a5e6ea..ea5b6753e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1097.png and b/TMessagesProj/src/main/assets/emoji/0_1097.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1098.png b/TMessagesProj/src/main/assets/emoji/0_1098.png index d33d2784d..5325d4ce3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1098.png and b/TMessagesProj/src/main/assets/emoji/0_1098.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1099.png b/TMessagesProj/src/main/assets/emoji/0_1099.png index 491022dfb..d85dadaa1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1099.png and b/TMessagesProj/src/main/assets/emoji/0_1099.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_11.png b/TMessagesProj/src/main/assets/emoji/0_11.png index e70d89f89..87f286323 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_11.png and b/TMessagesProj/src/main/assets/emoji/0_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_110.png b/TMessagesProj/src/main/assets/emoji/0_110.png index 108988500..be070e6ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_110.png and b/TMessagesProj/src/main/assets/emoji/0_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1100.png b/TMessagesProj/src/main/assets/emoji/0_1100.png index a25aa9a4f..a89e5cf6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1100.png and b/TMessagesProj/src/main/assets/emoji/0_1100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1101.png b/TMessagesProj/src/main/assets/emoji/0_1101.png index 9c12b1b78..6190d30a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1101.png and b/TMessagesProj/src/main/assets/emoji/0_1101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1102.png b/TMessagesProj/src/main/assets/emoji/0_1102.png index d98cf4546..56c74a4b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1102.png and b/TMessagesProj/src/main/assets/emoji/0_1102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1103.png b/TMessagesProj/src/main/assets/emoji/0_1103.png index 61f0a161a..800659ed5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1103.png and b/TMessagesProj/src/main/assets/emoji/0_1103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1104.png b/TMessagesProj/src/main/assets/emoji/0_1104.png index 58ba03cfa..436250b13 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1104.png and b/TMessagesProj/src/main/assets/emoji/0_1104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1105.png b/TMessagesProj/src/main/assets/emoji/0_1105.png index 36dd15530..90bb12718 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1105.png and b/TMessagesProj/src/main/assets/emoji/0_1105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1106.png b/TMessagesProj/src/main/assets/emoji/0_1106.png index 0d4efd62a..54c90b715 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1106.png and b/TMessagesProj/src/main/assets/emoji/0_1106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1107.png b/TMessagesProj/src/main/assets/emoji/0_1107.png index 0e4b108d2..8638f305a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1107.png and b/TMessagesProj/src/main/assets/emoji/0_1107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1108.png b/TMessagesProj/src/main/assets/emoji/0_1108.png index 760d793ef..2f9beb211 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1108.png and b/TMessagesProj/src/main/assets/emoji/0_1108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1109.png b/TMessagesProj/src/main/assets/emoji/0_1109.png index a71f01442..4f486fa9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1109.png and b/TMessagesProj/src/main/assets/emoji/0_1109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_111.png b/TMessagesProj/src/main/assets/emoji/0_111.png index 6194b2e38..664a51940 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_111.png and b/TMessagesProj/src/main/assets/emoji/0_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1110.png b/TMessagesProj/src/main/assets/emoji/0_1110.png index 6b10e77b5..2b204583e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1110.png and b/TMessagesProj/src/main/assets/emoji/0_1110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1111.png b/TMessagesProj/src/main/assets/emoji/0_1111.png index e116b1290..6ea4f945a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1111.png and b/TMessagesProj/src/main/assets/emoji/0_1111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1112.png b/TMessagesProj/src/main/assets/emoji/0_1112.png index 4d1677f3f..3d9bb7765 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1112.png and b/TMessagesProj/src/main/assets/emoji/0_1112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1113.png b/TMessagesProj/src/main/assets/emoji/0_1113.png index 65880deb3..defcdc546 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1113.png and b/TMessagesProj/src/main/assets/emoji/0_1113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1114.png b/TMessagesProj/src/main/assets/emoji/0_1114.png index 93762698d..f603611cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1114.png and b/TMessagesProj/src/main/assets/emoji/0_1114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1115.png b/TMessagesProj/src/main/assets/emoji/0_1115.png index 0ddafd83f..a9c58f950 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1115.png and b/TMessagesProj/src/main/assets/emoji/0_1115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1116.png b/TMessagesProj/src/main/assets/emoji/0_1116.png index 621142357..f4ccdd55a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1116.png and b/TMessagesProj/src/main/assets/emoji/0_1116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1117.png b/TMessagesProj/src/main/assets/emoji/0_1117.png index eac1b1c9a..914751a8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1117.png and b/TMessagesProj/src/main/assets/emoji/0_1117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1118.png b/TMessagesProj/src/main/assets/emoji/0_1118.png index bfb592ad6..b0ec103fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1118.png and b/TMessagesProj/src/main/assets/emoji/0_1118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1119.png b/TMessagesProj/src/main/assets/emoji/0_1119.png index 824764fa6..7205a104f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1119.png and b/TMessagesProj/src/main/assets/emoji/0_1119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_112.png b/TMessagesProj/src/main/assets/emoji/0_112.png index 3b433433b..eedaad7a9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_112.png and b/TMessagesProj/src/main/assets/emoji/0_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1120.png b/TMessagesProj/src/main/assets/emoji/0_1120.png index b955f1b66..da872b5e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1120.png and b/TMessagesProj/src/main/assets/emoji/0_1120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1121.png b/TMessagesProj/src/main/assets/emoji/0_1121.png index b26a4765a..66bdf158e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1121.png and b/TMessagesProj/src/main/assets/emoji/0_1121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1122.png b/TMessagesProj/src/main/assets/emoji/0_1122.png index 621951619..0998a35d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1122.png and b/TMessagesProj/src/main/assets/emoji/0_1122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1123.png b/TMessagesProj/src/main/assets/emoji/0_1123.png index 17533b11f..833a59653 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1123.png and b/TMessagesProj/src/main/assets/emoji/0_1123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1124.png b/TMessagesProj/src/main/assets/emoji/0_1124.png index d293a2379..76dc3fb11 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1124.png and b/TMessagesProj/src/main/assets/emoji/0_1124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1125.png b/TMessagesProj/src/main/assets/emoji/0_1125.png index df37d3b80..55f9666a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1125.png and b/TMessagesProj/src/main/assets/emoji/0_1125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1126.png b/TMessagesProj/src/main/assets/emoji/0_1126.png index 9eec95e6a..d3189ff5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1126.png and b/TMessagesProj/src/main/assets/emoji/0_1126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1127.png b/TMessagesProj/src/main/assets/emoji/0_1127.png index f2f29d3c4..4668a82bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1127.png and b/TMessagesProj/src/main/assets/emoji/0_1127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1128.png b/TMessagesProj/src/main/assets/emoji/0_1128.png index b83bc8629..ff6706d96 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1128.png and b/TMessagesProj/src/main/assets/emoji/0_1128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1129.png b/TMessagesProj/src/main/assets/emoji/0_1129.png index eb1b88cc5..b2552eb76 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1129.png and b/TMessagesProj/src/main/assets/emoji/0_1129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_113.png b/TMessagesProj/src/main/assets/emoji/0_113.png index 6606291ab..02da31950 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_113.png and b/TMessagesProj/src/main/assets/emoji/0_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1130.png b/TMessagesProj/src/main/assets/emoji/0_1130.png index 20f0ab78d..772ba2051 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1130.png and b/TMessagesProj/src/main/assets/emoji/0_1130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1131.png b/TMessagesProj/src/main/assets/emoji/0_1131.png index 6fcf34e26..849704974 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1131.png and b/TMessagesProj/src/main/assets/emoji/0_1131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1132.png b/TMessagesProj/src/main/assets/emoji/0_1132.png index 3dc36cc01..7ad3e7141 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1132.png and b/TMessagesProj/src/main/assets/emoji/0_1132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1133.png b/TMessagesProj/src/main/assets/emoji/0_1133.png index bff484e15..922a85147 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1133.png and b/TMessagesProj/src/main/assets/emoji/0_1133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1134.png b/TMessagesProj/src/main/assets/emoji/0_1134.png index 331fd8c05..f814725a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1134.png and b/TMessagesProj/src/main/assets/emoji/0_1134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1135.png b/TMessagesProj/src/main/assets/emoji/0_1135.png index 66c985943..880f11e35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1135.png and b/TMessagesProj/src/main/assets/emoji/0_1135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1136.png b/TMessagesProj/src/main/assets/emoji/0_1136.png index b49cce840..0bdafa0bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1136.png and b/TMessagesProj/src/main/assets/emoji/0_1136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1137.png b/TMessagesProj/src/main/assets/emoji/0_1137.png index d90468432..05dd289af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1137.png and b/TMessagesProj/src/main/assets/emoji/0_1137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1138.png b/TMessagesProj/src/main/assets/emoji/0_1138.png index 2219ea4ab..879345672 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1138.png and b/TMessagesProj/src/main/assets/emoji/0_1138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1139.png b/TMessagesProj/src/main/assets/emoji/0_1139.png index 4e2919c92..4eaf1d2c3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1139.png and b/TMessagesProj/src/main/assets/emoji/0_1139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_114.png b/TMessagesProj/src/main/assets/emoji/0_114.png index bda566507..02e72808a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_114.png and b/TMessagesProj/src/main/assets/emoji/0_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1140.png b/TMessagesProj/src/main/assets/emoji/0_1140.png index c0417f596..5cd620d4f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1140.png and b/TMessagesProj/src/main/assets/emoji/0_1140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1141.png b/TMessagesProj/src/main/assets/emoji/0_1141.png index bef7c56e6..8d7701e34 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1141.png and b/TMessagesProj/src/main/assets/emoji/0_1141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1142.png b/TMessagesProj/src/main/assets/emoji/0_1142.png index f31a4810c..bc558c321 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1142.png and b/TMessagesProj/src/main/assets/emoji/0_1142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1143.png b/TMessagesProj/src/main/assets/emoji/0_1143.png index afe08499f..2cee9e2d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1143.png and b/TMessagesProj/src/main/assets/emoji/0_1143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1144.png b/TMessagesProj/src/main/assets/emoji/0_1144.png index 81d394b44..ee0e7a615 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1144.png and b/TMessagesProj/src/main/assets/emoji/0_1144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1145.png b/TMessagesProj/src/main/assets/emoji/0_1145.png index 2706bb6f3..71b190b3b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1145.png and b/TMessagesProj/src/main/assets/emoji/0_1145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1146.png b/TMessagesProj/src/main/assets/emoji/0_1146.png index 306b99a5d..b0c4cd7e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1146.png and b/TMessagesProj/src/main/assets/emoji/0_1146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1147.png b/TMessagesProj/src/main/assets/emoji/0_1147.png index 97e86b62a..879475f57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1147.png and b/TMessagesProj/src/main/assets/emoji/0_1147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1148.png b/TMessagesProj/src/main/assets/emoji/0_1148.png index 4647edf7b..6bcc12a2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1148.png and b/TMessagesProj/src/main/assets/emoji/0_1148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1149.png b/TMessagesProj/src/main/assets/emoji/0_1149.png index 0d789bb5a..7d88ec7fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1149.png and b/TMessagesProj/src/main/assets/emoji/0_1149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_115.png b/TMessagesProj/src/main/assets/emoji/0_115.png index 6a7ef2c2f..6606291ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_115.png and b/TMessagesProj/src/main/assets/emoji/0_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1150.png b/TMessagesProj/src/main/assets/emoji/0_1150.png index ff80199a4..cb8ea3a6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1150.png and b/TMessagesProj/src/main/assets/emoji/0_1150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1151.png b/TMessagesProj/src/main/assets/emoji/0_1151.png index 3e3e6546d..071e49973 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1151.png and b/TMessagesProj/src/main/assets/emoji/0_1151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1152.png b/TMessagesProj/src/main/assets/emoji/0_1152.png index 63dff09ad..e0259af89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1152.png and b/TMessagesProj/src/main/assets/emoji/0_1152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1153.png b/TMessagesProj/src/main/assets/emoji/0_1153.png index 72031105a..5170a79a9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1153.png and b/TMessagesProj/src/main/assets/emoji/0_1153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1154.png b/TMessagesProj/src/main/assets/emoji/0_1154.png index a60eb7326..9292a60c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1154.png and b/TMessagesProj/src/main/assets/emoji/0_1154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1155.png b/TMessagesProj/src/main/assets/emoji/0_1155.png index f3aa398f0..577de8d22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1155.png and b/TMessagesProj/src/main/assets/emoji/0_1155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1156.png b/TMessagesProj/src/main/assets/emoji/0_1156.png index bd7b33789..26d29ea39 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1156.png and b/TMessagesProj/src/main/assets/emoji/0_1156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1157.png b/TMessagesProj/src/main/assets/emoji/0_1157.png index 1d7db08d9..1b03ef682 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1157.png and b/TMessagesProj/src/main/assets/emoji/0_1157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1158.png b/TMessagesProj/src/main/assets/emoji/0_1158.png index c36aeab74..c49a38f42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1158.png and b/TMessagesProj/src/main/assets/emoji/0_1158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1159.png b/TMessagesProj/src/main/assets/emoji/0_1159.png index b855fd091..3659b0496 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1159.png and b/TMessagesProj/src/main/assets/emoji/0_1159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_116.png b/TMessagesProj/src/main/assets/emoji/0_116.png index 2e99c63fe..bda566507 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_116.png and b/TMessagesProj/src/main/assets/emoji/0_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1160.png b/TMessagesProj/src/main/assets/emoji/0_1160.png index 7d3eb76ad..ed0c938e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1160.png and b/TMessagesProj/src/main/assets/emoji/0_1160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1161.png b/TMessagesProj/src/main/assets/emoji/0_1161.png index 7191452a9..0623051f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1161.png and b/TMessagesProj/src/main/assets/emoji/0_1161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1162.png b/TMessagesProj/src/main/assets/emoji/0_1162.png index e6505776c..9a4946115 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1162.png and b/TMessagesProj/src/main/assets/emoji/0_1162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1163.png b/TMessagesProj/src/main/assets/emoji/0_1163.png index cc0857c56..a0fdf5eeb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1163.png and b/TMessagesProj/src/main/assets/emoji/0_1163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1164.png b/TMessagesProj/src/main/assets/emoji/0_1164.png index 3de6583d0..bde137fb5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1164.png and b/TMessagesProj/src/main/assets/emoji/0_1164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1165.png b/TMessagesProj/src/main/assets/emoji/0_1165.png index 86b03467a..49be64899 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1165.png and b/TMessagesProj/src/main/assets/emoji/0_1165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1166.png b/TMessagesProj/src/main/assets/emoji/0_1166.png index fd0f07969..6af9f7f54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1166.png and b/TMessagesProj/src/main/assets/emoji/0_1166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1167.png b/TMessagesProj/src/main/assets/emoji/0_1167.png index cc0604611..28c9a1cad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1167.png and b/TMessagesProj/src/main/assets/emoji/0_1167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1168.png b/TMessagesProj/src/main/assets/emoji/0_1168.png index 974a879ee..f73d35354 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1168.png and b/TMessagesProj/src/main/assets/emoji/0_1168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1169.png b/TMessagesProj/src/main/assets/emoji/0_1169.png index c3004c0f3..5a53d6ef1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1169.png and b/TMessagesProj/src/main/assets/emoji/0_1169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_117.png b/TMessagesProj/src/main/assets/emoji/0_117.png index ddf194828..6a7ef2c2f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_117.png and b/TMessagesProj/src/main/assets/emoji/0_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1170.png b/TMessagesProj/src/main/assets/emoji/0_1170.png index e340aa3b5..7d9e7f49d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1170.png and b/TMessagesProj/src/main/assets/emoji/0_1170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1171.png b/TMessagesProj/src/main/assets/emoji/0_1171.png index c6b967463..576020056 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1171.png and b/TMessagesProj/src/main/assets/emoji/0_1171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1172.png b/TMessagesProj/src/main/assets/emoji/0_1172.png index f4bbb326d..601615b36 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1172.png and b/TMessagesProj/src/main/assets/emoji/0_1172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1173.png b/TMessagesProj/src/main/assets/emoji/0_1173.png index 34f622dbe..3aab6ee22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1173.png and b/TMessagesProj/src/main/assets/emoji/0_1173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1174.png b/TMessagesProj/src/main/assets/emoji/0_1174.png index d0be5d456..2143086ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1174.png and b/TMessagesProj/src/main/assets/emoji/0_1174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1175.png b/TMessagesProj/src/main/assets/emoji/0_1175.png index f618ab439..0d79218e1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1175.png and b/TMessagesProj/src/main/assets/emoji/0_1175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1176.png b/TMessagesProj/src/main/assets/emoji/0_1176.png index df2d75606..3ecdcd172 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1176.png and b/TMessagesProj/src/main/assets/emoji/0_1176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1177.png b/TMessagesProj/src/main/assets/emoji/0_1177.png index c8bfa8bf9..afdbe09e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1177.png and b/TMessagesProj/src/main/assets/emoji/0_1177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1178.png b/TMessagesProj/src/main/assets/emoji/0_1178.png index 3c671f2bd..80fa98c09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1178.png and b/TMessagesProj/src/main/assets/emoji/0_1178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1179.png b/TMessagesProj/src/main/assets/emoji/0_1179.png index 8368bf0e8..9c5e80ee8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1179.png and b/TMessagesProj/src/main/assets/emoji/0_1179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_118.png b/TMessagesProj/src/main/assets/emoji/0_118.png index ac66284ff..2e99c63fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_118.png and b/TMessagesProj/src/main/assets/emoji/0_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1180.png b/TMessagesProj/src/main/assets/emoji/0_1180.png index fa07a84c6..2ab3f04bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1180.png and b/TMessagesProj/src/main/assets/emoji/0_1180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1181.png b/TMessagesProj/src/main/assets/emoji/0_1181.png index 446b1fd4d..9ffec2ec5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1181.png and b/TMessagesProj/src/main/assets/emoji/0_1181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1182.png b/TMessagesProj/src/main/assets/emoji/0_1182.png index ed887e769..83115e2cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1182.png and b/TMessagesProj/src/main/assets/emoji/0_1182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1183.png b/TMessagesProj/src/main/assets/emoji/0_1183.png index b8ae9983a..5ab6aad32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1183.png and b/TMessagesProj/src/main/assets/emoji/0_1183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1184.png b/TMessagesProj/src/main/assets/emoji/0_1184.png index 7291aaa00..e2c49325b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1184.png and b/TMessagesProj/src/main/assets/emoji/0_1184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1185.png b/TMessagesProj/src/main/assets/emoji/0_1185.png index 647b4259a..2b5b99b44 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1185.png and b/TMessagesProj/src/main/assets/emoji/0_1185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1186.png b/TMessagesProj/src/main/assets/emoji/0_1186.png index 0ae1d8e4a..a752e4dfc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1186.png and b/TMessagesProj/src/main/assets/emoji/0_1186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1187.png b/TMessagesProj/src/main/assets/emoji/0_1187.png index b2985ccb4..1b4cdb397 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1187.png and b/TMessagesProj/src/main/assets/emoji/0_1187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1188.png b/TMessagesProj/src/main/assets/emoji/0_1188.png index e32bda907..349657abf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1188.png and b/TMessagesProj/src/main/assets/emoji/0_1188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1189.png b/TMessagesProj/src/main/assets/emoji/0_1189.png index 9b3722942..a33b46bbf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1189.png and b/TMessagesProj/src/main/assets/emoji/0_1189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_119.png b/TMessagesProj/src/main/assets/emoji/0_119.png index f9824f6f5..ddf194828 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_119.png and b/TMessagesProj/src/main/assets/emoji/0_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1190.png b/TMessagesProj/src/main/assets/emoji/0_1190.png index 59f541a02..6766de6fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1190.png and b/TMessagesProj/src/main/assets/emoji/0_1190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1191.png b/TMessagesProj/src/main/assets/emoji/0_1191.png index d9cb1491d..40c625dc5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1191.png and b/TMessagesProj/src/main/assets/emoji/0_1191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1192.png b/TMessagesProj/src/main/assets/emoji/0_1192.png index 014bb272d..512f95bf0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1192.png and b/TMessagesProj/src/main/assets/emoji/0_1192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1193.png b/TMessagesProj/src/main/assets/emoji/0_1193.png index b4b6e4bc4..2be5d5459 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1193.png and b/TMessagesProj/src/main/assets/emoji/0_1193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1194.png b/TMessagesProj/src/main/assets/emoji/0_1194.png index ca384b121..a2b7a498d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1194.png and b/TMessagesProj/src/main/assets/emoji/0_1194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1195.png b/TMessagesProj/src/main/assets/emoji/0_1195.png index 8cd05a42b..40eebb96b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1195.png and b/TMessagesProj/src/main/assets/emoji/0_1195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1196.png b/TMessagesProj/src/main/assets/emoji/0_1196.png index 3daaaa8a0..d36d6fda2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1196.png and b/TMessagesProj/src/main/assets/emoji/0_1196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1197.png b/TMessagesProj/src/main/assets/emoji/0_1197.png index 29d4e6589..6829d800a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1197.png and b/TMessagesProj/src/main/assets/emoji/0_1197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1198.png b/TMessagesProj/src/main/assets/emoji/0_1198.png index a65f34a5a..c13e8ab53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1198.png and b/TMessagesProj/src/main/assets/emoji/0_1198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1199.png b/TMessagesProj/src/main/assets/emoji/0_1199.png index 78360f924..8507d9753 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1199.png and b/TMessagesProj/src/main/assets/emoji/0_1199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_12.png b/TMessagesProj/src/main/assets/emoji/0_12.png index d06b712c8..2386b45ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_12.png and b/TMessagesProj/src/main/assets/emoji/0_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_120.png b/TMessagesProj/src/main/assets/emoji/0_120.png index b84474354..2ab289436 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_120.png and b/TMessagesProj/src/main/assets/emoji/0_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1200.png b/TMessagesProj/src/main/assets/emoji/0_1200.png index 382e18b05..9dde1f708 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1200.png and b/TMessagesProj/src/main/assets/emoji/0_1200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1201.png b/TMessagesProj/src/main/assets/emoji/0_1201.png index 4d96d4539..cdbb0f680 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1201.png and b/TMessagesProj/src/main/assets/emoji/0_1201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1202.png b/TMessagesProj/src/main/assets/emoji/0_1202.png index 155f3963a..cc2a256f3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1202.png and b/TMessagesProj/src/main/assets/emoji/0_1202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1203.png b/TMessagesProj/src/main/assets/emoji/0_1203.png index 6265cd7d4..1b8d97218 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1203.png and b/TMessagesProj/src/main/assets/emoji/0_1203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1204.png b/TMessagesProj/src/main/assets/emoji/0_1204.png index 3c1eb3b71..01c44a9d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1204.png and b/TMessagesProj/src/main/assets/emoji/0_1204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1205.png b/TMessagesProj/src/main/assets/emoji/0_1205.png index 4b6ca3c7a..89d95ae84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1205.png and b/TMessagesProj/src/main/assets/emoji/0_1205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1206.png b/TMessagesProj/src/main/assets/emoji/0_1206.png index dc1fa3ed4..b6bd328ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1206.png and b/TMessagesProj/src/main/assets/emoji/0_1206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1207.png b/TMessagesProj/src/main/assets/emoji/0_1207.png index 9de6243e1..46cf47c25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1207.png and b/TMessagesProj/src/main/assets/emoji/0_1207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1208.png b/TMessagesProj/src/main/assets/emoji/0_1208.png index 6b957fda1..7906f603e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1208.png and b/TMessagesProj/src/main/assets/emoji/0_1208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1209.png b/TMessagesProj/src/main/assets/emoji/0_1209.png index cabb27264..e63359fc5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1209.png and b/TMessagesProj/src/main/assets/emoji/0_1209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_121.png b/TMessagesProj/src/main/assets/emoji/0_121.png index da321b524..0cd022caf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_121.png and b/TMessagesProj/src/main/assets/emoji/0_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1210.png b/TMessagesProj/src/main/assets/emoji/0_1210.png index 755c3dfff..cc16c1a17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1210.png and b/TMessagesProj/src/main/assets/emoji/0_1210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1211.png b/TMessagesProj/src/main/assets/emoji/0_1211.png index 726162464..2b90fba17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1211.png and b/TMessagesProj/src/main/assets/emoji/0_1211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1212.png b/TMessagesProj/src/main/assets/emoji/0_1212.png index 7f3ed65fb..d81310eb3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1212.png and b/TMessagesProj/src/main/assets/emoji/0_1212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1213.png b/TMessagesProj/src/main/assets/emoji/0_1213.png index 9538dff4f..4d2171600 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1213.png and b/TMessagesProj/src/main/assets/emoji/0_1213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1214.png b/TMessagesProj/src/main/assets/emoji/0_1214.png index 56025bf33..f2fccffd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1214.png and b/TMessagesProj/src/main/assets/emoji/0_1214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1215.png b/TMessagesProj/src/main/assets/emoji/0_1215.png index 519c31c01..1df27b787 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1215.png and b/TMessagesProj/src/main/assets/emoji/0_1215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1216.png b/TMessagesProj/src/main/assets/emoji/0_1216.png index ff126c57c..53b76e448 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1216.png and b/TMessagesProj/src/main/assets/emoji/0_1216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1217.png b/TMessagesProj/src/main/assets/emoji/0_1217.png index bcc6e779f..7d19bc522 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1217.png and b/TMessagesProj/src/main/assets/emoji/0_1217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1218.png b/TMessagesProj/src/main/assets/emoji/0_1218.png index ed7eff8ea..df7e4291a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1218.png and b/TMessagesProj/src/main/assets/emoji/0_1218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1219.png b/TMessagesProj/src/main/assets/emoji/0_1219.png index d186695f1..af0d1dba9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1219.png and b/TMessagesProj/src/main/assets/emoji/0_1219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_122.png b/TMessagesProj/src/main/assets/emoji/0_122.png index c20c97e42..afba71e97 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_122.png and b/TMessagesProj/src/main/assets/emoji/0_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1220.png b/TMessagesProj/src/main/assets/emoji/0_1220.png index ee8de8cd9..d8f92458d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1220.png and b/TMessagesProj/src/main/assets/emoji/0_1220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1221.png b/TMessagesProj/src/main/assets/emoji/0_1221.png index 44a929fdf..0ef27b2a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1221.png and b/TMessagesProj/src/main/assets/emoji/0_1221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1222.png b/TMessagesProj/src/main/assets/emoji/0_1222.png index 5a29904d3..f1d61fb8f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1222.png and b/TMessagesProj/src/main/assets/emoji/0_1222.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1223.png b/TMessagesProj/src/main/assets/emoji/0_1223.png index 482d0b5d0..de24db257 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1223.png and b/TMessagesProj/src/main/assets/emoji/0_1223.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1224.png b/TMessagesProj/src/main/assets/emoji/0_1224.png index 872f3b14e..d040b0e22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1224.png and b/TMessagesProj/src/main/assets/emoji/0_1224.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1225.png b/TMessagesProj/src/main/assets/emoji/0_1225.png index 10fcc5bbe..b677467e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1225.png and b/TMessagesProj/src/main/assets/emoji/0_1225.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1226.png b/TMessagesProj/src/main/assets/emoji/0_1226.png index 9f60f0bc5..70ac7cedf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1226.png and b/TMessagesProj/src/main/assets/emoji/0_1226.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1227.png b/TMessagesProj/src/main/assets/emoji/0_1227.png index e5d8cce48..609b9b073 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1227.png and b/TMessagesProj/src/main/assets/emoji/0_1227.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1228.png b/TMessagesProj/src/main/assets/emoji/0_1228.png index 2c0edf9d5..c4ff3f723 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1228.png and b/TMessagesProj/src/main/assets/emoji/0_1228.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1229.png b/TMessagesProj/src/main/assets/emoji/0_1229.png index c64609eea..70873eba1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1229.png and b/TMessagesProj/src/main/assets/emoji/0_1229.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_123.png b/TMessagesProj/src/main/assets/emoji/0_123.png index bf0969f48..9ebc627b7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_123.png and b/TMessagesProj/src/main/assets/emoji/0_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1230.png b/TMessagesProj/src/main/assets/emoji/0_1230.png index 1907ee906..a578409e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1230.png and b/TMessagesProj/src/main/assets/emoji/0_1230.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1231.png b/TMessagesProj/src/main/assets/emoji/0_1231.png index 944baf0a4..3a21031be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1231.png and b/TMessagesProj/src/main/assets/emoji/0_1231.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1232.png b/TMessagesProj/src/main/assets/emoji/0_1232.png index eaf47e41e..d5013ccfe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1232.png and b/TMessagesProj/src/main/assets/emoji/0_1232.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1233.png b/TMessagesProj/src/main/assets/emoji/0_1233.png index bb45663fa..445642b25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1233.png and b/TMessagesProj/src/main/assets/emoji/0_1233.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1234.png b/TMessagesProj/src/main/assets/emoji/0_1234.png index 54bf0f0db..730839b6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1234.png and b/TMessagesProj/src/main/assets/emoji/0_1234.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1235.png b/TMessagesProj/src/main/assets/emoji/0_1235.png index f2189b052..a747c00ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1235.png and b/TMessagesProj/src/main/assets/emoji/0_1235.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1236.png b/TMessagesProj/src/main/assets/emoji/0_1236.png index 7eb8917eb..b80c357b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1236.png and b/TMessagesProj/src/main/assets/emoji/0_1236.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1237.png b/TMessagesProj/src/main/assets/emoji/0_1237.png index f16e4e0f9..ef8d9a3ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1237.png and b/TMessagesProj/src/main/assets/emoji/0_1237.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1238.png b/TMessagesProj/src/main/assets/emoji/0_1238.png index 816eea8bd..a18d78f18 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1238.png and b/TMessagesProj/src/main/assets/emoji/0_1238.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1239.png b/TMessagesProj/src/main/assets/emoji/0_1239.png index fa4b1266e..441d16017 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1239.png and b/TMessagesProj/src/main/assets/emoji/0_1239.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_124.png b/TMessagesProj/src/main/assets/emoji/0_124.png index dbad8f10e..c20c97e42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_124.png and b/TMessagesProj/src/main/assets/emoji/0_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1240.png b/TMessagesProj/src/main/assets/emoji/0_1240.png index d2876f066..59e4a7f29 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1240.png and b/TMessagesProj/src/main/assets/emoji/0_1240.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1241.png b/TMessagesProj/src/main/assets/emoji/0_1241.png index 583c6eddf..6ab69962a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1241.png and b/TMessagesProj/src/main/assets/emoji/0_1241.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1242.png b/TMessagesProj/src/main/assets/emoji/0_1242.png index 2cf0c6110..b894a19e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1242.png and b/TMessagesProj/src/main/assets/emoji/0_1242.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1243.png b/TMessagesProj/src/main/assets/emoji/0_1243.png index 8c7d5a9c9..791cb717e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1243.png and b/TMessagesProj/src/main/assets/emoji/0_1243.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1244.png b/TMessagesProj/src/main/assets/emoji/0_1244.png index d744161ec..fccf69f3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1244.png and b/TMessagesProj/src/main/assets/emoji/0_1244.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1245.png b/TMessagesProj/src/main/assets/emoji/0_1245.png index 77227f6df..acb87db09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1245.png and b/TMessagesProj/src/main/assets/emoji/0_1245.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1246.png b/TMessagesProj/src/main/assets/emoji/0_1246.png index b461917c5..05416303b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1246.png and b/TMessagesProj/src/main/assets/emoji/0_1246.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1247.png b/TMessagesProj/src/main/assets/emoji/0_1247.png index 68ef2bb7d..aa550346c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1247.png and b/TMessagesProj/src/main/assets/emoji/0_1247.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1248.png b/TMessagesProj/src/main/assets/emoji/0_1248.png index 40143df24..f784efd40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1248.png and b/TMessagesProj/src/main/assets/emoji/0_1248.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1249.png b/TMessagesProj/src/main/assets/emoji/0_1249.png index 5ce00b748..d760c3628 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1249.png and b/TMessagesProj/src/main/assets/emoji/0_1249.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_125.png b/TMessagesProj/src/main/assets/emoji/0_125.png index d67acc3d9..bf0969f48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_125.png and b/TMessagesProj/src/main/assets/emoji/0_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1250.png b/TMessagesProj/src/main/assets/emoji/0_1250.png index 55406b642..bf1a7d41d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1250.png and b/TMessagesProj/src/main/assets/emoji/0_1250.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1251.png b/TMessagesProj/src/main/assets/emoji/0_1251.png index a7522bdb5..d4ccc8b79 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1251.png and b/TMessagesProj/src/main/assets/emoji/0_1251.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1252.png b/TMessagesProj/src/main/assets/emoji/0_1252.png index fca6cf7fb..5247013e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1252.png and b/TMessagesProj/src/main/assets/emoji/0_1252.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1253.png b/TMessagesProj/src/main/assets/emoji/0_1253.png index e7b436279..23428890b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1253.png and b/TMessagesProj/src/main/assets/emoji/0_1253.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1254.png b/TMessagesProj/src/main/assets/emoji/0_1254.png index 468439731..c5fdfcad2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1254.png and b/TMessagesProj/src/main/assets/emoji/0_1254.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1255.png b/TMessagesProj/src/main/assets/emoji/0_1255.png index ebea8901c..25c453e9f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1255.png and b/TMessagesProj/src/main/assets/emoji/0_1255.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1256.png b/TMessagesProj/src/main/assets/emoji/0_1256.png index d2afbc6d0..535c8712d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1256.png and b/TMessagesProj/src/main/assets/emoji/0_1256.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1257.png b/TMessagesProj/src/main/assets/emoji/0_1257.png index 2323a406a..a9d1051cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1257.png and b/TMessagesProj/src/main/assets/emoji/0_1257.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1258.png b/TMessagesProj/src/main/assets/emoji/0_1258.png index 43cb2264d..a4fa13e30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1258.png and b/TMessagesProj/src/main/assets/emoji/0_1258.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1259.png b/TMessagesProj/src/main/assets/emoji/0_1259.png index be04f1779..fe8dd0e53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1259.png and b/TMessagesProj/src/main/assets/emoji/0_1259.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_126.png b/TMessagesProj/src/main/assets/emoji/0_126.png index d00275884..dbad8f10e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_126.png and b/TMessagesProj/src/main/assets/emoji/0_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1260.png b/TMessagesProj/src/main/assets/emoji/0_1260.png index 93cfc4c84..de8985381 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1260.png and b/TMessagesProj/src/main/assets/emoji/0_1260.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1261.png b/TMessagesProj/src/main/assets/emoji/0_1261.png index 32948faee..5847a060b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1261.png and b/TMessagesProj/src/main/assets/emoji/0_1261.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1262.png b/TMessagesProj/src/main/assets/emoji/0_1262.png index d9509a910..a4dec7db9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1262.png and b/TMessagesProj/src/main/assets/emoji/0_1262.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1263.png b/TMessagesProj/src/main/assets/emoji/0_1263.png index e5261b7db..5189a54e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1263.png and b/TMessagesProj/src/main/assets/emoji/0_1263.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1264.png b/TMessagesProj/src/main/assets/emoji/0_1264.png index 71b862662..7de4ce883 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1264.png and b/TMessagesProj/src/main/assets/emoji/0_1264.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1265.png b/TMessagesProj/src/main/assets/emoji/0_1265.png index f6031248b..0307b599f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1265.png and b/TMessagesProj/src/main/assets/emoji/0_1265.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1266.png b/TMessagesProj/src/main/assets/emoji/0_1266.png index 6f69527dc..fc413dd77 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1266.png and b/TMessagesProj/src/main/assets/emoji/0_1266.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1267.png b/TMessagesProj/src/main/assets/emoji/0_1267.png index 7b60bed5e..fbf7cbe29 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1267.png and b/TMessagesProj/src/main/assets/emoji/0_1267.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1268.png b/TMessagesProj/src/main/assets/emoji/0_1268.png index bb334fbf9..1ae7556bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1268.png and b/TMessagesProj/src/main/assets/emoji/0_1268.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1269.png b/TMessagesProj/src/main/assets/emoji/0_1269.png index a960455b7..07e2aa59a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1269.png and b/TMessagesProj/src/main/assets/emoji/0_1269.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_127.png b/TMessagesProj/src/main/assets/emoji/0_127.png index 5a7a3764d..d67acc3d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_127.png and b/TMessagesProj/src/main/assets/emoji/0_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1270.png b/TMessagesProj/src/main/assets/emoji/0_1270.png index 41f6c26a1..e1973c0d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1270.png and b/TMessagesProj/src/main/assets/emoji/0_1270.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1271.png b/TMessagesProj/src/main/assets/emoji/0_1271.png index adab1cc43..615cc1438 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1271.png and b/TMessagesProj/src/main/assets/emoji/0_1271.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1272.png b/TMessagesProj/src/main/assets/emoji/0_1272.png index 4884a62e1..c0a9aa4af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1272.png and b/TMessagesProj/src/main/assets/emoji/0_1272.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1273.png b/TMessagesProj/src/main/assets/emoji/0_1273.png index eace9bd73..b536d85a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1273.png and b/TMessagesProj/src/main/assets/emoji/0_1273.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1274.png b/TMessagesProj/src/main/assets/emoji/0_1274.png index 6652ee1ac..06b46668d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1274.png and b/TMessagesProj/src/main/assets/emoji/0_1274.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1275.png b/TMessagesProj/src/main/assets/emoji/0_1275.png index 6a3223bd7..e93494c62 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1275.png and b/TMessagesProj/src/main/assets/emoji/0_1275.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1276.png b/TMessagesProj/src/main/assets/emoji/0_1276.png index b9a389088..fa3f4c6a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1276.png and b/TMessagesProj/src/main/assets/emoji/0_1276.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1277.png b/TMessagesProj/src/main/assets/emoji/0_1277.png index c59743d25..50a330ee2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1277.png and b/TMessagesProj/src/main/assets/emoji/0_1277.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1278.png b/TMessagesProj/src/main/assets/emoji/0_1278.png index c1fb4b413..7abe58a9d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1278.png and b/TMessagesProj/src/main/assets/emoji/0_1278.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1279.png b/TMessagesProj/src/main/assets/emoji/0_1279.png index 550db9aaf..eef7a486b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1279.png and b/TMessagesProj/src/main/assets/emoji/0_1279.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_128.png b/TMessagesProj/src/main/assets/emoji/0_128.png index 1156536ec..d00275884 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_128.png and b/TMessagesProj/src/main/assets/emoji/0_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1280.png b/TMessagesProj/src/main/assets/emoji/0_1280.png index 99d7ea8a3..d9051abea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1280.png and b/TMessagesProj/src/main/assets/emoji/0_1280.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1281.png b/TMessagesProj/src/main/assets/emoji/0_1281.png index 817e92c6f..f839ba983 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1281.png and b/TMessagesProj/src/main/assets/emoji/0_1281.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1282.png b/TMessagesProj/src/main/assets/emoji/0_1282.png index cf639f5ad..726162464 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1282.png and b/TMessagesProj/src/main/assets/emoji/0_1282.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1283.png b/TMessagesProj/src/main/assets/emoji/0_1283.png index 3c8ce7f0f..d7fbe6322 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1283.png and b/TMessagesProj/src/main/assets/emoji/0_1283.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1284.png b/TMessagesProj/src/main/assets/emoji/0_1284.png index 0bac6aaaf..f99c23885 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1284.png and b/TMessagesProj/src/main/assets/emoji/0_1284.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1285.png b/TMessagesProj/src/main/assets/emoji/0_1285.png index 902888efb..511288d7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1285.png and b/TMessagesProj/src/main/assets/emoji/0_1285.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1286.png b/TMessagesProj/src/main/assets/emoji/0_1286.png index 7dbcd56c9..24168e4ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1286.png and b/TMessagesProj/src/main/assets/emoji/0_1286.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1287.png b/TMessagesProj/src/main/assets/emoji/0_1287.png index fd58ed62b..ff126c57c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1287.png and b/TMessagesProj/src/main/assets/emoji/0_1287.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1288.png b/TMessagesProj/src/main/assets/emoji/0_1288.png index f258aaa63..b72eb098d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1288.png and b/TMessagesProj/src/main/assets/emoji/0_1288.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1289.png b/TMessagesProj/src/main/assets/emoji/0_1289.png index c78bc8e5a..2068dd0a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1289.png and b/TMessagesProj/src/main/assets/emoji/0_1289.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_129.png b/TMessagesProj/src/main/assets/emoji/0_129.png index b39ae5b0f..5a7a3764d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_129.png and b/TMessagesProj/src/main/assets/emoji/0_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1290.png b/TMessagesProj/src/main/assets/emoji/0_1290.png index 049dc3e28..198d05b48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1290.png and b/TMessagesProj/src/main/assets/emoji/0_1290.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1291.png b/TMessagesProj/src/main/assets/emoji/0_1291.png index c286b9d32..47e6fd05f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1291.png and b/TMessagesProj/src/main/assets/emoji/0_1291.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1292.png b/TMessagesProj/src/main/assets/emoji/0_1292.png index be68838c7..41eff289b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1292.png and b/TMessagesProj/src/main/assets/emoji/0_1292.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1293.png b/TMessagesProj/src/main/assets/emoji/0_1293.png index a9019f726..9c866c947 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1293.png and b/TMessagesProj/src/main/assets/emoji/0_1293.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1294.png b/TMessagesProj/src/main/assets/emoji/0_1294.png index 7b691dd39..8757cb58f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1294.png and b/TMessagesProj/src/main/assets/emoji/0_1294.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1295.png b/TMessagesProj/src/main/assets/emoji/0_1295.png index d01916bae..a040b20df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1295.png and b/TMessagesProj/src/main/assets/emoji/0_1295.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1296.png b/TMessagesProj/src/main/assets/emoji/0_1296.png index 75804b5b9..10fcc5bbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1296.png and b/TMessagesProj/src/main/assets/emoji/0_1296.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1297.png b/TMessagesProj/src/main/assets/emoji/0_1297.png index b4f2295e8..a1f2b9d2b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1297.png and b/TMessagesProj/src/main/assets/emoji/0_1297.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1298.png b/TMessagesProj/src/main/assets/emoji/0_1298.png index b22572428..e5d8cce48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1298.png and b/TMessagesProj/src/main/assets/emoji/0_1298.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1299.png b/TMessagesProj/src/main/assets/emoji/0_1299.png index bac2cdbde..56d132a8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1299.png and b/TMessagesProj/src/main/assets/emoji/0_1299.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_13.png b/TMessagesProj/src/main/assets/emoji/0_13.png index 73b2bf243..71fd0d794 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_13.png and b/TMessagesProj/src/main/assets/emoji/0_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_130.png b/TMessagesProj/src/main/assets/emoji/0_130.png index d506f4006..1156536ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_130.png and b/TMessagesProj/src/main/assets/emoji/0_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1300.png b/TMessagesProj/src/main/assets/emoji/0_1300.png index 18337cd1f..4e3fe342c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1300.png and b/TMessagesProj/src/main/assets/emoji/0_1300.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1301.png b/TMessagesProj/src/main/assets/emoji/0_1301.png index d41b51735..0ea504835 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1301.png and b/TMessagesProj/src/main/assets/emoji/0_1301.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1302.png b/TMessagesProj/src/main/assets/emoji/0_1302.png index c3599a7a3..b441cab92 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1302.png and b/TMessagesProj/src/main/assets/emoji/0_1302.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1303.png b/TMessagesProj/src/main/assets/emoji/0_1303.png index af6c333b5..712048043 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1303.png and b/TMessagesProj/src/main/assets/emoji/0_1303.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1304.png b/TMessagesProj/src/main/assets/emoji/0_1304.png index 8ed3f8755..5102be1a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1304.png and b/TMessagesProj/src/main/assets/emoji/0_1304.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1305.png b/TMessagesProj/src/main/assets/emoji/0_1305.png index 716657d2e..0a7dc18af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1305.png and b/TMessagesProj/src/main/assets/emoji/0_1305.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1306.png b/TMessagesProj/src/main/assets/emoji/0_1306.png index cd11a77a5..c4991d771 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1306.png and b/TMessagesProj/src/main/assets/emoji/0_1306.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1307.png b/TMessagesProj/src/main/assets/emoji/0_1307.png index ba99a0663..3b216c296 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1307.png and b/TMessagesProj/src/main/assets/emoji/0_1307.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1308.png b/TMessagesProj/src/main/assets/emoji/0_1308.png index da7bec463..03dacacd7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1308.png and b/TMessagesProj/src/main/assets/emoji/0_1308.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1309.png b/TMessagesProj/src/main/assets/emoji/0_1309.png index b2bd09d78..6b21d0917 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1309.png and b/TMessagesProj/src/main/assets/emoji/0_1309.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_131.png b/TMessagesProj/src/main/assets/emoji/0_131.png index 6d3b269c9..b39ae5b0f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_131.png and b/TMessagesProj/src/main/assets/emoji/0_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1310.png b/TMessagesProj/src/main/assets/emoji/0_1310.png index a5b61a8b0..7d519cb1e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1310.png and b/TMessagesProj/src/main/assets/emoji/0_1310.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1311.png b/TMessagesProj/src/main/assets/emoji/0_1311.png index 772267860..1d7f2c703 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1311.png and b/TMessagesProj/src/main/assets/emoji/0_1311.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1312.png b/TMessagesProj/src/main/assets/emoji/0_1312.png index ef0429813..346834c6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1312.png and b/TMessagesProj/src/main/assets/emoji/0_1312.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1313.png b/TMessagesProj/src/main/assets/emoji/0_1313.png index af21ff9f4..c78ec9cec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1313.png and b/TMessagesProj/src/main/assets/emoji/0_1313.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1314.png b/TMessagesProj/src/main/assets/emoji/0_1314.png index 1527cdd73..e2c627255 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1314.png and b/TMessagesProj/src/main/assets/emoji/0_1314.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1315.png b/TMessagesProj/src/main/assets/emoji/0_1315.png index 13238b226..fe8a8bf7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1315.png and b/TMessagesProj/src/main/assets/emoji/0_1315.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1316.png b/TMessagesProj/src/main/assets/emoji/0_1316.png index 5c4b1ccf5..df5c9cbc6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1316.png and b/TMessagesProj/src/main/assets/emoji/0_1316.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1317.png b/TMessagesProj/src/main/assets/emoji/0_1317.png index 409a8ff98..92df3d8f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1317.png and b/TMessagesProj/src/main/assets/emoji/0_1317.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1318.png b/TMessagesProj/src/main/assets/emoji/0_1318.png index ccd6db1e2..0319f24cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1318.png and b/TMessagesProj/src/main/assets/emoji/0_1318.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1319.png b/TMessagesProj/src/main/assets/emoji/0_1319.png index 5b9617471..698aaa203 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1319.png and b/TMessagesProj/src/main/assets/emoji/0_1319.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_132.png b/TMessagesProj/src/main/assets/emoji/0_132.png index abf3e254b..734b91063 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_132.png and b/TMessagesProj/src/main/assets/emoji/0_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1320.png b/TMessagesProj/src/main/assets/emoji/0_1320.png index 2511e4914..3da50dfb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1320.png and b/TMessagesProj/src/main/assets/emoji/0_1320.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1321.png b/TMessagesProj/src/main/assets/emoji/0_1321.png index b5ea36642..034473034 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1321.png and b/TMessagesProj/src/main/assets/emoji/0_1321.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1322.png b/TMessagesProj/src/main/assets/emoji/0_1322.png index 55f4593f4..3df6c6373 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1322.png and b/TMessagesProj/src/main/assets/emoji/0_1322.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1323.png b/TMessagesProj/src/main/assets/emoji/0_1323.png index b79dc67a9..aa982b92c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1323.png and b/TMessagesProj/src/main/assets/emoji/0_1323.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1324.png b/TMessagesProj/src/main/assets/emoji/0_1324.png index 26fbd5d28..36271fe81 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1324.png and b/TMessagesProj/src/main/assets/emoji/0_1324.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1325.png b/TMessagesProj/src/main/assets/emoji/0_1325.png index 963b9b36a..f35d05a47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1325.png and b/TMessagesProj/src/main/assets/emoji/0_1325.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1326.png b/TMessagesProj/src/main/assets/emoji/0_1326.png index 321648cff..0ebbb2442 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1326.png and b/TMessagesProj/src/main/assets/emoji/0_1326.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1327.png b/TMessagesProj/src/main/assets/emoji/0_1327.png index 463c896e1..57ba595b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1327.png and b/TMessagesProj/src/main/assets/emoji/0_1327.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1328.png b/TMessagesProj/src/main/assets/emoji/0_1328.png index 90cd30d70..70bee5abe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1328.png and b/TMessagesProj/src/main/assets/emoji/0_1328.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1329.png b/TMessagesProj/src/main/assets/emoji/0_1329.png index 2ecdb6498..503c5ca42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1329.png and b/TMessagesProj/src/main/assets/emoji/0_1329.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_133.png b/TMessagesProj/src/main/assets/emoji/0_133.png index abfb0a9f8..ccdabf572 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_133.png and b/TMessagesProj/src/main/assets/emoji/0_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1330.png b/TMessagesProj/src/main/assets/emoji/0_1330.png index c55aa923f..c846fb17a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1330.png and b/TMessagesProj/src/main/assets/emoji/0_1330.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1331.png b/TMessagesProj/src/main/assets/emoji/0_1331.png index c5788146c..0aee496d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1331.png and b/TMessagesProj/src/main/assets/emoji/0_1331.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1332.png b/TMessagesProj/src/main/assets/emoji/0_1332.png index bd3df561f..0ae8c3215 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1332.png and b/TMessagesProj/src/main/assets/emoji/0_1332.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1333.png b/TMessagesProj/src/main/assets/emoji/0_1333.png index 3a58e7a54..f87352411 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1333.png and b/TMessagesProj/src/main/assets/emoji/0_1333.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1334.png b/TMessagesProj/src/main/assets/emoji/0_1334.png index 4e824cd0b..72e4765c6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1334.png and b/TMessagesProj/src/main/assets/emoji/0_1334.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1335.png b/TMessagesProj/src/main/assets/emoji/0_1335.png index de1dee078..96b6c772a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1335.png and b/TMessagesProj/src/main/assets/emoji/0_1335.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1336.png b/TMessagesProj/src/main/assets/emoji/0_1336.png index 05322b559..1a94dfb65 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1336.png and b/TMessagesProj/src/main/assets/emoji/0_1336.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1337.png b/TMessagesProj/src/main/assets/emoji/0_1337.png index ed7688958..9a7c7bd90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1337.png and b/TMessagesProj/src/main/assets/emoji/0_1337.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1338.png b/TMessagesProj/src/main/assets/emoji/0_1338.png index 5128b3572..f0378ffe0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1338.png and b/TMessagesProj/src/main/assets/emoji/0_1338.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1339.png b/TMessagesProj/src/main/assets/emoji/0_1339.png index 62fa9875a..af0904555 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1339.png and b/TMessagesProj/src/main/assets/emoji/0_1339.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_134.png b/TMessagesProj/src/main/assets/emoji/0_134.png index 5a975e9d2..127c84f71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_134.png and b/TMessagesProj/src/main/assets/emoji/0_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1340.png b/TMessagesProj/src/main/assets/emoji/0_1340.png index fdb8b0deb..11927d4d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1340.png and b/TMessagesProj/src/main/assets/emoji/0_1340.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1341.png b/TMessagesProj/src/main/assets/emoji/0_1341.png index 3e4a41451..15ec7b3a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1341.png and b/TMessagesProj/src/main/assets/emoji/0_1341.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1342.png b/TMessagesProj/src/main/assets/emoji/0_1342.png index e2c51a1c1..c900476ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1342.png and b/TMessagesProj/src/main/assets/emoji/0_1342.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1343.png b/TMessagesProj/src/main/assets/emoji/0_1343.png index 2762ecfb1..134d28985 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1343.png and b/TMessagesProj/src/main/assets/emoji/0_1343.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1344.png b/TMessagesProj/src/main/assets/emoji/0_1344.png index befbf22fb..04dae5019 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1344.png and b/TMessagesProj/src/main/assets/emoji/0_1344.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1345.png b/TMessagesProj/src/main/assets/emoji/0_1345.png index a36e0e3d8..7533beda1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1345.png and b/TMessagesProj/src/main/assets/emoji/0_1345.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1346.png b/TMessagesProj/src/main/assets/emoji/0_1346.png index aa3ad5d5f..69a10c1ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1346.png and b/TMessagesProj/src/main/assets/emoji/0_1346.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1347.png b/TMessagesProj/src/main/assets/emoji/0_1347.png index a64dfa62d..6a2b5f467 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1347.png and b/TMessagesProj/src/main/assets/emoji/0_1347.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1348.png b/TMessagesProj/src/main/assets/emoji/0_1348.png index 1d0add3cc..b4707899a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1348.png and b/TMessagesProj/src/main/assets/emoji/0_1348.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1349.png b/TMessagesProj/src/main/assets/emoji/0_1349.png index c56ff5cae..8c7b41587 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1349.png and b/TMessagesProj/src/main/assets/emoji/0_1349.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_135.png b/TMessagesProj/src/main/assets/emoji/0_135.png index 0696ca806..b50f831d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_135.png and b/TMessagesProj/src/main/assets/emoji/0_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1350.png b/TMessagesProj/src/main/assets/emoji/0_1350.png index cd083c8d4..9d29b5054 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1350.png and b/TMessagesProj/src/main/assets/emoji/0_1350.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1351.png b/TMessagesProj/src/main/assets/emoji/0_1351.png index 0e5928bb2..a7fd7f65f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1351.png and b/TMessagesProj/src/main/assets/emoji/0_1351.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1352.png b/TMessagesProj/src/main/assets/emoji/0_1352.png index 454196ccf..72c15063e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1352.png and b/TMessagesProj/src/main/assets/emoji/0_1352.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1353.png b/TMessagesProj/src/main/assets/emoji/0_1353.png index 4d8f23f54..06a9f6ba3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1353.png and b/TMessagesProj/src/main/assets/emoji/0_1353.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1354.png b/TMessagesProj/src/main/assets/emoji/0_1354.png index af39eae98..76a4784b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1354.png and b/TMessagesProj/src/main/assets/emoji/0_1354.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1355.png b/TMessagesProj/src/main/assets/emoji/0_1355.png index 3023b7db6..fff442814 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1355.png and b/TMessagesProj/src/main/assets/emoji/0_1355.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1356.png b/TMessagesProj/src/main/assets/emoji/0_1356.png index dfc929978..b4b1fc1db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1356.png and b/TMessagesProj/src/main/assets/emoji/0_1356.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1357.png b/TMessagesProj/src/main/assets/emoji/0_1357.png index 505e31e5a..28d08c545 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1357.png and b/TMessagesProj/src/main/assets/emoji/0_1357.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1358.png b/TMessagesProj/src/main/assets/emoji/0_1358.png index 36a5d5c27..ab282a660 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1358.png and b/TMessagesProj/src/main/assets/emoji/0_1358.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1359.png b/TMessagesProj/src/main/assets/emoji/0_1359.png index e801f7053..328ed7f7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1359.png and b/TMessagesProj/src/main/assets/emoji/0_1359.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_136.png b/TMessagesProj/src/main/assets/emoji/0_136.png index 24d4231b3..78abd71b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_136.png and b/TMessagesProj/src/main/assets/emoji/0_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1360.png b/TMessagesProj/src/main/assets/emoji/0_1360.png index 7879022fe..e374be955 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1360.png and b/TMessagesProj/src/main/assets/emoji/0_1360.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1361.png b/TMessagesProj/src/main/assets/emoji/0_1361.png index c24e9495a..f1c376d17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1361.png and b/TMessagesProj/src/main/assets/emoji/0_1361.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1362.png b/TMessagesProj/src/main/assets/emoji/0_1362.png index d1896d019..4680da424 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1362.png and b/TMessagesProj/src/main/assets/emoji/0_1362.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1363.png b/TMessagesProj/src/main/assets/emoji/0_1363.png index c9bc4c8e9..a0e5333cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1363.png and b/TMessagesProj/src/main/assets/emoji/0_1363.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1364.png b/TMessagesProj/src/main/assets/emoji/0_1364.png index 66a3a5865..11c6cd61f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1364.png and b/TMessagesProj/src/main/assets/emoji/0_1364.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1365.png b/TMessagesProj/src/main/assets/emoji/0_1365.png index 39d1bedd8..ca3a87f00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1365.png and b/TMessagesProj/src/main/assets/emoji/0_1365.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1366.png b/TMessagesProj/src/main/assets/emoji/0_1366.png index 31c58413e..01380e0da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1366.png and b/TMessagesProj/src/main/assets/emoji/0_1366.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1367.png b/TMessagesProj/src/main/assets/emoji/0_1367.png index d9283736f..678feca63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1367.png and b/TMessagesProj/src/main/assets/emoji/0_1367.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1368.png b/TMessagesProj/src/main/assets/emoji/0_1368.png index d38755efc..b4f2295e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1368.png and b/TMessagesProj/src/main/assets/emoji/0_1368.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1369.png b/TMessagesProj/src/main/assets/emoji/0_1369.png index 08d60c435..b22572428 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1369.png and b/TMessagesProj/src/main/assets/emoji/0_1369.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_137.png b/TMessagesProj/src/main/assets/emoji/0_137.png index c5f9d4747..7c93b94e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_137.png and b/TMessagesProj/src/main/assets/emoji/0_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1370.png b/TMessagesProj/src/main/assets/emoji/0_1370.png index 76d1e67f3..bac2cdbde 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1370.png and b/TMessagesProj/src/main/assets/emoji/0_1370.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1371.png b/TMessagesProj/src/main/assets/emoji/0_1371.png index e0fb693d1..18337cd1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1371.png and b/TMessagesProj/src/main/assets/emoji/0_1371.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1372.png b/TMessagesProj/src/main/assets/emoji/0_1372.png index 664670e7e..d41b51735 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1372.png and b/TMessagesProj/src/main/assets/emoji/0_1372.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1373.png b/TMessagesProj/src/main/assets/emoji/0_1373.png index 9a19e5213..c3599a7a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1373.png and b/TMessagesProj/src/main/assets/emoji/0_1373.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1374.png b/TMessagesProj/src/main/assets/emoji/0_1374.png index 376d27ff7..af6c333b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1374.png and b/TMessagesProj/src/main/assets/emoji/0_1374.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1375.png b/TMessagesProj/src/main/assets/emoji/0_1375.png index ccc31ef50..8ed3f8755 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1375.png and b/TMessagesProj/src/main/assets/emoji/0_1375.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1376.png b/TMessagesProj/src/main/assets/emoji/0_1376.png index 6cdc44f05..716657d2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1376.png and b/TMessagesProj/src/main/assets/emoji/0_1376.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1377.png b/TMessagesProj/src/main/assets/emoji/0_1377.png index cbbd4509a..cd11a77a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1377.png and b/TMessagesProj/src/main/assets/emoji/0_1377.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1378.png b/TMessagesProj/src/main/assets/emoji/0_1378.png index 4e6c4b890..ba99a0663 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1378.png and b/TMessagesProj/src/main/assets/emoji/0_1378.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1379.png b/TMessagesProj/src/main/assets/emoji/0_1379.png index c8f721ab1..da7bec463 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1379.png and b/TMessagesProj/src/main/assets/emoji/0_1379.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_138.png b/TMessagesProj/src/main/assets/emoji/0_138.png index f9d52b5cb..e0705ef60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_138.png and b/TMessagesProj/src/main/assets/emoji/0_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1380.png b/TMessagesProj/src/main/assets/emoji/0_1380.png index 7eae36d44..687e9cf91 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1380.png and b/TMessagesProj/src/main/assets/emoji/0_1380.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1381.png b/TMessagesProj/src/main/assets/emoji/0_1381.png index 5f871774a..e76d6d78b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1381.png and b/TMessagesProj/src/main/assets/emoji/0_1381.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1382.png b/TMessagesProj/src/main/assets/emoji/0_1382.png index 6035c56e7..725b27168 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1382.png and b/TMessagesProj/src/main/assets/emoji/0_1382.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1383.png b/TMessagesProj/src/main/assets/emoji/0_1383.png index e57846d9f..14c5cc47a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1383.png and b/TMessagesProj/src/main/assets/emoji/0_1383.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1384.png b/TMessagesProj/src/main/assets/emoji/0_1384.png index 2e3546784..e95c04487 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1384.png and b/TMessagesProj/src/main/assets/emoji/0_1384.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1385.png b/TMessagesProj/src/main/assets/emoji/0_1385.png index 4b0723c8c..62628146d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1385.png and b/TMessagesProj/src/main/assets/emoji/0_1385.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1386.png b/TMessagesProj/src/main/assets/emoji/0_1386.png index f8f6727d6..f8ae16cc2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1386.png and b/TMessagesProj/src/main/assets/emoji/0_1386.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1387.png b/TMessagesProj/src/main/assets/emoji/0_1387.png index cfea26a6c..68756a4ef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1387.png and b/TMessagesProj/src/main/assets/emoji/0_1387.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1388.png b/TMessagesProj/src/main/assets/emoji/0_1388.png index 1faa6f9e0..0de56ee8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1388.png and b/TMessagesProj/src/main/assets/emoji/0_1388.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1389.png b/TMessagesProj/src/main/assets/emoji/0_1389.png index 630b09a94..650519f48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1389.png and b/TMessagesProj/src/main/assets/emoji/0_1389.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_139.png b/TMessagesProj/src/main/assets/emoji/0_139.png index cad7d40fd..ed690da24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_139.png and b/TMessagesProj/src/main/assets/emoji/0_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1390.png b/TMessagesProj/src/main/assets/emoji/0_1390.png index 006ba31d2..e5e86f18d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1390.png and b/TMessagesProj/src/main/assets/emoji/0_1390.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1391.png b/TMessagesProj/src/main/assets/emoji/0_1391.png index 43cd78ae1..1bb274568 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1391.png and b/TMessagesProj/src/main/assets/emoji/0_1391.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1392.png b/TMessagesProj/src/main/assets/emoji/0_1392.png index 0037eb0f1..4cf18fc55 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1392.png and b/TMessagesProj/src/main/assets/emoji/0_1392.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1393.png b/TMessagesProj/src/main/assets/emoji/0_1393.png index e61bb4a5e..f9486d6ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1393.png and b/TMessagesProj/src/main/assets/emoji/0_1393.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1394.png b/TMessagesProj/src/main/assets/emoji/0_1394.png index 76fdb0de0..7fcb8731e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1394.png and b/TMessagesProj/src/main/assets/emoji/0_1394.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1395.png b/TMessagesProj/src/main/assets/emoji/0_1395.png index ee12adaed..ce8e88832 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1395.png and b/TMessagesProj/src/main/assets/emoji/0_1395.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1396.png b/TMessagesProj/src/main/assets/emoji/0_1396.png index f452d9881..7bb628e43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1396.png and b/TMessagesProj/src/main/assets/emoji/0_1396.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1397.png b/TMessagesProj/src/main/assets/emoji/0_1397.png index bdaf705c4..7e9e70b4b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1397.png and b/TMessagesProj/src/main/assets/emoji/0_1397.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1398.png b/TMessagesProj/src/main/assets/emoji/0_1398.png index 6703c4797..a2af8b2f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1398.png and b/TMessagesProj/src/main/assets/emoji/0_1398.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1399.png b/TMessagesProj/src/main/assets/emoji/0_1399.png index 10da24fa3..d057ede9e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1399.png and b/TMessagesProj/src/main/assets/emoji/0_1399.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_14.png b/TMessagesProj/src/main/assets/emoji/0_14.png index 992a2e000..074bb4aa7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_14.png and b/TMessagesProj/src/main/assets/emoji/0_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_140.png b/TMessagesProj/src/main/assets/emoji/0_140.png index abd2218bd..a507fdb40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_140.png and b/TMessagesProj/src/main/assets/emoji/0_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1400.png b/TMessagesProj/src/main/assets/emoji/0_1400.png index 5df809ff7..61d56c44c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1400.png and b/TMessagesProj/src/main/assets/emoji/0_1400.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1401.png b/TMessagesProj/src/main/assets/emoji/0_1401.png index 640575b7f..320618853 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1401.png and b/TMessagesProj/src/main/assets/emoji/0_1401.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1402.png b/TMessagesProj/src/main/assets/emoji/0_1402.png index ffdabf3b5..18caaa236 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1402.png and b/TMessagesProj/src/main/assets/emoji/0_1402.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1403.png b/TMessagesProj/src/main/assets/emoji/0_1403.png index d82d4044b..ddd2bfc09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1403.png and b/TMessagesProj/src/main/assets/emoji/0_1403.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1404.png b/TMessagesProj/src/main/assets/emoji/0_1404.png index a07826ada..1d55ce634 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1404.png and b/TMessagesProj/src/main/assets/emoji/0_1404.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1405.png b/TMessagesProj/src/main/assets/emoji/0_1405.png index e28fdf72d..5b2fef9c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1405.png and b/TMessagesProj/src/main/assets/emoji/0_1405.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1406.png b/TMessagesProj/src/main/assets/emoji/0_1406.png index 9636651f0..4ff0a480f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1406.png and b/TMessagesProj/src/main/assets/emoji/0_1406.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1407.png b/TMessagesProj/src/main/assets/emoji/0_1407.png index 6f9cd2e0c..d7f34510d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1407.png and b/TMessagesProj/src/main/assets/emoji/0_1407.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1408.png b/TMessagesProj/src/main/assets/emoji/0_1408.png index 5156471c9..4c7262a57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1408.png and b/TMessagesProj/src/main/assets/emoji/0_1408.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1409.png b/TMessagesProj/src/main/assets/emoji/0_1409.png index 7a28844a9..faffadf86 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1409.png and b/TMessagesProj/src/main/assets/emoji/0_1409.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_141.png b/TMessagesProj/src/main/assets/emoji/0_141.png index b9225fdfb..5c2ea5a9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_141.png and b/TMessagesProj/src/main/assets/emoji/0_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1410.png b/TMessagesProj/src/main/assets/emoji/0_1410.png index 835d92be0..bbffb1729 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1410.png and b/TMessagesProj/src/main/assets/emoji/0_1410.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1411.png b/TMessagesProj/src/main/assets/emoji/0_1411.png index b28a14d07..ac503a64a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1411.png and b/TMessagesProj/src/main/assets/emoji/0_1411.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1412.png b/TMessagesProj/src/main/assets/emoji/0_1412.png index 8035f466e..2d8ed5b67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1412.png and b/TMessagesProj/src/main/assets/emoji/0_1412.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1413.png b/TMessagesProj/src/main/assets/emoji/0_1413.png index 7de84d34f..033ad3118 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1413.png and b/TMessagesProj/src/main/assets/emoji/0_1413.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1414.png b/TMessagesProj/src/main/assets/emoji/0_1414.png index cffa2450f..8cd01f31c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1414.png and b/TMessagesProj/src/main/assets/emoji/0_1414.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1415.png b/TMessagesProj/src/main/assets/emoji/0_1415.png index 619d9b1f2..2150cab35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1415.png and b/TMessagesProj/src/main/assets/emoji/0_1415.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1416.png b/TMessagesProj/src/main/assets/emoji/0_1416.png index a624a4bb8..91d81cbc2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1416.png and b/TMessagesProj/src/main/assets/emoji/0_1416.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1417.png b/TMessagesProj/src/main/assets/emoji/0_1417.png index 3d6b414a6..0914935b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1417.png and b/TMessagesProj/src/main/assets/emoji/0_1417.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1418.png b/TMessagesProj/src/main/assets/emoji/0_1418.png index e14bf5445..976b29070 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1418.png and b/TMessagesProj/src/main/assets/emoji/0_1418.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1419.png b/TMessagesProj/src/main/assets/emoji/0_1419.png index 84f3c5d81..94de0bea8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1419.png and b/TMessagesProj/src/main/assets/emoji/0_1419.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_142.png b/TMessagesProj/src/main/assets/emoji/0_142.png index 03ad5c8cd..656d55d86 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_142.png and b/TMessagesProj/src/main/assets/emoji/0_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1420.png b/TMessagesProj/src/main/assets/emoji/0_1420.png index d557834d6..e3732d5ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1420.png and b/TMessagesProj/src/main/assets/emoji/0_1420.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1421.png b/TMessagesProj/src/main/assets/emoji/0_1421.png index 633cce2f0..d3abacbc8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1421.png and b/TMessagesProj/src/main/assets/emoji/0_1421.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1422.png b/TMessagesProj/src/main/assets/emoji/0_1422.png index e61469530..0ea9c4114 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1422.png and b/TMessagesProj/src/main/assets/emoji/0_1422.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1423.png b/TMessagesProj/src/main/assets/emoji/0_1423.png index a3c0f09ea..a54ec1bf3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1423.png and b/TMessagesProj/src/main/assets/emoji/0_1423.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1424.png b/TMessagesProj/src/main/assets/emoji/0_1424.png index 2b4ac2890..3493953b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1424.png and b/TMessagesProj/src/main/assets/emoji/0_1424.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1425.png b/TMessagesProj/src/main/assets/emoji/0_1425.png index 04d7688b9..ef2b635f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1425.png and b/TMessagesProj/src/main/assets/emoji/0_1425.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1426.png b/TMessagesProj/src/main/assets/emoji/0_1426.png index 53571305b..2e052f8b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1426.png and b/TMessagesProj/src/main/assets/emoji/0_1426.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1427.png b/TMessagesProj/src/main/assets/emoji/0_1427.png index 14a1ccfbf..c98d2b249 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1427.png and b/TMessagesProj/src/main/assets/emoji/0_1427.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1428.png b/TMessagesProj/src/main/assets/emoji/0_1428.png index 07266ec9c..e9e8367ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1428.png and b/TMessagesProj/src/main/assets/emoji/0_1428.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1429.png b/TMessagesProj/src/main/assets/emoji/0_1429.png index 1f5018bd1..dbb4c71ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1429.png and b/TMessagesProj/src/main/assets/emoji/0_1429.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_143.png b/TMessagesProj/src/main/assets/emoji/0_143.png index a2d5ddba6..b9225fdfb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_143.png and b/TMessagesProj/src/main/assets/emoji/0_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1430.png b/TMessagesProj/src/main/assets/emoji/0_1430.png index d10315fea..e3b247360 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1430.png and b/TMessagesProj/src/main/assets/emoji/0_1430.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1431.png b/TMessagesProj/src/main/assets/emoji/0_1431.png index 2dcc034b3..50e0454f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1431.png and b/TMessagesProj/src/main/assets/emoji/0_1431.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1432.png b/TMessagesProj/src/main/assets/emoji/0_1432.png index ceffb3199..fb3477b30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1432.png and b/TMessagesProj/src/main/assets/emoji/0_1432.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1433.png b/TMessagesProj/src/main/assets/emoji/0_1433.png index 56a070140..4279399d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1433.png and b/TMessagesProj/src/main/assets/emoji/0_1433.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1434.png b/TMessagesProj/src/main/assets/emoji/0_1434.png index 46d2b72f0..eeaf6e8fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1434.png and b/TMessagesProj/src/main/assets/emoji/0_1434.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1435.png b/TMessagesProj/src/main/assets/emoji/0_1435.png index d45e98989..af07b5150 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1435.png and b/TMessagesProj/src/main/assets/emoji/0_1435.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1436.png b/TMessagesProj/src/main/assets/emoji/0_1436.png index ec3339d79..384d06841 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1436.png and b/TMessagesProj/src/main/assets/emoji/0_1436.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1437.png b/TMessagesProj/src/main/assets/emoji/0_1437.png index 520dd6cb0..a5f08d30e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1437.png and b/TMessagesProj/src/main/assets/emoji/0_1437.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1438.png b/TMessagesProj/src/main/assets/emoji/0_1438.png index e9e980bf4..ddff5ab9e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1438.png and b/TMessagesProj/src/main/assets/emoji/0_1438.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1439.png b/TMessagesProj/src/main/assets/emoji/0_1439.png index 81d9df0b8..8a8d9f25c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1439.png and b/TMessagesProj/src/main/assets/emoji/0_1439.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_144.png b/TMessagesProj/src/main/assets/emoji/0_144.png index 227fd0945..03ad5c8cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_144.png and b/TMessagesProj/src/main/assets/emoji/0_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1440.png b/TMessagesProj/src/main/assets/emoji/0_1440.png index e3139454a..0d5bb8d23 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1440.png and b/TMessagesProj/src/main/assets/emoji/0_1440.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1441.png b/TMessagesProj/src/main/assets/emoji/0_1441.png index 42cef1b56..21266df5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1441.png and b/TMessagesProj/src/main/assets/emoji/0_1441.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1442.png b/TMessagesProj/src/main/assets/emoji/0_1442.png index eef5cae0c..a66e4cfb2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1442.png and b/TMessagesProj/src/main/assets/emoji/0_1442.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1443.png b/TMessagesProj/src/main/assets/emoji/0_1443.png index d492db687..73020b5f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1443.png and b/TMessagesProj/src/main/assets/emoji/0_1443.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1444.png b/TMessagesProj/src/main/assets/emoji/0_1444.png index 4bc100957..b0da7de8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1444.png and b/TMessagesProj/src/main/assets/emoji/0_1444.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1445.png b/TMessagesProj/src/main/assets/emoji/0_1445.png index 7e67077b7..5ad7d9e41 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1445.png and b/TMessagesProj/src/main/assets/emoji/0_1445.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1446.png b/TMessagesProj/src/main/assets/emoji/0_1446.png index e7012639f..7d41c9b60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1446.png and b/TMessagesProj/src/main/assets/emoji/0_1446.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1447.png b/TMessagesProj/src/main/assets/emoji/0_1447.png index 6f74d3baf..c3c73c30c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1447.png and b/TMessagesProj/src/main/assets/emoji/0_1447.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1448.png b/TMessagesProj/src/main/assets/emoji/0_1448.png index 0c37f9dc5..0f64798ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1448.png and b/TMessagesProj/src/main/assets/emoji/0_1448.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1449.png b/TMessagesProj/src/main/assets/emoji/0_1449.png index fbc6c89f6..251999353 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1449.png and b/TMessagesProj/src/main/assets/emoji/0_1449.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_145.png b/TMessagesProj/src/main/assets/emoji/0_145.png index 1021051f6..210d5876b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_145.png and b/TMessagesProj/src/main/assets/emoji/0_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1450.png b/TMessagesProj/src/main/assets/emoji/0_1450.png index 437029ef2..f5e5a1532 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1450.png and b/TMessagesProj/src/main/assets/emoji/0_1450.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1451.png b/TMessagesProj/src/main/assets/emoji/0_1451.png index dbd723f69..19beffa8c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1451.png and b/TMessagesProj/src/main/assets/emoji/0_1451.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1452.png b/TMessagesProj/src/main/assets/emoji/0_1452.png index 1f83809fe..3351fc935 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1452.png and b/TMessagesProj/src/main/assets/emoji/0_1452.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1453.png b/TMessagesProj/src/main/assets/emoji/0_1453.png index 2fac69bc4..f63c39f5c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1453.png and b/TMessagesProj/src/main/assets/emoji/0_1453.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1454.png b/TMessagesProj/src/main/assets/emoji/0_1454.png index fcb34c73a..60f188e7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1454.png and b/TMessagesProj/src/main/assets/emoji/0_1454.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1455.png b/TMessagesProj/src/main/assets/emoji/0_1455.png index 8a11b2571..20ae5df7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1455.png and b/TMessagesProj/src/main/assets/emoji/0_1455.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1456.png b/TMessagesProj/src/main/assets/emoji/0_1456.png index 3cbafc38d..b12c8dd68 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1456.png and b/TMessagesProj/src/main/assets/emoji/0_1456.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1457.png b/TMessagesProj/src/main/assets/emoji/0_1457.png index 42b76d1bf..82823b9fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1457.png and b/TMessagesProj/src/main/assets/emoji/0_1457.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1458.png b/TMessagesProj/src/main/assets/emoji/0_1458.png index c49a2f7ab..8826f52af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1458.png and b/TMessagesProj/src/main/assets/emoji/0_1458.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1459.png b/TMessagesProj/src/main/assets/emoji/0_1459.png index 86a323260..071431845 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1459.png and b/TMessagesProj/src/main/assets/emoji/0_1459.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_146.png b/TMessagesProj/src/main/assets/emoji/0_146.png index 93ebac3d0..8be5811f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_146.png and b/TMessagesProj/src/main/assets/emoji/0_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1460.png b/TMessagesProj/src/main/assets/emoji/0_1460.png index e8c8dd3cc..ed8d618ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1460.png and b/TMessagesProj/src/main/assets/emoji/0_1460.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1461.png b/TMessagesProj/src/main/assets/emoji/0_1461.png index 68f7dff47..d0708ac52 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1461.png and b/TMessagesProj/src/main/assets/emoji/0_1461.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1462.png b/TMessagesProj/src/main/assets/emoji/0_1462.png index c62e4f118..5b326b67e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1462.png and b/TMessagesProj/src/main/assets/emoji/0_1462.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1463.png b/TMessagesProj/src/main/assets/emoji/0_1463.png index 7344d4a7b..57899d973 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1463.png and b/TMessagesProj/src/main/assets/emoji/0_1463.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1464.png b/TMessagesProj/src/main/assets/emoji/0_1464.png index 827d4ef6d..e72493975 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1464.png and b/TMessagesProj/src/main/assets/emoji/0_1464.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1465.png b/TMessagesProj/src/main/assets/emoji/0_1465.png index 1e5a6707e..a69c0a1d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1465.png and b/TMessagesProj/src/main/assets/emoji/0_1465.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1466.png b/TMessagesProj/src/main/assets/emoji/0_1466.png index 1c30c8568..ffde49db6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1466.png and b/TMessagesProj/src/main/assets/emoji/0_1466.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1467.png b/TMessagesProj/src/main/assets/emoji/0_1467.png index b1273bb00..fe5f81297 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1467.png and b/TMessagesProj/src/main/assets/emoji/0_1467.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1468.png b/TMessagesProj/src/main/assets/emoji/0_1468.png index 4f2f02f37..e997ca3ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1468.png and b/TMessagesProj/src/main/assets/emoji/0_1468.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1469.png b/TMessagesProj/src/main/assets/emoji/0_1469.png index dc721e603..1425c7ed7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1469.png and b/TMessagesProj/src/main/assets/emoji/0_1469.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_147.png b/TMessagesProj/src/main/assets/emoji/0_147.png index b622def5f..e2fb675c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_147.png and b/TMessagesProj/src/main/assets/emoji/0_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1470.png b/TMessagesProj/src/main/assets/emoji/0_1470.png index 8309f8eb4..cbf56e629 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1470.png and b/TMessagesProj/src/main/assets/emoji/0_1470.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1471.png b/TMessagesProj/src/main/assets/emoji/0_1471.png index 63b066cef..0a78ff14c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1471.png and b/TMessagesProj/src/main/assets/emoji/0_1471.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1472.png b/TMessagesProj/src/main/assets/emoji/0_1472.png index e2e7c05ac..8099092e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1472.png and b/TMessagesProj/src/main/assets/emoji/0_1472.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1473.png b/TMessagesProj/src/main/assets/emoji/0_1473.png index 6b4a6b702..5ca664bad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1473.png and b/TMessagesProj/src/main/assets/emoji/0_1473.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1474.png b/TMessagesProj/src/main/assets/emoji/0_1474.png index e68df2865..e4e88d890 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1474.png and b/TMessagesProj/src/main/assets/emoji/0_1474.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1475.png b/TMessagesProj/src/main/assets/emoji/0_1475.png index b9af05f3f..12a124dbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1475.png and b/TMessagesProj/src/main/assets/emoji/0_1475.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1476.png b/TMessagesProj/src/main/assets/emoji/0_1476.png index 214f7dc1b..0f2addbe7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1476.png and b/TMessagesProj/src/main/assets/emoji/0_1476.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1477.png b/TMessagesProj/src/main/assets/emoji/0_1477.png index 40688eaca..abacebccf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1477.png and b/TMessagesProj/src/main/assets/emoji/0_1477.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1478.png b/TMessagesProj/src/main/assets/emoji/0_1478.png index e869b3334..e73e78da4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1478.png and b/TMessagesProj/src/main/assets/emoji/0_1478.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1479.png b/TMessagesProj/src/main/assets/emoji/0_1479.png index c661d1b9b..1c057c32e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1479.png and b/TMessagesProj/src/main/assets/emoji/0_1479.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_148.png b/TMessagesProj/src/main/assets/emoji/0_148.png index 19b824d0e..2695b24a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_148.png and b/TMessagesProj/src/main/assets/emoji/0_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1480.png b/TMessagesProj/src/main/assets/emoji/0_1480.png index 8f1988add..473043f20 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1480.png and b/TMessagesProj/src/main/assets/emoji/0_1480.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1481.png b/TMessagesProj/src/main/assets/emoji/0_1481.png index 28290b65e..ef724ca32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1481.png and b/TMessagesProj/src/main/assets/emoji/0_1481.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1482.png b/TMessagesProj/src/main/assets/emoji/0_1482.png index ac40dcdee..15e3d68d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1482.png and b/TMessagesProj/src/main/assets/emoji/0_1482.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1483.png b/TMessagesProj/src/main/assets/emoji/0_1483.png index ce0bb4756..837d1e24f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1483.png and b/TMessagesProj/src/main/assets/emoji/0_1483.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1484.png b/TMessagesProj/src/main/assets/emoji/0_1484.png index 79d24f943..4eafbb222 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1484.png and b/TMessagesProj/src/main/assets/emoji/0_1484.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1485.png b/TMessagesProj/src/main/assets/emoji/0_1485.png index f270e4b49..43a8dccf1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1485.png and b/TMessagesProj/src/main/assets/emoji/0_1485.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1486.png b/TMessagesProj/src/main/assets/emoji/0_1486.png index bfc127231..afa96f537 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1486.png and b/TMessagesProj/src/main/assets/emoji/0_1486.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1487.png b/TMessagesProj/src/main/assets/emoji/0_1487.png index 5b5bc38c3..028c0ae27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1487.png and b/TMessagesProj/src/main/assets/emoji/0_1487.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1488.png b/TMessagesProj/src/main/assets/emoji/0_1488.png index 35b68c071..74dc391fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1488.png and b/TMessagesProj/src/main/assets/emoji/0_1488.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1489.png b/TMessagesProj/src/main/assets/emoji/0_1489.png index ac58f2709..35a05baec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1489.png and b/TMessagesProj/src/main/assets/emoji/0_1489.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_149.png b/TMessagesProj/src/main/assets/emoji/0_149.png index 33f1404cf..b622def5f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_149.png and b/TMessagesProj/src/main/assets/emoji/0_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1490.png b/TMessagesProj/src/main/assets/emoji/0_1490.png index 0b48351f0..87708c6b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1490.png and b/TMessagesProj/src/main/assets/emoji/0_1490.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1491.png b/TMessagesProj/src/main/assets/emoji/0_1491.png index a2a170458..c141e3cdb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1491.png and b/TMessagesProj/src/main/assets/emoji/0_1491.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1492.png b/TMessagesProj/src/main/assets/emoji/0_1492.png index 5ca430c10..f0f33ac2f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1492.png and b/TMessagesProj/src/main/assets/emoji/0_1492.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1493.png b/TMessagesProj/src/main/assets/emoji/0_1493.png index d3e5dc3ef..9f9177ba8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1493.png and b/TMessagesProj/src/main/assets/emoji/0_1493.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1494.png b/TMessagesProj/src/main/assets/emoji/0_1494.png index be464894a..6138e242c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1494.png and b/TMessagesProj/src/main/assets/emoji/0_1494.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1495.png b/TMessagesProj/src/main/assets/emoji/0_1495.png index e7b951013..dd44fb050 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1495.png and b/TMessagesProj/src/main/assets/emoji/0_1495.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1496.png b/TMessagesProj/src/main/assets/emoji/0_1496.png index 0a1d40746..2ce9993e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1496.png and b/TMessagesProj/src/main/assets/emoji/0_1496.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1497.png b/TMessagesProj/src/main/assets/emoji/0_1497.png index 4ce65d9f2..6a20c381f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1497.png and b/TMessagesProj/src/main/assets/emoji/0_1497.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1498.png b/TMessagesProj/src/main/assets/emoji/0_1498.png index bf78eb23d..a74c9901f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1498.png and b/TMessagesProj/src/main/assets/emoji/0_1498.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1499.png b/TMessagesProj/src/main/assets/emoji/0_1499.png index 9b13f83a5..610e5ad16 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1499.png and b/TMessagesProj/src/main/assets/emoji/0_1499.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_15.png b/TMessagesProj/src/main/assets/emoji/0_15.png index 9e5014710..d8c476968 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_15.png and b/TMessagesProj/src/main/assets/emoji/0_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_150.png b/TMessagesProj/src/main/assets/emoji/0_150.png index 3da8d3370..19b824d0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_150.png and b/TMessagesProj/src/main/assets/emoji/0_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1500.png b/TMessagesProj/src/main/assets/emoji/0_1500.png index 1561181a3..c815379e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1500.png and b/TMessagesProj/src/main/assets/emoji/0_1500.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1501.png b/TMessagesProj/src/main/assets/emoji/0_1501.png index 45f4b3ad5..bc37bcdb2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1501.png and b/TMessagesProj/src/main/assets/emoji/0_1501.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1502.png b/TMessagesProj/src/main/assets/emoji/0_1502.png index 6c715dac1..635d68c13 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1502.png and b/TMessagesProj/src/main/assets/emoji/0_1502.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1503.png b/TMessagesProj/src/main/assets/emoji/0_1503.png index 4623c2a54..3a6b99068 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1503.png and b/TMessagesProj/src/main/assets/emoji/0_1503.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1504.png b/TMessagesProj/src/main/assets/emoji/0_1504.png index 0923ae4ac..e9edf09dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1504.png and b/TMessagesProj/src/main/assets/emoji/0_1504.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1505.png b/TMessagesProj/src/main/assets/emoji/0_1505.png index 5450a2374..6c4b0c7f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1505.png and b/TMessagesProj/src/main/assets/emoji/0_1505.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1506.png b/TMessagesProj/src/main/assets/emoji/0_1506.png index 3672d1b7f..303ca2924 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1506.png and b/TMessagesProj/src/main/assets/emoji/0_1506.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1507.png b/TMessagesProj/src/main/assets/emoji/0_1507.png index a1491815f..87eee5f75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1507.png and b/TMessagesProj/src/main/assets/emoji/0_1507.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1508.png b/TMessagesProj/src/main/assets/emoji/0_1508.png index 5903af5f9..4251e0c95 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1508.png and b/TMessagesProj/src/main/assets/emoji/0_1508.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1509.png b/TMessagesProj/src/main/assets/emoji/0_1509.png index bb12e6155..99a52b8d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1509.png and b/TMessagesProj/src/main/assets/emoji/0_1509.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_151.png b/TMessagesProj/src/main/assets/emoji/0_151.png index a613bdb66..33f1404cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_151.png and b/TMessagesProj/src/main/assets/emoji/0_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1510.png b/TMessagesProj/src/main/assets/emoji/0_1510.png index 76da1edd4..e39ddab10 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1510.png and b/TMessagesProj/src/main/assets/emoji/0_1510.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1511.png b/TMessagesProj/src/main/assets/emoji/0_1511.png index 83efd52a0..6cddcf027 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1511.png and b/TMessagesProj/src/main/assets/emoji/0_1511.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1512.png b/TMessagesProj/src/main/assets/emoji/0_1512.png index feb9c7608..f0c6e5490 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1512.png and b/TMessagesProj/src/main/assets/emoji/0_1512.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1513.png b/TMessagesProj/src/main/assets/emoji/0_1513.png index 6a60c4e5e..383bfca10 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1513.png and b/TMessagesProj/src/main/assets/emoji/0_1513.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1514.png b/TMessagesProj/src/main/assets/emoji/0_1514.png index c862b3511..26bc7559c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1514.png and b/TMessagesProj/src/main/assets/emoji/0_1514.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1515.png b/TMessagesProj/src/main/assets/emoji/0_1515.png index 6b9222676..8cff7780c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1515.png and b/TMessagesProj/src/main/assets/emoji/0_1515.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1516.png b/TMessagesProj/src/main/assets/emoji/0_1516.png index 8ab7f2bdd..2b4e61202 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1516.png and b/TMessagesProj/src/main/assets/emoji/0_1516.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1517.png b/TMessagesProj/src/main/assets/emoji/0_1517.png index 59fbce01f..c915e21e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1517.png and b/TMessagesProj/src/main/assets/emoji/0_1517.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1518.png b/TMessagesProj/src/main/assets/emoji/0_1518.png index 28b5cfe7b..7cb789f32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1518.png and b/TMessagesProj/src/main/assets/emoji/0_1518.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1519.png b/TMessagesProj/src/main/assets/emoji/0_1519.png index 9e77d96b2..dc55c49da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1519.png and b/TMessagesProj/src/main/assets/emoji/0_1519.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_152.png b/TMessagesProj/src/main/assets/emoji/0_152.png index 11852cf42..6467274f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_152.png and b/TMessagesProj/src/main/assets/emoji/0_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1520.png b/TMessagesProj/src/main/assets/emoji/0_1520.png index 42caae5f0..cb23dfee3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1520.png and b/TMessagesProj/src/main/assets/emoji/0_1520.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1521.png b/TMessagesProj/src/main/assets/emoji/0_1521.png index 89af54b5c..a43c87379 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1521.png and b/TMessagesProj/src/main/assets/emoji/0_1521.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1522.png b/TMessagesProj/src/main/assets/emoji/0_1522.png index 654027617..bf4412b1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1522.png and b/TMessagesProj/src/main/assets/emoji/0_1522.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1523.png b/TMessagesProj/src/main/assets/emoji/0_1523.png index d586f0448..307f7035b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1523.png and b/TMessagesProj/src/main/assets/emoji/0_1523.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1524.png b/TMessagesProj/src/main/assets/emoji/0_1524.png index e420c0ade..101bab49e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1524.png and b/TMessagesProj/src/main/assets/emoji/0_1524.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1525.png b/TMessagesProj/src/main/assets/emoji/0_1525.png index bc0217b70..6f5965544 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1525.png and b/TMessagesProj/src/main/assets/emoji/0_1525.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1526.png b/TMessagesProj/src/main/assets/emoji/0_1526.png index e422f6a00..6d3c5e159 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1526.png and b/TMessagesProj/src/main/assets/emoji/0_1526.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1527.png b/TMessagesProj/src/main/assets/emoji/0_1527.png index ec60863d8..084a1e55e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1527.png and b/TMessagesProj/src/main/assets/emoji/0_1527.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1528.png b/TMessagesProj/src/main/assets/emoji/0_1528.png index aaab9679b..42b76d1bf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1528.png and b/TMessagesProj/src/main/assets/emoji/0_1528.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1529.png b/TMessagesProj/src/main/assets/emoji/0_1529.png index 07833b174..c49a2f7ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1529.png and b/TMessagesProj/src/main/assets/emoji/0_1529.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_153.png b/TMessagesProj/src/main/assets/emoji/0_153.png index edef6d59d..a613bdb66 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_153.png and b/TMessagesProj/src/main/assets/emoji/0_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1530.png b/TMessagesProj/src/main/assets/emoji/0_1530.png index 1cde415d2..86a323260 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1530.png and b/TMessagesProj/src/main/assets/emoji/0_1530.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1531.png b/TMessagesProj/src/main/assets/emoji/0_1531.png index 07fb549d9..193ba18c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1531.png and b/TMessagesProj/src/main/assets/emoji/0_1531.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1532.png b/TMessagesProj/src/main/assets/emoji/0_1532.png index 7bdaf02c7..248220885 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1532.png and b/TMessagesProj/src/main/assets/emoji/0_1532.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1533.png b/TMessagesProj/src/main/assets/emoji/0_1533.png index d1b877b1c..575fe5f5a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1533.png and b/TMessagesProj/src/main/assets/emoji/0_1533.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1534.png b/TMessagesProj/src/main/assets/emoji/0_1534.png index f9dd7b576..82c9771ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1534.png and b/TMessagesProj/src/main/assets/emoji/0_1534.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1535.png b/TMessagesProj/src/main/assets/emoji/0_1535.png index 6e6cd8df8..7c1629a57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1535.png and b/TMessagesProj/src/main/assets/emoji/0_1535.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1536.png b/TMessagesProj/src/main/assets/emoji/0_1536.png index 78ae735d6..945b10b5b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1536.png and b/TMessagesProj/src/main/assets/emoji/0_1536.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1537.png b/TMessagesProj/src/main/assets/emoji/0_1537.png index dcc4c0153..b847498a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1537.png and b/TMessagesProj/src/main/assets/emoji/0_1537.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1538.png b/TMessagesProj/src/main/assets/emoji/0_1538.png index ae315e06f..331143dd7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1538.png and b/TMessagesProj/src/main/assets/emoji/0_1538.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1539.png b/TMessagesProj/src/main/assets/emoji/0_1539.png index 566cf3168..184cf83c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1539.png and b/TMessagesProj/src/main/assets/emoji/0_1539.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_154.png b/TMessagesProj/src/main/assets/emoji/0_154.png index 81a25e15c..11852cf42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_154.png and b/TMessagesProj/src/main/assets/emoji/0_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1540.png b/TMessagesProj/src/main/assets/emoji/0_1540.png index 46b34e639..c65ca4bd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1540.png and b/TMessagesProj/src/main/assets/emoji/0_1540.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1541.png b/TMessagesProj/src/main/assets/emoji/0_1541.png index 93b3d0b4b..23fa7b0a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1541.png and b/TMessagesProj/src/main/assets/emoji/0_1541.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1542.png b/TMessagesProj/src/main/assets/emoji/0_1542.png index a6d152fe5..c7c04a579 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1542.png and b/TMessagesProj/src/main/assets/emoji/0_1542.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1543.png b/TMessagesProj/src/main/assets/emoji/0_1543.png index 53490c2aa..f54b79bd6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1543.png and b/TMessagesProj/src/main/assets/emoji/0_1543.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1544.png b/TMessagesProj/src/main/assets/emoji/0_1544.png index dbddfc4f6..60e434473 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1544.png and b/TMessagesProj/src/main/assets/emoji/0_1544.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1545.png b/TMessagesProj/src/main/assets/emoji/0_1545.png index bf530a61e..fd5e1cf3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1545.png and b/TMessagesProj/src/main/assets/emoji/0_1545.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1546.png b/TMessagesProj/src/main/assets/emoji/0_1546.png index 901694403..b9af05f3f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1546.png and b/TMessagesProj/src/main/assets/emoji/0_1546.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1547.png b/TMessagesProj/src/main/assets/emoji/0_1547.png index c89311f63..5ce99ca53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1547.png and b/TMessagesProj/src/main/assets/emoji/0_1547.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1548.png b/TMessagesProj/src/main/assets/emoji/0_1548.png index b4aba9c7f..48dbd9eb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1548.png and b/TMessagesProj/src/main/assets/emoji/0_1548.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1549.png b/TMessagesProj/src/main/assets/emoji/0_1549.png index fb9a69e68..d37561c90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1549.png and b/TMessagesProj/src/main/assets/emoji/0_1549.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_155.png b/TMessagesProj/src/main/assets/emoji/0_155.png index 92e56ffb1..edef6d59d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_155.png and b/TMessagesProj/src/main/assets/emoji/0_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1550.png b/TMessagesProj/src/main/assets/emoji/0_1550.png index 1627e40d6..a379cdfdd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1550.png and b/TMessagesProj/src/main/assets/emoji/0_1550.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1551.png b/TMessagesProj/src/main/assets/emoji/0_1551.png index 5f3d6cabf..735f067ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1551.png and b/TMessagesProj/src/main/assets/emoji/0_1551.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1552.png b/TMessagesProj/src/main/assets/emoji/0_1552.png index 8893b3c6f..ee0c329a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1552.png and b/TMessagesProj/src/main/assets/emoji/0_1552.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1553.png b/TMessagesProj/src/main/assets/emoji/0_1553.png index 83a4bcbca..7e973f51f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1553.png and b/TMessagesProj/src/main/assets/emoji/0_1553.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1554.png b/TMessagesProj/src/main/assets/emoji/0_1554.png index bec8ca515..f8f500963 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1554.png and b/TMessagesProj/src/main/assets/emoji/0_1554.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1555.png b/TMessagesProj/src/main/assets/emoji/0_1555.png index c1f647b7d..9d72e1570 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1555.png and b/TMessagesProj/src/main/assets/emoji/0_1555.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1556.png b/TMessagesProj/src/main/assets/emoji/0_1556.png index faccee097..87390bd47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1556.png and b/TMessagesProj/src/main/assets/emoji/0_1556.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1557.png b/TMessagesProj/src/main/assets/emoji/0_1557.png index ad96520aa..0eec7b58e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1557.png and b/TMessagesProj/src/main/assets/emoji/0_1557.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1558.png b/TMessagesProj/src/main/assets/emoji/0_1558.png index a1fcbeb37..308df0caf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1558.png and b/TMessagesProj/src/main/assets/emoji/0_1558.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1559.png b/TMessagesProj/src/main/assets/emoji/0_1559.png index 4b8a646dd..b5a3f73e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1559.png and b/TMessagesProj/src/main/assets/emoji/0_1559.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_156.png b/TMessagesProj/src/main/assets/emoji/0_156.png index c32079fe1..81a25e15c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_156.png and b/TMessagesProj/src/main/assets/emoji/0_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1560.png b/TMessagesProj/src/main/assets/emoji/0_1560.png index ac0a14419..df2becf50 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1560.png and b/TMessagesProj/src/main/assets/emoji/0_1560.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1561.png b/TMessagesProj/src/main/assets/emoji/0_1561.png index 101baf38f..4efb7776b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1561.png and b/TMessagesProj/src/main/assets/emoji/0_1561.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1562.png b/TMessagesProj/src/main/assets/emoji/0_1562.png index b0eb01322..ded83a31c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1562.png and b/TMessagesProj/src/main/assets/emoji/0_1562.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1563.png b/TMessagesProj/src/main/assets/emoji/0_1563.png index c177b8045..d5651506a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1563.png and b/TMessagesProj/src/main/assets/emoji/0_1563.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1564.png b/TMessagesProj/src/main/assets/emoji/0_1564.png index 80f3b9742..daa656875 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1564.png and b/TMessagesProj/src/main/assets/emoji/0_1564.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1565.png b/TMessagesProj/src/main/assets/emoji/0_1565.png index 3c1c2d1cc..79bd39c57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1565.png and b/TMessagesProj/src/main/assets/emoji/0_1565.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1566.png b/TMessagesProj/src/main/assets/emoji/0_1566.png index 9c1d0d403..3487627d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1566.png and b/TMessagesProj/src/main/assets/emoji/0_1566.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1567.png b/TMessagesProj/src/main/assets/emoji/0_1567.png index f8482d6e4..a64685e7a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1567.png and b/TMessagesProj/src/main/assets/emoji/0_1567.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1568.png b/TMessagesProj/src/main/assets/emoji/0_1568.png index e4894b805..57d89bedd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1568.png and b/TMessagesProj/src/main/assets/emoji/0_1568.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1569.png b/TMessagesProj/src/main/assets/emoji/0_1569.png index 052963138..9f2346dc7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1569.png and b/TMessagesProj/src/main/assets/emoji/0_1569.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_157.png b/TMessagesProj/src/main/assets/emoji/0_157.png index 581beea0d..af5920c81 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_157.png and b/TMessagesProj/src/main/assets/emoji/0_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1570.png b/TMessagesProj/src/main/assets/emoji/0_1570.png index a2eefa727..b73933e80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1570.png and b/TMessagesProj/src/main/assets/emoji/0_1570.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1571.png b/TMessagesProj/src/main/assets/emoji/0_1571.png index 52b2fb3c3..564ac0d9a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1571.png and b/TMessagesProj/src/main/assets/emoji/0_1571.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1572.png b/TMessagesProj/src/main/assets/emoji/0_1572.png index 48f8e6882..b5f4437d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1572.png and b/TMessagesProj/src/main/assets/emoji/0_1572.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1573.png b/TMessagesProj/src/main/assets/emoji/0_1573.png index 2d7ace8f6..3597a225a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1573.png and b/TMessagesProj/src/main/assets/emoji/0_1573.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1574.png b/TMessagesProj/src/main/assets/emoji/0_1574.png index 513acb56a..694c407cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1574.png and b/TMessagesProj/src/main/assets/emoji/0_1574.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1575.png b/TMessagesProj/src/main/assets/emoji/0_1575.png index eca862f02..0e0de5a00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1575.png and b/TMessagesProj/src/main/assets/emoji/0_1575.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1576.png b/TMessagesProj/src/main/assets/emoji/0_1576.png index f6ca6ed5e..7cd48029d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1576.png and b/TMessagesProj/src/main/assets/emoji/0_1576.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1577.png b/TMessagesProj/src/main/assets/emoji/0_1577.png index d26888fee..a267ec64c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1577.png and b/TMessagesProj/src/main/assets/emoji/0_1577.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1578.png b/TMessagesProj/src/main/assets/emoji/0_1578.png index 4a242c870..0290d5366 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1578.png and b/TMessagesProj/src/main/assets/emoji/0_1578.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1579.png b/TMessagesProj/src/main/assets/emoji/0_1579.png index 9ea76513a..4a6fbc35e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1579.png and b/TMessagesProj/src/main/assets/emoji/0_1579.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_158.png b/TMessagesProj/src/main/assets/emoji/0_158.png index 75d368376..4e4e519d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_158.png and b/TMessagesProj/src/main/assets/emoji/0_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1580.png b/TMessagesProj/src/main/assets/emoji/0_1580.png index 2603c404e..906b5de83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1580.png and b/TMessagesProj/src/main/assets/emoji/0_1580.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1581.png b/TMessagesProj/src/main/assets/emoji/0_1581.png index f22dbf609..b1d5e33ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1581.png and b/TMessagesProj/src/main/assets/emoji/0_1581.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1582.png b/TMessagesProj/src/main/assets/emoji/0_1582.png index 906623f80..e945a45bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1582.png and b/TMessagesProj/src/main/assets/emoji/0_1582.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1583.png b/TMessagesProj/src/main/assets/emoji/0_1583.png index b253a67b6..f75fa362b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1583.png and b/TMessagesProj/src/main/assets/emoji/0_1583.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1584.png b/TMessagesProj/src/main/assets/emoji/0_1584.png index 60cbfadc8..dcee8085e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1584.png and b/TMessagesProj/src/main/assets/emoji/0_1584.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1585.png b/TMessagesProj/src/main/assets/emoji/0_1585.png index 8a40a0ada..04f027cf4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1585.png and b/TMessagesProj/src/main/assets/emoji/0_1585.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1586.png b/TMessagesProj/src/main/assets/emoji/0_1586.png index f6c1a2c77..cb3c9211f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1586.png and b/TMessagesProj/src/main/assets/emoji/0_1586.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1587.png b/TMessagesProj/src/main/assets/emoji/0_1587.png index 3593e0d54..a8566dac4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1587.png and b/TMessagesProj/src/main/assets/emoji/0_1587.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1588.png b/TMessagesProj/src/main/assets/emoji/0_1588.png index c9d5089ed..2e738cdcb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1588.png and b/TMessagesProj/src/main/assets/emoji/0_1588.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1589.png b/TMessagesProj/src/main/assets/emoji/0_1589.png index d2e53a237..f0e3a2806 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1589.png and b/TMessagesProj/src/main/assets/emoji/0_1589.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_159.png b/TMessagesProj/src/main/assets/emoji/0_159.png index 5ce8c0ed6..1d9d1ae3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_159.png and b/TMessagesProj/src/main/assets/emoji/0_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1590.png b/TMessagesProj/src/main/assets/emoji/0_1590.png index fc8830422..b9fae137c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1590.png and b/TMessagesProj/src/main/assets/emoji/0_1590.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1591.png b/TMessagesProj/src/main/assets/emoji/0_1591.png index 24b1364fe..42caae5f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1591.png and b/TMessagesProj/src/main/assets/emoji/0_1591.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1592.png b/TMessagesProj/src/main/assets/emoji/0_1592.png index 839493114..89af54b5c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1592.png and b/TMessagesProj/src/main/assets/emoji/0_1592.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1593.png b/TMessagesProj/src/main/assets/emoji/0_1593.png index 445192925..cba825fcf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1593.png and b/TMessagesProj/src/main/assets/emoji/0_1593.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1594.png b/TMessagesProj/src/main/assets/emoji/0_1594.png index 2d6b4080a..1711ba153 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1594.png and b/TMessagesProj/src/main/assets/emoji/0_1594.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1595.png b/TMessagesProj/src/main/assets/emoji/0_1595.png index 3ea4c760b..db59963f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1595.png and b/TMessagesProj/src/main/assets/emoji/0_1595.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1596.png b/TMessagesProj/src/main/assets/emoji/0_1596.png index 1a661ade4..898e9a71f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1596.png and b/TMessagesProj/src/main/assets/emoji/0_1596.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1597.png b/TMessagesProj/src/main/assets/emoji/0_1597.png index 115b53b17..45dcd4d21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1597.png and b/TMessagesProj/src/main/assets/emoji/0_1597.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1598.png b/TMessagesProj/src/main/assets/emoji/0_1598.png index 58431f382..ec60863d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1598.png and b/TMessagesProj/src/main/assets/emoji/0_1598.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1599.png b/TMessagesProj/src/main/assets/emoji/0_1599.png index 48679d00e..228b0446f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1599.png and b/TMessagesProj/src/main/assets/emoji/0_1599.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_16.png b/TMessagesProj/src/main/assets/emoji/0_16.png index f4135e4c1..713b5c32f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_16.png and b/TMessagesProj/src/main/assets/emoji/0_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_160.png b/TMessagesProj/src/main/assets/emoji/0_160.png index 0ad3bdd36..1f0750bd0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_160.png and b/TMessagesProj/src/main/assets/emoji/0_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1600.png b/TMessagesProj/src/main/assets/emoji/0_1600.png index 13c81df36..12a111e42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1600.png and b/TMessagesProj/src/main/assets/emoji/0_1600.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1601.png b/TMessagesProj/src/main/assets/emoji/0_1601.png index 982a398c4..b9b532981 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1601.png and b/TMessagesProj/src/main/assets/emoji/0_1601.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1602.png b/TMessagesProj/src/main/assets/emoji/0_1602.png index b94386675..15d6c225b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1602.png and b/TMessagesProj/src/main/assets/emoji/0_1602.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1603.png b/TMessagesProj/src/main/assets/emoji/0_1603.png index 020df6796..74bd5ef8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1603.png and b/TMessagesProj/src/main/assets/emoji/0_1603.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1604.png b/TMessagesProj/src/main/assets/emoji/0_1604.png index c15618a04..d1b877b1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1604.png and b/TMessagesProj/src/main/assets/emoji/0_1604.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1605.png b/TMessagesProj/src/main/assets/emoji/0_1605.png index 1a7b1bfe4..2ae9d8ed0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1605.png and b/TMessagesProj/src/main/assets/emoji/0_1605.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1606.png b/TMessagesProj/src/main/assets/emoji/0_1606.png index 5c52973ac..9731ab329 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1606.png and b/TMessagesProj/src/main/assets/emoji/0_1606.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1607.png b/TMessagesProj/src/main/assets/emoji/0_1607.png index 75b723260..4fe89e2ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1607.png and b/TMessagesProj/src/main/assets/emoji/0_1607.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1608.png b/TMessagesProj/src/main/assets/emoji/0_1608.png index 250a93ea3..a2ae03780 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1608.png and b/TMessagesProj/src/main/assets/emoji/0_1608.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1609.png b/TMessagesProj/src/main/assets/emoji/0_1609.png index 1f5532a62..e776e0802 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1609.png and b/TMessagesProj/src/main/assets/emoji/0_1609.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_161.png b/TMessagesProj/src/main/assets/emoji/0_161.png index 82b2f768e..5ce8c0ed6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_161.png and b/TMessagesProj/src/main/assets/emoji/0_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1610.png b/TMessagesProj/src/main/assets/emoji/0_1610.png index 2f4749de2..566cf3168 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1610.png and b/TMessagesProj/src/main/assets/emoji/0_1610.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1611.png b/TMessagesProj/src/main/assets/emoji/0_1611.png index e9b656aa9..371006112 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1611.png and b/TMessagesProj/src/main/assets/emoji/0_1611.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1612.png b/TMessagesProj/src/main/assets/emoji/0_1612.png index a644b2d00..dce650230 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1612.png and b/TMessagesProj/src/main/assets/emoji/0_1612.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1613.png b/TMessagesProj/src/main/assets/emoji/0_1613.png index 221ea79d2..ce0b889b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1613.png and b/TMessagesProj/src/main/assets/emoji/0_1613.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1614.png b/TMessagesProj/src/main/assets/emoji/0_1614.png index 81cc9b136..1d5178329 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1614.png and b/TMessagesProj/src/main/assets/emoji/0_1614.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1615.png b/TMessagesProj/src/main/assets/emoji/0_1615.png index 8dbb9701a..6449fc018 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1615.png and b/TMessagesProj/src/main/assets/emoji/0_1615.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1616.png b/TMessagesProj/src/main/assets/emoji/0_1616.png index 303059c57..7dce50e00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1616.png and b/TMessagesProj/src/main/assets/emoji/0_1616.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1617.png b/TMessagesProj/src/main/assets/emoji/0_1617.png index 2e691b9dc..66083d1d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1617.png and b/TMessagesProj/src/main/assets/emoji/0_1617.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1618.png b/TMessagesProj/src/main/assets/emoji/0_1618.png index 9e7728be6..7283e8bee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1618.png and b/TMessagesProj/src/main/assets/emoji/0_1618.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1619.png b/TMessagesProj/src/main/assets/emoji/0_1619.png index c0e96e893..547687896 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_1619.png and b/TMessagesProj/src/main/assets/emoji/0_1619.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_162.png b/TMessagesProj/src/main/assets/emoji/0_162.png index a815fd145..0ad3bdd36 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_162.png and b/TMessagesProj/src/main/assets/emoji/0_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1620.png b/TMessagesProj/src/main/assets/emoji/0_1620.png new file mode 100644 index 000000000..fbe30dd5d Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1620.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1621.png b/TMessagesProj/src/main/assets/emoji/0_1621.png new file mode 100644 index 000000000..0c35a1252 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1621.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1622.png b/TMessagesProj/src/main/assets/emoji/0_1622.png new file mode 100644 index 000000000..652db8c7c Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1622.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1623.png b/TMessagesProj/src/main/assets/emoji/0_1623.png new file mode 100644 index 000000000..5f8bb0761 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1623.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1624.png b/TMessagesProj/src/main/assets/emoji/0_1624.png new file mode 100644 index 000000000..376988e3a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1624.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1625.png b/TMessagesProj/src/main/assets/emoji/0_1625.png new file mode 100644 index 000000000..07b18291b Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1625.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1626.png b/TMessagesProj/src/main/assets/emoji/0_1626.png new file mode 100644 index 000000000..96fdb5e44 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1626.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1627.png b/TMessagesProj/src/main/assets/emoji/0_1627.png new file mode 100644 index 000000000..e638de1b2 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1627.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1628.png b/TMessagesProj/src/main/assets/emoji/0_1628.png new file mode 100644 index 000000000..05cc3f571 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1628.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1629.png b/TMessagesProj/src/main/assets/emoji/0_1629.png new file mode 100644 index 000000000..ec2412f14 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1629.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_163.png b/TMessagesProj/src/main/assets/emoji/0_163.png index 65596e301..db1ded72d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_163.png and b/TMessagesProj/src/main/assets/emoji/0_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1630.png b/TMessagesProj/src/main/assets/emoji/0_1630.png new file mode 100644 index 000000000..6a74350d5 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1630.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1631.png b/TMessagesProj/src/main/assets/emoji/0_1631.png new file mode 100644 index 000000000..27c87e41b Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1631.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1632.png b/TMessagesProj/src/main/assets/emoji/0_1632.png new file mode 100644 index 000000000..211e5849a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1632.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1633.png b/TMessagesProj/src/main/assets/emoji/0_1633.png new file mode 100644 index 000000000..bac412242 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1633.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1634.png b/TMessagesProj/src/main/assets/emoji/0_1634.png new file mode 100644 index 000000000..d333b5ac5 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1634.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1635.png b/TMessagesProj/src/main/assets/emoji/0_1635.png new file mode 100644 index 000000000..ecedd0599 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1635.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1636.png b/TMessagesProj/src/main/assets/emoji/0_1636.png new file mode 100644 index 000000000..27c58fbe2 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1636.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1637.png b/TMessagesProj/src/main/assets/emoji/0_1637.png new file mode 100644 index 000000000..1f952ea69 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1637.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1638.png b/TMessagesProj/src/main/assets/emoji/0_1638.png new file mode 100644 index 000000000..97fbd3f31 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1638.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1639.png b/TMessagesProj/src/main/assets/emoji/0_1639.png new file mode 100644 index 000000000..1bf7e85c5 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1639.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_164.png b/TMessagesProj/src/main/assets/emoji/0_164.png index bf5c6e341..bef0db25c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_164.png and b/TMessagesProj/src/main/assets/emoji/0_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1640.png b/TMessagesProj/src/main/assets/emoji/0_1640.png new file mode 100644 index 000000000..86e3e0b89 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1640.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1641.png b/TMessagesProj/src/main/assets/emoji/0_1641.png new file mode 100644 index 000000000..9b5673603 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1641.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1642.png b/TMessagesProj/src/main/assets/emoji/0_1642.png new file mode 100644 index 000000000..65b58c6f6 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1642.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1643.png b/TMessagesProj/src/main/assets/emoji/0_1643.png new file mode 100644 index 000000000..e4a5b7dfb Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1643.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1644.png b/TMessagesProj/src/main/assets/emoji/0_1644.png new file mode 100644 index 000000000..bb204b587 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1644.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1645.png b/TMessagesProj/src/main/assets/emoji/0_1645.png new file mode 100644 index 000000000..f5bf2ef11 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1645.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1646.png b/TMessagesProj/src/main/assets/emoji/0_1646.png new file mode 100644 index 000000000..b49afc1f0 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1646.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1647.png b/TMessagesProj/src/main/assets/emoji/0_1647.png new file mode 100644 index 000000000..a9ba4df4f Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1647.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1648.png b/TMessagesProj/src/main/assets/emoji/0_1648.png new file mode 100644 index 000000000..f015180e1 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1648.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1649.png b/TMessagesProj/src/main/assets/emoji/0_1649.png new file mode 100644 index 000000000..c73615f4e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1649.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_165.png b/TMessagesProj/src/main/assets/emoji/0_165.png index fc3c245b8..21aa0bd2c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_165.png and b/TMessagesProj/src/main/assets/emoji/0_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1650.png b/TMessagesProj/src/main/assets/emoji/0_1650.png new file mode 100644 index 000000000..4a242c870 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1650.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1651.png b/TMessagesProj/src/main/assets/emoji/0_1651.png new file mode 100644 index 000000000..13aba216a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1651.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1652.png b/TMessagesProj/src/main/assets/emoji/0_1652.png new file mode 100644 index 000000000..cf1f93e2e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1652.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1653.png b/TMessagesProj/src/main/assets/emoji/0_1653.png new file mode 100644 index 000000000..2603c404e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1653.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1654.png b/TMessagesProj/src/main/assets/emoji/0_1654.png new file mode 100644 index 000000000..139d8903e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1654.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1655.png b/TMessagesProj/src/main/assets/emoji/0_1655.png new file mode 100644 index 000000000..085b3a643 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1655.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1656.png b/TMessagesProj/src/main/assets/emoji/0_1656.png new file mode 100644 index 000000000..d336958ef Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1656.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1657.png b/TMessagesProj/src/main/assets/emoji/0_1657.png new file mode 100644 index 000000000..ee988f480 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1657.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1658.png b/TMessagesProj/src/main/assets/emoji/0_1658.png new file mode 100644 index 000000000..a5e26dbff Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1658.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1659.png b/TMessagesProj/src/main/assets/emoji/0_1659.png new file mode 100644 index 000000000..f6c1a2c77 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1659.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_166.png b/TMessagesProj/src/main/assets/emoji/0_166.png index b92a8204f..29d2eb7d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_166.png and b/TMessagesProj/src/main/assets/emoji/0_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1660.png b/TMessagesProj/src/main/assets/emoji/0_1660.png new file mode 100644 index 000000000..3593e0d54 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1660.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1661.png b/TMessagesProj/src/main/assets/emoji/0_1661.png new file mode 100644 index 000000000..c41cf39f9 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1661.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1662.png b/TMessagesProj/src/main/assets/emoji/0_1662.png new file mode 100644 index 000000000..d274838fe Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1662.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1663.png b/TMessagesProj/src/main/assets/emoji/0_1663.png new file mode 100644 index 000000000..ea02492d2 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1663.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1664.png b/TMessagesProj/src/main/assets/emoji/0_1664.png new file mode 100644 index 000000000..58e79463d Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1664.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1665.png b/TMessagesProj/src/main/assets/emoji/0_1665.png new file mode 100644 index 000000000..41ac05153 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1665.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1666.png b/TMessagesProj/src/main/assets/emoji/0_1666.png new file mode 100644 index 000000000..fca10ff3b Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1666.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1667.png b/TMessagesProj/src/main/assets/emoji/0_1667.png new file mode 100644 index 000000000..7bc72ae24 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1667.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1668.png b/TMessagesProj/src/main/assets/emoji/0_1668.png new file mode 100644 index 000000000..12608a327 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1668.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1669.png b/TMessagesProj/src/main/assets/emoji/0_1669.png new file mode 100644 index 000000000..7a6154313 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1669.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_167.png b/TMessagesProj/src/main/assets/emoji/0_167.png index 18e60fa6f..fc3c245b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_167.png and b/TMessagesProj/src/main/assets/emoji/0_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1670.png b/TMessagesProj/src/main/assets/emoji/0_1670.png new file mode 100644 index 000000000..358a00655 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1670.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1671.png b/TMessagesProj/src/main/assets/emoji/0_1671.png new file mode 100644 index 000000000..409bc747c Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1671.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1672.png b/TMessagesProj/src/main/assets/emoji/0_1672.png new file mode 100644 index 000000000..05a606ed2 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1672.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1673.png b/TMessagesProj/src/main/assets/emoji/0_1673.png new file mode 100644 index 000000000..d112a5f34 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1673.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1674.png b/TMessagesProj/src/main/assets/emoji/0_1674.png new file mode 100644 index 000000000..eef6659c1 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1674.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1675.png b/TMessagesProj/src/main/assets/emoji/0_1675.png new file mode 100644 index 000000000..74acf6d19 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1675.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1676.png b/TMessagesProj/src/main/assets/emoji/0_1676.png new file mode 100644 index 000000000..f79f52d44 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1676.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1677.png b/TMessagesProj/src/main/assets/emoji/0_1677.png new file mode 100644 index 000000000..020df6796 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1677.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1678.png b/TMessagesProj/src/main/assets/emoji/0_1678.png new file mode 100644 index 000000000..818231649 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1678.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1679.png b/TMessagesProj/src/main/assets/emoji/0_1679.png new file mode 100644 index 000000000..310687440 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1679.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_168.png b/TMessagesProj/src/main/assets/emoji/0_168.png index c56528f11..b92a8204f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_168.png and b/TMessagesProj/src/main/assets/emoji/0_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1680.png b/TMessagesProj/src/main/assets/emoji/0_1680.png new file mode 100644 index 000000000..9687c8752 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1680.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1681.png b/TMessagesProj/src/main/assets/emoji/0_1681.png new file mode 100644 index 000000000..75b723260 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1681.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1682.png b/TMessagesProj/src/main/assets/emoji/0_1682.png new file mode 100644 index 000000000..43bc9618c Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1682.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1683.png b/TMessagesProj/src/main/assets/emoji/0_1683.png new file mode 100644 index 000000000..1ee74d959 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1683.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1684.png b/TMessagesProj/src/main/assets/emoji/0_1684.png new file mode 100644 index 000000000..687a3eb9f Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1684.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1685.png b/TMessagesProj/src/main/assets/emoji/0_1685.png new file mode 100644 index 000000000..003a09978 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1685.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1686.png b/TMessagesProj/src/main/assets/emoji/0_1686.png new file mode 100644 index 000000000..51388f393 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1686.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1687.png b/TMessagesProj/src/main/assets/emoji/0_1687.png new file mode 100644 index 000000000..2caf68f71 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1687.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1688.png b/TMessagesProj/src/main/assets/emoji/0_1688.png new file mode 100644 index 000000000..4b674f5eb Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1688.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1689.png b/TMessagesProj/src/main/assets/emoji/0_1689.png new file mode 100644 index 000000000..acc1a2172 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1689.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_169.png b/TMessagesProj/src/main/assets/emoji/0_169.png index f89681bab..e8944d382 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_169.png and b/TMessagesProj/src/main/assets/emoji/0_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1690.png b/TMessagesProj/src/main/assets/emoji/0_1690.png new file mode 100644 index 000000000..8dbb9701a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1690.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1691.png b/TMessagesProj/src/main/assets/emoji/0_1691.png new file mode 100644 index 000000000..964ceb545 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1691.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1692.png b/TMessagesProj/src/main/assets/emoji/0_1692.png new file mode 100644 index 000000000..2e691b9dc Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1692.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1693.png b/TMessagesProj/src/main/assets/emoji/0_1693.png new file mode 100644 index 000000000..9e7728be6 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1693.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_1694.png b/TMessagesProj/src/main/assets/emoji/0_1694.png new file mode 100644 index 000000000..d2c275397 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/0_1694.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_17.png b/TMessagesProj/src/main/assets/emoji/0_17.png index d3fe71d93..ef3f8953d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_17.png and b/TMessagesProj/src/main/assets/emoji/0_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_170.png b/TMessagesProj/src/main/assets/emoji/0_170.png index 123e24484..835f1f2b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_170.png and b/TMessagesProj/src/main/assets/emoji/0_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_171.png b/TMessagesProj/src/main/assets/emoji/0_171.png index 4ff6b12a2..57402a0b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_171.png and b/TMessagesProj/src/main/assets/emoji/0_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_172.png b/TMessagesProj/src/main/assets/emoji/0_172.png index 651d49858..5d640ef0f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_172.png and b/TMessagesProj/src/main/assets/emoji/0_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_173.png b/TMessagesProj/src/main/assets/emoji/0_173.png index db0431fe1..4ff6b12a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_173.png and b/TMessagesProj/src/main/assets/emoji/0_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_174.png b/TMessagesProj/src/main/assets/emoji/0_174.png index 01ad0b6b9..651d49858 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_174.png and b/TMessagesProj/src/main/assets/emoji/0_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_175.png b/TMessagesProj/src/main/assets/emoji/0_175.png index 75e4dc5fc..0c7784b5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_175.png and b/TMessagesProj/src/main/assets/emoji/0_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_176.png b/TMessagesProj/src/main/assets/emoji/0_176.png index d45970a6d..36b7663d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_176.png and b/TMessagesProj/src/main/assets/emoji/0_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_177.png b/TMessagesProj/src/main/assets/emoji/0_177.png index 2d5423117..b8a6f7825 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_177.png and b/TMessagesProj/src/main/assets/emoji/0_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_178.png b/TMessagesProj/src/main/assets/emoji/0_178.png index 5f1381ea7..d45970a6d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_178.png and b/TMessagesProj/src/main/assets/emoji/0_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_179.png b/TMessagesProj/src/main/assets/emoji/0_179.png index 442740029..2d5423117 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_179.png and b/TMessagesProj/src/main/assets/emoji/0_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_18.png b/TMessagesProj/src/main/assets/emoji/0_18.png index ef3c7c201..6d1045531 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_18.png and b/TMessagesProj/src/main/assets/emoji/0_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_180.png b/TMessagesProj/src/main/assets/emoji/0_180.png index 147a69559..5f1381ea7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_180.png and b/TMessagesProj/src/main/assets/emoji/0_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_181.png b/TMessagesProj/src/main/assets/emoji/0_181.png index eae0a604d..b3fe651fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_181.png and b/TMessagesProj/src/main/assets/emoji/0_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_182.png b/TMessagesProj/src/main/assets/emoji/0_182.png index b9a894260..0d69179eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_182.png and b/TMessagesProj/src/main/assets/emoji/0_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_183.png b/TMessagesProj/src/main/assets/emoji/0_183.png index 0045a4d7b..1088431ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_183.png and b/TMessagesProj/src/main/assets/emoji/0_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_184.png b/TMessagesProj/src/main/assets/emoji/0_184.png index 027c5d8d9..dccd15fc5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_184.png and b/TMessagesProj/src/main/assets/emoji/0_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_185.png b/TMessagesProj/src/main/assets/emoji/0_185.png index bb24ff1d8..0045a4d7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_185.png and b/TMessagesProj/src/main/assets/emoji/0_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_186.png b/TMessagesProj/src/main/assets/emoji/0_186.png index 96b7a17d6..027c5d8d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_186.png and b/TMessagesProj/src/main/assets/emoji/0_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_187.png b/TMessagesProj/src/main/assets/emoji/0_187.png index 6036a26f4..77c90df49 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_187.png and b/TMessagesProj/src/main/assets/emoji/0_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_188.png b/TMessagesProj/src/main/assets/emoji/0_188.png index f529ba8af..56488e261 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_188.png and b/TMessagesProj/src/main/assets/emoji/0_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_189.png b/TMessagesProj/src/main/assets/emoji/0_189.png index 1440efe6d..d4f8b1492 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_189.png and b/TMessagesProj/src/main/assets/emoji/0_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_19.png b/TMessagesProj/src/main/assets/emoji/0_19.png index a11931eb1..5a480a526 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_19.png and b/TMessagesProj/src/main/assets/emoji/0_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_190.png b/TMessagesProj/src/main/assets/emoji/0_190.png index 7903af400..4795f5774 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_190.png and b/TMessagesProj/src/main/assets/emoji/0_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_191.png b/TMessagesProj/src/main/assets/emoji/0_191.png index 95517caf4..1440efe6d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_191.png and b/TMessagesProj/src/main/assets/emoji/0_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_192.png b/TMessagesProj/src/main/assets/emoji/0_192.png index 1ffefe489..ef8903589 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_192.png and b/TMessagesProj/src/main/assets/emoji/0_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_193.png b/TMessagesProj/src/main/assets/emoji/0_193.png index e795ad686..3867383fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_193.png and b/TMessagesProj/src/main/assets/emoji/0_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_194.png b/TMessagesProj/src/main/assets/emoji/0_194.png index 51194dcc4..b5c85f73a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_194.png and b/TMessagesProj/src/main/assets/emoji/0_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_195.png b/TMessagesProj/src/main/assets/emoji/0_195.png index 03698d4a8..7b893d122 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_195.png and b/TMessagesProj/src/main/assets/emoji/0_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_196.png b/TMessagesProj/src/main/assets/emoji/0_196.png index 5d89056c5..019d62fc6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_196.png and b/TMessagesProj/src/main/assets/emoji/0_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_197.png b/TMessagesProj/src/main/assets/emoji/0_197.png index ceff63f6b..03698d4a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_197.png and b/TMessagesProj/src/main/assets/emoji/0_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_198.png b/TMessagesProj/src/main/assets/emoji/0_198.png index 0d87d7359..452d95cab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_198.png and b/TMessagesProj/src/main/assets/emoji/0_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_199.png b/TMessagesProj/src/main/assets/emoji/0_199.png index caf4870b6..1f0b7e635 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_199.png and b/TMessagesProj/src/main/assets/emoji/0_199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_2.png b/TMessagesProj/src/main/assets/emoji/0_2.png index 0faabca1c..5ec2a6fbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_2.png and b/TMessagesProj/src/main/assets/emoji/0_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_20.png b/TMessagesProj/src/main/assets/emoji/0_20.png index c0ecdcd8a..4e740e4a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_20.png and b/TMessagesProj/src/main/assets/emoji/0_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_200.png b/TMessagesProj/src/main/assets/emoji/0_200.png index 73c766af4..0d6cb8a2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_200.png and b/TMessagesProj/src/main/assets/emoji/0_200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_201.png b/TMessagesProj/src/main/assets/emoji/0_201.png index f26f88a2f..5594bab49 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_201.png and b/TMessagesProj/src/main/assets/emoji/0_201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_202.png b/TMessagesProj/src/main/assets/emoji/0_202.png index 95e36b30c..36ff424a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_202.png and b/TMessagesProj/src/main/assets/emoji/0_202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_203.png b/TMessagesProj/src/main/assets/emoji/0_203.png index b7e22e0b0..be349d3bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_203.png and b/TMessagesProj/src/main/assets/emoji/0_203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_204.png b/TMessagesProj/src/main/assets/emoji/0_204.png index 1adf77fb2..95e36b30c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_204.png and b/TMessagesProj/src/main/assets/emoji/0_204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_205.png b/TMessagesProj/src/main/assets/emoji/0_205.png index 99ae801ce..cce1fc47e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_205.png and b/TMessagesProj/src/main/assets/emoji/0_205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_206.png b/TMessagesProj/src/main/assets/emoji/0_206.png index f53361392..ac1a8a4c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_206.png and b/TMessagesProj/src/main/assets/emoji/0_206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_207.png b/TMessagesProj/src/main/assets/emoji/0_207.png index 5f7c989f4..154e615bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_207.png and b/TMessagesProj/src/main/assets/emoji/0_207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_208.png b/TMessagesProj/src/main/assets/emoji/0_208.png index d57ac8c6f..e856eb7a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_208.png and b/TMessagesProj/src/main/assets/emoji/0_208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_209.png b/TMessagesProj/src/main/assets/emoji/0_209.png index ec24961de..4a4fb3e04 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_209.png and b/TMessagesProj/src/main/assets/emoji/0_209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_21.png b/TMessagesProj/src/main/assets/emoji/0_21.png index aa9631b1c..d758f89a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_21.png and b/TMessagesProj/src/main/assets/emoji/0_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_210.png b/TMessagesProj/src/main/assets/emoji/0_210.png index cf6b3ef69..82d2cf159 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_210.png and b/TMessagesProj/src/main/assets/emoji/0_210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_211.png b/TMessagesProj/src/main/assets/emoji/0_211.png index 6b6521010..25cfbc239 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_211.png and b/TMessagesProj/src/main/assets/emoji/0_211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_212.png b/TMessagesProj/src/main/assets/emoji/0_212.png index 93c768348..f20a6d3d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_212.png and b/TMessagesProj/src/main/assets/emoji/0_212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_213.png b/TMessagesProj/src/main/assets/emoji/0_213.png index 629f786f3..48c070695 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_213.png and b/TMessagesProj/src/main/assets/emoji/0_213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_214.png b/TMessagesProj/src/main/assets/emoji/0_214.png index 0f9052381..cef419d80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_214.png and b/TMessagesProj/src/main/assets/emoji/0_214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_215.png b/TMessagesProj/src/main/assets/emoji/0_215.png index 5e95b5570..34d7c5184 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_215.png and b/TMessagesProj/src/main/assets/emoji/0_215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_216.png b/TMessagesProj/src/main/assets/emoji/0_216.png index 3840d822b..d57ac8c6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_216.png and b/TMessagesProj/src/main/assets/emoji/0_216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_217.png b/TMessagesProj/src/main/assets/emoji/0_217.png index b0bdadb2c..76ac7a3f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_217.png and b/TMessagesProj/src/main/assets/emoji/0_217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_218.png b/TMessagesProj/src/main/assets/emoji/0_218.png index 06d6d2038..f8b19bed5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_218.png and b/TMessagesProj/src/main/assets/emoji/0_218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_219.png b/TMessagesProj/src/main/assets/emoji/0_219.png index 1d107917d..9445badf0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_219.png and b/TMessagesProj/src/main/assets/emoji/0_219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_22.png b/TMessagesProj/src/main/assets/emoji/0_22.png index 9cb644417..ab7cc66ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_22.png and b/TMessagesProj/src/main/assets/emoji/0_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_220.png b/TMessagesProj/src/main/assets/emoji/0_220.png index 726d7570a..0794ad2f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_220.png and b/TMessagesProj/src/main/assets/emoji/0_220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_221.png b/TMessagesProj/src/main/assets/emoji/0_221.png index 782b8ca8a..8e2033710 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_221.png and b/TMessagesProj/src/main/assets/emoji/0_221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_222.png b/TMessagesProj/src/main/assets/emoji/0_222.png index 0225b35c4..0f9052381 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_222.png and b/TMessagesProj/src/main/assets/emoji/0_222.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_223.png b/TMessagesProj/src/main/assets/emoji/0_223.png index c1a063882..20b9909d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_223.png and b/TMessagesProj/src/main/assets/emoji/0_223.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_224.png b/TMessagesProj/src/main/assets/emoji/0_224.png index 9df7e3179..71f28ea01 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_224.png and b/TMessagesProj/src/main/assets/emoji/0_224.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_225.png b/TMessagesProj/src/main/assets/emoji/0_225.png index 670abfc02..8bb40af98 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_225.png and b/TMessagesProj/src/main/assets/emoji/0_225.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_226.png b/TMessagesProj/src/main/assets/emoji/0_226.png index 7258faaa7..f749bebca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_226.png and b/TMessagesProj/src/main/assets/emoji/0_226.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_227.png b/TMessagesProj/src/main/assets/emoji/0_227.png index 58ddfd948..2f0e74819 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_227.png and b/TMessagesProj/src/main/assets/emoji/0_227.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_228.png b/TMessagesProj/src/main/assets/emoji/0_228.png index 03ff28f96..2d0b81e14 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_228.png and b/TMessagesProj/src/main/assets/emoji/0_228.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_229.png b/TMessagesProj/src/main/assets/emoji/0_229.png index 81c41d889..6486c1c20 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_229.png and b/TMessagesProj/src/main/assets/emoji/0_229.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_23.png b/TMessagesProj/src/main/assets/emoji/0_23.png index 9d8910ec8..b7b0860b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_23.png and b/TMessagesProj/src/main/assets/emoji/0_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_230.png b/TMessagesProj/src/main/assets/emoji/0_230.png index 8e3fe7e38..d87b43a1a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_230.png and b/TMessagesProj/src/main/assets/emoji/0_230.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_231.png b/TMessagesProj/src/main/assets/emoji/0_231.png index 60323361b..4e4ae0dc6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_231.png and b/TMessagesProj/src/main/assets/emoji/0_231.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_232.png b/TMessagesProj/src/main/assets/emoji/0_232.png index e0dc03266..5836d9d45 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_232.png and b/TMessagesProj/src/main/assets/emoji/0_232.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_233.png b/TMessagesProj/src/main/assets/emoji/0_233.png index 046b19ece..b8f30c2ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_233.png and b/TMessagesProj/src/main/assets/emoji/0_233.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_234.png b/TMessagesProj/src/main/assets/emoji/0_234.png index 37d57ffbc..7258faaa7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_234.png and b/TMessagesProj/src/main/assets/emoji/0_234.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_235.png b/TMessagesProj/src/main/assets/emoji/0_235.png index 79d2f9b84..127b636fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_235.png and b/TMessagesProj/src/main/assets/emoji/0_235.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_236.png b/TMessagesProj/src/main/assets/emoji/0_236.png index 8ef6eb194..1dc429809 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_236.png and b/TMessagesProj/src/main/assets/emoji/0_236.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_237.png b/TMessagesProj/src/main/assets/emoji/0_237.png index fff4bface..dbae79429 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_237.png and b/TMessagesProj/src/main/assets/emoji/0_237.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_238.png b/TMessagesProj/src/main/assets/emoji/0_238.png index 3aef5c8d2..f828203e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_238.png and b/TMessagesProj/src/main/assets/emoji/0_238.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_239.png b/TMessagesProj/src/main/assets/emoji/0_239.png index ad73d1abb..60323361b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_239.png and b/TMessagesProj/src/main/assets/emoji/0_239.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_24.png b/TMessagesProj/src/main/assets/emoji/0_24.png index 50a194a5b..0b098b0d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_24.png and b/TMessagesProj/src/main/assets/emoji/0_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_240.png b/TMessagesProj/src/main/assets/emoji/0_240.png index 74cabd43e..e0dc03266 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_240.png and b/TMessagesProj/src/main/assets/emoji/0_240.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_241.png b/TMessagesProj/src/main/assets/emoji/0_241.png index d1d0b1b0e..a4ce37688 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_241.png and b/TMessagesProj/src/main/assets/emoji/0_241.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_242.png b/TMessagesProj/src/main/assets/emoji/0_242.png index 900a91c45..cdfc4c597 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_242.png and b/TMessagesProj/src/main/assets/emoji/0_242.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_243.png b/TMessagesProj/src/main/assets/emoji/0_243.png index 92b87a3dc..2b7066d27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_243.png and b/TMessagesProj/src/main/assets/emoji/0_243.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_244.png b/TMessagesProj/src/main/assets/emoji/0_244.png index 2f6b9dd77..5233b4b9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_244.png and b/TMessagesProj/src/main/assets/emoji/0_244.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_245.png b/TMessagesProj/src/main/assets/emoji/0_245.png index c01229fbc..fff4bface 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_245.png and b/TMessagesProj/src/main/assets/emoji/0_245.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_246.png b/TMessagesProj/src/main/assets/emoji/0_246.png index 9ef81ba0c..3aef5c8d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_246.png and b/TMessagesProj/src/main/assets/emoji/0_246.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_247.png b/TMessagesProj/src/main/assets/emoji/0_247.png index 15af12994..27795fc07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_247.png and b/TMessagesProj/src/main/assets/emoji/0_247.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_248.png b/TMessagesProj/src/main/assets/emoji/0_248.png index f2f9ebe47..5f0bd617a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_248.png and b/TMessagesProj/src/main/assets/emoji/0_248.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_249.png b/TMessagesProj/src/main/assets/emoji/0_249.png index d245cdc6f..524c53ea7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_249.png and b/TMessagesProj/src/main/assets/emoji/0_249.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_25.png b/TMessagesProj/src/main/assets/emoji/0_25.png index 9f2faacca..fe9fec19b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_25.png and b/TMessagesProj/src/main/assets/emoji/0_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_250.png b/TMessagesProj/src/main/assets/emoji/0_250.png index 7cd379a5e..86e7cadba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_250.png and b/TMessagesProj/src/main/assets/emoji/0_250.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_251.png b/TMessagesProj/src/main/assets/emoji/0_251.png index 7875a59e4..c7264d45a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_251.png and b/TMessagesProj/src/main/assets/emoji/0_251.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_252.png b/TMessagesProj/src/main/assets/emoji/0_252.png index 4e512975c..7375a24e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_252.png and b/TMessagesProj/src/main/assets/emoji/0_252.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_253.png b/TMessagesProj/src/main/assets/emoji/0_253.png index 4a3ba38f2..96e58dd86 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_253.png and b/TMessagesProj/src/main/assets/emoji/0_253.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_254.png b/TMessagesProj/src/main/assets/emoji/0_254.png index c257c22af..f3141b3f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_254.png and b/TMessagesProj/src/main/assets/emoji/0_254.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_255.png b/TMessagesProj/src/main/assets/emoji/0_255.png index afb0b2e1d..15af12994 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_255.png and b/TMessagesProj/src/main/assets/emoji/0_255.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_256.png b/TMessagesProj/src/main/assets/emoji/0_256.png index f7b7f0249..f2f9ebe47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_256.png and b/TMessagesProj/src/main/assets/emoji/0_256.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_257.png b/TMessagesProj/src/main/assets/emoji/0_257.png index d04cbf677..d245cdc6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_257.png and b/TMessagesProj/src/main/assets/emoji/0_257.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_258.png b/TMessagesProj/src/main/assets/emoji/0_258.png index 638d16e2d..7cd379a5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_258.png and b/TMessagesProj/src/main/assets/emoji/0_258.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_259.png b/TMessagesProj/src/main/assets/emoji/0_259.png index 6ff941d4f..c50782a47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_259.png and b/TMessagesProj/src/main/assets/emoji/0_259.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_26.png b/TMessagesProj/src/main/assets/emoji/0_26.png index 6a7746d81..20848a01d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_26.png and b/TMessagesProj/src/main/assets/emoji/0_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_260.png b/TMessagesProj/src/main/assets/emoji/0_260.png index 9dfb0e9fa..de216d92c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_260.png and b/TMessagesProj/src/main/assets/emoji/0_260.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_261.png b/TMessagesProj/src/main/assets/emoji/0_261.png index 890cff0ff..6a8ae284a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_261.png and b/TMessagesProj/src/main/assets/emoji/0_261.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_262.png b/TMessagesProj/src/main/assets/emoji/0_262.png index f0111e0e1..5c6420d84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_262.png and b/TMessagesProj/src/main/assets/emoji/0_262.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_263.png b/TMessagesProj/src/main/assets/emoji/0_263.png index 987999d50..242a31ef9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_263.png and b/TMessagesProj/src/main/assets/emoji/0_263.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_264.png b/TMessagesProj/src/main/assets/emoji/0_264.png index f47b28af8..f7b7f0249 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_264.png and b/TMessagesProj/src/main/assets/emoji/0_264.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_265.png b/TMessagesProj/src/main/assets/emoji/0_265.png index 71f61953d..9d7ba7291 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_265.png and b/TMessagesProj/src/main/assets/emoji/0_265.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_266.png b/TMessagesProj/src/main/assets/emoji/0_266.png index 42fc5bac2..dd392ff29 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_266.png and b/TMessagesProj/src/main/assets/emoji/0_266.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_267.png b/TMessagesProj/src/main/assets/emoji/0_267.png index f3d751737..2817909e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_267.png and b/TMessagesProj/src/main/assets/emoji/0_267.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_268.png b/TMessagesProj/src/main/assets/emoji/0_268.png index 508208a3d..0c9b1d3c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_268.png and b/TMessagesProj/src/main/assets/emoji/0_268.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_269.png b/TMessagesProj/src/main/assets/emoji/0_269.png index 4c9b15c8f..890cff0ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_269.png and b/TMessagesProj/src/main/assets/emoji/0_269.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_27.png b/TMessagesProj/src/main/assets/emoji/0_27.png index 579d417db..3197ff0ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_27.png and b/TMessagesProj/src/main/assets/emoji/0_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_270.png b/TMessagesProj/src/main/assets/emoji/0_270.png index 7d38db838..f0111e0e1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_270.png and b/TMessagesProj/src/main/assets/emoji/0_270.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_271.png b/TMessagesProj/src/main/assets/emoji/0_271.png index 2d66e2ed7..6e436f7d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_271.png and b/TMessagesProj/src/main/assets/emoji/0_271.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_272.png b/TMessagesProj/src/main/assets/emoji/0_272.png index 154bcfded..515f8710f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_272.png and b/TMessagesProj/src/main/assets/emoji/0_272.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_273.png b/TMessagesProj/src/main/assets/emoji/0_273.png index 142ce3fde..29e69bdc9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_273.png and b/TMessagesProj/src/main/assets/emoji/0_273.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_274.png b/TMessagesProj/src/main/assets/emoji/0_274.png index 0602cd7e3..f214bdf6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_274.png and b/TMessagesProj/src/main/assets/emoji/0_274.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_275.png b/TMessagesProj/src/main/assets/emoji/0_275.png index 60c53f706..d8445d0f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_275.png and b/TMessagesProj/src/main/assets/emoji/0_275.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_276.png b/TMessagesProj/src/main/assets/emoji/0_276.png index 0ef37f8f2..f7f754324 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_276.png and b/TMessagesProj/src/main/assets/emoji/0_276.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_277.png b/TMessagesProj/src/main/assets/emoji/0_277.png index 1d42a6208..14f0dfe9a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_277.png and b/TMessagesProj/src/main/assets/emoji/0_277.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_278.png b/TMessagesProj/src/main/assets/emoji/0_278.png index afc59fac5..0f4ed5c93 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_278.png and b/TMessagesProj/src/main/assets/emoji/0_278.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_279.png b/TMessagesProj/src/main/assets/emoji/0_279.png index 044e42511..a6e3d234e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_279.png and b/TMessagesProj/src/main/assets/emoji/0_279.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_28.png b/TMessagesProj/src/main/assets/emoji/0_28.png index 1cd0353c7..87b6fa514 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_28.png and b/TMessagesProj/src/main/assets/emoji/0_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_280.png b/TMessagesProj/src/main/assets/emoji/0_280.png index 809a29f6e..294db2538 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_280.png and b/TMessagesProj/src/main/assets/emoji/0_280.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_281.png b/TMessagesProj/src/main/assets/emoji/0_281.png index bfd8e2649..142ce3fde 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_281.png and b/TMessagesProj/src/main/assets/emoji/0_281.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_282.png b/TMessagesProj/src/main/assets/emoji/0_282.png index ad89b67e3..0602cd7e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_282.png and b/TMessagesProj/src/main/assets/emoji/0_282.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_283.png b/TMessagesProj/src/main/assets/emoji/0_283.png index dd023d693..582b5b6a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_283.png and b/TMessagesProj/src/main/assets/emoji/0_283.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_284.png b/TMessagesProj/src/main/assets/emoji/0_284.png index 4d523e5ac..986ffe2e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_284.png and b/TMessagesProj/src/main/assets/emoji/0_284.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_285.png b/TMessagesProj/src/main/assets/emoji/0_285.png index 768d06003..e82b1f195 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_285.png and b/TMessagesProj/src/main/assets/emoji/0_285.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_286.png b/TMessagesProj/src/main/assets/emoji/0_286.png index 9b5459c45..9153f2e8f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_286.png and b/TMessagesProj/src/main/assets/emoji/0_286.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_287.png b/TMessagesProj/src/main/assets/emoji/0_287.png index aa72d1907..903460b60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_287.png and b/TMessagesProj/src/main/assets/emoji/0_287.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_288.png b/TMessagesProj/src/main/assets/emoji/0_288.png index 791d4afce..809a29f6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_288.png and b/TMessagesProj/src/main/assets/emoji/0_288.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_289.png b/TMessagesProj/src/main/assets/emoji/0_289.png index 32b4f3793..27d83bca3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_289.png and b/TMessagesProj/src/main/assets/emoji/0_289.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_29.png b/TMessagesProj/src/main/assets/emoji/0_29.png index 3210ddcd6..126f739d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_29.png and b/TMessagesProj/src/main/assets/emoji/0_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_290.png b/TMessagesProj/src/main/assets/emoji/0_290.png index e5f47685b..40e111be4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_290.png and b/TMessagesProj/src/main/assets/emoji/0_290.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_291.png b/TMessagesProj/src/main/assets/emoji/0_291.png index 2b116ddb3..80d7ecf3e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_291.png and b/TMessagesProj/src/main/assets/emoji/0_291.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_292.png b/TMessagesProj/src/main/assets/emoji/0_292.png index db732c97a..5d4cef47b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_292.png and b/TMessagesProj/src/main/assets/emoji/0_292.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_293.png b/TMessagesProj/src/main/assets/emoji/0_293.png index 51d2c8945..e50dd552b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_293.png and b/TMessagesProj/src/main/assets/emoji/0_293.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_294.png b/TMessagesProj/src/main/assets/emoji/0_294.png index 017a473bb..9b5459c45 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_294.png and b/TMessagesProj/src/main/assets/emoji/0_294.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_295.png b/TMessagesProj/src/main/assets/emoji/0_295.png index b8d58cae2..aa72d1907 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_295.png and b/TMessagesProj/src/main/assets/emoji/0_295.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_296.png b/TMessagesProj/src/main/assets/emoji/0_296.png index 277e81641..cb53bbbdd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_296.png and b/TMessagesProj/src/main/assets/emoji/0_296.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_297.png b/TMessagesProj/src/main/assets/emoji/0_297.png index 9296f1c6f..24730d074 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_297.png and b/TMessagesProj/src/main/assets/emoji/0_297.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_298.png b/TMessagesProj/src/main/assets/emoji/0_298.png index b936bf458..7c1cdab5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_298.png and b/TMessagesProj/src/main/assets/emoji/0_298.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_299.png b/TMessagesProj/src/main/assets/emoji/0_299.png index 9bb5c8166..1e71c592d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_299.png and b/TMessagesProj/src/main/assets/emoji/0_299.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_3.png b/TMessagesProj/src/main/assets/emoji/0_3.png index f49928553..71581decc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_3.png and b/TMessagesProj/src/main/assets/emoji/0_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_30.png b/TMessagesProj/src/main/assets/emoji/0_30.png index 133b23cfd..ebde1b692 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_30.png and b/TMessagesProj/src/main/assets/emoji/0_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_300.png b/TMessagesProj/src/main/assets/emoji/0_300.png index 678bb1fef..da8c91b6c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_300.png and b/TMessagesProj/src/main/assets/emoji/0_300.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_301.png b/TMessagesProj/src/main/assets/emoji/0_301.png index 7111a7c67..e5bc5214d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_301.png and b/TMessagesProj/src/main/assets/emoji/0_301.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_302.png b/TMessagesProj/src/main/assets/emoji/0_302.png index b692c958e..4485a72e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_302.png and b/TMessagesProj/src/main/assets/emoji/0_302.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_303.png b/TMessagesProj/src/main/assets/emoji/0_303.png index d8eb81a32..c17125b68 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_303.png and b/TMessagesProj/src/main/assets/emoji/0_303.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_304.png b/TMessagesProj/src/main/assets/emoji/0_304.png index 69b5735c9..778a00372 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_304.png and b/TMessagesProj/src/main/assets/emoji/0_304.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_305.png b/TMessagesProj/src/main/assets/emoji/0_305.png index ac605edd3..0f6371ea4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_305.png and b/TMessagesProj/src/main/assets/emoji/0_305.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_306.png b/TMessagesProj/src/main/assets/emoji/0_306.png index 834b99caa..010eaf7f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_306.png and b/TMessagesProj/src/main/assets/emoji/0_306.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_307.png b/TMessagesProj/src/main/assets/emoji/0_307.png index 83d366e41..d02d2960b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_307.png and b/TMessagesProj/src/main/assets/emoji/0_307.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_308.png b/TMessagesProj/src/main/assets/emoji/0_308.png index 710f9d0aa..a3e607f98 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_308.png and b/TMessagesProj/src/main/assets/emoji/0_308.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_309.png b/TMessagesProj/src/main/assets/emoji/0_309.png index 942347b9f..022d1ea82 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_309.png and b/TMessagesProj/src/main/assets/emoji/0_309.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_31.png b/TMessagesProj/src/main/assets/emoji/0_31.png index e96d5d884..fb961ca06 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_31.png and b/TMessagesProj/src/main/assets/emoji/0_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_310.png b/TMessagesProj/src/main/assets/emoji/0_310.png index 518d80029..2af4a8615 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_310.png and b/TMessagesProj/src/main/assets/emoji/0_310.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_311.png b/TMessagesProj/src/main/assets/emoji/0_311.png index 5bec9080e..866d697cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_311.png and b/TMessagesProj/src/main/assets/emoji/0_311.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_312.png b/TMessagesProj/src/main/assets/emoji/0_312.png index e1f8e6ab8..3389dae46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_312.png and b/TMessagesProj/src/main/assets/emoji/0_312.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_313.png b/TMessagesProj/src/main/assets/emoji/0_313.png index f051f8e9c..ac605edd3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_313.png and b/TMessagesProj/src/main/assets/emoji/0_313.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_314.png b/TMessagesProj/src/main/assets/emoji/0_314.png index 95a4b4cba..417442243 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_314.png and b/TMessagesProj/src/main/assets/emoji/0_314.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_315.png b/TMessagesProj/src/main/assets/emoji/0_315.png index b3f207c1e..5498afdd2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_315.png and b/TMessagesProj/src/main/assets/emoji/0_315.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_316.png b/TMessagesProj/src/main/assets/emoji/0_316.png index 4f12dd173..98e7afc40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_316.png and b/TMessagesProj/src/main/assets/emoji/0_316.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_317.png b/TMessagesProj/src/main/assets/emoji/0_317.png index a7b154e2d..a336175b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_317.png and b/TMessagesProj/src/main/assets/emoji/0_317.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_318.png b/TMessagesProj/src/main/assets/emoji/0_318.png index 681373d18..080e446e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_318.png and b/TMessagesProj/src/main/assets/emoji/0_318.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_319.png b/TMessagesProj/src/main/assets/emoji/0_319.png index 6ca9826bb..5bec9080e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_319.png and b/TMessagesProj/src/main/assets/emoji/0_319.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_32.png b/TMessagesProj/src/main/assets/emoji/0_32.png index e7d9b5f0f..26e0893b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_32.png and b/TMessagesProj/src/main/assets/emoji/0_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_320.png b/TMessagesProj/src/main/assets/emoji/0_320.png index 6ea1f126d..7addd44a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_320.png and b/TMessagesProj/src/main/assets/emoji/0_320.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_321.png b/TMessagesProj/src/main/assets/emoji/0_321.png index 75c42144e..47628b72e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_321.png and b/TMessagesProj/src/main/assets/emoji/0_321.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_322.png b/TMessagesProj/src/main/assets/emoji/0_322.png index 927804753..95a4b4cba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_322.png and b/TMessagesProj/src/main/assets/emoji/0_322.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_323.png b/TMessagesProj/src/main/assets/emoji/0_323.png index 4c815a44b..32b41c8c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_323.png and b/TMessagesProj/src/main/assets/emoji/0_323.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_324.png b/TMessagesProj/src/main/assets/emoji/0_324.png index 36f936da1..351bfe440 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_324.png and b/TMessagesProj/src/main/assets/emoji/0_324.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_325.png b/TMessagesProj/src/main/assets/emoji/0_325.png index daee7a36f..06f1d622c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_325.png and b/TMessagesProj/src/main/assets/emoji/0_325.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_326.png b/TMessagesProj/src/main/assets/emoji/0_326.png index 25b2ab6e6..681373d18 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_326.png and b/TMessagesProj/src/main/assets/emoji/0_326.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_327.png b/TMessagesProj/src/main/assets/emoji/0_327.png index a6a4a8f84..fcc76a07b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_327.png and b/TMessagesProj/src/main/assets/emoji/0_327.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_328.png b/TMessagesProj/src/main/assets/emoji/0_328.png index 6e8378a49..6ea1f126d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_328.png and b/TMessagesProj/src/main/assets/emoji/0_328.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_329.png b/TMessagesProj/src/main/assets/emoji/0_329.png index 03856bf5a..75c42144e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_329.png and b/TMessagesProj/src/main/assets/emoji/0_329.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_33.png b/TMessagesProj/src/main/assets/emoji/0_33.png index e74681c4d..b2442e5e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_33.png and b/TMessagesProj/src/main/assets/emoji/0_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_330.png b/TMessagesProj/src/main/assets/emoji/0_330.png index db527bd89..927804753 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_330.png and b/TMessagesProj/src/main/assets/emoji/0_330.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_331.png b/TMessagesProj/src/main/assets/emoji/0_331.png index 9c3c2a841..4c815a44b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_331.png and b/TMessagesProj/src/main/assets/emoji/0_331.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_332.png b/TMessagesProj/src/main/assets/emoji/0_332.png index 5d71d5992..b9741f4c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_332.png and b/TMessagesProj/src/main/assets/emoji/0_332.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_333.png b/TMessagesProj/src/main/assets/emoji/0_333.png index 793961ef4..1180366a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_333.png and b/TMessagesProj/src/main/assets/emoji/0_333.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_334.png b/TMessagesProj/src/main/assets/emoji/0_334.png index bdb6c78b6..7e869df66 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_334.png and b/TMessagesProj/src/main/assets/emoji/0_334.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_335.png b/TMessagesProj/src/main/assets/emoji/0_335.png index d15057c4b..c1d6e4bef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_335.png and b/TMessagesProj/src/main/assets/emoji/0_335.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_336.png b/TMessagesProj/src/main/assets/emoji/0_336.png index 58196b092..62bc2ed9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_336.png and b/TMessagesProj/src/main/assets/emoji/0_336.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_337.png b/TMessagesProj/src/main/assets/emoji/0_337.png index ddf8f8ace..a34c287fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_337.png and b/TMessagesProj/src/main/assets/emoji/0_337.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_338.png b/TMessagesProj/src/main/assets/emoji/0_338.png index 7dc2b105e..864565d1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_338.png and b/TMessagesProj/src/main/assets/emoji/0_338.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_339.png b/TMessagesProj/src/main/assets/emoji/0_339.png index 57308268e..aeeaaced9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_339.png and b/TMessagesProj/src/main/assets/emoji/0_339.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_34.png b/TMessagesProj/src/main/assets/emoji/0_34.png index 084b172cd..74b8411c5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_34.png and b/TMessagesProj/src/main/assets/emoji/0_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_340.png b/TMessagesProj/src/main/assets/emoji/0_340.png index ee1a27d5a..a8ca0963c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_340.png and b/TMessagesProj/src/main/assets/emoji/0_340.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_341.png b/TMessagesProj/src/main/assets/emoji/0_341.png index a7069a58d..46b2324e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_341.png and b/TMessagesProj/src/main/assets/emoji/0_341.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_342.png b/TMessagesProj/src/main/assets/emoji/0_342.png index 70595aa1f..0b7e02c88 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_342.png and b/TMessagesProj/src/main/assets/emoji/0_342.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_343.png b/TMessagesProj/src/main/assets/emoji/0_343.png index 378879118..ab9c3c418 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_343.png and b/TMessagesProj/src/main/assets/emoji/0_343.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_344.png b/TMessagesProj/src/main/assets/emoji/0_344.png index d4164c407..ee6f992db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_344.png and b/TMessagesProj/src/main/assets/emoji/0_344.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_345.png b/TMessagesProj/src/main/assets/emoji/0_345.png index 78bf39a13..056c04658 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_345.png and b/TMessagesProj/src/main/assets/emoji/0_345.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_346.png b/TMessagesProj/src/main/assets/emoji/0_346.png index ce70cb96f..a411ce26d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_346.png and b/TMessagesProj/src/main/assets/emoji/0_346.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_347.png b/TMessagesProj/src/main/assets/emoji/0_347.png index eafe3096d..9614a9f0b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_347.png and b/TMessagesProj/src/main/assets/emoji/0_347.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_348.png b/TMessagesProj/src/main/assets/emoji/0_348.png index be2a6060b..4acddc27d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_348.png and b/TMessagesProj/src/main/assets/emoji/0_348.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_349.png b/TMessagesProj/src/main/assets/emoji/0_349.png index 0d26f91e9..57308268e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_349.png and b/TMessagesProj/src/main/assets/emoji/0_349.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_35.png b/TMessagesProj/src/main/assets/emoji/0_35.png index 37dc934e8..566ebcb5a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_35.png and b/TMessagesProj/src/main/assets/emoji/0_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_350.png b/TMessagesProj/src/main/assets/emoji/0_350.png index e6c28369d..2adc98424 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_350.png and b/TMessagesProj/src/main/assets/emoji/0_350.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_351.png b/TMessagesProj/src/main/assets/emoji/0_351.png index ed160139c..a7069a58d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_351.png and b/TMessagesProj/src/main/assets/emoji/0_351.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_352.png b/TMessagesProj/src/main/assets/emoji/0_352.png index 382e6b1e4..70595aa1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_352.png and b/TMessagesProj/src/main/assets/emoji/0_352.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_353.png b/TMessagesProj/src/main/assets/emoji/0_353.png index 1cae04df3..22bce39e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_353.png and b/TMessagesProj/src/main/assets/emoji/0_353.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_354.png b/TMessagesProj/src/main/assets/emoji/0_354.png index 5946db554..5955d65c3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_354.png and b/TMessagesProj/src/main/assets/emoji/0_354.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_355.png b/TMessagesProj/src/main/assets/emoji/0_355.png index 053c3249b..49ae63cc4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_355.png and b/TMessagesProj/src/main/assets/emoji/0_355.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_356.png b/TMessagesProj/src/main/assets/emoji/0_356.png index 9c28697de..9fbf6c542 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_356.png and b/TMessagesProj/src/main/assets/emoji/0_356.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_357.png b/TMessagesProj/src/main/assets/emoji/0_357.png index 8a9544374..404cd9274 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_357.png and b/TMessagesProj/src/main/assets/emoji/0_357.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_358.png b/TMessagesProj/src/main/assets/emoji/0_358.png index 8d165e532..ebd097edd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_358.png and b/TMessagesProj/src/main/assets/emoji/0_358.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_359.png b/TMessagesProj/src/main/assets/emoji/0_359.png index 5863adcbe..79d8b9f22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_359.png and b/TMessagesProj/src/main/assets/emoji/0_359.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_36.png b/TMessagesProj/src/main/assets/emoji/0_36.png index a22b6e9ec..59533bedd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_36.png and b/TMessagesProj/src/main/assets/emoji/0_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_360.png b/TMessagesProj/src/main/assets/emoji/0_360.png index b54bee8b5..2f4999a25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_360.png and b/TMessagesProj/src/main/assets/emoji/0_360.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_361.png b/TMessagesProj/src/main/assets/emoji/0_361.png index 19293305b..d054137a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_361.png and b/TMessagesProj/src/main/assets/emoji/0_361.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_362.png b/TMessagesProj/src/main/assets/emoji/0_362.png index 8b8f3e5bf..fda07dd22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_362.png and b/TMessagesProj/src/main/assets/emoji/0_362.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_363.png b/TMessagesProj/src/main/assets/emoji/0_363.png index c9f2935c6..6c6d52e83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_363.png and b/TMessagesProj/src/main/assets/emoji/0_363.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_364.png b/TMessagesProj/src/main/assets/emoji/0_364.png index ddd4a0e58..a46896868 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_364.png and b/TMessagesProj/src/main/assets/emoji/0_364.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_365.png b/TMessagesProj/src/main/assets/emoji/0_365.png index 9d79a0b60..356ff71c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_365.png and b/TMessagesProj/src/main/assets/emoji/0_365.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_366.png b/TMessagesProj/src/main/assets/emoji/0_366.png index 2038d28fa..54618280f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_366.png and b/TMessagesProj/src/main/assets/emoji/0_366.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_367.png b/TMessagesProj/src/main/assets/emoji/0_367.png index c6b3df90a..fe5130929 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_367.png and b/TMessagesProj/src/main/assets/emoji/0_367.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_368.png b/TMessagesProj/src/main/assets/emoji/0_368.png index 7464b3b65..f2e1c3893 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_368.png and b/TMessagesProj/src/main/assets/emoji/0_368.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_369.png b/TMessagesProj/src/main/assets/emoji/0_369.png index 1ec1f8edc..cd5ac59fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_369.png and b/TMessagesProj/src/main/assets/emoji/0_369.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_37.png b/TMessagesProj/src/main/assets/emoji/0_37.png index 7ee672192..178a10564 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_37.png and b/TMessagesProj/src/main/assets/emoji/0_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_370.png b/TMessagesProj/src/main/assets/emoji/0_370.png index 775b0107b..9e0862599 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_370.png and b/TMessagesProj/src/main/assets/emoji/0_370.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_371.png b/TMessagesProj/src/main/assets/emoji/0_371.png index b21602851..257d49884 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_371.png and b/TMessagesProj/src/main/assets/emoji/0_371.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_372.png b/TMessagesProj/src/main/assets/emoji/0_372.png index 8f5eb4c61..aecaa0739 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_372.png and b/TMessagesProj/src/main/assets/emoji/0_372.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_373.png b/TMessagesProj/src/main/assets/emoji/0_373.png index 529faf73b..b7eb45155 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_373.png and b/TMessagesProj/src/main/assets/emoji/0_373.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_374.png b/TMessagesProj/src/main/assets/emoji/0_374.png index 20ad62ba2..ae0053115 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_374.png and b/TMessagesProj/src/main/assets/emoji/0_374.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_375.png b/TMessagesProj/src/main/assets/emoji/0_375.png index af84a2341..66a9b0be4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_375.png and b/TMessagesProj/src/main/assets/emoji/0_375.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_376.png b/TMessagesProj/src/main/assets/emoji/0_376.png index 4d206c445..99930fb5c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_376.png and b/TMessagesProj/src/main/assets/emoji/0_376.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_377.png b/TMessagesProj/src/main/assets/emoji/0_377.png index ea376e607..2038d28fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_377.png and b/TMessagesProj/src/main/assets/emoji/0_377.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_378.png b/TMessagesProj/src/main/assets/emoji/0_378.png index e46917791..9f8d89eda 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_378.png and b/TMessagesProj/src/main/assets/emoji/0_378.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_379.png b/TMessagesProj/src/main/assets/emoji/0_379.png index a1a4976cc..227ccd181 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_379.png and b/TMessagesProj/src/main/assets/emoji/0_379.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_38.png b/TMessagesProj/src/main/assets/emoji/0_38.png index 71d50b09e..926ad8e02 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_38.png and b/TMessagesProj/src/main/assets/emoji/0_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_380.png b/TMessagesProj/src/main/assets/emoji/0_380.png index c89b628e2..128c7a507 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_380.png and b/TMessagesProj/src/main/assets/emoji/0_380.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_381.png b/TMessagesProj/src/main/assets/emoji/0_381.png index a327cb738..924fe6411 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_381.png and b/TMessagesProj/src/main/assets/emoji/0_381.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_382.png b/TMessagesProj/src/main/assets/emoji/0_382.png index e1d3dd4a0..90df715a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_382.png and b/TMessagesProj/src/main/assets/emoji/0_382.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_383.png b/TMessagesProj/src/main/assets/emoji/0_383.png index 10dc4d0fc..2a63e2f06 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_383.png and b/TMessagesProj/src/main/assets/emoji/0_383.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_384.png b/TMessagesProj/src/main/assets/emoji/0_384.png index 0cbe63a27..ea25c0b53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_384.png and b/TMessagesProj/src/main/assets/emoji/0_384.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_385.png b/TMessagesProj/src/main/assets/emoji/0_385.png index faa03b636..6eab0c868 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_385.png and b/TMessagesProj/src/main/assets/emoji/0_385.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_386.png b/TMessagesProj/src/main/assets/emoji/0_386.png index 305d58185..bffec8881 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_386.png and b/TMessagesProj/src/main/assets/emoji/0_386.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_387.png b/TMessagesProj/src/main/assets/emoji/0_387.png index 523a084a7..7cc2b8338 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_387.png and b/TMessagesProj/src/main/assets/emoji/0_387.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_388.png b/TMessagesProj/src/main/assets/emoji/0_388.png index 5dff56173..0cec8f7fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_388.png and b/TMessagesProj/src/main/assets/emoji/0_388.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_389.png b/TMessagesProj/src/main/assets/emoji/0_389.png index 5e510cc8a..11e56288b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_389.png and b/TMessagesProj/src/main/assets/emoji/0_389.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_39.png b/TMessagesProj/src/main/assets/emoji/0_39.png index 246a430be..d9a626d28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_39.png and b/TMessagesProj/src/main/assets/emoji/0_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_390.png b/TMessagesProj/src/main/assets/emoji/0_390.png index ceed35e25..ee620ce95 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_390.png and b/TMessagesProj/src/main/assets/emoji/0_390.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_391.png b/TMessagesProj/src/main/assets/emoji/0_391.png index 6e06b0cfa..ce4ec7575 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_391.png and b/TMessagesProj/src/main/assets/emoji/0_391.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_392.png b/TMessagesProj/src/main/assets/emoji/0_392.png index 40618729e..d287c466c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_392.png and b/TMessagesProj/src/main/assets/emoji/0_392.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_393.png b/TMessagesProj/src/main/assets/emoji/0_393.png index d9af5857e..2165e8739 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_393.png and b/TMessagesProj/src/main/assets/emoji/0_393.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_394.png b/TMessagesProj/src/main/assets/emoji/0_394.png index a08bd8972..8f6c5120e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_394.png and b/TMessagesProj/src/main/assets/emoji/0_394.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_395.png b/TMessagesProj/src/main/assets/emoji/0_395.png index 7ba6b424e..0cbe63a27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_395.png and b/TMessagesProj/src/main/assets/emoji/0_395.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_396.png b/TMessagesProj/src/main/assets/emoji/0_396.png index 8d0417ac0..b1402a5cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_396.png and b/TMessagesProj/src/main/assets/emoji/0_396.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_397.png b/TMessagesProj/src/main/assets/emoji/0_397.png index 7c13eab13..305d58185 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_397.png and b/TMessagesProj/src/main/assets/emoji/0_397.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_398.png b/TMessagesProj/src/main/assets/emoji/0_398.png index d29d38333..523a084a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_398.png and b/TMessagesProj/src/main/assets/emoji/0_398.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_399.png b/TMessagesProj/src/main/assets/emoji/0_399.png index aee34f389..a8e3be617 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_399.png and b/TMessagesProj/src/main/assets/emoji/0_399.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_4.png b/TMessagesProj/src/main/assets/emoji/0_4.png index 3a76baf2b..55817ac00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_4.png and b/TMessagesProj/src/main/assets/emoji/0_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_40.png b/TMessagesProj/src/main/assets/emoji/0_40.png index abd1b0c23..ea49bb5b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_40.png and b/TMessagesProj/src/main/assets/emoji/0_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_400.png b/TMessagesProj/src/main/assets/emoji/0_400.png index a65ceeb92..5e510cc8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_400.png and b/TMessagesProj/src/main/assets/emoji/0_400.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_401.png b/TMessagesProj/src/main/assets/emoji/0_401.png index 32a1902dd..ceed35e25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_401.png and b/TMessagesProj/src/main/assets/emoji/0_401.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_402.png b/TMessagesProj/src/main/assets/emoji/0_402.png index 86217429e..4b8f492c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_402.png and b/TMessagesProj/src/main/assets/emoji/0_402.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_403.png b/TMessagesProj/src/main/assets/emoji/0_403.png index 1efec3142..40618729e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_403.png and b/TMessagesProj/src/main/assets/emoji/0_403.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_404.png b/TMessagesProj/src/main/assets/emoji/0_404.png index 573065dee..d9af5857e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_404.png and b/TMessagesProj/src/main/assets/emoji/0_404.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_405.png b/TMessagesProj/src/main/assets/emoji/0_405.png index cb7634f5b..a08bd8972 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_405.png and b/TMessagesProj/src/main/assets/emoji/0_405.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_406.png b/TMessagesProj/src/main/assets/emoji/0_406.png index e6ad1c68d..7ba6b424e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_406.png and b/TMessagesProj/src/main/assets/emoji/0_406.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_407.png b/TMessagesProj/src/main/assets/emoji/0_407.png index 52956d4df..3181226ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_407.png and b/TMessagesProj/src/main/assets/emoji/0_407.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_408.png b/TMessagesProj/src/main/assets/emoji/0_408.png index fa5bd463e..50f8ffd21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_408.png and b/TMessagesProj/src/main/assets/emoji/0_408.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_409.png b/TMessagesProj/src/main/assets/emoji/0_409.png index 8ce604902..d83444b53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_409.png and b/TMessagesProj/src/main/assets/emoji/0_409.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_41.png b/TMessagesProj/src/main/assets/emoji/0_41.png index 6e3ab8be4..415014269 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_41.png and b/TMessagesProj/src/main/assets/emoji/0_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_410.png b/TMessagesProj/src/main/assets/emoji/0_410.png index f091f6292..aee34f389 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_410.png and b/TMessagesProj/src/main/assets/emoji/0_410.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_411.png b/TMessagesProj/src/main/assets/emoji/0_411.png index 0104bfc51..79f9393ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_411.png and b/TMessagesProj/src/main/assets/emoji/0_411.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_412.png b/TMessagesProj/src/main/assets/emoji/0_412.png index ccf7d4531..32a1902dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_412.png and b/TMessagesProj/src/main/assets/emoji/0_412.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_413.png b/TMessagesProj/src/main/assets/emoji/0_413.png index 178b0886d..86217429e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_413.png and b/TMessagesProj/src/main/assets/emoji/0_413.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_414.png b/TMessagesProj/src/main/assets/emoji/0_414.png index a5c7c9c3f..69166f604 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_414.png and b/TMessagesProj/src/main/assets/emoji/0_414.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_415.png b/TMessagesProj/src/main/assets/emoji/0_415.png index 0aa84417f..cd2a66ae1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_415.png and b/TMessagesProj/src/main/assets/emoji/0_415.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_416.png b/TMessagesProj/src/main/assets/emoji/0_416.png index d06be85ff..a71ecf86b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_416.png and b/TMessagesProj/src/main/assets/emoji/0_416.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_417.png b/TMessagesProj/src/main/assets/emoji/0_417.png index 1aaded53a..b5caac88b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_417.png and b/TMessagesProj/src/main/assets/emoji/0_417.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_418.png b/TMessagesProj/src/main/assets/emoji/0_418.png index a6ebe9ba7..38ab80ddb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_418.png and b/TMessagesProj/src/main/assets/emoji/0_418.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_419.png b/TMessagesProj/src/main/assets/emoji/0_419.png index a8049db1b..fa60f4c21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_419.png and b/TMessagesProj/src/main/assets/emoji/0_419.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_42.png b/TMessagesProj/src/main/assets/emoji/0_42.png index d8588c16e..5fac8df6d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_42.png and b/TMessagesProj/src/main/assets/emoji/0_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_420.png b/TMessagesProj/src/main/assets/emoji/0_420.png index a2a65c1ae..e320254c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_420.png and b/TMessagesProj/src/main/assets/emoji/0_420.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_421.png b/TMessagesProj/src/main/assets/emoji/0_421.png index d2b7cbf9a..a9603b7eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_421.png and b/TMessagesProj/src/main/assets/emoji/0_421.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_422.png b/TMessagesProj/src/main/assets/emoji/0_422.png index ff17d7d8e..1d6ced96b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_422.png and b/TMessagesProj/src/main/assets/emoji/0_422.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_423.png b/TMessagesProj/src/main/assets/emoji/0_423.png index c1bc45f0d..eb4ff8d0a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_423.png and b/TMessagesProj/src/main/assets/emoji/0_423.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_424.png b/TMessagesProj/src/main/assets/emoji/0_424.png index 75d9a8e85..e2035c734 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_424.png and b/TMessagesProj/src/main/assets/emoji/0_424.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_425.png b/TMessagesProj/src/main/assets/emoji/0_425.png index dacacd149..2e65a7f90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_425.png and b/TMessagesProj/src/main/assets/emoji/0_425.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_426.png b/TMessagesProj/src/main/assets/emoji/0_426.png index 339bcbd1c..b3f62faaf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_426.png and b/TMessagesProj/src/main/assets/emoji/0_426.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_427.png b/TMessagesProj/src/main/assets/emoji/0_427.png index 48c675b83..3b0c01019 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_427.png and b/TMessagesProj/src/main/assets/emoji/0_427.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_428.png b/TMessagesProj/src/main/assets/emoji/0_428.png index 156f07dc3..ee83b6a8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_428.png and b/TMessagesProj/src/main/assets/emoji/0_428.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_429.png b/TMessagesProj/src/main/assets/emoji/0_429.png index ed500cf08..6c1ef37cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_429.png and b/TMessagesProj/src/main/assets/emoji/0_429.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_43.png b/TMessagesProj/src/main/assets/emoji/0_43.png index 8fd315b4d..9aff0a035 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_43.png and b/TMessagesProj/src/main/assets/emoji/0_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_430.png b/TMessagesProj/src/main/assets/emoji/0_430.png index d3ea4092d..5d5ce72ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_430.png and b/TMessagesProj/src/main/assets/emoji/0_430.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_431.png b/TMessagesProj/src/main/assets/emoji/0_431.png index a5c0b43de..552ac813c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_431.png and b/TMessagesProj/src/main/assets/emoji/0_431.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_432.png b/TMessagesProj/src/main/assets/emoji/0_432.png index 855fad5db..d923eeb20 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_432.png and b/TMessagesProj/src/main/assets/emoji/0_432.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_433.png b/TMessagesProj/src/main/assets/emoji/0_433.png index ef137bf80..5e4454330 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_433.png and b/TMessagesProj/src/main/assets/emoji/0_433.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_434.png b/TMessagesProj/src/main/assets/emoji/0_434.png index 96de472da..fd0a8a9cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_434.png and b/TMessagesProj/src/main/assets/emoji/0_434.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_435.png b/TMessagesProj/src/main/assets/emoji/0_435.png index 9484991cc..45e67889e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_435.png and b/TMessagesProj/src/main/assets/emoji/0_435.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_436.png b/TMessagesProj/src/main/assets/emoji/0_436.png index 3256884f8..13b6ccc4a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_436.png and b/TMessagesProj/src/main/assets/emoji/0_436.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_437.png b/TMessagesProj/src/main/assets/emoji/0_437.png index 4480b3ea9..29b9e35fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_437.png and b/TMessagesProj/src/main/assets/emoji/0_437.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_438.png b/TMessagesProj/src/main/assets/emoji/0_438.png index d2ba49660..550e00e2d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_438.png and b/TMessagesProj/src/main/assets/emoji/0_438.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_439.png b/TMessagesProj/src/main/assets/emoji/0_439.png index 4fe0f3029..bbe6db69a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_439.png and b/TMessagesProj/src/main/assets/emoji/0_439.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_44.png b/TMessagesProj/src/main/assets/emoji/0_44.png index 57e0d7996..1b315de4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_44.png and b/TMessagesProj/src/main/assets/emoji/0_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_440.png b/TMessagesProj/src/main/assets/emoji/0_440.png index 6514e12a2..da2eb87fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_440.png and b/TMessagesProj/src/main/assets/emoji/0_440.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_441.png b/TMessagesProj/src/main/assets/emoji/0_441.png index 6098b6acb..c11bf9954 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_441.png and b/TMessagesProj/src/main/assets/emoji/0_441.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_442.png b/TMessagesProj/src/main/assets/emoji/0_442.png index 50311f3d6..c8ffe494e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_442.png and b/TMessagesProj/src/main/assets/emoji/0_442.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_443.png b/TMessagesProj/src/main/assets/emoji/0_443.png index 0efec48db..5422bd43e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_443.png and b/TMessagesProj/src/main/assets/emoji/0_443.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_444.png b/TMessagesProj/src/main/assets/emoji/0_444.png index e90e0fec1..6f87251c4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_444.png and b/TMessagesProj/src/main/assets/emoji/0_444.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_445.png b/TMessagesProj/src/main/assets/emoji/0_445.png index 16cc3d60b..d6d4ce83f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_445.png and b/TMessagesProj/src/main/assets/emoji/0_445.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_446.png b/TMessagesProj/src/main/assets/emoji/0_446.png index 0016b88b6..e393b690c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_446.png and b/TMessagesProj/src/main/assets/emoji/0_446.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_447.png b/TMessagesProj/src/main/assets/emoji/0_447.png index 33c41398e..4dae93267 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_447.png and b/TMessagesProj/src/main/assets/emoji/0_447.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_448.png b/TMessagesProj/src/main/assets/emoji/0_448.png index 98b2aff52..ff4832953 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_448.png and b/TMessagesProj/src/main/assets/emoji/0_448.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_449.png b/TMessagesProj/src/main/assets/emoji/0_449.png index 39eda092a..19a45fef3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_449.png and b/TMessagesProj/src/main/assets/emoji/0_449.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_45.png b/TMessagesProj/src/main/assets/emoji/0_45.png index 7490b7b62..83f829f38 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_45.png and b/TMessagesProj/src/main/assets/emoji/0_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_450.png b/TMessagesProj/src/main/assets/emoji/0_450.png index b51caa94c..f8125f763 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_450.png and b/TMessagesProj/src/main/assets/emoji/0_450.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_451.png b/TMessagesProj/src/main/assets/emoji/0_451.png index 63a206aad..bb1fc2e2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_451.png and b/TMessagesProj/src/main/assets/emoji/0_451.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_452.png b/TMessagesProj/src/main/assets/emoji/0_452.png index 7a8bbadc7..67ac87472 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_452.png and b/TMessagesProj/src/main/assets/emoji/0_452.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_453.png b/TMessagesProj/src/main/assets/emoji/0_453.png index 1c49ccd7c..1dbd062c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_453.png and b/TMessagesProj/src/main/assets/emoji/0_453.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_454.png b/TMessagesProj/src/main/assets/emoji/0_454.png index df6df1cc2..4978fc4dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_454.png and b/TMessagesProj/src/main/assets/emoji/0_454.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_455.png b/TMessagesProj/src/main/assets/emoji/0_455.png index 1f7910609..5e718c036 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_455.png and b/TMessagesProj/src/main/assets/emoji/0_455.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_456.png b/TMessagesProj/src/main/assets/emoji/0_456.png index 4eeb1fb04..5039ead6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_456.png and b/TMessagesProj/src/main/assets/emoji/0_456.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_457.png b/TMessagesProj/src/main/assets/emoji/0_457.png index 909c369a1..65d73707e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_457.png and b/TMessagesProj/src/main/assets/emoji/0_457.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_458.png b/TMessagesProj/src/main/assets/emoji/0_458.png index d4d0cbd04..689ff237c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_458.png and b/TMessagesProj/src/main/assets/emoji/0_458.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_459.png b/TMessagesProj/src/main/assets/emoji/0_459.png index 0ea20b3f3..57620ce67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_459.png and b/TMessagesProj/src/main/assets/emoji/0_459.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_46.png b/TMessagesProj/src/main/assets/emoji/0_46.png index 201f302e7..a9d851362 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_46.png and b/TMessagesProj/src/main/assets/emoji/0_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_460.png b/TMessagesProj/src/main/assets/emoji/0_460.png index e06abbb1b..9aa41419f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_460.png and b/TMessagesProj/src/main/assets/emoji/0_460.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_461.png b/TMessagesProj/src/main/assets/emoji/0_461.png index 8e4dd10bf..4f6ac2f40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_461.png and b/TMessagesProj/src/main/assets/emoji/0_461.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_462.png b/TMessagesProj/src/main/assets/emoji/0_462.png index a89d8c5ca..c2af8cd1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_462.png and b/TMessagesProj/src/main/assets/emoji/0_462.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_463.png b/TMessagesProj/src/main/assets/emoji/0_463.png index 133b8d9ee..52d8be9bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_463.png and b/TMessagesProj/src/main/assets/emoji/0_463.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_464.png b/TMessagesProj/src/main/assets/emoji/0_464.png index d3efa7fca..dbb0f09bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_464.png and b/TMessagesProj/src/main/assets/emoji/0_464.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_465.png b/TMessagesProj/src/main/assets/emoji/0_465.png index 8ed074285..0f78987f3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_465.png and b/TMessagesProj/src/main/assets/emoji/0_465.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_466.png b/TMessagesProj/src/main/assets/emoji/0_466.png index e1e02153a..aaba9a344 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_466.png and b/TMessagesProj/src/main/assets/emoji/0_466.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_467.png b/TMessagesProj/src/main/assets/emoji/0_467.png index 0ee746481..77f53ddf7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_467.png and b/TMessagesProj/src/main/assets/emoji/0_467.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_468.png b/TMessagesProj/src/main/assets/emoji/0_468.png index 8d8628878..55d5880fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_468.png and b/TMessagesProj/src/main/assets/emoji/0_468.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_469.png b/TMessagesProj/src/main/assets/emoji/0_469.png index 4b9eb8d6b..4f33eff7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_469.png and b/TMessagesProj/src/main/assets/emoji/0_469.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_47.png b/TMessagesProj/src/main/assets/emoji/0_47.png index 0dfe716d7..8e1ad190e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_47.png and b/TMessagesProj/src/main/assets/emoji/0_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_470.png b/TMessagesProj/src/main/assets/emoji/0_470.png index 46f58314b..ad8ffdda1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_470.png and b/TMessagesProj/src/main/assets/emoji/0_470.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_471.png b/TMessagesProj/src/main/assets/emoji/0_471.png index 6d8c25388..41a3f1f32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_471.png and b/TMessagesProj/src/main/assets/emoji/0_471.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_472.png b/TMessagesProj/src/main/assets/emoji/0_472.png index 2d2b344c4..b7b83c25e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_472.png and b/TMessagesProj/src/main/assets/emoji/0_472.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_473.png b/TMessagesProj/src/main/assets/emoji/0_473.png index 2b4541beb..767c90fe0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_473.png and b/TMessagesProj/src/main/assets/emoji/0_473.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_474.png b/TMessagesProj/src/main/assets/emoji/0_474.png index bc443af13..ef1fe1f5a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_474.png and b/TMessagesProj/src/main/assets/emoji/0_474.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_475.png b/TMessagesProj/src/main/assets/emoji/0_475.png index 6f11a32b3..4b272278d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_475.png and b/TMessagesProj/src/main/assets/emoji/0_475.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_476.png b/TMessagesProj/src/main/assets/emoji/0_476.png index 07ee34706..c1454dd05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_476.png and b/TMessagesProj/src/main/assets/emoji/0_476.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_477.png b/TMessagesProj/src/main/assets/emoji/0_477.png index 1ccc94a6d..9a91e2700 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_477.png and b/TMessagesProj/src/main/assets/emoji/0_477.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_478.png b/TMessagesProj/src/main/assets/emoji/0_478.png index 125c89b6d..23a89354f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_478.png and b/TMessagesProj/src/main/assets/emoji/0_478.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_479.png b/TMessagesProj/src/main/assets/emoji/0_479.png index a00d55142..681259e7e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_479.png and b/TMessagesProj/src/main/assets/emoji/0_479.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_48.png b/TMessagesProj/src/main/assets/emoji/0_48.png index 27da5a5ee..4166c4e05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_48.png and b/TMessagesProj/src/main/assets/emoji/0_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_480.png b/TMessagesProj/src/main/assets/emoji/0_480.png index 78b206bae..d42af3d7a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_480.png and b/TMessagesProj/src/main/assets/emoji/0_480.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_481.png b/TMessagesProj/src/main/assets/emoji/0_481.png index 2bc8c0580..90155bff8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_481.png and b/TMessagesProj/src/main/assets/emoji/0_481.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_482.png b/TMessagesProj/src/main/assets/emoji/0_482.png index 4c7b7f82e..60479c8c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_482.png and b/TMessagesProj/src/main/assets/emoji/0_482.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_483.png b/TMessagesProj/src/main/assets/emoji/0_483.png index a1018f0a0..aefbbcc9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_483.png and b/TMessagesProj/src/main/assets/emoji/0_483.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_484.png b/TMessagesProj/src/main/assets/emoji/0_484.png index 8f0cd32ab..efe88c417 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_484.png and b/TMessagesProj/src/main/assets/emoji/0_484.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_485.png b/TMessagesProj/src/main/assets/emoji/0_485.png index e0be69a4f..8c2de3339 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_485.png and b/TMessagesProj/src/main/assets/emoji/0_485.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_486.png b/TMessagesProj/src/main/assets/emoji/0_486.png index 0c95a3430..f2494c5dc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_486.png and b/TMessagesProj/src/main/assets/emoji/0_486.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_487.png b/TMessagesProj/src/main/assets/emoji/0_487.png index 09c744ac5..ef5449cca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_487.png and b/TMessagesProj/src/main/assets/emoji/0_487.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_488.png b/TMessagesProj/src/main/assets/emoji/0_488.png index c891e9730..23c873331 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_488.png and b/TMessagesProj/src/main/assets/emoji/0_488.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_489.png b/TMessagesProj/src/main/assets/emoji/0_489.png index 8bc9393b7..99cf4578d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_489.png and b/TMessagesProj/src/main/assets/emoji/0_489.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_49.png b/TMessagesProj/src/main/assets/emoji/0_49.png index ba574c63d..4bb827c3d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_49.png and b/TMessagesProj/src/main/assets/emoji/0_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_490.png b/TMessagesProj/src/main/assets/emoji/0_490.png index 3910a035a..055add2d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_490.png and b/TMessagesProj/src/main/assets/emoji/0_490.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_491.png b/TMessagesProj/src/main/assets/emoji/0_491.png index a57eed20c..908bd5c7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_491.png and b/TMessagesProj/src/main/assets/emoji/0_491.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_492.png b/TMessagesProj/src/main/assets/emoji/0_492.png index 5402cd558..b2d6dd102 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_492.png and b/TMessagesProj/src/main/assets/emoji/0_492.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_493.png b/TMessagesProj/src/main/assets/emoji/0_493.png index d8e1d30a4..79715fa28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_493.png and b/TMessagesProj/src/main/assets/emoji/0_493.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_494.png b/TMessagesProj/src/main/assets/emoji/0_494.png index e5db9abfc..7ef8369db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_494.png and b/TMessagesProj/src/main/assets/emoji/0_494.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_495.png b/TMessagesProj/src/main/assets/emoji/0_495.png index 1e12572f3..ca45410d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_495.png and b/TMessagesProj/src/main/assets/emoji/0_495.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_496.png b/TMessagesProj/src/main/assets/emoji/0_496.png index 3afb52cdc..1eb13efab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_496.png and b/TMessagesProj/src/main/assets/emoji/0_496.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_497.png b/TMessagesProj/src/main/assets/emoji/0_497.png index 5caa8a5df..bf3c34bbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_497.png and b/TMessagesProj/src/main/assets/emoji/0_497.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_498.png b/TMessagesProj/src/main/assets/emoji/0_498.png index 10b10ddda..782d00aad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_498.png and b/TMessagesProj/src/main/assets/emoji/0_498.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_499.png b/TMessagesProj/src/main/assets/emoji/0_499.png index 056a77963..aa1b6bd28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_499.png and b/TMessagesProj/src/main/assets/emoji/0_499.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_5.png b/TMessagesProj/src/main/assets/emoji/0_5.png index 67a89c8df..6b3a5a512 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_5.png and b/TMessagesProj/src/main/assets/emoji/0_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_50.png b/TMessagesProj/src/main/assets/emoji/0_50.png index 9a5e4461d..bfa74f044 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_50.png and b/TMessagesProj/src/main/assets/emoji/0_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_500.png b/TMessagesProj/src/main/assets/emoji/0_500.png index 1c8bff98d..1a54351cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_500.png and b/TMessagesProj/src/main/assets/emoji/0_500.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_501.png b/TMessagesProj/src/main/assets/emoji/0_501.png index a7ff0da02..f38416c4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_501.png and b/TMessagesProj/src/main/assets/emoji/0_501.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_502.png b/TMessagesProj/src/main/assets/emoji/0_502.png index 5f0010aac..a8e378d21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_502.png and b/TMessagesProj/src/main/assets/emoji/0_502.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_503.png b/TMessagesProj/src/main/assets/emoji/0_503.png index 92071d1a1..93a9f592c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_503.png and b/TMessagesProj/src/main/assets/emoji/0_503.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_504.png b/TMessagesProj/src/main/assets/emoji/0_504.png index 9dea50bd3..988f29092 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_504.png and b/TMessagesProj/src/main/assets/emoji/0_504.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_505.png b/TMessagesProj/src/main/assets/emoji/0_505.png index eb1700d9f..9d9bc750b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_505.png and b/TMessagesProj/src/main/assets/emoji/0_505.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_506.png b/TMessagesProj/src/main/assets/emoji/0_506.png index d0cd79588..79eb878cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_506.png and b/TMessagesProj/src/main/assets/emoji/0_506.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_507.png b/TMessagesProj/src/main/assets/emoji/0_507.png index 16fe89d6b..daa20de15 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_507.png and b/TMessagesProj/src/main/assets/emoji/0_507.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_508.png b/TMessagesProj/src/main/assets/emoji/0_508.png index 9be3160d5..801aec859 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_508.png and b/TMessagesProj/src/main/assets/emoji/0_508.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_509.png b/TMessagesProj/src/main/assets/emoji/0_509.png index 8387d23af..e21512ded 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_509.png and b/TMessagesProj/src/main/assets/emoji/0_509.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_51.png b/TMessagesProj/src/main/assets/emoji/0_51.png index e9398e44f..f49ee5aad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_51.png and b/TMessagesProj/src/main/assets/emoji/0_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_510.png b/TMessagesProj/src/main/assets/emoji/0_510.png index 20f12360b..194bb06db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_510.png and b/TMessagesProj/src/main/assets/emoji/0_510.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_511.png b/TMessagesProj/src/main/assets/emoji/0_511.png index addca5de6..c7be15aa2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_511.png and b/TMessagesProj/src/main/assets/emoji/0_511.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_512.png b/TMessagesProj/src/main/assets/emoji/0_512.png index 24fe71075..71361fc4a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_512.png and b/TMessagesProj/src/main/assets/emoji/0_512.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_513.png b/TMessagesProj/src/main/assets/emoji/0_513.png index 13d7a573c..9a6e9a5be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_513.png and b/TMessagesProj/src/main/assets/emoji/0_513.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_514.png b/TMessagesProj/src/main/assets/emoji/0_514.png index e6631323f..1d60716b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_514.png and b/TMessagesProj/src/main/assets/emoji/0_514.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_515.png b/TMessagesProj/src/main/assets/emoji/0_515.png index 47e4ad93e..762b64cb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_515.png and b/TMessagesProj/src/main/assets/emoji/0_515.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_516.png b/TMessagesProj/src/main/assets/emoji/0_516.png index 729624c8e..9526f7e84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_516.png and b/TMessagesProj/src/main/assets/emoji/0_516.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_517.png b/TMessagesProj/src/main/assets/emoji/0_517.png index d07d2a120..c372355af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_517.png and b/TMessagesProj/src/main/assets/emoji/0_517.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_518.png b/TMessagesProj/src/main/assets/emoji/0_518.png index 089f59e82..e994fb1ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_518.png and b/TMessagesProj/src/main/assets/emoji/0_518.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_519.png b/TMessagesProj/src/main/assets/emoji/0_519.png index f1f0729d5..9437062e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_519.png and b/TMessagesProj/src/main/assets/emoji/0_519.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_52.png b/TMessagesProj/src/main/assets/emoji/0_52.png index 6a0c5e803..87d3ca73f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_52.png and b/TMessagesProj/src/main/assets/emoji/0_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_520.png b/TMessagesProj/src/main/assets/emoji/0_520.png index b93d75871..db2206852 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_520.png and b/TMessagesProj/src/main/assets/emoji/0_520.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_521.png b/TMessagesProj/src/main/assets/emoji/0_521.png index b4757d3b0..69fc3789b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_521.png and b/TMessagesProj/src/main/assets/emoji/0_521.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_522.png b/TMessagesProj/src/main/assets/emoji/0_522.png index eb77cd19c..2329977bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_522.png and b/TMessagesProj/src/main/assets/emoji/0_522.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_523.png b/TMessagesProj/src/main/assets/emoji/0_523.png index b09169151..94b3eb722 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_523.png and b/TMessagesProj/src/main/assets/emoji/0_523.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_524.png b/TMessagesProj/src/main/assets/emoji/0_524.png index 4a2d24122..3a5b8ae88 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_524.png and b/TMessagesProj/src/main/assets/emoji/0_524.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_525.png b/TMessagesProj/src/main/assets/emoji/0_525.png index 37f6d7725..85c5a4442 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_525.png and b/TMessagesProj/src/main/assets/emoji/0_525.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_526.png b/TMessagesProj/src/main/assets/emoji/0_526.png index 44b0a3978..649d2112f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_526.png and b/TMessagesProj/src/main/assets/emoji/0_526.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_527.png b/TMessagesProj/src/main/assets/emoji/0_527.png index 5907d4d42..87108504e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_527.png and b/TMessagesProj/src/main/assets/emoji/0_527.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_528.png b/TMessagesProj/src/main/assets/emoji/0_528.png index 5ef14dd16..0c2d6753b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_528.png and b/TMessagesProj/src/main/assets/emoji/0_528.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_529.png b/TMessagesProj/src/main/assets/emoji/0_529.png index fa91d6ba4..f7fc0b4d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_529.png and b/TMessagesProj/src/main/assets/emoji/0_529.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_53.png b/TMessagesProj/src/main/assets/emoji/0_53.png index c2514cf03..b4ff0fa3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_53.png and b/TMessagesProj/src/main/assets/emoji/0_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_530.png b/TMessagesProj/src/main/assets/emoji/0_530.png index cd748147a..3cbb8c179 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_530.png and b/TMessagesProj/src/main/assets/emoji/0_530.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_531.png b/TMessagesProj/src/main/assets/emoji/0_531.png index 17c99349a..17bb8db9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_531.png and b/TMessagesProj/src/main/assets/emoji/0_531.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_532.png b/TMessagesProj/src/main/assets/emoji/0_532.png index fbdf07e03..b16cdde95 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_532.png and b/TMessagesProj/src/main/assets/emoji/0_532.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_533.png b/TMessagesProj/src/main/assets/emoji/0_533.png index b95c70ef6..729624c8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_533.png and b/TMessagesProj/src/main/assets/emoji/0_533.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_534.png b/TMessagesProj/src/main/assets/emoji/0_534.png index 9d520cdc2..68755087d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_534.png and b/TMessagesProj/src/main/assets/emoji/0_534.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_535.png b/TMessagesProj/src/main/assets/emoji/0_535.png index df911c57b..ba80ad470 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_535.png and b/TMessagesProj/src/main/assets/emoji/0_535.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_536.png b/TMessagesProj/src/main/assets/emoji/0_536.png index 6c66b962e..f2e0f477d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_536.png and b/TMessagesProj/src/main/assets/emoji/0_536.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_537.png b/TMessagesProj/src/main/assets/emoji/0_537.png index 4be045799..f881f8a27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_537.png and b/TMessagesProj/src/main/assets/emoji/0_537.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_538.png b/TMessagesProj/src/main/assets/emoji/0_538.png index f77904a6f..49dffb496 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_538.png and b/TMessagesProj/src/main/assets/emoji/0_538.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_539.png b/TMessagesProj/src/main/assets/emoji/0_539.png index 23b4e0322..99e9b6d2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_539.png and b/TMessagesProj/src/main/assets/emoji/0_539.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_54.png b/TMessagesProj/src/main/assets/emoji/0_54.png index 6d29c0c76..e356bfc80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_54.png and b/TMessagesProj/src/main/assets/emoji/0_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_540.png b/TMessagesProj/src/main/assets/emoji/0_540.png index c8e55149e..84a794ff7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_540.png and b/TMessagesProj/src/main/assets/emoji/0_540.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_541.png b/TMessagesProj/src/main/assets/emoji/0_541.png index 455d03a2e..56720a0cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_541.png and b/TMessagesProj/src/main/assets/emoji/0_541.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_542.png b/TMessagesProj/src/main/assets/emoji/0_542.png index 02d228d85..bd590481e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_542.png and b/TMessagesProj/src/main/assets/emoji/0_542.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_543.png b/TMessagesProj/src/main/assets/emoji/0_543.png index 8b141d136..8641a8868 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_543.png and b/TMessagesProj/src/main/assets/emoji/0_543.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_544.png b/TMessagesProj/src/main/assets/emoji/0_544.png index 884589828..244efe1ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_544.png and b/TMessagesProj/src/main/assets/emoji/0_544.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_545.png b/TMessagesProj/src/main/assets/emoji/0_545.png index 1d40effb2..f6bf929d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_545.png and b/TMessagesProj/src/main/assets/emoji/0_545.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_546.png b/TMessagesProj/src/main/assets/emoji/0_546.png index 7effe8d43..b8aabaf07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_546.png and b/TMessagesProj/src/main/assets/emoji/0_546.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_547.png b/TMessagesProj/src/main/assets/emoji/0_547.png index 0775a74b5..2e618e66e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_547.png and b/TMessagesProj/src/main/assets/emoji/0_547.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_548.png b/TMessagesProj/src/main/assets/emoji/0_548.png index 49e1e015d..be487547d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_548.png and b/TMessagesProj/src/main/assets/emoji/0_548.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_549.png b/TMessagesProj/src/main/assets/emoji/0_549.png index 14e61d6b3..c933b7452 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_549.png and b/TMessagesProj/src/main/assets/emoji/0_549.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_55.png b/TMessagesProj/src/main/assets/emoji/0_55.png index 3e30972b2..77d19b3f3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_55.png and b/TMessagesProj/src/main/assets/emoji/0_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_550.png b/TMessagesProj/src/main/assets/emoji/0_550.png index e7a8ad9a0..5089e7cd9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_550.png and b/TMessagesProj/src/main/assets/emoji/0_550.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_551.png b/TMessagesProj/src/main/assets/emoji/0_551.png index 34f9634a3..79686ff89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_551.png and b/TMessagesProj/src/main/assets/emoji/0_551.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_552.png b/TMessagesProj/src/main/assets/emoji/0_552.png index 180b18e28..53f39c2db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_552.png and b/TMessagesProj/src/main/assets/emoji/0_552.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_553.png b/TMessagesProj/src/main/assets/emoji/0_553.png index 2218a3556..550695675 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_553.png and b/TMessagesProj/src/main/assets/emoji/0_553.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_554.png b/TMessagesProj/src/main/assets/emoji/0_554.png index f3184b28e..01e1bf01a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_554.png and b/TMessagesProj/src/main/assets/emoji/0_554.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_555.png b/TMessagesProj/src/main/assets/emoji/0_555.png index 9fd6cfea7..dc1bd09bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_555.png and b/TMessagesProj/src/main/assets/emoji/0_555.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_556.png b/TMessagesProj/src/main/assets/emoji/0_556.png index e9674a39a..d004eab83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_556.png and b/TMessagesProj/src/main/assets/emoji/0_556.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_557.png b/TMessagesProj/src/main/assets/emoji/0_557.png index 3c856e967..871de985c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_557.png and b/TMessagesProj/src/main/assets/emoji/0_557.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_558.png b/TMessagesProj/src/main/assets/emoji/0_558.png index 5ca705531..dc7a84504 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_558.png and b/TMessagesProj/src/main/assets/emoji/0_558.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_559.png b/TMessagesProj/src/main/assets/emoji/0_559.png index 642ed83d2..20a6502a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_559.png and b/TMessagesProj/src/main/assets/emoji/0_559.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_56.png b/TMessagesProj/src/main/assets/emoji/0_56.png index 1c833274b..644c20476 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_56.png and b/TMessagesProj/src/main/assets/emoji/0_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_560.png b/TMessagesProj/src/main/assets/emoji/0_560.png index ea125d7e5..00bde64c5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_560.png and b/TMessagesProj/src/main/assets/emoji/0_560.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_561.png b/TMessagesProj/src/main/assets/emoji/0_561.png index 1f223161b..69813a2e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_561.png and b/TMessagesProj/src/main/assets/emoji/0_561.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_562.png b/TMessagesProj/src/main/assets/emoji/0_562.png index f7d068014..9d15ab11b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_562.png and b/TMessagesProj/src/main/assets/emoji/0_562.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_563.png b/TMessagesProj/src/main/assets/emoji/0_563.png index 171464158..46b48572a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_563.png and b/TMessagesProj/src/main/assets/emoji/0_563.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_564.png b/TMessagesProj/src/main/assets/emoji/0_564.png index e63a7d04c..60b2b5ba6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_564.png and b/TMessagesProj/src/main/assets/emoji/0_564.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_565.png b/TMessagesProj/src/main/assets/emoji/0_565.png index ec0907670..81782b23d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_565.png and b/TMessagesProj/src/main/assets/emoji/0_565.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_566.png b/TMessagesProj/src/main/assets/emoji/0_566.png index 294034e67..14e61d6b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_566.png and b/TMessagesProj/src/main/assets/emoji/0_566.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_567.png b/TMessagesProj/src/main/assets/emoji/0_567.png index 6dbb68fcf..e7a8ad9a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_567.png and b/TMessagesProj/src/main/assets/emoji/0_567.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_568.png b/TMessagesProj/src/main/assets/emoji/0_568.png index f772b023e..34f9634a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_568.png and b/TMessagesProj/src/main/assets/emoji/0_568.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_569.png b/TMessagesProj/src/main/assets/emoji/0_569.png index a1f87ec5c..180b18e28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_569.png and b/TMessagesProj/src/main/assets/emoji/0_569.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_57.png b/TMessagesProj/src/main/assets/emoji/0_57.png index bc700edb4..89e8bf311 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_57.png and b/TMessagesProj/src/main/assets/emoji/0_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_570.png b/TMessagesProj/src/main/assets/emoji/0_570.png index f06dd9335..eee427455 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_570.png and b/TMessagesProj/src/main/assets/emoji/0_570.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_571.png b/TMessagesProj/src/main/assets/emoji/0_571.png index 4e71634a4..f3184b28e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_571.png and b/TMessagesProj/src/main/assets/emoji/0_571.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_572.png b/TMessagesProj/src/main/assets/emoji/0_572.png index c39b2f739..6519c1b47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_572.png and b/TMessagesProj/src/main/assets/emoji/0_572.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_573.png b/TMessagesProj/src/main/assets/emoji/0_573.png index 5acaf376c..cfcffa290 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_573.png and b/TMessagesProj/src/main/assets/emoji/0_573.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_574.png b/TMessagesProj/src/main/assets/emoji/0_574.png index bce31f8c5..3c856e967 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_574.png and b/TMessagesProj/src/main/assets/emoji/0_574.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_575.png b/TMessagesProj/src/main/assets/emoji/0_575.png index db4d5b9c1..5ca705531 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_575.png and b/TMessagesProj/src/main/assets/emoji/0_575.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_576.png b/TMessagesProj/src/main/assets/emoji/0_576.png index b61f21924..5a44c36ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_576.png and b/TMessagesProj/src/main/assets/emoji/0_576.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_577.png b/TMessagesProj/src/main/assets/emoji/0_577.png index f8e0afd97..ea125d7e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_577.png and b/TMessagesProj/src/main/assets/emoji/0_577.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_578.png b/TMessagesProj/src/main/assets/emoji/0_578.png index 94c0e461a..1f223161b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_578.png and b/TMessagesProj/src/main/assets/emoji/0_578.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_579.png b/TMessagesProj/src/main/assets/emoji/0_579.png index acf9ab7b9..f7d068014 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_579.png and b/TMessagesProj/src/main/assets/emoji/0_579.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_58.png b/TMessagesProj/src/main/assets/emoji/0_58.png index 960a9cbd4..7a9b4418a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_58.png and b/TMessagesProj/src/main/assets/emoji/0_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_580.png b/TMessagesProj/src/main/assets/emoji/0_580.png index 91b648c6b..171464158 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_580.png and b/TMessagesProj/src/main/assets/emoji/0_580.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_581.png b/TMessagesProj/src/main/assets/emoji/0_581.png index 447dffd80..e63a7d04c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_581.png and b/TMessagesProj/src/main/assets/emoji/0_581.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_582.png b/TMessagesProj/src/main/assets/emoji/0_582.png index 1096dfcaf..ec0907670 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_582.png and b/TMessagesProj/src/main/assets/emoji/0_582.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_583.png b/TMessagesProj/src/main/assets/emoji/0_583.png index 48d1e1df6..294034e67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_583.png and b/TMessagesProj/src/main/assets/emoji/0_583.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_584.png b/TMessagesProj/src/main/assets/emoji/0_584.png index 5b947e6f0..6dbb68fcf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_584.png and b/TMessagesProj/src/main/assets/emoji/0_584.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_585.png b/TMessagesProj/src/main/assets/emoji/0_585.png index ce7c7bd28..f772b023e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_585.png and b/TMessagesProj/src/main/assets/emoji/0_585.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_586.png b/TMessagesProj/src/main/assets/emoji/0_586.png index 950ad4bd8..a1f87ec5c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_586.png and b/TMessagesProj/src/main/assets/emoji/0_586.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_587.png b/TMessagesProj/src/main/assets/emoji/0_587.png index f1e57fa03..f06dd9335 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_587.png and b/TMessagesProj/src/main/assets/emoji/0_587.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_588.png b/TMessagesProj/src/main/assets/emoji/0_588.png index 61c9dc35c..4e71634a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_588.png and b/TMessagesProj/src/main/assets/emoji/0_588.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_589.png b/TMessagesProj/src/main/assets/emoji/0_589.png index fef853f94..c39b2f739 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_589.png and b/TMessagesProj/src/main/assets/emoji/0_589.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_59.png b/TMessagesProj/src/main/assets/emoji/0_59.png index cbf97c0e4..4f7f292f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_59.png and b/TMessagesProj/src/main/assets/emoji/0_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_590.png b/TMessagesProj/src/main/assets/emoji/0_590.png index 24d96872d..5acaf376c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_590.png and b/TMessagesProj/src/main/assets/emoji/0_590.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_591.png b/TMessagesProj/src/main/assets/emoji/0_591.png index ca95f232b..bce31f8c5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_591.png and b/TMessagesProj/src/main/assets/emoji/0_591.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_592.png b/TMessagesProj/src/main/assets/emoji/0_592.png index bd5b8c883..db4d5b9c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_592.png and b/TMessagesProj/src/main/assets/emoji/0_592.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_593.png b/TMessagesProj/src/main/assets/emoji/0_593.png index 735717f45..b61f21924 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_593.png and b/TMessagesProj/src/main/assets/emoji/0_593.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_594.png b/TMessagesProj/src/main/assets/emoji/0_594.png index 60ca02c5f..f8e0afd97 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_594.png and b/TMessagesProj/src/main/assets/emoji/0_594.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_595.png b/TMessagesProj/src/main/assets/emoji/0_595.png index 861cdc53f..94c0e461a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_595.png and b/TMessagesProj/src/main/assets/emoji/0_595.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_596.png b/TMessagesProj/src/main/assets/emoji/0_596.png index ebdf530c8..acf9ab7b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_596.png and b/TMessagesProj/src/main/assets/emoji/0_596.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_597.png b/TMessagesProj/src/main/assets/emoji/0_597.png index c34874c71..91b648c6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_597.png and b/TMessagesProj/src/main/assets/emoji/0_597.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_598.png b/TMessagesProj/src/main/assets/emoji/0_598.png index 94031a5e2..447dffd80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_598.png and b/TMessagesProj/src/main/assets/emoji/0_598.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_599.png b/TMessagesProj/src/main/assets/emoji/0_599.png index a67033d66..1096dfcaf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_599.png and b/TMessagesProj/src/main/assets/emoji/0_599.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_6.png b/TMessagesProj/src/main/assets/emoji/0_6.png index 4d520a2c8..9cfac3d8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_6.png and b/TMessagesProj/src/main/assets/emoji/0_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_60.png b/TMessagesProj/src/main/assets/emoji/0_60.png index c951b1898..8d4967672 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_60.png and b/TMessagesProj/src/main/assets/emoji/0_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_600.png b/TMessagesProj/src/main/assets/emoji/0_600.png index 9e3caf2cf..5b2807564 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_600.png and b/TMessagesProj/src/main/assets/emoji/0_600.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_601.png b/TMessagesProj/src/main/assets/emoji/0_601.png index 1c0f5f9ee..21e686315 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_601.png and b/TMessagesProj/src/main/assets/emoji/0_601.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_602.png b/TMessagesProj/src/main/assets/emoji/0_602.png index 6e02862af..ce7c7bd28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_602.png and b/TMessagesProj/src/main/assets/emoji/0_602.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_603.png b/TMessagesProj/src/main/assets/emoji/0_603.png index 505c6d502..950ad4bd8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_603.png and b/TMessagesProj/src/main/assets/emoji/0_603.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_604.png b/TMessagesProj/src/main/assets/emoji/0_604.png index 21761dd4e..f1e57fa03 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_604.png and b/TMessagesProj/src/main/assets/emoji/0_604.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_605.png b/TMessagesProj/src/main/assets/emoji/0_605.png index fd07104ff..61c9dc35c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_605.png and b/TMessagesProj/src/main/assets/emoji/0_605.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_606.png b/TMessagesProj/src/main/assets/emoji/0_606.png index 53e8a0be5..fef853f94 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_606.png and b/TMessagesProj/src/main/assets/emoji/0_606.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_607.png b/TMessagesProj/src/main/assets/emoji/0_607.png index bc6fdd7c9..5dfa6724a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_607.png and b/TMessagesProj/src/main/assets/emoji/0_607.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_608.png b/TMessagesProj/src/main/assets/emoji/0_608.png index 74241194f..ca95f232b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_608.png and b/TMessagesProj/src/main/assets/emoji/0_608.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_609.png b/TMessagesProj/src/main/assets/emoji/0_609.png index 11d6168d8..bba7b90f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_609.png and b/TMessagesProj/src/main/assets/emoji/0_609.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_61.png b/TMessagesProj/src/main/assets/emoji/0_61.png index d47a3db1d..f577324ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_61.png and b/TMessagesProj/src/main/assets/emoji/0_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_610.png b/TMessagesProj/src/main/assets/emoji/0_610.png index bcd14163e..735717f45 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_610.png and b/TMessagesProj/src/main/assets/emoji/0_610.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_611.png b/TMessagesProj/src/main/assets/emoji/0_611.png index 179efcf77..21f27cbe8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_611.png and b/TMessagesProj/src/main/assets/emoji/0_611.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_612.png b/TMessagesProj/src/main/assets/emoji/0_612.png index 9109407d2..861cdc53f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_612.png and b/TMessagesProj/src/main/assets/emoji/0_612.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_613.png b/TMessagesProj/src/main/assets/emoji/0_613.png index d8423f461..34a0063e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_613.png and b/TMessagesProj/src/main/assets/emoji/0_613.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_614.png b/TMessagesProj/src/main/assets/emoji/0_614.png index 8ce68de6b..c34874c71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_614.png and b/TMessagesProj/src/main/assets/emoji/0_614.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_615.png b/TMessagesProj/src/main/assets/emoji/0_615.png index 565a8c68e..94031a5e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_615.png and b/TMessagesProj/src/main/assets/emoji/0_615.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_616.png b/TMessagesProj/src/main/assets/emoji/0_616.png index d7196ec18..a67033d66 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_616.png and b/TMessagesProj/src/main/assets/emoji/0_616.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_617.png b/TMessagesProj/src/main/assets/emoji/0_617.png index 320a1562f..9e3caf2cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_617.png and b/TMessagesProj/src/main/assets/emoji/0_617.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_618.png b/TMessagesProj/src/main/assets/emoji/0_618.png index 886e2a83d..1c0f5f9ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_618.png and b/TMessagesProj/src/main/assets/emoji/0_618.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_619.png b/TMessagesProj/src/main/assets/emoji/0_619.png index a6001c75f..6e02862af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_619.png and b/TMessagesProj/src/main/assets/emoji/0_619.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_62.png b/TMessagesProj/src/main/assets/emoji/0_62.png index 3dc5a0a3a..54a055e43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_62.png and b/TMessagesProj/src/main/assets/emoji/0_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_620.png b/TMessagesProj/src/main/assets/emoji/0_620.png index cc8bf15f5..505c6d502 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_620.png and b/TMessagesProj/src/main/assets/emoji/0_620.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_621.png b/TMessagesProj/src/main/assets/emoji/0_621.png index 3d47e97e8..21761dd4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_621.png and b/TMessagesProj/src/main/assets/emoji/0_621.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_622.png b/TMessagesProj/src/main/assets/emoji/0_622.png index 7126b24ae..fd07104ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_622.png and b/TMessagesProj/src/main/assets/emoji/0_622.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_623.png b/TMessagesProj/src/main/assets/emoji/0_623.png index 654bdc4d4..53e8a0be5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_623.png and b/TMessagesProj/src/main/assets/emoji/0_623.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_624.png b/TMessagesProj/src/main/assets/emoji/0_624.png index 14a1ccdab..bc6fdd7c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_624.png and b/TMessagesProj/src/main/assets/emoji/0_624.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_625.png b/TMessagesProj/src/main/assets/emoji/0_625.png index c11f5b719..74241194f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_625.png and b/TMessagesProj/src/main/assets/emoji/0_625.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_626.png b/TMessagesProj/src/main/assets/emoji/0_626.png index ae1d9ffeb..11d6168d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_626.png and b/TMessagesProj/src/main/assets/emoji/0_626.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_627.png b/TMessagesProj/src/main/assets/emoji/0_627.png index 5c912a821..bcd14163e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_627.png and b/TMessagesProj/src/main/assets/emoji/0_627.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_628.png b/TMessagesProj/src/main/assets/emoji/0_628.png index bd382bc74..179efcf77 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_628.png and b/TMessagesProj/src/main/assets/emoji/0_628.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_629.png b/TMessagesProj/src/main/assets/emoji/0_629.png index 8af23c87f..9109407d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_629.png and b/TMessagesProj/src/main/assets/emoji/0_629.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_63.png b/TMessagesProj/src/main/assets/emoji/0_63.png index dc0433df4..fe2841e71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_63.png and b/TMessagesProj/src/main/assets/emoji/0_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_630.png b/TMessagesProj/src/main/assets/emoji/0_630.png index 5cb7b07c4..d8423f461 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_630.png and b/TMessagesProj/src/main/assets/emoji/0_630.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_631.png b/TMessagesProj/src/main/assets/emoji/0_631.png index d913d5e75..8ce68de6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_631.png and b/TMessagesProj/src/main/assets/emoji/0_631.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_632.png b/TMessagesProj/src/main/assets/emoji/0_632.png index 5f73866fe..565a8c68e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_632.png and b/TMessagesProj/src/main/assets/emoji/0_632.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_633.png b/TMessagesProj/src/main/assets/emoji/0_633.png index 6be8ce7e8..d7196ec18 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_633.png and b/TMessagesProj/src/main/assets/emoji/0_633.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_634.png b/TMessagesProj/src/main/assets/emoji/0_634.png index 59868e181..320a1562f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_634.png and b/TMessagesProj/src/main/assets/emoji/0_634.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_635.png b/TMessagesProj/src/main/assets/emoji/0_635.png index 9651d0042..91b2cb3a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_635.png and b/TMessagesProj/src/main/assets/emoji/0_635.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_636.png b/TMessagesProj/src/main/assets/emoji/0_636.png index 8f6eb5489..a6001c75f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_636.png and b/TMessagesProj/src/main/assets/emoji/0_636.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_637.png b/TMessagesProj/src/main/assets/emoji/0_637.png index 6520f7fef..cc8bf15f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_637.png and b/TMessagesProj/src/main/assets/emoji/0_637.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_638.png b/TMessagesProj/src/main/assets/emoji/0_638.png index 336dfef35..3d47e97e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_638.png and b/TMessagesProj/src/main/assets/emoji/0_638.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_639.png b/TMessagesProj/src/main/assets/emoji/0_639.png index 252cb888f..7126b24ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_639.png and b/TMessagesProj/src/main/assets/emoji/0_639.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_64.png b/TMessagesProj/src/main/assets/emoji/0_64.png index 582a9e6ff..4deb4ac38 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_64.png and b/TMessagesProj/src/main/assets/emoji/0_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_640.png b/TMessagesProj/src/main/assets/emoji/0_640.png index 613f17b63..654bdc4d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_640.png and b/TMessagesProj/src/main/assets/emoji/0_640.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_641.png b/TMessagesProj/src/main/assets/emoji/0_641.png index 4fe83ca0c..ca66debb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_641.png and b/TMessagesProj/src/main/assets/emoji/0_641.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_642.png b/TMessagesProj/src/main/assets/emoji/0_642.png index 8f6ad7a7c..c11f5b719 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_642.png and b/TMessagesProj/src/main/assets/emoji/0_642.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_643.png b/TMessagesProj/src/main/assets/emoji/0_643.png index 3dcf5f186..ae1d9ffeb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_643.png and b/TMessagesProj/src/main/assets/emoji/0_643.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_644.png b/TMessagesProj/src/main/assets/emoji/0_644.png index 83ae937ff..5c912a821 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_644.png and b/TMessagesProj/src/main/assets/emoji/0_644.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_645.png b/TMessagesProj/src/main/assets/emoji/0_645.png index feb615e8e..bd382bc74 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_645.png and b/TMessagesProj/src/main/assets/emoji/0_645.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_646.png b/TMessagesProj/src/main/assets/emoji/0_646.png index c81b14c64..8af23c87f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_646.png and b/TMessagesProj/src/main/assets/emoji/0_646.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_647.png b/TMessagesProj/src/main/assets/emoji/0_647.png index aa3ffaf94..5cb7b07c4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_647.png and b/TMessagesProj/src/main/assets/emoji/0_647.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_648.png b/TMessagesProj/src/main/assets/emoji/0_648.png index 7914380ad..d52ae6f24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_648.png and b/TMessagesProj/src/main/assets/emoji/0_648.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_649.png b/TMessagesProj/src/main/assets/emoji/0_649.png index 2e8f9b3e1..5f73866fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_649.png and b/TMessagesProj/src/main/assets/emoji/0_649.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_65.png b/TMessagesProj/src/main/assets/emoji/0_65.png index 4d330c443..eb54b8d05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_65.png and b/TMessagesProj/src/main/assets/emoji/0_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_650.png b/TMessagesProj/src/main/assets/emoji/0_650.png index b441d8404..6be8ce7e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_650.png and b/TMessagesProj/src/main/assets/emoji/0_650.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_651.png b/TMessagesProj/src/main/assets/emoji/0_651.png index a545664fe..59868e181 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_651.png and b/TMessagesProj/src/main/assets/emoji/0_651.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_652.png b/TMessagesProj/src/main/assets/emoji/0_652.png index 159e037c6..9651d0042 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_652.png and b/TMessagesProj/src/main/assets/emoji/0_652.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_653.png b/TMessagesProj/src/main/assets/emoji/0_653.png index 182528169..8f6eb5489 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_653.png and b/TMessagesProj/src/main/assets/emoji/0_653.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_654.png b/TMessagesProj/src/main/assets/emoji/0_654.png index 87e7d116d..6520f7fef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_654.png and b/TMessagesProj/src/main/assets/emoji/0_654.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_655.png b/TMessagesProj/src/main/assets/emoji/0_655.png index 46f0ede40..336dfef35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_655.png and b/TMessagesProj/src/main/assets/emoji/0_655.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_656.png b/TMessagesProj/src/main/assets/emoji/0_656.png index a3fdfe7b7..252cb888f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_656.png and b/TMessagesProj/src/main/assets/emoji/0_656.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_657.png b/TMessagesProj/src/main/assets/emoji/0_657.png index 8a9f5bfe7..613f17b63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_657.png and b/TMessagesProj/src/main/assets/emoji/0_657.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_658.png b/TMessagesProj/src/main/assets/emoji/0_658.png index dc2307234..4fe83ca0c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_658.png and b/TMessagesProj/src/main/assets/emoji/0_658.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_659.png b/TMessagesProj/src/main/assets/emoji/0_659.png index 0b014232a..067a89a53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_659.png and b/TMessagesProj/src/main/assets/emoji/0_659.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_66.png b/TMessagesProj/src/main/assets/emoji/0_66.png index 3446dcc18..25cd46625 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_66.png and b/TMessagesProj/src/main/assets/emoji/0_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_660.png b/TMessagesProj/src/main/assets/emoji/0_660.png index be19840c8..81eb4a61c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_660.png and b/TMessagesProj/src/main/assets/emoji/0_660.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_661.png b/TMessagesProj/src/main/assets/emoji/0_661.png index efb3cab17..83ae937ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_661.png and b/TMessagesProj/src/main/assets/emoji/0_661.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_662.png b/TMessagesProj/src/main/assets/emoji/0_662.png index efe5fa471..feb615e8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_662.png and b/TMessagesProj/src/main/assets/emoji/0_662.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_663.png b/TMessagesProj/src/main/assets/emoji/0_663.png index 8c53f9a4b..c81b14c64 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_663.png and b/TMessagesProj/src/main/assets/emoji/0_663.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_664.png b/TMessagesProj/src/main/assets/emoji/0_664.png index fd8d8ff3a..aa3ffaf94 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_664.png and b/TMessagesProj/src/main/assets/emoji/0_664.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_665.png b/TMessagesProj/src/main/assets/emoji/0_665.png index fd64f735f..7914380ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_665.png and b/TMessagesProj/src/main/assets/emoji/0_665.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_666.png b/TMessagesProj/src/main/assets/emoji/0_666.png index b3caddc09..2e8f9b3e1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_666.png and b/TMessagesProj/src/main/assets/emoji/0_666.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_667.png b/TMessagesProj/src/main/assets/emoji/0_667.png index 2de8ebe48..a53b4da17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_667.png and b/TMessagesProj/src/main/assets/emoji/0_667.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_668.png b/TMessagesProj/src/main/assets/emoji/0_668.png index d17253fca..825bfe06a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_668.png and b/TMessagesProj/src/main/assets/emoji/0_668.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_669.png b/TMessagesProj/src/main/assets/emoji/0_669.png index f92f13042..159e037c6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_669.png and b/TMessagesProj/src/main/assets/emoji/0_669.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_67.png b/TMessagesProj/src/main/assets/emoji/0_67.png index 40db0dcb8..4368cff9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_67.png and b/TMessagesProj/src/main/assets/emoji/0_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_670.png b/TMessagesProj/src/main/assets/emoji/0_670.png index 22244846f..182528169 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_670.png and b/TMessagesProj/src/main/assets/emoji/0_670.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_671.png b/TMessagesProj/src/main/assets/emoji/0_671.png index 0bcc1a310..13f514a75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_671.png and b/TMessagesProj/src/main/assets/emoji/0_671.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_672.png b/TMessagesProj/src/main/assets/emoji/0_672.png index 07767a510..d9a7f4e71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_672.png and b/TMessagesProj/src/main/assets/emoji/0_672.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_673.png b/TMessagesProj/src/main/assets/emoji/0_673.png index 75ea5be74..13e09c158 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_673.png and b/TMessagesProj/src/main/assets/emoji/0_673.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_674.png b/TMessagesProj/src/main/assets/emoji/0_674.png index bd3bcef0a..dd58c5728 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_674.png and b/TMessagesProj/src/main/assets/emoji/0_674.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_675.png b/TMessagesProj/src/main/assets/emoji/0_675.png index be445868d..701dedfbf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_675.png and b/TMessagesProj/src/main/assets/emoji/0_675.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_676.png b/TMessagesProj/src/main/assets/emoji/0_676.png index fe42b0948..529ce614f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_676.png and b/TMessagesProj/src/main/assets/emoji/0_676.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_677.png b/TMessagesProj/src/main/assets/emoji/0_677.png index 5a0c5885e..b9c3e8e59 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_677.png and b/TMessagesProj/src/main/assets/emoji/0_677.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_678.png b/TMessagesProj/src/main/assets/emoji/0_678.png index fee7e239d..84a1afc3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_678.png and b/TMessagesProj/src/main/assets/emoji/0_678.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_679.png b/TMessagesProj/src/main/assets/emoji/0_679.png index 79a64a1a0..08af63a96 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_679.png and b/TMessagesProj/src/main/assets/emoji/0_679.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_68.png b/TMessagesProj/src/main/assets/emoji/0_68.png index e62026e5e..019c0a84b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_68.png and b/TMessagesProj/src/main/assets/emoji/0_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_680.png b/TMessagesProj/src/main/assets/emoji/0_680.png index 1773db7d8..15f50f2ef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_680.png and b/TMessagesProj/src/main/assets/emoji/0_680.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_681.png b/TMessagesProj/src/main/assets/emoji/0_681.png index 781c539c3..edbc5dbbb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_681.png and b/TMessagesProj/src/main/assets/emoji/0_681.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_682.png b/TMessagesProj/src/main/assets/emoji/0_682.png index 25e9c38a9..c0d97af7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_682.png and b/TMessagesProj/src/main/assets/emoji/0_682.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_683.png b/TMessagesProj/src/main/assets/emoji/0_683.png index bf0764490..e0fea0c76 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_683.png and b/TMessagesProj/src/main/assets/emoji/0_683.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_684.png b/TMessagesProj/src/main/assets/emoji/0_684.png index 0777ab9f3..0d877040c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_684.png and b/TMessagesProj/src/main/assets/emoji/0_684.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_685.png b/TMessagesProj/src/main/assets/emoji/0_685.png index e1d457e4d..902e7455f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_685.png and b/TMessagesProj/src/main/assets/emoji/0_685.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_686.png b/TMessagesProj/src/main/assets/emoji/0_686.png index c10ec92ad..feb89246c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_686.png and b/TMessagesProj/src/main/assets/emoji/0_686.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_687.png b/TMessagesProj/src/main/assets/emoji/0_687.png index 670a10dc6..56a6df527 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_687.png and b/TMessagesProj/src/main/assets/emoji/0_687.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_688.png b/TMessagesProj/src/main/assets/emoji/0_688.png index 90b2cc4dd..508d2af62 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_688.png and b/TMessagesProj/src/main/assets/emoji/0_688.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_689.png b/TMessagesProj/src/main/assets/emoji/0_689.png index 6f044d267..22505e9be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_689.png and b/TMessagesProj/src/main/assets/emoji/0_689.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_69.png b/TMessagesProj/src/main/assets/emoji/0_69.png index 8a44b4af4..1afe9de97 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_69.png and b/TMessagesProj/src/main/assets/emoji/0_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_690.png b/TMessagesProj/src/main/assets/emoji/0_690.png index 9c54a8518..01987dc5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_690.png and b/TMessagesProj/src/main/assets/emoji/0_690.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_691.png b/TMessagesProj/src/main/assets/emoji/0_691.png index bf1f887da..8fa0aa490 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_691.png and b/TMessagesProj/src/main/assets/emoji/0_691.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_692.png b/TMessagesProj/src/main/assets/emoji/0_692.png index 21530ae53..b15e44889 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_692.png and b/TMessagesProj/src/main/assets/emoji/0_692.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_693.png b/TMessagesProj/src/main/assets/emoji/0_693.png index 6b35da6cb..e9ee61d9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_693.png and b/TMessagesProj/src/main/assets/emoji/0_693.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_694.png b/TMessagesProj/src/main/assets/emoji/0_694.png index 486cb00c3..ac68a1e6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_694.png and b/TMessagesProj/src/main/assets/emoji/0_694.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_695.png b/TMessagesProj/src/main/assets/emoji/0_695.png index ab0087ca0..4304c9087 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_695.png and b/TMessagesProj/src/main/assets/emoji/0_695.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_696.png b/TMessagesProj/src/main/assets/emoji/0_696.png index 2676e6561..12bee8620 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_696.png and b/TMessagesProj/src/main/assets/emoji/0_696.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_697.png b/TMessagesProj/src/main/assets/emoji/0_697.png index 2d3368121..227e9a02d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_697.png and b/TMessagesProj/src/main/assets/emoji/0_697.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_698.png b/TMessagesProj/src/main/assets/emoji/0_698.png index e9731ca04..9de46ad67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_698.png and b/TMessagesProj/src/main/assets/emoji/0_698.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_699.png b/TMessagesProj/src/main/assets/emoji/0_699.png index db8298d95..630e89cd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_699.png and b/TMessagesProj/src/main/assets/emoji/0_699.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_7.png b/TMessagesProj/src/main/assets/emoji/0_7.png index ee11eaa23..d4b374a22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_7.png and b/TMessagesProj/src/main/assets/emoji/0_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_70.png b/TMessagesProj/src/main/assets/emoji/0_70.png index a4dcf0ca5..17a099726 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_70.png and b/TMessagesProj/src/main/assets/emoji/0_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_700.png b/TMessagesProj/src/main/assets/emoji/0_700.png index 3557ede74..20042d7af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_700.png and b/TMessagesProj/src/main/assets/emoji/0_700.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_701.png b/TMessagesProj/src/main/assets/emoji/0_701.png index 6a8238df4..34a4aa6af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_701.png and b/TMessagesProj/src/main/assets/emoji/0_701.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_702.png b/TMessagesProj/src/main/assets/emoji/0_702.png index 7a7c47aef..62fe01989 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_702.png and b/TMessagesProj/src/main/assets/emoji/0_702.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_703.png b/TMessagesProj/src/main/assets/emoji/0_703.png index 655939c04..d21921afb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_703.png and b/TMessagesProj/src/main/assets/emoji/0_703.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_704.png b/TMessagesProj/src/main/assets/emoji/0_704.png index 77528de04..03259769c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_704.png and b/TMessagesProj/src/main/assets/emoji/0_704.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_705.png b/TMessagesProj/src/main/assets/emoji/0_705.png index 92df0f8d1..050ac9acd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_705.png and b/TMessagesProj/src/main/assets/emoji/0_705.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_706.png b/TMessagesProj/src/main/assets/emoji/0_706.png index fdbdf5832..7772eda30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_706.png and b/TMessagesProj/src/main/assets/emoji/0_706.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_707.png b/TMessagesProj/src/main/assets/emoji/0_707.png index 8bfa82af7..a14b71735 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_707.png and b/TMessagesProj/src/main/assets/emoji/0_707.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_708.png b/TMessagesProj/src/main/assets/emoji/0_708.png index acf7989b9..3eb84f1dc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_708.png and b/TMessagesProj/src/main/assets/emoji/0_708.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_709.png b/TMessagesProj/src/main/assets/emoji/0_709.png index b6092d6e1..e7df2d33a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_709.png and b/TMessagesProj/src/main/assets/emoji/0_709.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_71.png b/TMessagesProj/src/main/assets/emoji/0_71.png index e8f1a3de8..bb6c4b496 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_71.png and b/TMessagesProj/src/main/assets/emoji/0_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_710.png b/TMessagesProj/src/main/assets/emoji/0_710.png index 16307b8c0..6cfe5edb1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_710.png and b/TMessagesProj/src/main/assets/emoji/0_710.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_711.png b/TMessagesProj/src/main/assets/emoji/0_711.png index 52e3b8d1e..3578a56cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_711.png and b/TMessagesProj/src/main/assets/emoji/0_711.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_712.png b/TMessagesProj/src/main/assets/emoji/0_712.png index 477501494..8e79a8155 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_712.png and b/TMessagesProj/src/main/assets/emoji/0_712.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_713.png b/TMessagesProj/src/main/assets/emoji/0_713.png index a4df4e46a..aef258cf9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_713.png and b/TMessagesProj/src/main/assets/emoji/0_713.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_714.png b/TMessagesProj/src/main/assets/emoji/0_714.png index 87ef64a76..c7a5efbd6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_714.png and b/TMessagesProj/src/main/assets/emoji/0_714.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_715.png b/TMessagesProj/src/main/assets/emoji/0_715.png index d3248c16d..909ed02f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_715.png and b/TMessagesProj/src/main/assets/emoji/0_715.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_716.png b/TMessagesProj/src/main/assets/emoji/0_716.png index 3c47c376b..7abe5de35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_716.png and b/TMessagesProj/src/main/assets/emoji/0_716.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_717.png b/TMessagesProj/src/main/assets/emoji/0_717.png index d43b7bb8d..1b08ee7af 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_717.png and b/TMessagesProj/src/main/assets/emoji/0_717.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_718.png b/TMessagesProj/src/main/assets/emoji/0_718.png index defbd354e..39b09fcae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_718.png and b/TMessagesProj/src/main/assets/emoji/0_718.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_719.png b/TMessagesProj/src/main/assets/emoji/0_719.png index f4c55ac39..e77733f54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_719.png and b/TMessagesProj/src/main/assets/emoji/0_719.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_72.png b/TMessagesProj/src/main/assets/emoji/0_72.png index 019043adf..39908ce47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_72.png and b/TMessagesProj/src/main/assets/emoji/0_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_720.png b/TMessagesProj/src/main/assets/emoji/0_720.png index 2322bdece..4521f20cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_720.png and b/TMessagesProj/src/main/assets/emoji/0_720.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_721.png b/TMessagesProj/src/main/assets/emoji/0_721.png index 4564ab355..d8f386551 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_721.png and b/TMessagesProj/src/main/assets/emoji/0_721.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_722.png b/TMessagesProj/src/main/assets/emoji/0_722.png index 8e4b0e103..dc3accf1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_722.png and b/TMessagesProj/src/main/assets/emoji/0_722.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_723.png b/TMessagesProj/src/main/assets/emoji/0_723.png index c816e3e19..98a000164 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_723.png and b/TMessagesProj/src/main/assets/emoji/0_723.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_724.png b/TMessagesProj/src/main/assets/emoji/0_724.png index 51cd0df7f..d0f141ad8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_724.png and b/TMessagesProj/src/main/assets/emoji/0_724.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_725.png b/TMessagesProj/src/main/assets/emoji/0_725.png index dca6c4748..47955dd21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_725.png and b/TMessagesProj/src/main/assets/emoji/0_725.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_726.png b/TMessagesProj/src/main/assets/emoji/0_726.png index b116547bf..f2aeb801c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_726.png and b/TMessagesProj/src/main/assets/emoji/0_726.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_727.png b/TMessagesProj/src/main/assets/emoji/0_727.png index eb8c03ef2..890f6dc90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_727.png and b/TMessagesProj/src/main/assets/emoji/0_727.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_728.png b/TMessagesProj/src/main/assets/emoji/0_728.png index 0646ec594..98bcb78bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_728.png and b/TMessagesProj/src/main/assets/emoji/0_728.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_729.png b/TMessagesProj/src/main/assets/emoji/0_729.png index d87baee71..4008d8d19 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_729.png and b/TMessagesProj/src/main/assets/emoji/0_729.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_73.png b/TMessagesProj/src/main/assets/emoji/0_73.png index fb6d90ae1..df3b8d271 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_73.png and b/TMessagesProj/src/main/assets/emoji/0_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_730.png b/TMessagesProj/src/main/assets/emoji/0_730.png index 2ba438087..80d6e09f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_730.png and b/TMessagesProj/src/main/assets/emoji/0_730.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_731.png b/TMessagesProj/src/main/assets/emoji/0_731.png index f95d3e7ea..bfcd18c4f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_731.png and b/TMessagesProj/src/main/assets/emoji/0_731.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_732.png b/TMessagesProj/src/main/assets/emoji/0_732.png index 4fbe981f2..d354ad9ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_732.png and b/TMessagesProj/src/main/assets/emoji/0_732.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_733.png b/TMessagesProj/src/main/assets/emoji/0_733.png index bced3ee13..a11a32bfd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_733.png and b/TMessagesProj/src/main/assets/emoji/0_733.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_734.png b/TMessagesProj/src/main/assets/emoji/0_734.png index 51244d6fa..c3152df24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_734.png and b/TMessagesProj/src/main/assets/emoji/0_734.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_735.png b/TMessagesProj/src/main/assets/emoji/0_735.png index 75b68ac96..2b79d60e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_735.png and b/TMessagesProj/src/main/assets/emoji/0_735.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_736.png b/TMessagesProj/src/main/assets/emoji/0_736.png index ed35b85da..7f0e8d237 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_736.png and b/TMessagesProj/src/main/assets/emoji/0_736.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_737.png b/TMessagesProj/src/main/assets/emoji/0_737.png index 12a964eb7..7cb6bef47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_737.png and b/TMessagesProj/src/main/assets/emoji/0_737.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_738.png b/TMessagesProj/src/main/assets/emoji/0_738.png index fb0f8bfd4..5145aa5a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_738.png and b/TMessagesProj/src/main/assets/emoji/0_738.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_739.png b/TMessagesProj/src/main/assets/emoji/0_739.png index 84dd35236..107496fae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_739.png and b/TMessagesProj/src/main/assets/emoji/0_739.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_74.png b/TMessagesProj/src/main/assets/emoji/0_74.png index 2f013754b..a19080d06 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_74.png and b/TMessagesProj/src/main/assets/emoji/0_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_740.png b/TMessagesProj/src/main/assets/emoji/0_740.png index 8aa1ac624..a8d0c5003 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_740.png and b/TMessagesProj/src/main/assets/emoji/0_740.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_741.png b/TMessagesProj/src/main/assets/emoji/0_741.png index e99bccd06..85fa5aef4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_741.png and b/TMessagesProj/src/main/assets/emoji/0_741.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_742.png b/TMessagesProj/src/main/assets/emoji/0_742.png index 3d83ff10a..f5f9bdf8c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_742.png and b/TMessagesProj/src/main/assets/emoji/0_742.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_743.png b/TMessagesProj/src/main/assets/emoji/0_743.png index a7877b2d1..99d240073 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_743.png and b/TMessagesProj/src/main/assets/emoji/0_743.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_744.png b/TMessagesProj/src/main/assets/emoji/0_744.png index e3e8e5575..8771ba487 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_744.png and b/TMessagesProj/src/main/assets/emoji/0_744.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_745.png b/TMessagesProj/src/main/assets/emoji/0_745.png index 59cfcdd78..c83aebea2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_745.png and b/TMessagesProj/src/main/assets/emoji/0_745.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_746.png b/TMessagesProj/src/main/assets/emoji/0_746.png index daa925917..a8879e1d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_746.png and b/TMessagesProj/src/main/assets/emoji/0_746.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_747.png b/TMessagesProj/src/main/assets/emoji/0_747.png index b060d41b2..3bc54a623 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_747.png and b/TMessagesProj/src/main/assets/emoji/0_747.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_748.png b/TMessagesProj/src/main/assets/emoji/0_748.png index 5176e0c49..42701733e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_748.png and b/TMessagesProj/src/main/assets/emoji/0_748.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_749.png b/TMessagesProj/src/main/assets/emoji/0_749.png index c2986f637..8d8720c3e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_749.png and b/TMessagesProj/src/main/assets/emoji/0_749.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_75.png b/TMessagesProj/src/main/assets/emoji/0_75.png index 4d1935d85..cbdc4bff7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_75.png and b/TMessagesProj/src/main/assets/emoji/0_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_750.png b/TMessagesProj/src/main/assets/emoji/0_750.png index d59fda960..f47c634d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_750.png and b/TMessagesProj/src/main/assets/emoji/0_750.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_751.png b/TMessagesProj/src/main/assets/emoji/0_751.png index 7a7a4144d..3c6e271d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_751.png and b/TMessagesProj/src/main/assets/emoji/0_751.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_752.png b/TMessagesProj/src/main/assets/emoji/0_752.png index 9716d825b..3c365719f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_752.png and b/TMessagesProj/src/main/assets/emoji/0_752.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_753.png b/TMessagesProj/src/main/assets/emoji/0_753.png index 8ddea6c7f..4f3dff3fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_753.png and b/TMessagesProj/src/main/assets/emoji/0_753.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_754.png b/TMessagesProj/src/main/assets/emoji/0_754.png index 608d49a3b..3fc714a08 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_754.png and b/TMessagesProj/src/main/assets/emoji/0_754.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_755.png b/TMessagesProj/src/main/assets/emoji/0_755.png index 19e1f47c2..ac4158382 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_755.png and b/TMessagesProj/src/main/assets/emoji/0_755.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_756.png b/TMessagesProj/src/main/assets/emoji/0_756.png index ec1930dbe..a96b14a13 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_756.png and b/TMessagesProj/src/main/assets/emoji/0_756.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_757.png b/TMessagesProj/src/main/assets/emoji/0_757.png index a5770a88d..396d53013 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_757.png and b/TMessagesProj/src/main/assets/emoji/0_757.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_758.png b/TMessagesProj/src/main/assets/emoji/0_758.png index ed9e66489..67d1cfa51 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_758.png and b/TMessagesProj/src/main/assets/emoji/0_758.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_759.png b/TMessagesProj/src/main/assets/emoji/0_759.png index 071f8e7bd..f6d7d0d2b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_759.png and b/TMessagesProj/src/main/assets/emoji/0_759.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_76.png b/TMessagesProj/src/main/assets/emoji/0_76.png index 30db05c20..f1e80c3b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_76.png and b/TMessagesProj/src/main/assets/emoji/0_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_760.png b/TMessagesProj/src/main/assets/emoji/0_760.png index cdc7df0b2..f9e6e17ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_760.png and b/TMessagesProj/src/main/assets/emoji/0_760.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_761.png b/TMessagesProj/src/main/assets/emoji/0_761.png index 598bb5702..5e55134d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_761.png and b/TMessagesProj/src/main/assets/emoji/0_761.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_762.png b/TMessagesProj/src/main/assets/emoji/0_762.png index 69b5f4b52..516620a9d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_762.png and b/TMessagesProj/src/main/assets/emoji/0_762.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_763.png b/TMessagesProj/src/main/assets/emoji/0_763.png index d19e2f0f5..788094984 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_763.png and b/TMessagesProj/src/main/assets/emoji/0_763.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_764.png b/TMessagesProj/src/main/assets/emoji/0_764.png index 4c67a753d..caa339803 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_764.png and b/TMessagesProj/src/main/assets/emoji/0_764.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_765.png b/TMessagesProj/src/main/assets/emoji/0_765.png index cb505f288..93e383089 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_765.png and b/TMessagesProj/src/main/assets/emoji/0_765.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_766.png b/TMessagesProj/src/main/assets/emoji/0_766.png index a46027b38..decf4ca44 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_766.png and b/TMessagesProj/src/main/assets/emoji/0_766.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_767.png b/TMessagesProj/src/main/assets/emoji/0_767.png index 06461b91a..56b34afc8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_767.png and b/TMessagesProj/src/main/assets/emoji/0_767.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_768.png b/TMessagesProj/src/main/assets/emoji/0_768.png index 9997a165b..33930fc7a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_768.png and b/TMessagesProj/src/main/assets/emoji/0_768.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_769.png b/TMessagesProj/src/main/assets/emoji/0_769.png index 891cd226f..eee24f3c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_769.png and b/TMessagesProj/src/main/assets/emoji/0_769.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_77.png b/TMessagesProj/src/main/assets/emoji/0_77.png index c7e4d80a7..97232dba0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_77.png and b/TMessagesProj/src/main/assets/emoji/0_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_770.png b/TMessagesProj/src/main/assets/emoji/0_770.png index 04392d17e..8ddea6c7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_770.png and b/TMessagesProj/src/main/assets/emoji/0_770.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_771.png b/TMessagesProj/src/main/assets/emoji/0_771.png index 9a9cfd7f4..f48044ba2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_771.png and b/TMessagesProj/src/main/assets/emoji/0_771.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_772.png b/TMessagesProj/src/main/assets/emoji/0_772.png index 296af268b..19e1f47c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_772.png and b/TMessagesProj/src/main/assets/emoji/0_772.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_773.png b/TMessagesProj/src/main/assets/emoji/0_773.png index e7c0db884..dadca3920 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_773.png and b/TMessagesProj/src/main/assets/emoji/0_773.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_774.png b/TMessagesProj/src/main/assets/emoji/0_774.png index 08c4539b5..a5770a88d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_774.png and b/TMessagesProj/src/main/assets/emoji/0_774.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_775.png b/TMessagesProj/src/main/assets/emoji/0_775.png index 2d9fc5531..ed9e66489 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_775.png and b/TMessagesProj/src/main/assets/emoji/0_775.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_776.png b/TMessagesProj/src/main/assets/emoji/0_776.png index 1289e2d15..071f8e7bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_776.png and b/TMessagesProj/src/main/assets/emoji/0_776.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_777.png b/TMessagesProj/src/main/assets/emoji/0_777.png index fb9c64b35..cdc7df0b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_777.png and b/TMessagesProj/src/main/assets/emoji/0_777.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_778.png b/TMessagesProj/src/main/assets/emoji/0_778.png index 4bc0aa3ba..598bb5702 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_778.png and b/TMessagesProj/src/main/assets/emoji/0_778.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_779.png b/TMessagesProj/src/main/assets/emoji/0_779.png index e6905714a..69b5f4b52 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_779.png and b/TMessagesProj/src/main/assets/emoji/0_779.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_78.png b/TMessagesProj/src/main/assets/emoji/0_78.png index cbd4c042c..752b46d98 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_78.png and b/TMessagesProj/src/main/assets/emoji/0_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_780.png b/TMessagesProj/src/main/assets/emoji/0_780.png index 86da784f4..52be148e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_780.png and b/TMessagesProj/src/main/assets/emoji/0_780.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_781.png b/TMessagesProj/src/main/assets/emoji/0_781.png index 3898a9782..9bd385f32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_781.png and b/TMessagesProj/src/main/assets/emoji/0_781.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_782.png b/TMessagesProj/src/main/assets/emoji/0_782.png index 0f5ff5885..2e1c2f9cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_782.png and b/TMessagesProj/src/main/assets/emoji/0_782.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_783.png b/TMessagesProj/src/main/assets/emoji/0_783.png index 6143364ad..42f422774 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_783.png and b/TMessagesProj/src/main/assets/emoji/0_783.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_784.png b/TMessagesProj/src/main/assets/emoji/0_784.png index d14776449..06461b91a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_784.png and b/TMessagesProj/src/main/assets/emoji/0_784.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_785.png b/TMessagesProj/src/main/assets/emoji/0_785.png index 18d7aecc7..16f951e19 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_785.png and b/TMessagesProj/src/main/assets/emoji/0_785.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_786.png b/TMessagesProj/src/main/assets/emoji/0_786.png index d4f712ad3..891cd226f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_786.png and b/TMessagesProj/src/main/assets/emoji/0_786.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_787.png b/TMessagesProj/src/main/assets/emoji/0_787.png index 18cd26d23..df42055e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_787.png and b/TMessagesProj/src/main/assets/emoji/0_787.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_788.png b/TMessagesProj/src/main/assets/emoji/0_788.png index 2725a9255..233ef79f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_788.png and b/TMessagesProj/src/main/assets/emoji/0_788.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_789.png b/TMessagesProj/src/main/assets/emoji/0_789.png index 654156f54..14ba5e04b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_789.png and b/TMessagesProj/src/main/assets/emoji/0_789.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_79.png b/TMessagesProj/src/main/assets/emoji/0_79.png index dde0d63e0..3555fa469 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_79.png and b/TMessagesProj/src/main/assets/emoji/0_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_790.png b/TMessagesProj/src/main/assets/emoji/0_790.png index 5f694d390..b7d029cf3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_790.png and b/TMessagesProj/src/main/assets/emoji/0_790.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_791.png b/TMessagesProj/src/main/assets/emoji/0_791.png index 2ca4623d8..d63e1d71e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_791.png and b/TMessagesProj/src/main/assets/emoji/0_791.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_792.png b/TMessagesProj/src/main/assets/emoji/0_792.png index d6b264dbc..d499fd5a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_792.png and b/TMessagesProj/src/main/assets/emoji/0_792.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_793.png b/TMessagesProj/src/main/assets/emoji/0_793.png index e14548aa6..e819ae120 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_793.png and b/TMessagesProj/src/main/assets/emoji/0_793.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_794.png b/TMessagesProj/src/main/assets/emoji/0_794.png index 8c1bc24db..d041cee9e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_794.png and b/TMessagesProj/src/main/assets/emoji/0_794.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_795.png b/TMessagesProj/src/main/assets/emoji/0_795.png index ecd34b196..4bc0aa3ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_795.png and b/TMessagesProj/src/main/assets/emoji/0_795.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_796.png b/TMessagesProj/src/main/assets/emoji/0_796.png index 93df01b72..195d2be00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_796.png and b/TMessagesProj/src/main/assets/emoji/0_796.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_797.png b/TMessagesProj/src/main/assets/emoji/0_797.png index 8c0af68b1..86da784f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_797.png and b/TMessagesProj/src/main/assets/emoji/0_797.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_798.png b/TMessagesProj/src/main/assets/emoji/0_798.png index 82a5dcd88..c74f1dcdf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_798.png and b/TMessagesProj/src/main/assets/emoji/0_798.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_799.png b/TMessagesProj/src/main/assets/emoji/0_799.png index e46521c40..409c8ca70 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_799.png and b/TMessagesProj/src/main/assets/emoji/0_799.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_8.png b/TMessagesProj/src/main/assets/emoji/0_8.png index 3a792409d..9485abd47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_8.png and b/TMessagesProj/src/main/assets/emoji/0_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_80.png b/TMessagesProj/src/main/assets/emoji/0_80.png index c7abf2fbc..4d919f542 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_80.png and b/TMessagesProj/src/main/assets/emoji/0_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_800.png b/TMessagesProj/src/main/assets/emoji/0_800.png index e660ac236..c8958e9c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_800.png and b/TMessagesProj/src/main/assets/emoji/0_800.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_801.png b/TMessagesProj/src/main/assets/emoji/0_801.png index b50d03df0..0386c6b0a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_801.png and b/TMessagesProj/src/main/assets/emoji/0_801.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_802.png b/TMessagesProj/src/main/assets/emoji/0_802.png index 8d3ccf211..38e0f3a38 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_802.png and b/TMessagesProj/src/main/assets/emoji/0_802.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_803.png b/TMessagesProj/src/main/assets/emoji/0_803.png index e5ddc9ce0..7f4499dfc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_803.png and b/TMessagesProj/src/main/assets/emoji/0_803.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_804.png b/TMessagesProj/src/main/assets/emoji/0_804.png index 767f2212b..46a10f865 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_804.png and b/TMessagesProj/src/main/assets/emoji/0_804.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_805.png b/TMessagesProj/src/main/assets/emoji/0_805.png index 568acf2b3..3781781ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_805.png and b/TMessagesProj/src/main/assets/emoji/0_805.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_806.png b/TMessagesProj/src/main/assets/emoji/0_806.png index 3fed689aa..60802f89f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_806.png and b/TMessagesProj/src/main/assets/emoji/0_806.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_807.png b/TMessagesProj/src/main/assets/emoji/0_807.png index a08e2dec8..2fdfb6326 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_807.png and b/TMessagesProj/src/main/assets/emoji/0_807.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_808.png b/TMessagesProj/src/main/assets/emoji/0_808.png index 43cf4d802..84ebbedf4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_808.png and b/TMessagesProj/src/main/assets/emoji/0_808.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_809.png b/TMessagesProj/src/main/assets/emoji/0_809.png index 7d055a765..50dc6c222 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_809.png and b/TMessagesProj/src/main/assets/emoji/0_809.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_81.png b/TMessagesProj/src/main/assets/emoji/0_81.png index fdee54838..99b5cb941 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_81.png and b/TMessagesProj/src/main/assets/emoji/0_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_810.png b/TMessagesProj/src/main/assets/emoji/0_810.png index 4e5383e44..94cd88859 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_810.png and b/TMessagesProj/src/main/assets/emoji/0_810.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_811.png b/TMessagesProj/src/main/assets/emoji/0_811.png index 6fbe5c02f..5edf2f551 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_811.png and b/TMessagesProj/src/main/assets/emoji/0_811.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_812.png b/TMessagesProj/src/main/assets/emoji/0_812.png index 6e543d4a9..eb11fcbe9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_812.png and b/TMessagesProj/src/main/assets/emoji/0_812.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_813.png b/TMessagesProj/src/main/assets/emoji/0_813.png index 974b54be9..e8da4b4f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_813.png and b/TMessagesProj/src/main/assets/emoji/0_813.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_814.png b/TMessagesProj/src/main/assets/emoji/0_814.png index 3c0ab3267..0e91cb995 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_814.png and b/TMessagesProj/src/main/assets/emoji/0_814.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_815.png b/TMessagesProj/src/main/assets/emoji/0_815.png index afa34c611..e218d707a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_815.png and b/TMessagesProj/src/main/assets/emoji/0_815.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_816.png b/TMessagesProj/src/main/assets/emoji/0_816.png index 8212b9ad7..d0f145e15 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_816.png and b/TMessagesProj/src/main/assets/emoji/0_816.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_817.png b/TMessagesProj/src/main/assets/emoji/0_817.png index 21f72f701..e9cb6c9cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_817.png and b/TMessagesProj/src/main/assets/emoji/0_817.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_818.png b/TMessagesProj/src/main/assets/emoji/0_818.png index a93ecf07f..f0f2c022c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_818.png and b/TMessagesProj/src/main/assets/emoji/0_818.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_819.png b/TMessagesProj/src/main/assets/emoji/0_819.png index 257bfacfe..c0da2ddcf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_819.png and b/TMessagesProj/src/main/assets/emoji/0_819.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_82.png b/TMessagesProj/src/main/assets/emoji/0_82.png index 4d5a3a1b1..31066be2d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_82.png and b/TMessagesProj/src/main/assets/emoji/0_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_820.png b/TMessagesProj/src/main/assets/emoji/0_820.png index b12597945..f55e09200 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_820.png and b/TMessagesProj/src/main/assets/emoji/0_820.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_821.png b/TMessagesProj/src/main/assets/emoji/0_821.png index f6f3213b0..e646176ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_821.png and b/TMessagesProj/src/main/assets/emoji/0_821.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_822.png b/TMessagesProj/src/main/assets/emoji/0_822.png index 496100f8e..14f865ef0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_822.png and b/TMessagesProj/src/main/assets/emoji/0_822.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_823.png b/TMessagesProj/src/main/assets/emoji/0_823.png index e5eb40914..78ffe8fd7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_823.png and b/TMessagesProj/src/main/assets/emoji/0_823.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_824.png b/TMessagesProj/src/main/assets/emoji/0_824.png index 3be04bc5a..852ce044c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_824.png and b/TMessagesProj/src/main/assets/emoji/0_824.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_825.png b/TMessagesProj/src/main/assets/emoji/0_825.png index 140e33a20..4621e0127 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_825.png and b/TMessagesProj/src/main/assets/emoji/0_825.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_826.png b/TMessagesProj/src/main/assets/emoji/0_826.png index 3dbe9536b..c6a202408 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_826.png and b/TMessagesProj/src/main/assets/emoji/0_826.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_827.png b/TMessagesProj/src/main/assets/emoji/0_827.png index 5845c32ae..394d5f638 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_827.png and b/TMessagesProj/src/main/assets/emoji/0_827.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_828.png b/TMessagesProj/src/main/assets/emoji/0_828.png index fe60569ca..8340ecd5b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_828.png and b/TMessagesProj/src/main/assets/emoji/0_828.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_829.png b/TMessagesProj/src/main/assets/emoji/0_829.png index f099d9205..153c54fc8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_829.png and b/TMessagesProj/src/main/assets/emoji/0_829.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_83.png b/TMessagesProj/src/main/assets/emoji/0_83.png index f07e1e9cf..c11b010fb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_83.png and b/TMessagesProj/src/main/assets/emoji/0_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_830.png b/TMessagesProj/src/main/assets/emoji/0_830.png index 20ec2fb11..02bf2cb6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_830.png and b/TMessagesProj/src/main/assets/emoji/0_830.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_831.png b/TMessagesProj/src/main/assets/emoji/0_831.png index 001f851b5..715623432 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_831.png and b/TMessagesProj/src/main/assets/emoji/0_831.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_832.png b/TMessagesProj/src/main/assets/emoji/0_832.png index 3d4d4f7d0..49d794f23 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_832.png and b/TMessagesProj/src/main/assets/emoji/0_832.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_833.png b/TMessagesProj/src/main/assets/emoji/0_833.png index 8adc3ab8c..e429d99d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_833.png and b/TMessagesProj/src/main/assets/emoji/0_833.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_834.png b/TMessagesProj/src/main/assets/emoji/0_834.png index 1193c6456..1592d1ba0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_834.png and b/TMessagesProj/src/main/assets/emoji/0_834.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_835.png b/TMessagesProj/src/main/assets/emoji/0_835.png index 6bad6fe2c..37a1b553d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_835.png and b/TMessagesProj/src/main/assets/emoji/0_835.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_836.png b/TMessagesProj/src/main/assets/emoji/0_836.png index 6a6c72aa4..9bd7c5c41 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_836.png and b/TMessagesProj/src/main/assets/emoji/0_836.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_837.png b/TMessagesProj/src/main/assets/emoji/0_837.png index dfe28e4ac..548cfdd6a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_837.png and b/TMessagesProj/src/main/assets/emoji/0_837.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_838.png b/TMessagesProj/src/main/assets/emoji/0_838.png index 3fe27a09f..6eab57769 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_838.png and b/TMessagesProj/src/main/assets/emoji/0_838.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_839.png b/TMessagesProj/src/main/assets/emoji/0_839.png index 8a64b0a52..40a2ad9ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_839.png and b/TMessagesProj/src/main/assets/emoji/0_839.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_84.png b/TMessagesProj/src/main/assets/emoji/0_84.png index 0679b0ef6..36295a80d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_84.png and b/TMessagesProj/src/main/assets/emoji/0_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_840.png b/TMessagesProj/src/main/assets/emoji/0_840.png index 3d2819495..a6d8b489b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_840.png and b/TMessagesProj/src/main/assets/emoji/0_840.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_841.png b/TMessagesProj/src/main/assets/emoji/0_841.png index f08d7e866..27a3dc5b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_841.png and b/TMessagesProj/src/main/assets/emoji/0_841.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_842.png b/TMessagesProj/src/main/assets/emoji/0_842.png index 46fcccc3b..84b2bbdfe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_842.png and b/TMessagesProj/src/main/assets/emoji/0_842.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_843.png b/TMessagesProj/src/main/assets/emoji/0_843.png index a8392e8d4..930b8bd30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_843.png and b/TMessagesProj/src/main/assets/emoji/0_843.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_844.png b/TMessagesProj/src/main/assets/emoji/0_844.png index 3cf33ba22..75dceb4e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_844.png and b/TMessagesProj/src/main/assets/emoji/0_844.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_845.png b/TMessagesProj/src/main/assets/emoji/0_845.png index 18e25c7e5..a50d5aeef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_845.png and b/TMessagesProj/src/main/assets/emoji/0_845.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_846.png b/TMessagesProj/src/main/assets/emoji/0_846.png index b14762351..7075cc7d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_846.png and b/TMessagesProj/src/main/assets/emoji/0_846.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_847.png b/TMessagesProj/src/main/assets/emoji/0_847.png index 1d8facee0..38b41a331 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_847.png and b/TMessagesProj/src/main/assets/emoji/0_847.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_848.png b/TMessagesProj/src/main/assets/emoji/0_848.png index be09d14eb..dcad5b334 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_848.png and b/TMessagesProj/src/main/assets/emoji/0_848.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_849.png b/TMessagesProj/src/main/assets/emoji/0_849.png index ddd745e93..7f9b2bbc9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_849.png and b/TMessagesProj/src/main/assets/emoji/0_849.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_85.png b/TMessagesProj/src/main/assets/emoji/0_85.png index afa3ea7bb..b643b8dd5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_85.png and b/TMessagesProj/src/main/assets/emoji/0_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_850.png b/TMessagesProj/src/main/assets/emoji/0_850.png index 942f9a523..811ef5f1d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_850.png and b/TMessagesProj/src/main/assets/emoji/0_850.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_851.png b/TMessagesProj/src/main/assets/emoji/0_851.png index 59e9bdbbb..51e783a6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_851.png and b/TMessagesProj/src/main/assets/emoji/0_851.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_852.png b/TMessagesProj/src/main/assets/emoji/0_852.png index 1ac3d25f8..349463aaa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_852.png and b/TMessagesProj/src/main/assets/emoji/0_852.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_853.png b/TMessagesProj/src/main/assets/emoji/0_853.png index ee51a24ee..e20d9aa5f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_853.png and b/TMessagesProj/src/main/assets/emoji/0_853.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_854.png b/TMessagesProj/src/main/assets/emoji/0_854.png index 670b32526..16b16b15b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_854.png and b/TMessagesProj/src/main/assets/emoji/0_854.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_855.png b/TMessagesProj/src/main/assets/emoji/0_855.png index f86959cca..44a46c6ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_855.png and b/TMessagesProj/src/main/assets/emoji/0_855.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_856.png b/TMessagesProj/src/main/assets/emoji/0_856.png index 39aef3bc9..6115e5f4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_856.png and b/TMessagesProj/src/main/assets/emoji/0_856.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_857.png b/TMessagesProj/src/main/assets/emoji/0_857.png index 7026b913c..1dd3b5562 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_857.png and b/TMessagesProj/src/main/assets/emoji/0_857.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_858.png b/TMessagesProj/src/main/assets/emoji/0_858.png index 03607e76b..5dc9b8926 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_858.png and b/TMessagesProj/src/main/assets/emoji/0_858.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_859.png b/TMessagesProj/src/main/assets/emoji/0_859.png index ec80858f2..4f21fb399 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_859.png and b/TMessagesProj/src/main/assets/emoji/0_859.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_86.png b/TMessagesProj/src/main/assets/emoji/0_86.png index 4206bae88..3f4faa796 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_86.png and b/TMessagesProj/src/main/assets/emoji/0_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_860.png b/TMessagesProj/src/main/assets/emoji/0_860.png index fca9d35b6..d5e286b54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_860.png and b/TMessagesProj/src/main/assets/emoji/0_860.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_861.png b/TMessagesProj/src/main/assets/emoji/0_861.png index 8515f2c12..b30a21fdd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_861.png and b/TMessagesProj/src/main/assets/emoji/0_861.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_862.png b/TMessagesProj/src/main/assets/emoji/0_862.png index 592103309..93de4b218 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_862.png and b/TMessagesProj/src/main/assets/emoji/0_862.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_863.png b/TMessagesProj/src/main/assets/emoji/0_863.png index 4515355c8..a7dc277e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_863.png and b/TMessagesProj/src/main/assets/emoji/0_863.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_864.png b/TMessagesProj/src/main/assets/emoji/0_864.png index 808d163fa..add93aa0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_864.png and b/TMessagesProj/src/main/assets/emoji/0_864.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_865.png b/TMessagesProj/src/main/assets/emoji/0_865.png index c1218b8c7..02efe6987 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_865.png and b/TMessagesProj/src/main/assets/emoji/0_865.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_866.png b/TMessagesProj/src/main/assets/emoji/0_866.png index cbeb32bd0..cddd82b6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_866.png and b/TMessagesProj/src/main/assets/emoji/0_866.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_867.png b/TMessagesProj/src/main/assets/emoji/0_867.png index f3a818b2c..1c3864370 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_867.png and b/TMessagesProj/src/main/assets/emoji/0_867.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_868.png b/TMessagesProj/src/main/assets/emoji/0_868.png index 17057d099..c27dd9698 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_868.png and b/TMessagesProj/src/main/assets/emoji/0_868.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_869.png b/TMessagesProj/src/main/assets/emoji/0_869.png index 616283eda..2c21a8e92 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_869.png and b/TMessagesProj/src/main/assets/emoji/0_869.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_87.png b/TMessagesProj/src/main/assets/emoji/0_87.png index 1304f786a..a5b6ac07e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_87.png and b/TMessagesProj/src/main/assets/emoji/0_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_870.png b/TMessagesProj/src/main/assets/emoji/0_870.png index 118e9c027..851822a50 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_870.png and b/TMessagesProj/src/main/assets/emoji/0_870.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_871.png b/TMessagesProj/src/main/assets/emoji/0_871.png index 9dedda198..d5f2b1a6d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_871.png and b/TMessagesProj/src/main/assets/emoji/0_871.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_872.png b/TMessagesProj/src/main/assets/emoji/0_872.png index 8b8c32574..4c1fcea43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_872.png and b/TMessagesProj/src/main/assets/emoji/0_872.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_873.png b/TMessagesProj/src/main/assets/emoji/0_873.png index efe8ee27e..b31dd8315 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_873.png and b/TMessagesProj/src/main/assets/emoji/0_873.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_874.png b/TMessagesProj/src/main/assets/emoji/0_874.png index 997a99b76..17316b896 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_874.png and b/TMessagesProj/src/main/assets/emoji/0_874.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_875.png b/TMessagesProj/src/main/assets/emoji/0_875.png index b54f35ce9..103cdaf5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_875.png and b/TMessagesProj/src/main/assets/emoji/0_875.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_876.png b/TMessagesProj/src/main/assets/emoji/0_876.png index 8e4aa2603..e8c5c3fa7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_876.png and b/TMessagesProj/src/main/assets/emoji/0_876.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_877.png b/TMessagesProj/src/main/assets/emoji/0_877.png index baaea1f24..c669538bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_877.png and b/TMessagesProj/src/main/assets/emoji/0_877.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_878.png b/TMessagesProj/src/main/assets/emoji/0_878.png index ed17983ff..00ed58fd7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_878.png and b/TMessagesProj/src/main/assets/emoji/0_878.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_879.png b/TMessagesProj/src/main/assets/emoji/0_879.png index 29348038b..d1c17f623 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_879.png and b/TMessagesProj/src/main/assets/emoji/0_879.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_88.png b/TMessagesProj/src/main/assets/emoji/0_88.png index 1088a93f7..a625a681d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_88.png and b/TMessagesProj/src/main/assets/emoji/0_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_880.png b/TMessagesProj/src/main/assets/emoji/0_880.png index f7cac5176..2f26b0a05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_880.png and b/TMessagesProj/src/main/assets/emoji/0_880.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_881.png b/TMessagesProj/src/main/assets/emoji/0_881.png index 47efcb6d9..856ce4ff4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_881.png and b/TMessagesProj/src/main/assets/emoji/0_881.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_882.png b/TMessagesProj/src/main/assets/emoji/0_882.png index d381d9e08..411e48c7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_882.png and b/TMessagesProj/src/main/assets/emoji/0_882.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_883.png b/TMessagesProj/src/main/assets/emoji/0_883.png index 0b255265d..2a4def9c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_883.png and b/TMessagesProj/src/main/assets/emoji/0_883.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_884.png b/TMessagesProj/src/main/assets/emoji/0_884.png index 4c8e5ddd2..9273f0922 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_884.png and b/TMessagesProj/src/main/assets/emoji/0_884.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_885.png b/TMessagesProj/src/main/assets/emoji/0_885.png index 0912adf6d..d25f77ea1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_885.png and b/TMessagesProj/src/main/assets/emoji/0_885.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_886.png b/TMessagesProj/src/main/assets/emoji/0_886.png index 8ba473c25..65ffae58a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_886.png and b/TMessagesProj/src/main/assets/emoji/0_886.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_887.png b/TMessagesProj/src/main/assets/emoji/0_887.png index d0cff2f1a..59ec733bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_887.png and b/TMessagesProj/src/main/assets/emoji/0_887.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_888.png b/TMessagesProj/src/main/assets/emoji/0_888.png index 7cfecdbcd..b14555219 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_888.png and b/TMessagesProj/src/main/assets/emoji/0_888.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_889.png b/TMessagesProj/src/main/assets/emoji/0_889.png index 16f7ad0b5..073788e26 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_889.png and b/TMessagesProj/src/main/assets/emoji/0_889.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_89.png b/TMessagesProj/src/main/assets/emoji/0_89.png index f56910408..ef4a65b5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_89.png and b/TMessagesProj/src/main/assets/emoji/0_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_890.png b/TMessagesProj/src/main/assets/emoji/0_890.png index 257b078e1..0ed052945 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_890.png and b/TMessagesProj/src/main/assets/emoji/0_890.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_891.png b/TMessagesProj/src/main/assets/emoji/0_891.png index 2a956321a..9142f6b8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_891.png and b/TMessagesProj/src/main/assets/emoji/0_891.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_892.png b/TMessagesProj/src/main/assets/emoji/0_892.png index 600f2e00f..3a34eb503 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_892.png and b/TMessagesProj/src/main/assets/emoji/0_892.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_893.png b/TMessagesProj/src/main/assets/emoji/0_893.png index 0e0f197c5..da5bff903 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_893.png and b/TMessagesProj/src/main/assets/emoji/0_893.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_894.png b/TMessagesProj/src/main/assets/emoji/0_894.png index 3b49798df..f9cac5cbf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_894.png and b/TMessagesProj/src/main/assets/emoji/0_894.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_895.png b/TMessagesProj/src/main/assets/emoji/0_895.png index f78432c69..508cc5b96 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_895.png and b/TMessagesProj/src/main/assets/emoji/0_895.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_896.png b/TMessagesProj/src/main/assets/emoji/0_896.png index a7a6f6fe0..94122f881 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_896.png and b/TMessagesProj/src/main/assets/emoji/0_896.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_897.png b/TMessagesProj/src/main/assets/emoji/0_897.png index cf0d2c723..f03773f63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_897.png and b/TMessagesProj/src/main/assets/emoji/0_897.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_898.png b/TMessagesProj/src/main/assets/emoji/0_898.png index 18cd91dae..54300977f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_898.png and b/TMessagesProj/src/main/assets/emoji/0_898.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_899.png b/TMessagesProj/src/main/assets/emoji/0_899.png index 7c5449e8e..ab9e9797c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_899.png and b/TMessagesProj/src/main/assets/emoji/0_899.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_9.png b/TMessagesProj/src/main/assets/emoji/0_9.png index b0d2bb30e..9155ab91d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_9.png and b/TMessagesProj/src/main/assets/emoji/0_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_90.png b/TMessagesProj/src/main/assets/emoji/0_90.png index 1c945cdb8..669959c24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_90.png and b/TMessagesProj/src/main/assets/emoji/0_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_900.png b/TMessagesProj/src/main/assets/emoji/0_900.png index a77be40fe..1af0e0ca6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_900.png and b/TMessagesProj/src/main/assets/emoji/0_900.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_901.png b/TMessagesProj/src/main/assets/emoji/0_901.png index 65f347550..bfe3e6aa9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_901.png and b/TMessagesProj/src/main/assets/emoji/0_901.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_902.png b/TMessagesProj/src/main/assets/emoji/0_902.png index 32f022579..3ea84e1b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_902.png and b/TMessagesProj/src/main/assets/emoji/0_902.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_903.png b/TMessagesProj/src/main/assets/emoji/0_903.png index bf6f736ab..6e41af5b4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_903.png and b/TMessagesProj/src/main/assets/emoji/0_903.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_904.png b/TMessagesProj/src/main/assets/emoji/0_904.png index 80da1c4c7..803168e00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_904.png and b/TMessagesProj/src/main/assets/emoji/0_904.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_905.png b/TMessagesProj/src/main/assets/emoji/0_905.png index b74e37bab..cbf278dff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_905.png and b/TMessagesProj/src/main/assets/emoji/0_905.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_906.png b/TMessagesProj/src/main/assets/emoji/0_906.png index da52131ae..461c6ddcf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_906.png and b/TMessagesProj/src/main/assets/emoji/0_906.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_907.png b/TMessagesProj/src/main/assets/emoji/0_907.png index 319645e02..1539a3e10 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_907.png and b/TMessagesProj/src/main/assets/emoji/0_907.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_908.png b/TMessagesProj/src/main/assets/emoji/0_908.png index c4c46f012..2d2ecbf6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_908.png and b/TMessagesProj/src/main/assets/emoji/0_908.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_909.png b/TMessagesProj/src/main/assets/emoji/0_909.png index 2253fd97e..a576103fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_909.png and b/TMessagesProj/src/main/assets/emoji/0_909.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_91.png b/TMessagesProj/src/main/assets/emoji/0_91.png index cc440f2de..0e671ffb1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_91.png and b/TMessagesProj/src/main/assets/emoji/0_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_910.png b/TMessagesProj/src/main/assets/emoji/0_910.png index 97c0d77c6..d107c8633 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_910.png and b/TMessagesProj/src/main/assets/emoji/0_910.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_911.png b/TMessagesProj/src/main/assets/emoji/0_911.png index e7ba112e1..8609af52e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_911.png and b/TMessagesProj/src/main/assets/emoji/0_911.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_912.png b/TMessagesProj/src/main/assets/emoji/0_912.png index e2a07251e..c8cd6ad69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_912.png and b/TMessagesProj/src/main/assets/emoji/0_912.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_913.png b/TMessagesProj/src/main/assets/emoji/0_913.png index 7956e17ed..5bacd84e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_913.png and b/TMessagesProj/src/main/assets/emoji/0_913.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_914.png b/TMessagesProj/src/main/assets/emoji/0_914.png index eb5342a66..d81be5b65 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_914.png and b/TMessagesProj/src/main/assets/emoji/0_914.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_915.png b/TMessagesProj/src/main/assets/emoji/0_915.png index b8c3e6b64..4daa1de2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_915.png and b/TMessagesProj/src/main/assets/emoji/0_915.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_916.png b/TMessagesProj/src/main/assets/emoji/0_916.png index 10eb891f0..7c84e4da8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_916.png and b/TMessagesProj/src/main/assets/emoji/0_916.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_917.png b/TMessagesProj/src/main/assets/emoji/0_917.png index 74f71293a..f978e51e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_917.png and b/TMessagesProj/src/main/assets/emoji/0_917.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_918.png b/TMessagesProj/src/main/assets/emoji/0_918.png index 857f94e78..72d16335c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_918.png and b/TMessagesProj/src/main/assets/emoji/0_918.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_919.png b/TMessagesProj/src/main/assets/emoji/0_919.png index 94278cde9..4de32e309 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_919.png and b/TMessagesProj/src/main/assets/emoji/0_919.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_92.png b/TMessagesProj/src/main/assets/emoji/0_92.png index 9c2820ee5..3c85a694e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_92.png and b/TMessagesProj/src/main/assets/emoji/0_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_920.png b/TMessagesProj/src/main/assets/emoji/0_920.png index c37fe355c..bfd45d1e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_920.png and b/TMessagesProj/src/main/assets/emoji/0_920.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_921.png b/TMessagesProj/src/main/assets/emoji/0_921.png index 0c026e5b9..fd503a6ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_921.png and b/TMessagesProj/src/main/assets/emoji/0_921.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_922.png b/TMessagesProj/src/main/assets/emoji/0_922.png index 340ed0ad0..a48f26394 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_922.png and b/TMessagesProj/src/main/assets/emoji/0_922.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_923.png b/TMessagesProj/src/main/assets/emoji/0_923.png index e26a0ca09..49b26be77 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_923.png and b/TMessagesProj/src/main/assets/emoji/0_923.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_924.png b/TMessagesProj/src/main/assets/emoji/0_924.png index 99aeb2eea..46a09a920 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_924.png and b/TMessagesProj/src/main/assets/emoji/0_924.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_925.png b/TMessagesProj/src/main/assets/emoji/0_925.png index 79ff4de63..9d7127693 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_925.png and b/TMessagesProj/src/main/assets/emoji/0_925.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_926.png b/TMessagesProj/src/main/assets/emoji/0_926.png index 0712063a1..77782cb60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_926.png and b/TMessagesProj/src/main/assets/emoji/0_926.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_927.png b/TMessagesProj/src/main/assets/emoji/0_927.png index 1e87467ab..6ba987fb2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_927.png and b/TMessagesProj/src/main/assets/emoji/0_927.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_928.png b/TMessagesProj/src/main/assets/emoji/0_928.png index 40620a810..950500cf1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_928.png and b/TMessagesProj/src/main/assets/emoji/0_928.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_929.png b/TMessagesProj/src/main/assets/emoji/0_929.png index c6518fd69..d214f7932 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_929.png and b/TMessagesProj/src/main/assets/emoji/0_929.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_93.png b/TMessagesProj/src/main/assets/emoji/0_93.png index fb19ab896..3811dd63d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_93.png and b/TMessagesProj/src/main/assets/emoji/0_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_930.png b/TMessagesProj/src/main/assets/emoji/0_930.png index f92b1b289..bd1f01a99 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_930.png and b/TMessagesProj/src/main/assets/emoji/0_930.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_931.png b/TMessagesProj/src/main/assets/emoji/0_931.png index e9bc734e2..089968b90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_931.png and b/TMessagesProj/src/main/assets/emoji/0_931.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_932.png b/TMessagesProj/src/main/assets/emoji/0_932.png index 03f050440..64ad94504 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_932.png and b/TMessagesProj/src/main/assets/emoji/0_932.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_933.png b/TMessagesProj/src/main/assets/emoji/0_933.png index 5d0947b17..a117127c4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_933.png and b/TMessagesProj/src/main/assets/emoji/0_933.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_934.png b/TMessagesProj/src/main/assets/emoji/0_934.png index b091d371c..4e3354256 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_934.png and b/TMessagesProj/src/main/assets/emoji/0_934.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_935.png b/TMessagesProj/src/main/assets/emoji/0_935.png index 4d2447a7d..c8e166353 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_935.png and b/TMessagesProj/src/main/assets/emoji/0_935.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_936.png b/TMessagesProj/src/main/assets/emoji/0_936.png index f301d0034..e22e38a82 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_936.png and b/TMessagesProj/src/main/assets/emoji/0_936.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_937.png b/TMessagesProj/src/main/assets/emoji/0_937.png index 295f8a2ad..31a4cc6d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_937.png and b/TMessagesProj/src/main/assets/emoji/0_937.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_938.png b/TMessagesProj/src/main/assets/emoji/0_938.png index 00cef70fe..87edd12ef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_938.png and b/TMessagesProj/src/main/assets/emoji/0_938.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_939.png b/TMessagesProj/src/main/assets/emoji/0_939.png index 3e30c21fc..11ff3906f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_939.png and b/TMessagesProj/src/main/assets/emoji/0_939.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_94.png b/TMessagesProj/src/main/assets/emoji/0_94.png index 5531a9c9d..a9f8b0d79 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_94.png and b/TMessagesProj/src/main/assets/emoji/0_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_940.png b/TMessagesProj/src/main/assets/emoji/0_940.png index c05d26547..911f7e784 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_940.png and b/TMessagesProj/src/main/assets/emoji/0_940.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_941.png b/TMessagesProj/src/main/assets/emoji/0_941.png index 3e31ecef2..337fe1678 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_941.png and b/TMessagesProj/src/main/assets/emoji/0_941.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_942.png b/TMessagesProj/src/main/assets/emoji/0_942.png index ae31cdeab..38ba67932 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_942.png and b/TMessagesProj/src/main/assets/emoji/0_942.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_943.png b/TMessagesProj/src/main/assets/emoji/0_943.png index 0275d19d5..049db616c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_943.png and b/TMessagesProj/src/main/assets/emoji/0_943.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_944.png b/TMessagesProj/src/main/assets/emoji/0_944.png index b87b270e6..5bc0eb3f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_944.png and b/TMessagesProj/src/main/assets/emoji/0_944.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_945.png b/TMessagesProj/src/main/assets/emoji/0_945.png index 66b09238c..d3a0f4fce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_945.png and b/TMessagesProj/src/main/assets/emoji/0_945.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_946.png b/TMessagesProj/src/main/assets/emoji/0_946.png index e169b40dd..bb6cc2296 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_946.png and b/TMessagesProj/src/main/assets/emoji/0_946.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_947.png b/TMessagesProj/src/main/assets/emoji/0_947.png index 9c0488451..160a7bd51 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_947.png and b/TMessagesProj/src/main/assets/emoji/0_947.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_948.png b/TMessagesProj/src/main/assets/emoji/0_948.png index 260ea94fb..77dd51849 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_948.png and b/TMessagesProj/src/main/assets/emoji/0_948.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_949.png b/TMessagesProj/src/main/assets/emoji/0_949.png index 5f9e2f26e..8c0522e72 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_949.png and b/TMessagesProj/src/main/assets/emoji/0_949.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_95.png b/TMessagesProj/src/main/assets/emoji/0_95.png index 5c1bd19d8..79ee2da06 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_95.png and b/TMessagesProj/src/main/assets/emoji/0_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_950.png b/TMessagesProj/src/main/assets/emoji/0_950.png index 8b086e775..a5f32cdd4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_950.png and b/TMessagesProj/src/main/assets/emoji/0_950.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_951.png b/TMessagesProj/src/main/assets/emoji/0_951.png index 304fe6a21..5a2d67eaf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_951.png and b/TMessagesProj/src/main/assets/emoji/0_951.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_952.png b/TMessagesProj/src/main/assets/emoji/0_952.png index c96ab9b4c..5479ce01e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_952.png and b/TMessagesProj/src/main/assets/emoji/0_952.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_953.png b/TMessagesProj/src/main/assets/emoji/0_953.png index 91849b57a..e3fcf3235 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_953.png and b/TMessagesProj/src/main/assets/emoji/0_953.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_954.png b/TMessagesProj/src/main/assets/emoji/0_954.png index b40c5a955..68b242b24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_954.png and b/TMessagesProj/src/main/assets/emoji/0_954.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_955.png b/TMessagesProj/src/main/assets/emoji/0_955.png index 22195d38e..95959e315 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_955.png and b/TMessagesProj/src/main/assets/emoji/0_955.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_956.png b/TMessagesProj/src/main/assets/emoji/0_956.png index 48f2910d1..3ced9b8c5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_956.png and b/TMessagesProj/src/main/assets/emoji/0_956.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_957.png b/TMessagesProj/src/main/assets/emoji/0_957.png index 88570ff35..79b8db75e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_957.png and b/TMessagesProj/src/main/assets/emoji/0_957.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_958.png b/TMessagesProj/src/main/assets/emoji/0_958.png index fcc24f502..59b7d4081 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_958.png and b/TMessagesProj/src/main/assets/emoji/0_958.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_959.png b/TMessagesProj/src/main/assets/emoji/0_959.png index ca2c1a9ef..38cbd3451 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_959.png and b/TMessagesProj/src/main/assets/emoji/0_959.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_96.png b/TMessagesProj/src/main/assets/emoji/0_96.png index ad241a3ac..fc485f684 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_96.png and b/TMessagesProj/src/main/assets/emoji/0_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_960.png b/TMessagesProj/src/main/assets/emoji/0_960.png index 68d32282d..090371798 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_960.png and b/TMessagesProj/src/main/assets/emoji/0_960.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_961.png b/TMessagesProj/src/main/assets/emoji/0_961.png index 1985c3d6a..95857f67d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_961.png and b/TMessagesProj/src/main/assets/emoji/0_961.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_962.png b/TMessagesProj/src/main/assets/emoji/0_962.png index e6279bf6b..5c6699849 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_962.png and b/TMessagesProj/src/main/assets/emoji/0_962.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_963.png b/TMessagesProj/src/main/assets/emoji/0_963.png index dd8f0fde3..32a2db705 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_963.png and b/TMessagesProj/src/main/assets/emoji/0_963.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_964.png b/TMessagesProj/src/main/assets/emoji/0_964.png index 8d92ee84c..67075fde7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_964.png and b/TMessagesProj/src/main/assets/emoji/0_964.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_965.png b/TMessagesProj/src/main/assets/emoji/0_965.png index 4f2473c90..857f94e78 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_965.png and b/TMessagesProj/src/main/assets/emoji/0_965.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_966.png b/TMessagesProj/src/main/assets/emoji/0_966.png index 9e3be0464..b78eb07d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_966.png and b/TMessagesProj/src/main/assets/emoji/0_966.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_967.png b/TMessagesProj/src/main/assets/emoji/0_967.png index 6b6ab2769..684d30d63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_967.png and b/TMessagesProj/src/main/assets/emoji/0_967.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_968.png b/TMessagesProj/src/main/assets/emoji/0_968.png index 20a29b692..a939f8fb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_968.png and b/TMessagesProj/src/main/assets/emoji/0_968.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_969.png b/TMessagesProj/src/main/assets/emoji/0_969.png index 66c13e2e7..4a072a3cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_969.png and b/TMessagesProj/src/main/assets/emoji/0_969.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_97.png b/TMessagesProj/src/main/assets/emoji/0_97.png index 01b385e49..031417d9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_97.png and b/TMessagesProj/src/main/assets/emoji/0_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_970.png b/TMessagesProj/src/main/assets/emoji/0_970.png index fb45ed830..59b09c1f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_970.png and b/TMessagesProj/src/main/assets/emoji/0_970.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_971.png b/TMessagesProj/src/main/assets/emoji/0_971.png index a3f515128..b2e5000ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_971.png and b/TMessagesProj/src/main/assets/emoji/0_971.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_972.png b/TMessagesProj/src/main/assets/emoji/0_972.png index f95923d52..50c0c8c43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_972.png and b/TMessagesProj/src/main/assets/emoji/0_972.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_973.png b/TMessagesProj/src/main/assets/emoji/0_973.png index 8ccd389df..6f07033e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_973.png and b/TMessagesProj/src/main/assets/emoji/0_973.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_974.png b/TMessagesProj/src/main/assets/emoji/0_974.png index f233a9d66..1e87467ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_974.png and b/TMessagesProj/src/main/assets/emoji/0_974.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_975.png b/TMessagesProj/src/main/assets/emoji/0_975.png index d3213b7c7..ee9f160e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_975.png and b/TMessagesProj/src/main/assets/emoji/0_975.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_976.png b/TMessagesProj/src/main/assets/emoji/0_976.png index 0cf5247c5..c6518fd69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_976.png and b/TMessagesProj/src/main/assets/emoji/0_976.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_977.png b/TMessagesProj/src/main/assets/emoji/0_977.png index c01cca9b0..f92b1b289 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_977.png and b/TMessagesProj/src/main/assets/emoji/0_977.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_978.png b/TMessagesProj/src/main/assets/emoji/0_978.png index 529b6850c..1ea7bd850 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_978.png and b/TMessagesProj/src/main/assets/emoji/0_978.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_979.png b/TMessagesProj/src/main/assets/emoji/0_979.png index 4097988fe..5b552791e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_979.png and b/TMessagesProj/src/main/assets/emoji/0_979.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_98.png b/TMessagesProj/src/main/assets/emoji/0_98.png index afde02fb3..4f1507dc4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_98.png and b/TMessagesProj/src/main/assets/emoji/0_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_980.png b/TMessagesProj/src/main/assets/emoji/0_980.png index 7a9ad2588..a5ef5df00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_980.png and b/TMessagesProj/src/main/assets/emoji/0_980.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_981.png b/TMessagesProj/src/main/assets/emoji/0_981.png index 573698560..bb8960697 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_981.png and b/TMessagesProj/src/main/assets/emoji/0_981.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_982.png b/TMessagesProj/src/main/assets/emoji/0_982.png index 3f56819a9..305f490d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_982.png and b/TMessagesProj/src/main/assets/emoji/0_982.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_983.png b/TMessagesProj/src/main/assets/emoji/0_983.png index 231136e00..884be682a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_983.png and b/TMessagesProj/src/main/assets/emoji/0_983.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_984.png b/TMessagesProj/src/main/assets/emoji/0_984.png index 2d7f89050..1076a3cdb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_984.png and b/TMessagesProj/src/main/assets/emoji/0_984.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_985.png b/TMessagesProj/src/main/assets/emoji/0_985.png index b5dbc2c96..73f559764 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_985.png and b/TMessagesProj/src/main/assets/emoji/0_985.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_986.png b/TMessagesProj/src/main/assets/emoji/0_986.png index 89bd5151c..5962e12ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_986.png and b/TMessagesProj/src/main/assets/emoji/0_986.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_987.png b/TMessagesProj/src/main/assets/emoji/0_987.png index f93f35586..c08fd2240 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_987.png and b/TMessagesProj/src/main/assets/emoji/0_987.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_988.png b/TMessagesProj/src/main/assets/emoji/0_988.png index 8f30ab28c..3427df769 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_988.png and b/TMessagesProj/src/main/assets/emoji/0_988.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_989.png b/TMessagesProj/src/main/assets/emoji/0_989.png index db434e3b3..dadcdc79e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_989.png and b/TMessagesProj/src/main/assets/emoji/0_989.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_99.png b/TMessagesProj/src/main/assets/emoji/0_99.png index a0f16d3dd..9c89c736f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_99.png and b/TMessagesProj/src/main/assets/emoji/0_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_990.png b/TMessagesProj/src/main/assets/emoji/0_990.png index d57c90482..03256a388 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_990.png and b/TMessagesProj/src/main/assets/emoji/0_990.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_991.png b/TMessagesProj/src/main/assets/emoji/0_991.png index 18d22b8bd..3a576cf67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_991.png and b/TMessagesProj/src/main/assets/emoji/0_991.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_992.png b/TMessagesProj/src/main/assets/emoji/0_992.png index aa644837d..e034fb82b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_992.png and b/TMessagesProj/src/main/assets/emoji/0_992.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_993.png b/TMessagesProj/src/main/assets/emoji/0_993.png index 3dad9f7b1..495f6707b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_993.png and b/TMessagesProj/src/main/assets/emoji/0_993.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_994.png b/TMessagesProj/src/main/assets/emoji/0_994.png index 52266b9e2..b53cef118 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_994.png and b/TMessagesProj/src/main/assets/emoji/0_994.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_995.png b/TMessagesProj/src/main/assets/emoji/0_995.png index 10ded070b..5bf76e56d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_995.png and b/TMessagesProj/src/main/assets/emoji/0_995.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_996.png b/TMessagesProj/src/main/assets/emoji/0_996.png index 1aea4fe68..8f67139d1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_996.png and b/TMessagesProj/src/main/assets/emoji/0_996.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_997.png b/TMessagesProj/src/main/assets/emoji/0_997.png index 9659ca160..b414d3da5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_997.png and b/TMessagesProj/src/main/assets/emoji/0_997.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_998.png b/TMessagesProj/src/main/assets/emoji/0_998.png index 8bc63797d..dfd8bd86a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_998.png and b/TMessagesProj/src/main/assets/emoji/0_998.png differ diff --git a/TMessagesProj/src/main/assets/emoji/0_999.png b/TMessagesProj/src/main/assets/emoji/0_999.png index 6bcff59b6..df00566c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/0_999.png and b/TMessagesProj/src/main/assets/emoji/0_999.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_0.png b/TMessagesProj/src/main/assets/emoji/1_0.png index 5a8497f96..bfc92e1a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_0.png and b/TMessagesProj/src/main/assets/emoji/1_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_1.png b/TMessagesProj/src/main/assets/emoji/1_1.png index 38825bfb5..c0dfbaf46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_1.png and b/TMessagesProj/src/main/assets/emoji/1_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_10.png b/TMessagesProj/src/main/assets/emoji/1_10.png index 9ae951386..d22558def 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_10.png and b/TMessagesProj/src/main/assets/emoji/1_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_100.png b/TMessagesProj/src/main/assets/emoji/1_100.png index d1d037ddb..c16348392 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_100.png and b/TMessagesProj/src/main/assets/emoji/1_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_101.png b/TMessagesProj/src/main/assets/emoji/1_101.png index b0d9ec123..5ebaef2c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_101.png and b/TMessagesProj/src/main/assets/emoji/1_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_102.png b/TMessagesProj/src/main/assets/emoji/1_102.png index 2d9dc2057..784738b59 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_102.png and b/TMessagesProj/src/main/assets/emoji/1_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_103.png b/TMessagesProj/src/main/assets/emoji/1_103.png index ead3085cc..0dcc3b599 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_103.png and b/TMessagesProj/src/main/assets/emoji/1_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_104.png b/TMessagesProj/src/main/assets/emoji/1_104.png index 02af74ec1..2e12d4eba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_104.png and b/TMessagesProj/src/main/assets/emoji/1_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_105.png b/TMessagesProj/src/main/assets/emoji/1_105.png index a0f24e726..586aacd0b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_105.png and b/TMessagesProj/src/main/assets/emoji/1_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_106.png b/TMessagesProj/src/main/assets/emoji/1_106.png index a9b1bb4d3..09623fafa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_106.png and b/TMessagesProj/src/main/assets/emoji/1_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_107.png b/TMessagesProj/src/main/assets/emoji/1_107.png index 7c54a5524..5571d4428 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_107.png and b/TMessagesProj/src/main/assets/emoji/1_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_108.png b/TMessagesProj/src/main/assets/emoji/1_108.png index a5889ac03..5f5329f30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_108.png and b/TMessagesProj/src/main/assets/emoji/1_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_109.png b/TMessagesProj/src/main/assets/emoji/1_109.png index 0ca8af8cd..fc6da8712 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_109.png and b/TMessagesProj/src/main/assets/emoji/1_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_11.png b/TMessagesProj/src/main/assets/emoji/1_11.png index c195bb6f5..765b3c91a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_11.png and b/TMessagesProj/src/main/assets/emoji/1_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_110.png b/TMessagesProj/src/main/assets/emoji/1_110.png index 89c729619..dbe0bf5f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_110.png and b/TMessagesProj/src/main/assets/emoji/1_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_111.png b/TMessagesProj/src/main/assets/emoji/1_111.png index 3225ad33f..abb8c20d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_111.png and b/TMessagesProj/src/main/assets/emoji/1_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_112.png b/TMessagesProj/src/main/assets/emoji/1_112.png index 8d210d0d0..1d0878b25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_112.png and b/TMessagesProj/src/main/assets/emoji/1_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_113.png b/TMessagesProj/src/main/assets/emoji/1_113.png index 90007c5ca..b0d9ec123 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_113.png and b/TMessagesProj/src/main/assets/emoji/1_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_114.png b/TMessagesProj/src/main/assets/emoji/1_114.png index f895d569c..12ad508f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_114.png and b/TMessagesProj/src/main/assets/emoji/1_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_115.png b/TMessagesProj/src/main/assets/emoji/1_115.png index 0168decff..3ce7c2897 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_115.png and b/TMessagesProj/src/main/assets/emoji/1_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_116.png b/TMessagesProj/src/main/assets/emoji/1_116.png index f61bec04c..6f290fb43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_116.png and b/TMessagesProj/src/main/assets/emoji/1_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_117.png b/TMessagesProj/src/main/assets/emoji/1_117.png index c8cf56b47..de77cb87c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_117.png and b/TMessagesProj/src/main/assets/emoji/1_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_118.png b/TMessagesProj/src/main/assets/emoji/1_118.png index b65ec7c90..68b509547 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_118.png and b/TMessagesProj/src/main/assets/emoji/1_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_119.png b/TMessagesProj/src/main/assets/emoji/1_119.png index 6af8eafc5..60dcbb99c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_119.png and b/TMessagesProj/src/main/assets/emoji/1_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_12.png b/TMessagesProj/src/main/assets/emoji/1_12.png index 7abb10c6f..af32e6cb9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_12.png and b/TMessagesProj/src/main/assets/emoji/1_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_120.png b/TMessagesProj/src/main/assets/emoji/1_120.png index d97e7ab1e..91e46b59d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_120.png and b/TMessagesProj/src/main/assets/emoji/1_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_121.png b/TMessagesProj/src/main/assets/emoji/1_121.png index 38edc9d7a..e719a0abf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_121.png and b/TMessagesProj/src/main/assets/emoji/1_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_122.png b/TMessagesProj/src/main/assets/emoji/1_122.png index c062c2d37..89c729619 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_122.png and b/TMessagesProj/src/main/assets/emoji/1_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_123.png b/TMessagesProj/src/main/assets/emoji/1_123.png index aaf38c8d1..1220c6327 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_123.png and b/TMessagesProj/src/main/assets/emoji/1_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_124.png b/TMessagesProj/src/main/assets/emoji/1_124.png index a7cd4d865..a5585592a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_124.png and b/TMessagesProj/src/main/assets/emoji/1_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_125.png b/TMessagesProj/src/main/assets/emoji/1_125.png index 40cf2b28e..d3b287671 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_125.png and b/TMessagesProj/src/main/assets/emoji/1_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_126.png b/TMessagesProj/src/main/assets/emoji/1_126.png index cfebbacf0..e398641bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_126.png and b/TMessagesProj/src/main/assets/emoji/1_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_127.png b/TMessagesProj/src/main/assets/emoji/1_127.png index eed56bc5d..110e15de9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_127.png and b/TMessagesProj/src/main/assets/emoji/1_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_128.png b/TMessagesProj/src/main/assets/emoji/1_128.png index 91016e7bb..0168decff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_128.png and b/TMessagesProj/src/main/assets/emoji/1_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_129.png b/TMessagesProj/src/main/assets/emoji/1_129.png index 541755ad3..6b79d0840 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_129.png and b/TMessagesProj/src/main/assets/emoji/1_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_13.png b/TMessagesProj/src/main/assets/emoji/1_13.png index 72fc26019..c8efa31e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_13.png and b/TMessagesProj/src/main/assets/emoji/1_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_130.png b/TMessagesProj/src/main/assets/emoji/1_130.png index acc00fb76..53dac4172 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_130.png and b/TMessagesProj/src/main/assets/emoji/1_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_131.png b/TMessagesProj/src/main/assets/emoji/1_131.png index 1f213ab8c..10b2cdd27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_131.png and b/TMessagesProj/src/main/assets/emoji/1_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_132.png b/TMessagesProj/src/main/assets/emoji/1_132.png index fd84a4970..7bc7f88c3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_132.png and b/TMessagesProj/src/main/assets/emoji/1_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_133.png b/TMessagesProj/src/main/assets/emoji/1_133.png index 05d4ace46..350ae20ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_133.png and b/TMessagesProj/src/main/assets/emoji/1_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_134.png b/TMessagesProj/src/main/assets/emoji/1_134.png index b0f947ea3..734f04b10 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_134.png and b/TMessagesProj/src/main/assets/emoji/1_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_135.png b/TMessagesProj/src/main/assets/emoji/1_135.png index 392c1254d..2cb22d637 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_135.png and b/TMessagesProj/src/main/assets/emoji/1_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_136.png b/TMessagesProj/src/main/assets/emoji/1_136.png index 26007bf12..c9d1711b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_136.png and b/TMessagesProj/src/main/assets/emoji/1_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_137.png b/TMessagesProj/src/main/assets/emoji/1_137.png index 4e2c4c4d8..25efa0d69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_137.png and b/TMessagesProj/src/main/assets/emoji/1_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_138.png b/TMessagesProj/src/main/assets/emoji/1_138.png index d3421a4b3..085677ce0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_138.png and b/TMessagesProj/src/main/assets/emoji/1_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_139.png b/TMessagesProj/src/main/assets/emoji/1_139.png index 2f33e4b3f..4755e20e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_139.png and b/TMessagesProj/src/main/assets/emoji/1_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_14.png b/TMessagesProj/src/main/assets/emoji/1_14.png index c9c4738aa..114a3ffca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_14.png and b/TMessagesProj/src/main/assets/emoji/1_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_140.png b/TMessagesProj/src/main/assets/emoji/1_140.png index 410169ce5..80ed91e4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_140.png and b/TMessagesProj/src/main/assets/emoji/1_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_141.png b/TMessagesProj/src/main/assets/emoji/1_141.png index 35af56901..16f9c3f61 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_141.png and b/TMessagesProj/src/main/assets/emoji/1_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_142.png b/TMessagesProj/src/main/assets/emoji/1_142.png index 250e763d9..83494cfc0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_142.png and b/TMessagesProj/src/main/assets/emoji/1_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_143.png b/TMessagesProj/src/main/assets/emoji/1_143.png index bb2649f85..eb1cb6ad0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_143.png and b/TMessagesProj/src/main/assets/emoji/1_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_144.png b/TMessagesProj/src/main/assets/emoji/1_144.png index 3a1bdbcb0..2f90581ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_144.png and b/TMessagesProj/src/main/assets/emoji/1_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_145.png b/TMessagesProj/src/main/assets/emoji/1_145.png index f5257a1d3..bd611b18c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_145.png and b/TMessagesProj/src/main/assets/emoji/1_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_146.png b/TMessagesProj/src/main/assets/emoji/1_146.png index dd2e2006a..9f1c24005 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_146.png and b/TMessagesProj/src/main/assets/emoji/1_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_147.png b/TMessagesProj/src/main/assets/emoji/1_147.png index 7c105eedf..3e4c2006c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_147.png and b/TMessagesProj/src/main/assets/emoji/1_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_148.png b/TMessagesProj/src/main/assets/emoji/1_148.png index 5713b82f6..dda3e2bde 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_148.png and b/TMessagesProj/src/main/assets/emoji/1_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_149.png b/TMessagesProj/src/main/assets/emoji/1_149.png index 23775b0b1..b0f947ea3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_149.png and b/TMessagesProj/src/main/assets/emoji/1_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_15.png b/TMessagesProj/src/main/assets/emoji/1_15.png index d398ff41b..4b0f54f7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_15.png and b/TMessagesProj/src/main/assets/emoji/1_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_150.png b/TMessagesProj/src/main/assets/emoji/1_150.png index be05b6929..872fc54ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_150.png and b/TMessagesProj/src/main/assets/emoji/1_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_151.png b/TMessagesProj/src/main/assets/emoji/1_151.png index 6515e4908..fc0405cd9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_151.png and b/TMessagesProj/src/main/assets/emoji/1_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_152.png b/TMessagesProj/src/main/assets/emoji/1_152.png index 356c6ce7f..e74e43226 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_152.png and b/TMessagesProj/src/main/assets/emoji/1_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_153.png b/TMessagesProj/src/main/assets/emoji/1_153.png index c44703dbc..a978f2834 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_153.png and b/TMessagesProj/src/main/assets/emoji/1_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_154.png b/TMessagesProj/src/main/assets/emoji/1_154.png index ae5cb4404..f5efa1d2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_154.png and b/TMessagesProj/src/main/assets/emoji/1_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_155.png b/TMessagesProj/src/main/assets/emoji/1_155.png index fbb19ec8e..112e9f39d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_155.png and b/TMessagesProj/src/main/assets/emoji/1_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_156.png b/TMessagesProj/src/main/assets/emoji/1_156.png index e9678fef2..c09494b18 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_156.png and b/TMessagesProj/src/main/assets/emoji/1_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_157.png b/TMessagesProj/src/main/assets/emoji/1_157.png index 0c16574c6..719177395 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_157.png and b/TMessagesProj/src/main/assets/emoji/1_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_158.png b/TMessagesProj/src/main/assets/emoji/1_158.png index 6af1de652..18e85bf8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_158.png and b/TMessagesProj/src/main/assets/emoji/1_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_159.png b/TMessagesProj/src/main/assets/emoji/1_159.png index b1e25850f..3a1bdbcb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_159.png and b/TMessagesProj/src/main/assets/emoji/1_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_16.png b/TMessagesProj/src/main/assets/emoji/1_16.png index d87759c8f..ae05495ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_16.png and b/TMessagesProj/src/main/assets/emoji/1_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_160.png b/TMessagesProj/src/main/assets/emoji/1_160.png index 0975230b4..3c788c514 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_160.png and b/TMessagesProj/src/main/assets/emoji/1_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_161.png b/TMessagesProj/src/main/assets/emoji/1_161.png index cd8678a7b..27ace67a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_161.png and b/TMessagesProj/src/main/assets/emoji/1_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_162.png b/TMessagesProj/src/main/assets/emoji/1_162.png index 6e8bda168..202c1f22b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_162.png and b/TMessagesProj/src/main/assets/emoji/1_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_163.png b/TMessagesProj/src/main/assets/emoji/1_163.png index bfb850de9..841641aee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_163.png and b/TMessagesProj/src/main/assets/emoji/1_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_164.png b/TMessagesProj/src/main/assets/emoji/1_164.png index 193dd260c..8d4e4efc5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_164.png and b/TMessagesProj/src/main/assets/emoji/1_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_165.png b/TMessagesProj/src/main/assets/emoji/1_165.png index abfb52dcf..c70cacb63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_165.png and b/TMessagesProj/src/main/assets/emoji/1_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_166.png b/TMessagesProj/src/main/assets/emoji/1_166.png index a02084252..ccf6b9087 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_166.png and b/TMessagesProj/src/main/assets/emoji/1_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_167.png b/TMessagesProj/src/main/assets/emoji/1_167.png index bd29599f0..3fcaf9912 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_167.png and b/TMessagesProj/src/main/assets/emoji/1_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_168.png b/TMessagesProj/src/main/assets/emoji/1_168.png index d6fd77ee6..f3cbe4296 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_168.png and b/TMessagesProj/src/main/assets/emoji/1_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_169.png b/TMessagesProj/src/main/assets/emoji/1_169.png index f45b7345b..08a70ec12 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_169.png and b/TMessagesProj/src/main/assets/emoji/1_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_17.png b/TMessagesProj/src/main/assets/emoji/1_17.png index da4f2bc24..837c9c34b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_17.png and b/TMessagesProj/src/main/assets/emoji/1_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_170.png b/TMessagesProj/src/main/assets/emoji/1_170.png index f6a3c575e..866bbc562 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_170.png and b/TMessagesProj/src/main/assets/emoji/1_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_171.png b/TMessagesProj/src/main/assets/emoji/1_171.png index d05697f4c..386c0d1f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_171.png and b/TMessagesProj/src/main/assets/emoji/1_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_172.png b/TMessagesProj/src/main/assets/emoji/1_172.png index 79404bb49..64948e476 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_172.png and b/TMessagesProj/src/main/assets/emoji/1_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_173.png b/TMessagesProj/src/main/assets/emoji/1_173.png index 549dabab5..aa44491d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_173.png and b/TMessagesProj/src/main/assets/emoji/1_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_174.png b/TMessagesProj/src/main/assets/emoji/1_174.png index 37879df85..07ee0d714 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_174.png and b/TMessagesProj/src/main/assets/emoji/1_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_175.png b/TMessagesProj/src/main/assets/emoji/1_175.png index e41df3783..dbaa6eb5b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_175.png and b/TMessagesProj/src/main/assets/emoji/1_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_176.png b/TMessagesProj/src/main/assets/emoji/1_176.png index 2177b9c13..f8eeeafe3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_176.png and b/TMessagesProj/src/main/assets/emoji/1_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_177.png b/TMessagesProj/src/main/assets/emoji/1_177.png index 1815fab45..6e8bda168 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_177.png and b/TMessagesProj/src/main/assets/emoji/1_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_178.png b/TMessagesProj/src/main/assets/emoji/1_178.png index 504781b2f..01c990a3a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_178.png and b/TMessagesProj/src/main/assets/emoji/1_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_179.png b/TMessagesProj/src/main/assets/emoji/1_179.png index 3a74dc61e..0a5d4aa67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_179.png and b/TMessagesProj/src/main/assets/emoji/1_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_18.png b/TMessagesProj/src/main/assets/emoji/1_18.png index bd4b64a2a..4d18fb13d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_18.png and b/TMessagesProj/src/main/assets/emoji/1_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_180.png b/TMessagesProj/src/main/assets/emoji/1_180.png index 4e9df73d4..61a9b4be9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_180.png and b/TMessagesProj/src/main/assets/emoji/1_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_181.png b/TMessagesProj/src/main/assets/emoji/1_181.png index 94bed2dbe..0a444d847 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_181.png and b/TMessagesProj/src/main/assets/emoji/1_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_182.png b/TMessagesProj/src/main/assets/emoji/1_182.png index b28135c05..bd29599f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_182.png and b/TMessagesProj/src/main/assets/emoji/1_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_183.png b/TMessagesProj/src/main/assets/emoji/1_183.png index 099fae9d7..d6fd77ee6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_183.png and b/TMessagesProj/src/main/assets/emoji/1_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_184.png b/TMessagesProj/src/main/assets/emoji/1_184.png new file mode 100644 index 000000000..8b568aa0c Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_185.png b/TMessagesProj/src/main/assets/emoji/1_185.png new file mode 100644 index 000000000..c90f2a0f8 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_186.png b/TMessagesProj/src/main/assets/emoji/1_186.png new file mode 100644 index 000000000..d05697f4c Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_187.png b/TMessagesProj/src/main/assets/emoji/1_187.png new file mode 100644 index 000000000..788f51008 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_188.png b/TMessagesProj/src/main/assets/emoji/1_188.png new file mode 100644 index 000000000..549dabab5 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_189.png b/TMessagesProj/src/main/assets/emoji/1_189.png new file mode 100644 index 000000000..51888e2df Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_19.png b/TMessagesProj/src/main/assets/emoji/1_19.png index 49320eba0..4c177a4ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_19.png and b/TMessagesProj/src/main/assets/emoji/1_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_190.png b/TMessagesProj/src/main/assets/emoji/1_190.png new file mode 100644 index 000000000..9f66d763e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_191.png b/TMessagesProj/src/main/assets/emoji/1_191.png new file mode 100644 index 000000000..b2884f793 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_192.png b/TMessagesProj/src/main/assets/emoji/1_192.png new file mode 100644 index 000000000..d9a625821 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_193.png b/TMessagesProj/src/main/assets/emoji/1_193.png new file mode 100644 index 000000000..734f4d857 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_194.png b/TMessagesProj/src/main/assets/emoji/1_194.png new file mode 100644 index 000000000..c8a261efd Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_195.png b/TMessagesProj/src/main/assets/emoji/1_195.png new file mode 100644 index 000000000..7074099a9 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_196.png b/TMessagesProj/src/main/assets/emoji/1_196.png new file mode 100644 index 000000000..782947a31 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_197.png b/TMessagesProj/src/main/assets/emoji/1_197.png new file mode 100644 index 000000000..3227b936f Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_198.png b/TMessagesProj/src/main/assets/emoji/1_198.png new file mode 100644 index 000000000..099fae9d7 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/1_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_2.png b/TMessagesProj/src/main/assets/emoji/1_2.png index d5c0d8cf0..caeb5f1ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_2.png and b/TMessagesProj/src/main/assets/emoji/1_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_20.png b/TMessagesProj/src/main/assets/emoji/1_20.png index 70ebdd9f5..5b9920c1e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_20.png and b/TMessagesProj/src/main/assets/emoji/1_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_21.png b/TMessagesProj/src/main/assets/emoji/1_21.png index 0d2b36375..3fdcf7512 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_21.png and b/TMessagesProj/src/main/assets/emoji/1_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_22.png b/TMessagesProj/src/main/assets/emoji/1_22.png index 406eeeaa0..fa0fbde0f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_22.png and b/TMessagesProj/src/main/assets/emoji/1_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_23.png b/TMessagesProj/src/main/assets/emoji/1_23.png index 7c1620376..3eac8e3e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_23.png and b/TMessagesProj/src/main/assets/emoji/1_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_24.png b/TMessagesProj/src/main/assets/emoji/1_24.png index 1699b4296..11c772518 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_24.png and b/TMessagesProj/src/main/assets/emoji/1_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_25.png b/TMessagesProj/src/main/assets/emoji/1_25.png index 55f165fc8..8dde57027 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_25.png and b/TMessagesProj/src/main/assets/emoji/1_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_26.png b/TMessagesProj/src/main/assets/emoji/1_26.png index d9f130af4..e430cd636 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_26.png and b/TMessagesProj/src/main/assets/emoji/1_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_27.png b/TMessagesProj/src/main/assets/emoji/1_27.png index 4d559e6e3..06a2dca8f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_27.png and b/TMessagesProj/src/main/assets/emoji/1_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_28.png b/TMessagesProj/src/main/assets/emoji/1_28.png index d6a4ce74e..3051e8c1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_28.png and b/TMessagesProj/src/main/assets/emoji/1_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_29.png b/TMessagesProj/src/main/assets/emoji/1_29.png index e98ec596b..24780c2f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_29.png and b/TMessagesProj/src/main/assets/emoji/1_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_3.png b/TMessagesProj/src/main/assets/emoji/1_3.png index 4e3efd0b5..f7f4b0528 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_3.png and b/TMessagesProj/src/main/assets/emoji/1_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_30.png b/TMessagesProj/src/main/assets/emoji/1_30.png index 07488df70..3c8cda0d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_30.png and b/TMessagesProj/src/main/assets/emoji/1_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_31.png b/TMessagesProj/src/main/assets/emoji/1_31.png index d764a20ea..2d1ba7974 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_31.png and b/TMessagesProj/src/main/assets/emoji/1_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_32.png b/TMessagesProj/src/main/assets/emoji/1_32.png index b846047f4..52aa5824c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_32.png and b/TMessagesProj/src/main/assets/emoji/1_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_33.png b/TMessagesProj/src/main/assets/emoji/1_33.png index c2cc301d4..0448938ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_33.png and b/TMessagesProj/src/main/assets/emoji/1_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_34.png b/TMessagesProj/src/main/assets/emoji/1_34.png index 7314d4560..e9ca3d56e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_34.png and b/TMessagesProj/src/main/assets/emoji/1_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_35.png b/TMessagesProj/src/main/assets/emoji/1_35.png index ac596ee6c..9ce7289ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_35.png and b/TMessagesProj/src/main/assets/emoji/1_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_36.png b/TMessagesProj/src/main/assets/emoji/1_36.png index 3e249cf7e..1e06d57be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_36.png and b/TMessagesProj/src/main/assets/emoji/1_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_37.png b/TMessagesProj/src/main/assets/emoji/1_37.png index d1b871d1b..b7e188a53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_37.png and b/TMessagesProj/src/main/assets/emoji/1_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_38.png b/TMessagesProj/src/main/assets/emoji/1_38.png index 62a6ae34f..d9fbccce3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_38.png and b/TMessagesProj/src/main/assets/emoji/1_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_39.png b/TMessagesProj/src/main/assets/emoji/1_39.png index 39957a12e..46a10ec53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_39.png and b/TMessagesProj/src/main/assets/emoji/1_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_4.png b/TMessagesProj/src/main/assets/emoji/1_4.png index 98839fe90..d1c7a805a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_4.png and b/TMessagesProj/src/main/assets/emoji/1_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_40.png b/TMessagesProj/src/main/assets/emoji/1_40.png index 7e418cf1c..8cc35c852 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_40.png and b/TMessagesProj/src/main/assets/emoji/1_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_41.png b/TMessagesProj/src/main/assets/emoji/1_41.png index c62ab2367..d99a7fcdb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_41.png and b/TMessagesProj/src/main/assets/emoji/1_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_42.png b/TMessagesProj/src/main/assets/emoji/1_42.png index b4880d5f8..7b7b62a57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_42.png and b/TMessagesProj/src/main/assets/emoji/1_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_43.png b/TMessagesProj/src/main/assets/emoji/1_43.png index 404eba1ea..e1e917642 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_43.png and b/TMessagesProj/src/main/assets/emoji/1_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_44.png b/TMessagesProj/src/main/assets/emoji/1_44.png index 7f86abc77..b76136852 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_44.png and b/TMessagesProj/src/main/assets/emoji/1_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_45.png b/TMessagesProj/src/main/assets/emoji/1_45.png index 923f3b3e0..7e418cf1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_45.png and b/TMessagesProj/src/main/assets/emoji/1_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_46.png b/TMessagesProj/src/main/assets/emoji/1_46.png index 761511474..e257d4bee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_46.png and b/TMessagesProj/src/main/assets/emoji/1_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_47.png b/TMessagesProj/src/main/assets/emoji/1_47.png index 52ff14fa1..b1f292605 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_47.png and b/TMessagesProj/src/main/assets/emoji/1_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_48.png b/TMessagesProj/src/main/assets/emoji/1_48.png index 21c76ebf2..0f7f51dad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_48.png and b/TMessagesProj/src/main/assets/emoji/1_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_49.png b/TMessagesProj/src/main/assets/emoji/1_49.png index 509402a03..846aa421b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_49.png and b/TMessagesProj/src/main/assets/emoji/1_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_5.png b/TMessagesProj/src/main/assets/emoji/1_5.png index 0d8851cbb..9f2e634d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_5.png and b/TMessagesProj/src/main/assets/emoji/1_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_50.png b/TMessagesProj/src/main/assets/emoji/1_50.png index eaabbd357..cc3dd6a9d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_50.png and b/TMessagesProj/src/main/assets/emoji/1_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_51.png b/TMessagesProj/src/main/assets/emoji/1_51.png index 2c789e7f7..7b6d0585e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_51.png and b/TMessagesProj/src/main/assets/emoji/1_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_52.png b/TMessagesProj/src/main/assets/emoji/1_52.png index 0b347f916..df5c6c62b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_52.png and b/TMessagesProj/src/main/assets/emoji/1_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_53.png b/TMessagesProj/src/main/assets/emoji/1_53.png index 30f8ef01a..77f9ab164 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_53.png and b/TMessagesProj/src/main/assets/emoji/1_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_54.png b/TMessagesProj/src/main/assets/emoji/1_54.png index 3d59ffb68..5d7a83d0a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_54.png and b/TMessagesProj/src/main/assets/emoji/1_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_55.png b/TMessagesProj/src/main/assets/emoji/1_55.png index 3be48e209..1dffaee04 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_55.png and b/TMessagesProj/src/main/assets/emoji/1_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_56.png b/TMessagesProj/src/main/assets/emoji/1_56.png index 4272ffa0a..385954c5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_56.png and b/TMessagesProj/src/main/assets/emoji/1_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_57.png b/TMessagesProj/src/main/assets/emoji/1_57.png index 91d3b5f95..8b1b24fb2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_57.png and b/TMessagesProj/src/main/assets/emoji/1_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_58.png b/TMessagesProj/src/main/assets/emoji/1_58.png index bf5a347ed..519fc3174 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_58.png and b/TMessagesProj/src/main/assets/emoji/1_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_59.png b/TMessagesProj/src/main/assets/emoji/1_59.png index 93bc05b6d..11b24f797 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_59.png and b/TMessagesProj/src/main/assets/emoji/1_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_6.png b/TMessagesProj/src/main/assets/emoji/1_6.png index e6133da42..4f9bdb90e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_6.png and b/TMessagesProj/src/main/assets/emoji/1_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_60.png b/TMessagesProj/src/main/assets/emoji/1_60.png index 82eb4ea0b..f49620dd8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_60.png and b/TMessagesProj/src/main/assets/emoji/1_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_61.png b/TMessagesProj/src/main/assets/emoji/1_61.png index ecc3614bf..b999fa96f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_61.png and b/TMessagesProj/src/main/assets/emoji/1_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_62.png b/TMessagesProj/src/main/assets/emoji/1_62.png index fb15aaaf2..c40f0ed1e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_62.png and b/TMessagesProj/src/main/assets/emoji/1_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_63.png b/TMessagesProj/src/main/assets/emoji/1_63.png index 5855887dc..c1ae0b6f9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_63.png and b/TMessagesProj/src/main/assets/emoji/1_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_64.png b/TMessagesProj/src/main/assets/emoji/1_64.png index ff2cc478b..9d5a117c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_64.png and b/TMessagesProj/src/main/assets/emoji/1_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_65.png b/TMessagesProj/src/main/assets/emoji/1_65.png index bf9c94f80..0e0fe1b34 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_65.png and b/TMessagesProj/src/main/assets/emoji/1_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_66.png b/TMessagesProj/src/main/assets/emoji/1_66.png index fec0e16a8..88205b611 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_66.png and b/TMessagesProj/src/main/assets/emoji/1_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_67.png b/TMessagesProj/src/main/assets/emoji/1_67.png index 9f956cb35..4921af536 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_67.png and b/TMessagesProj/src/main/assets/emoji/1_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_68.png b/TMessagesProj/src/main/assets/emoji/1_68.png index cae4e65b1..32d8c4e6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_68.png and b/TMessagesProj/src/main/assets/emoji/1_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_69.png b/TMessagesProj/src/main/assets/emoji/1_69.png index 85111b919..aae81022e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_69.png and b/TMessagesProj/src/main/assets/emoji/1_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_7.png b/TMessagesProj/src/main/assets/emoji/1_7.png index bf45c8d24..d1302c3ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_7.png and b/TMessagesProj/src/main/assets/emoji/1_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_70.png b/TMessagesProj/src/main/assets/emoji/1_70.png index fd343d38d..53055d814 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_70.png and b/TMessagesProj/src/main/assets/emoji/1_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_71.png b/TMessagesProj/src/main/assets/emoji/1_71.png index e86b48993..1edf4c861 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_71.png and b/TMessagesProj/src/main/assets/emoji/1_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_72.png b/TMessagesProj/src/main/assets/emoji/1_72.png index f6a59babf..a324b3ddc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_72.png and b/TMessagesProj/src/main/assets/emoji/1_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_73.png b/TMessagesProj/src/main/assets/emoji/1_73.png index ca0cc2a69..9f956cb35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_73.png and b/TMessagesProj/src/main/assets/emoji/1_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_74.png b/TMessagesProj/src/main/assets/emoji/1_74.png index 314dbc703..ce57691da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_74.png and b/TMessagesProj/src/main/assets/emoji/1_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_75.png b/TMessagesProj/src/main/assets/emoji/1_75.png index 9e1bd02c5..5b3ea15d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_75.png and b/TMessagesProj/src/main/assets/emoji/1_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_76.png b/TMessagesProj/src/main/assets/emoji/1_76.png index 93d8c19a5..9f45add8f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_76.png and b/TMessagesProj/src/main/assets/emoji/1_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_77.png b/TMessagesProj/src/main/assets/emoji/1_77.png index a2d9ed385..769d6becd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_77.png and b/TMessagesProj/src/main/assets/emoji/1_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_78.png b/TMessagesProj/src/main/assets/emoji/1_78.png index 89371d998..39cfc4307 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_78.png and b/TMessagesProj/src/main/assets/emoji/1_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_79.png b/TMessagesProj/src/main/assets/emoji/1_79.png index fcc7b286d..5d5f36143 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_79.png and b/TMessagesProj/src/main/assets/emoji/1_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_8.png b/TMessagesProj/src/main/assets/emoji/1_8.png index 30230079a..2f13e6788 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_8.png and b/TMessagesProj/src/main/assets/emoji/1_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_80.png b/TMessagesProj/src/main/assets/emoji/1_80.png index 4ed70185a..04ce1932b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_80.png and b/TMessagesProj/src/main/assets/emoji/1_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_81.png b/TMessagesProj/src/main/assets/emoji/1_81.png index 06135175e..dc86a4d51 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_81.png and b/TMessagesProj/src/main/assets/emoji/1_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_82.png b/TMessagesProj/src/main/assets/emoji/1_82.png index 8e05de5ff..20a4e5df3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_82.png and b/TMessagesProj/src/main/assets/emoji/1_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_83.png b/TMessagesProj/src/main/assets/emoji/1_83.png index 4723ddee7..f84e8412e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_83.png and b/TMessagesProj/src/main/assets/emoji/1_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_84.png b/TMessagesProj/src/main/assets/emoji/1_84.png index d24bc10ab..3d904617d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_84.png and b/TMessagesProj/src/main/assets/emoji/1_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_85.png b/TMessagesProj/src/main/assets/emoji/1_85.png index 601f49886..1c763e21f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_85.png and b/TMessagesProj/src/main/assets/emoji/1_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_86.png b/TMessagesProj/src/main/assets/emoji/1_86.png index 1d42b3fd2..f0b7691ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_86.png and b/TMessagesProj/src/main/assets/emoji/1_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_87.png b/TMessagesProj/src/main/assets/emoji/1_87.png index 297d92237..ed257a27a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_87.png and b/TMessagesProj/src/main/assets/emoji/1_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_88.png b/TMessagesProj/src/main/assets/emoji/1_88.png index ed68f866e..4ed70185a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_88.png and b/TMessagesProj/src/main/assets/emoji/1_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_89.png b/TMessagesProj/src/main/assets/emoji/1_89.png index 40193f41b..06135175e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_89.png and b/TMessagesProj/src/main/assets/emoji/1_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_9.png b/TMessagesProj/src/main/assets/emoji/1_9.png index e98492b66..30230079a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_9.png and b/TMessagesProj/src/main/assets/emoji/1_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_90.png b/TMessagesProj/src/main/assets/emoji/1_90.png index c16348392..4cb0d769e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_90.png and b/TMessagesProj/src/main/assets/emoji/1_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_91.png b/TMessagesProj/src/main/assets/emoji/1_91.png index 21c5b0361..f9c77e3c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_91.png and b/TMessagesProj/src/main/assets/emoji/1_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_92.png b/TMessagesProj/src/main/assets/emoji/1_92.png index bcea6b521..a311cb2f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_92.png and b/TMessagesProj/src/main/assets/emoji/1_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_93.png b/TMessagesProj/src/main/assets/emoji/1_93.png index efcc07bc2..8e4975724 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_93.png and b/TMessagesProj/src/main/assets/emoji/1_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_94.png b/TMessagesProj/src/main/assets/emoji/1_94.png index 98201bc9d..88af58aac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_94.png and b/TMessagesProj/src/main/assets/emoji/1_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_95.png b/TMessagesProj/src/main/assets/emoji/1_95.png index 33003909d..c0498a676 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_95.png and b/TMessagesProj/src/main/assets/emoji/1_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_96.png b/TMessagesProj/src/main/assets/emoji/1_96.png index 0ce40e2e4..c6a28def5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_96.png and b/TMessagesProj/src/main/assets/emoji/1_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_97.png b/TMessagesProj/src/main/assets/emoji/1_97.png index d90e79293..7333af1bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_97.png and b/TMessagesProj/src/main/assets/emoji/1_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_98.png b/TMessagesProj/src/main/assets/emoji/1_98.png index c4e52dcdf..6b8685282 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_98.png and b/TMessagesProj/src/main/assets/emoji/1_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/1_99.png b/TMessagesProj/src/main/assets/emoji/1_99.png index e0e5adc1e..80fc4132a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/1_99.png and b/TMessagesProj/src/main/assets/emoji/1_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_0.png b/TMessagesProj/src/main/assets/emoji/2_0.png index 4d474da0f..f20854199 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_0.png and b/TMessagesProj/src/main/assets/emoji/2_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_1.png b/TMessagesProj/src/main/assets/emoji/2_1.png index 7a1a084c2..0944d05a9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_1.png and b/TMessagesProj/src/main/assets/emoji/2_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_10.png b/TMessagesProj/src/main/assets/emoji/2_10.png index acb92d591..9cbe2afc0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_10.png and b/TMessagesProj/src/main/assets/emoji/2_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_100.png b/TMessagesProj/src/main/assets/emoji/2_100.png index e9753d0aa..20ed6ebb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_100.png and b/TMessagesProj/src/main/assets/emoji/2_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_101.png b/TMessagesProj/src/main/assets/emoji/2_101.png index 735596a03..054a5dc72 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_101.png and b/TMessagesProj/src/main/assets/emoji/2_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_102.png b/TMessagesProj/src/main/assets/emoji/2_102.png index b158f8698..653dff527 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_102.png and b/TMessagesProj/src/main/assets/emoji/2_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_103.png b/TMessagesProj/src/main/assets/emoji/2_103.png index 332614498..a44e011a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_103.png and b/TMessagesProj/src/main/assets/emoji/2_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_104.png b/TMessagesProj/src/main/assets/emoji/2_104.png index 0d5610804..634e9c2ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_104.png and b/TMessagesProj/src/main/assets/emoji/2_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_105.png b/TMessagesProj/src/main/assets/emoji/2_105.png index 031d71f7c..b8bb7a6e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_105.png and b/TMessagesProj/src/main/assets/emoji/2_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_106.png b/TMessagesProj/src/main/assets/emoji/2_106.png index ea6e0781c..34199f7bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_106.png and b/TMessagesProj/src/main/assets/emoji/2_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_107.png b/TMessagesProj/src/main/assets/emoji/2_107.png index 091300583..7b4e913d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_107.png and b/TMessagesProj/src/main/assets/emoji/2_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_108.png b/TMessagesProj/src/main/assets/emoji/2_108.png index f1f8b581b..b2630b516 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_108.png and b/TMessagesProj/src/main/assets/emoji/2_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_109.png b/TMessagesProj/src/main/assets/emoji/2_109.png index 1d7a28550..4bad64d2c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_109.png and b/TMessagesProj/src/main/assets/emoji/2_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_11.png b/TMessagesProj/src/main/assets/emoji/2_11.png index 87ac24af4..d35435431 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_11.png and b/TMessagesProj/src/main/assets/emoji/2_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_110.png b/TMessagesProj/src/main/assets/emoji/2_110.png index fcbe95681..8e8b476b4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_110.png and b/TMessagesProj/src/main/assets/emoji/2_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_111.png b/TMessagesProj/src/main/assets/emoji/2_111.png index ab4a00687..299058f1d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_111.png and b/TMessagesProj/src/main/assets/emoji/2_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_112.png b/TMessagesProj/src/main/assets/emoji/2_112.png index 9f13241ba..b467c0136 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_112.png and b/TMessagesProj/src/main/assets/emoji/2_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_113.png b/TMessagesProj/src/main/assets/emoji/2_113.png index 889306196..031d71f7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_113.png and b/TMessagesProj/src/main/assets/emoji/2_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_114.png b/TMessagesProj/src/main/assets/emoji/2_114.png index 63c2a8431..5c5bf0e25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_114.png and b/TMessagesProj/src/main/assets/emoji/2_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_115.png b/TMessagesProj/src/main/assets/emoji/2_115.png new file mode 100644 index 000000000..091300583 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_116.png b/TMessagesProj/src/main/assets/emoji/2_116.png new file mode 100644 index 000000000..df41710a9 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_117.png b/TMessagesProj/src/main/assets/emoji/2_117.png new file mode 100644 index 000000000..4668d0a0a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_118.png b/TMessagesProj/src/main/assets/emoji/2_118.png new file mode 100644 index 000000000..282c75373 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_119.png b/TMessagesProj/src/main/assets/emoji/2_119.png new file mode 100644 index 000000000..b557130cf Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_12.png b/TMessagesProj/src/main/assets/emoji/2_12.png index 514fae9a7..04ca33492 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_12.png and b/TMessagesProj/src/main/assets/emoji/2_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_120.png b/TMessagesProj/src/main/assets/emoji/2_120.png new file mode 100644 index 000000000..e9f65fa06 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_121.png b/TMessagesProj/src/main/assets/emoji/2_121.png new file mode 100644 index 000000000..f502ecd02 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_122.png b/TMessagesProj/src/main/assets/emoji/2_122.png new file mode 100644 index 000000000..6fe9d421e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/2_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_13.png b/TMessagesProj/src/main/assets/emoji/2_13.png index ae4fda850..072301064 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_13.png and b/TMessagesProj/src/main/assets/emoji/2_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_14.png b/TMessagesProj/src/main/assets/emoji/2_14.png index 33f40be7c..d11692e99 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_14.png and b/TMessagesProj/src/main/assets/emoji/2_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_15.png b/TMessagesProj/src/main/assets/emoji/2_15.png index df005f4d6..cc6e9af38 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_15.png and b/TMessagesProj/src/main/assets/emoji/2_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_16.png b/TMessagesProj/src/main/assets/emoji/2_16.png index f3e15a22c..03656dd15 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_16.png and b/TMessagesProj/src/main/assets/emoji/2_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_17.png b/TMessagesProj/src/main/assets/emoji/2_17.png index a8cde1a3f..257170b0c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_17.png and b/TMessagesProj/src/main/assets/emoji/2_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_18.png b/TMessagesProj/src/main/assets/emoji/2_18.png index fa086c6e2..b85b4721b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_18.png and b/TMessagesProj/src/main/assets/emoji/2_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_19.png b/TMessagesProj/src/main/assets/emoji/2_19.png index 89d386ad1..6e169528b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_19.png and b/TMessagesProj/src/main/assets/emoji/2_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_2.png b/TMessagesProj/src/main/assets/emoji/2_2.png index e2337587d..6766961ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_2.png and b/TMessagesProj/src/main/assets/emoji/2_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_20.png b/TMessagesProj/src/main/assets/emoji/2_20.png index dd96b9b32..5dbde7f83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_20.png and b/TMessagesProj/src/main/assets/emoji/2_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_21.png b/TMessagesProj/src/main/assets/emoji/2_21.png index c31f4ec10..b85836265 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_21.png and b/TMessagesProj/src/main/assets/emoji/2_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_22.png b/TMessagesProj/src/main/assets/emoji/2_22.png index 11f839fec..57d3b355f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_22.png and b/TMessagesProj/src/main/assets/emoji/2_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_23.png b/TMessagesProj/src/main/assets/emoji/2_23.png index 1fa8df385..9bca5729b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_23.png and b/TMessagesProj/src/main/assets/emoji/2_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_24.png b/TMessagesProj/src/main/assets/emoji/2_24.png index c7ecddef6..351a17ee1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_24.png and b/TMessagesProj/src/main/assets/emoji/2_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_25.png b/TMessagesProj/src/main/assets/emoji/2_25.png index 44879b122..9dbd52ea8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_25.png and b/TMessagesProj/src/main/assets/emoji/2_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_26.png b/TMessagesProj/src/main/assets/emoji/2_26.png index c1e0db8ca..c7ecddef6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_26.png and b/TMessagesProj/src/main/assets/emoji/2_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_27.png b/TMessagesProj/src/main/assets/emoji/2_27.png index ae41a23fb..e53aeaf75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_27.png and b/TMessagesProj/src/main/assets/emoji/2_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_28.png b/TMessagesProj/src/main/assets/emoji/2_28.png index 287b62208..79e8bfcd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_28.png and b/TMessagesProj/src/main/assets/emoji/2_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_29.png b/TMessagesProj/src/main/assets/emoji/2_29.png index 3571417ab..ad16b5449 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_29.png and b/TMessagesProj/src/main/assets/emoji/2_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_3.png b/TMessagesProj/src/main/assets/emoji/2_3.png index fcbc86b23..029800898 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_3.png and b/TMessagesProj/src/main/assets/emoji/2_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_30.png b/TMessagesProj/src/main/assets/emoji/2_30.png index 5143f1126..8d450d93b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_30.png and b/TMessagesProj/src/main/assets/emoji/2_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_31.png b/TMessagesProj/src/main/assets/emoji/2_31.png index ccdafa07c..4a97d19db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_31.png and b/TMessagesProj/src/main/assets/emoji/2_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_32.png b/TMessagesProj/src/main/assets/emoji/2_32.png index 79c81ec76..3778a007f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_32.png and b/TMessagesProj/src/main/assets/emoji/2_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_33.png b/TMessagesProj/src/main/assets/emoji/2_33.png index ece0f92bf..37e985c3a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_33.png and b/TMessagesProj/src/main/assets/emoji/2_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_34.png b/TMessagesProj/src/main/assets/emoji/2_34.png index d8ce058b6..d0390ddbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_34.png and b/TMessagesProj/src/main/assets/emoji/2_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_35.png b/TMessagesProj/src/main/assets/emoji/2_35.png index 8af36520e..c82bbf22a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_35.png and b/TMessagesProj/src/main/assets/emoji/2_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_36.png b/TMessagesProj/src/main/assets/emoji/2_36.png index 97136894d..9fd7f16a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_36.png and b/TMessagesProj/src/main/assets/emoji/2_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_37.png b/TMessagesProj/src/main/assets/emoji/2_37.png index 9223b036f..35613b781 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_37.png and b/TMessagesProj/src/main/assets/emoji/2_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_38.png b/TMessagesProj/src/main/assets/emoji/2_38.png index c65e6f55a..3da5bac9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_38.png and b/TMessagesProj/src/main/assets/emoji/2_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_39.png b/TMessagesProj/src/main/assets/emoji/2_39.png index 2b8ecae17..6d27333a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_39.png and b/TMessagesProj/src/main/assets/emoji/2_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_4.png b/TMessagesProj/src/main/assets/emoji/2_4.png index 163a3ee5c..07e676cd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_4.png and b/TMessagesProj/src/main/assets/emoji/2_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_40.png b/TMessagesProj/src/main/assets/emoji/2_40.png index 33f012514..bba7bc3ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_40.png and b/TMessagesProj/src/main/assets/emoji/2_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_41.png b/TMessagesProj/src/main/assets/emoji/2_41.png index e703ff583..14a8b7220 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_41.png and b/TMessagesProj/src/main/assets/emoji/2_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_42.png b/TMessagesProj/src/main/assets/emoji/2_42.png index 343b4a3bf..9e0bea794 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_42.png and b/TMessagesProj/src/main/assets/emoji/2_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_43.png b/TMessagesProj/src/main/assets/emoji/2_43.png index 1f5a67e6f..0a0e13e5b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_43.png and b/TMessagesProj/src/main/assets/emoji/2_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_44.png b/TMessagesProj/src/main/assets/emoji/2_44.png index 2d6e43e58..e9078e5fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_44.png and b/TMessagesProj/src/main/assets/emoji/2_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_45.png b/TMessagesProj/src/main/assets/emoji/2_45.png index abb852e8b..3343c3e7d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_45.png and b/TMessagesProj/src/main/assets/emoji/2_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_46.png b/TMessagesProj/src/main/assets/emoji/2_46.png index d60974721..6fd9c6c04 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_46.png and b/TMessagesProj/src/main/assets/emoji/2_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_47.png b/TMessagesProj/src/main/assets/emoji/2_47.png index 3ab8325c5..c94a2073d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_47.png and b/TMessagesProj/src/main/assets/emoji/2_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_48.png b/TMessagesProj/src/main/assets/emoji/2_48.png index 57b81772c..f6678559d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_48.png and b/TMessagesProj/src/main/assets/emoji/2_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_49.png b/TMessagesProj/src/main/assets/emoji/2_49.png index c5db2d46b..326b2148b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_49.png and b/TMessagesProj/src/main/assets/emoji/2_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_5.png b/TMessagesProj/src/main/assets/emoji/2_5.png index 1ee3676ea..fb2bdeef6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_5.png and b/TMessagesProj/src/main/assets/emoji/2_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_50.png b/TMessagesProj/src/main/assets/emoji/2_50.png index ba5768299..4f0e7f35f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_50.png and b/TMessagesProj/src/main/assets/emoji/2_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_51.png b/TMessagesProj/src/main/assets/emoji/2_51.png index ecf22149d..fbca13015 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_51.png and b/TMessagesProj/src/main/assets/emoji/2_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_52.png b/TMessagesProj/src/main/assets/emoji/2_52.png index 874dcab5c..a968009d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_52.png and b/TMessagesProj/src/main/assets/emoji/2_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_53.png b/TMessagesProj/src/main/assets/emoji/2_53.png index fa63e192e..126f42726 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_53.png and b/TMessagesProj/src/main/assets/emoji/2_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_54.png b/TMessagesProj/src/main/assets/emoji/2_54.png index 460ed5fb7..85d2606f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_54.png and b/TMessagesProj/src/main/assets/emoji/2_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_55.png b/TMessagesProj/src/main/assets/emoji/2_55.png index 5875bafe3..4a067d1f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_55.png and b/TMessagesProj/src/main/assets/emoji/2_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_56.png b/TMessagesProj/src/main/assets/emoji/2_56.png index fb82e0b94..8285ce15e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_56.png and b/TMessagesProj/src/main/assets/emoji/2_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_57.png b/TMessagesProj/src/main/assets/emoji/2_57.png index 7fe1d833b..056ab4718 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_57.png and b/TMessagesProj/src/main/assets/emoji/2_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_58.png b/TMessagesProj/src/main/assets/emoji/2_58.png index c81d616d9..55a6686e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_58.png and b/TMessagesProj/src/main/assets/emoji/2_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_59.png b/TMessagesProj/src/main/assets/emoji/2_59.png index c9226869b..fd26f3143 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_59.png and b/TMessagesProj/src/main/assets/emoji/2_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_6.png b/TMessagesProj/src/main/assets/emoji/2_6.png index dedce1870..4d5cc35e1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_6.png and b/TMessagesProj/src/main/assets/emoji/2_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_60.png b/TMessagesProj/src/main/assets/emoji/2_60.png index 47482d628..c72f1e278 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_60.png and b/TMessagesProj/src/main/assets/emoji/2_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_61.png b/TMessagesProj/src/main/assets/emoji/2_61.png index ccbd00ade..78c9113e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_61.png and b/TMessagesProj/src/main/assets/emoji/2_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_62.png b/TMessagesProj/src/main/assets/emoji/2_62.png index 7613fac3c..571fd886f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_62.png and b/TMessagesProj/src/main/assets/emoji/2_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_63.png b/TMessagesProj/src/main/assets/emoji/2_63.png index 013e4d5c0..e3ec40c18 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_63.png and b/TMessagesProj/src/main/assets/emoji/2_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_64.png b/TMessagesProj/src/main/assets/emoji/2_64.png index b70823245..6cf57e395 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_64.png and b/TMessagesProj/src/main/assets/emoji/2_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_65.png b/TMessagesProj/src/main/assets/emoji/2_65.png index 548a77963..c9226869b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_65.png and b/TMessagesProj/src/main/assets/emoji/2_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_66.png b/TMessagesProj/src/main/assets/emoji/2_66.png index dd3709998..8633b1b33 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_66.png and b/TMessagesProj/src/main/assets/emoji/2_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_67.png b/TMessagesProj/src/main/assets/emoji/2_67.png index a611b1968..f3bf9248a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_67.png and b/TMessagesProj/src/main/assets/emoji/2_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_68.png b/TMessagesProj/src/main/assets/emoji/2_68.png index 93563e32c..3f010faca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_68.png and b/TMessagesProj/src/main/assets/emoji/2_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_69.png b/TMessagesProj/src/main/assets/emoji/2_69.png index 7be0bc06b..0e749e027 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_69.png and b/TMessagesProj/src/main/assets/emoji/2_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_7.png b/TMessagesProj/src/main/assets/emoji/2_7.png index e45639aee..51018b552 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_7.png and b/TMessagesProj/src/main/assets/emoji/2_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_70.png b/TMessagesProj/src/main/assets/emoji/2_70.png index 65e1fe497..95efa249b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_70.png and b/TMessagesProj/src/main/assets/emoji/2_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_71.png b/TMessagesProj/src/main/assets/emoji/2_71.png index 7afb09a1c..9e510d736 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_71.png and b/TMessagesProj/src/main/assets/emoji/2_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_72.png b/TMessagesProj/src/main/assets/emoji/2_72.png index 9533d2fd7..f2e210930 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_72.png and b/TMessagesProj/src/main/assets/emoji/2_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_73.png b/TMessagesProj/src/main/assets/emoji/2_73.png index 96a794676..a611b1968 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_73.png and b/TMessagesProj/src/main/assets/emoji/2_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_74.png b/TMessagesProj/src/main/assets/emoji/2_74.png index 6653c743e..de1849a6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_74.png and b/TMessagesProj/src/main/assets/emoji/2_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_75.png b/TMessagesProj/src/main/assets/emoji/2_75.png index b3ffc502d..a7ce224b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_75.png and b/TMessagesProj/src/main/assets/emoji/2_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_76.png b/TMessagesProj/src/main/assets/emoji/2_76.png index 4f84e5902..d39b41e7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_76.png and b/TMessagesProj/src/main/assets/emoji/2_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_77.png b/TMessagesProj/src/main/assets/emoji/2_77.png index 47c448274..7afb09a1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_77.png and b/TMessagesProj/src/main/assets/emoji/2_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_78.png b/TMessagesProj/src/main/assets/emoji/2_78.png index 6bf08c5e7..776c28d54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_78.png and b/TMessagesProj/src/main/assets/emoji/2_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_79.png b/TMessagesProj/src/main/assets/emoji/2_79.png index d7d02bbc3..3ccb06560 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_79.png and b/TMessagesProj/src/main/assets/emoji/2_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_8.png b/TMessagesProj/src/main/assets/emoji/2_8.png index ad016962d..35dc01e5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_8.png and b/TMessagesProj/src/main/assets/emoji/2_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_80.png b/TMessagesProj/src/main/assets/emoji/2_80.png index ed8aedbc9..6153aa1c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_80.png and b/TMessagesProj/src/main/assets/emoji/2_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_81.png b/TMessagesProj/src/main/assets/emoji/2_81.png index 5b8cd89e8..5e608f56d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_81.png and b/TMessagesProj/src/main/assets/emoji/2_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_82.png b/TMessagesProj/src/main/assets/emoji/2_82.png index 4bcef7665..4f84e5902 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_82.png and b/TMessagesProj/src/main/assets/emoji/2_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_83.png b/TMessagesProj/src/main/assets/emoji/2_83.png index 3459a4796..614406249 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_83.png and b/TMessagesProj/src/main/assets/emoji/2_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_84.png b/TMessagesProj/src/main/assets/emoji/2_84.png index b3a06f120..d94dceb79 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_84.png and b/TMessagesProj/src/main/assets/emoji/2_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_85.png b/TMessagesProj/src/main/assets/emoji/2_85.png index 771e765ed..6c0da84d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_85.png and b/TMessagesProj/src/main/assets/emoji/2_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_86.png b/TMessagesProj/src/main/assets/emoji/2_86.png index 2825ec6ad..ed8aedbc9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_86.png and b/TMessagesProj/src/main/assets/emoji/2_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_87.png b/TMessagesProj/src/main/assets/emoji/2_87.png index 144767084..2843d3a46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_87.png and b/TMessagesProj/src/main/assets/emoji/2_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_88.png b/TMessagesProj/src/main/assets/emoji/2_88.png index 4a15e063b..27e75adbf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_88.png and b/TMessagesProj/src/main/assets/emoji/2_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_89.png b/TMessagesProj/src/main/assets/emoji/2_89.png index 8eff824e6..00a3e9991 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_89.png and b/TMessagesProj/src/main/assets/emoji/2_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_9.png b/TMessagesProj/src/main/assets/emoji/2_9.png index 9cbe2afc0..f160a66b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_9.png and b/TMessagesProj/src/main/assets/emoji/2_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_90.png b/TMessagesProj/src/main/assets/emoji/2_90.png index 99d5e1381..3b14fc60c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_90.png and b/TMessagesProj/src/main/assets/emoji/2_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_91.png b/TMessagesProj/src/main/assets/emoji/2_91.png index aab87284d..771e765ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_91.png and b/TMessagesProj/src/main/assets/emoji/2_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_92.png b/TMessagesProj/src/main/assets/emoji/2_92.png index 71a0d8b9a..de46e98d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_92.png and b/TMessagesProj/src/main/assets/emoji/2_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_93.png b/TMessagesProj/src/main/assets/emoji/2_93.png index 7243246d0..4756c7f15 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_93.png and b/TMessagesProj/src/main/assets/emoji/2_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_94.png b/TMessagesProj/src/main/assets/emoji/2_94.png index 00d176648..1c0732a53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_94.png and b/TMessagesProj/src/main/assets/emoji/2_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_95.png b/TMessagesProj/src/main/assets/emoji/2_95.png index d7a2e6c13..f9f38eea0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_95.png and b/TMessagesProj/src/main/assets/emoji/2_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_96.png b/TMessagesProj/src/main/assets/emoji/2_96.png index 50ca0ea87..12888ae8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_96.png and b/TMessagesProj/src/main/assets/emoji/2_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_97.png b/TMessagesProj/src/main/assets/emoji/2_97.png index 04f825ade..bf8b9794e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_97.png and b/TMessagesProj/src/main/assets/emoji/2_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_98.png b/TMessagesProj/src/main/assets/emoji/2_98.png index 34199f7bd..42e930804 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_98.png and b/TMessagesProj/src/main/assets/emoji/2_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/2_99.png b/TMessagesProj/src/main/assets/emoji/2_99.png index 10f640dca..6f4e82d60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/2_99.png and b/TMessagesProj/src/main/assets/emoji/2_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_0.png b/TMessagesProj/src/main/assets/emoji/3_0.png index f42fae5bf..c2a72d752 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_0.png and b/TMessagesProj/src/main/assets/emoji/3_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_1.png b/TMessagesProj/src/main/assets/emoji/3_1.png index 53d408913..92b41df7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_1.png and b/TMessagesProj/src/main/assets/emoji/3_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_10.png b/TMessagesProj/src/main/assets/emoji/3_10.png index bcb41bbab..099d1e425 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_10.png and b/TMessagesProj/src/main/assets/emoji/3_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_100.png b/TMessagesProj/src/main/assets/emoji/3_100.png index e0188f461..20dafee6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_100.png and b/TMessagesProj/src/main/assets/emoji/3_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_101.png b/TMessagesProj/src/main/assets/emoji/3_101.png index 26baabff5..bc7374d70 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_101.png and b/TMessagesProj/src/main/assets/emoji/3_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_102.png b/TMessagesProj/src/main/assets/emoji/3_102.png index 58651bd9e..ff24e8f89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_102.png and b/TMessagesProj/src/main/assets/emoji/3_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_103.png b/TMessagesProj/src/main/assets/emoji/3_103.png index 181991999..0b606e74f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_103.png and b/TMessagesProj/src/main/assets/emoji/3_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_104.png b/TMessagesProj/src/main/assets/emoji/3_104.png index 841c2ff20..7cc739d57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_104.png and b/TMessagesProj/src/main/assets/emoji/3_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_105.png b/TMessagesProj/src/main/assets/emoji/3_105.png index e6b9bd200..d424749f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_105.png and b/TMessagesProj/src/main/assets/emoji/3_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_106.png b/TMessagesProj/src/main/assets/emoji/3_106.png index 1168e95aa..e9ef3f8e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_106.png and b/TMessagesProj/src/main/assets/emoji/3_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_107.png b/TMessagesProj/src/main/assets/emoji/3_107.png index fa8a32bc1..f71288084 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_107.png and b/TMessagesProj/src/main/assets/emoji/3_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_108.png b/TMessagesProj/src/main/assets/emoji/3_108.png index 3f6658749..784246485 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_108.png and b/TMessagesProj/src/main/assets/emoji/3_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_109.png b/TMessagesProj/src/main/assets/emoji/3_109.png index 24cdde5af..82c2e15e7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_109.png and b/TMessagesProj/src/main/assets/emoji/3_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_11.png b/TMessagesProj/src/main/assets/emoji/3_11.png index eb576d3bd..407515819 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_11.png and b/TMessagesProj/src/main/assets/emoji/3_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_110.png b/TMessagesProj/src/main/assets/emoji/3_110.png index 9d0cf57e8..a3b0ada05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_110.png and b/TMessagesProj/src/main/assets/emoji/3_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_111.png b/TMessagesProj/src/main/assets/emoji/3_111.png index e23a8dbd6..e2a29fcfd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_111.png and b/TMessagesProj/src/main/assets/emoji/3_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_112.png b/TMessagesProj/src/main/assets/emoji/3_112.png index 8040bdd1b..fdbf68335 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_112.png and b/TMessagesProj/src/main/assets/emoji/3_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_113.png b/TMessagesProj/src/main/assets/emoji/3_113.png index 459640d24..c4a690109 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_113.png and b/TMessagesProj/src/main/assets/emoji/3_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_114.png b/TMessagesProj/src/main/assets/emoji/3_114.png index 244bb54cf..8b9fab72f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_114.png and b/TMessagesProj/src/main/assets/emoji/3_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_115.png b/TMessagesProj/src/main/assets/emoji/3_115.png index dc1eb3cc7..a1b6a8854 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_115.png and b/TMessagesProj/src/main/assets/emoji/3_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_116.png b/TMessagesProj/src/main/assets/emoji/3_116.png index 2c610df25..21343974b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_116.png and b/TMessagesProj/src/main/assets/emoji/3_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_117.png b/TMessagesProj/src/main/assets/emoji/3_117.png index 1efd313d7..d9fb7b134 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_117.png and b/TMessagesProj/src/main/assets/emoji/3_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_118.png b/TMessagesProj/src/main/assets/emoji/3_118.png index 4800c94f5..73b71053e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_118.png and b/TMessagesProj/src/main/assets/emoji/3_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_119.png b/TMessagesProj/src/main/assets/emoji/3_119.png index 70e1866c9..24b793811 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_119.png and b/TMessagesProj/src/main/assets/emoji/3_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_12.png b/TMessagesProj/src/main/assets/emoji/3_12.png index 37bb3923b..23dc73e37 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_12.png and b/TMessagesProj/src/main/assets/emoji/3_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_120.png b/TMessagesProj/src/main/assets/emoji/3_120.png index 0708fa747..bae46ad27 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_120.png and b/TMessagesProj/src/main/assets/emoji/3_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_121.png b/TMessagesProj/src/main/assets/emoji/3_121.png index 04ba34ebb..78e366833 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_121.png and b/TMessagesProj/src/main/assets/emoji/3_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_122.png b/TMessagesProj/src/main/assets/emoji/3_122.png index e364bf64d..0d8c22875 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_122.png and b/TMessagesProj/src/main/assets/emoji/3_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_123.png b/TMessagesProj/src/main/assets/emoji/3_123.png index 230097aec..81e1fe325 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_123.png and b/TMessagesProj/src/main/assets/emoji/3_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_124.png b/TMessagesProj/src/main/assets/emoji/3_124.png index ac2992254..80296d477 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_124.png and b/TMessagesProj/src/main/assets/emoji/3_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_125.png b/TMessagesProj/src/main/assets/emoji/3_125.png index de0b4c2f3..b3ebf7ddc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_125.png and b/TMessagesProj/src/main/assets/emoji/3_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_126.png b/TMessagesProj/src/main/assets/emoji/3_126.png index f91260d10..df22d8da7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_126.png and b/TMessagesProj/src/main/assets/emoji/3_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_127.png b/TMessagesProj/src/main/assets/emoji/3_127.png index 062201e3a..96842b4da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_127.png and b/TMessagesProj/src/main/assets/emoji/3_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_128.png b/TMessagesProj/src/main/assets/emoji/3_128.png index 431925efa..45aa8f91e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_128.png and b/TMessagesProj/src/main/assets/emoji/3_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_129.png b/TMessagesProj/src/main/assets/emoji/3_129.png index d3208a239..aad1b6e8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_129.png and b/TMessagesProj/src/main/assets/emoji/3_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_13.png b/TMessagesProj/src/main/assets/emoji/3_13.png index bb775dbb6..5a2e28831 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_13.png and b/TMessagesProj/src/main/assets/emoji/3_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_130.png b/TMessagesProj/src/main/assets/emoji/3_130.png index e5cbf7544..354f201c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_130.png and b/TMessagesProj/src/main/assets/emoji/3_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_131.png b/TMessagesProj/src/main/assets/emoji/3_131.png index 5309b4443..c775ed7fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_131.png and b/TMessagesProj/src/main/assets/emoji/3_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_132.png b/TMessagesProj/src/main/assets/emoji/3_132.png index b239a030e..39ecac176 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_132.png and b/TMessagesProj/src/main/assets/emoji/3_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_133.png b/TMessagesProj/src/main/assets/emoji/3_133.png index 04f44689c..491b4605a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_133.png and b/TMessagesProj/src/main/assets/emoji/3_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_134.png b/TMessagesProj/src/main/assets/emoji/3_134.png index aed1bc6b5..889bb1e53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_134.png and b/TMessagesProj/src/main/assets/emoji/3_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_135.png b/TMessagesProj/src/main/assets/emoji/3_135.png index c021f7503..45fb2f2c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_135.png and b/TMessagesProj/src/main/assets/emoji/3_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_136.png b/TMessagesProj/src/main/assets/emoji/3_136.png index 10a8112e2..0edba009f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_136.png and b/TMessagesProj/src/main/assets/emoji/3_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_137.png b/TMessagesProj/src/main/assets/emoji/3_137.png index 063391ad6..afab661db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_137.png and b/TMessagesProj/src/main/assets/emoji/3_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_138.png b/TMessagesProj/src/main/assets/emoji/3_138.png index 1e2a52597..b8c0996a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_138.png and b/TMessagesProj/src/main/assets/emoji/3_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_139.png b/TMessagesProj/src/main/assets/emoji/3_139.png index 360f528ef..326343332 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_139.png and b/TMessagesProj/src/main/assets/emoji/3_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_14.png b/TMessagesProj/src/main/assets/emoji/3_14.png index ca3fc5e94..0bac41ecc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_14.png and b/TMessagesProj/src/main/assets/emoji/3_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_140.png b/TMessagesProj/src/main/assets/emoji/3_140.png index 20315697f..1df820389 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_140.png and b/TMessagesProj/src/main/assets/emoji/3_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_141.png b/TMessagesProj/src/main/assets/emoji/3_141.png index e89eb6fbf..6bbb2e0a9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_141.png and b/TMessagesProj/src/main/assets/emoji/3_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_142.png b/TMessagesProj/src/main/assets/emoji/3_142.png index eb3e17ff8..9e68f3bca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_142.png and b/TMessagesProj/src/main/assets/emoji/3_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_143.png b/TMessagesProj/src/main/assets/emoji/3_143.png index c0de54708..f99c552b7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_143.png and b/TMessagesProj/src/main/assets/emoji/3_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_144.png b/TMessagesProj/src/main/assets/emoji/3_144.png index efb6af6e8..9121a0f0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_144.png and b/TMessagesProj/src/main/assets/emoji/3_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_145.png b/TMessagesProj/src/main/assets/emoji/3_145.png index 896e9ff01..3dff6f569 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_145.png and b/TMessagesProj/src/main/assets/emoji/3_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_146.png b/TMessagesProj/src/main/assets/emoji/3_146.png index 79dc0d29e..954539193 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_146.png and b/TMessagesProj/src/main/assets/emoji/3_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_147.png b/TMessagesProj/src/main/assets/emoji/3_147.png index 7261aa5ef..a3c12dfee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_147.png and b/TMessagesProj/src/main/assets/emoji/3_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_148.png b/TMessagesProj/src/main/assets/emoji/3_148.png index c5afc7dba..ec534a090 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_148.png and b/TMessagesProj/src/main/assets/emoji/3_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_149.png b/TMessagesProj/src/main/assets/emoji/3_149.png index eca2bd6c1..4298d596d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_149.png and b/TMessagesProj/src/main/assets/emoji/3_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_15.png b/TMessagesProj/src/main/assets/emoji/3_15.png index 71f244040..2a0b7ffdb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_15.png and b/TMessagesProj/src/main/assets/emoji/3_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_150.png b/TMessagesProj/src/main/assets/emoji/3_150.png index 334c272a9..90836d18c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_150.png and b/TMessagesProj/src/main/assets/emoji/3_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_151.png b/TMessagesProj/src/main/assets/emoji/3_151.png index e6a1904d0..823531438 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_151.png and b/TMessagesProj/src/main/assets/emoji/3_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_152.png b/TMessagesProj/src/main/assets/emoji/3_152.png index ab479da7d..04ce07192 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_152.png and b/TMessagesProj/src/main/assets/emoji/3_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_153.png b/TMessagesProj/src/main/assets/emoji/3_153.png index e75232378..2819c5d56 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_153.png and b/TMessagesProj/src/main/assets/emoji/3_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_154.png b/TMessagesProj/src/main/assets/emoji/3_154.png index 4fa91369b..d0d3cd4d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_154.png and b/TMessagesProj/src/main/assets/emoji/3_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_155.png b/TMessagesProj/src/main/assets/emoji/3_155.png index 2c4ed9ca2..0eb3467db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_155.png and b/TMessagesProj/src/main/assets/emoji/3_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_156.png b/TMessagesProj/src/main/assets/emoji/3_156.png index 9913c7d35..21c6510b4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_156.png and b/TMessagesProj/src/main/assets/emoji/3_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_157.png b/TMessagesProj/src/main/assets/emoji/3_157.png index 188a3730f..1c9f116c6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_157.png and b/TMessagesProj/src/main/assets/emoji/3_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_158.png b/TMessagesProj/src/main/assets/emoji/3_158.png index d26bde5c8..3a80b4262 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_158.png and b/TMessagesProj/src/main/assets/emoji/3_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_159.png b/TMessagesProj/src/main/assets/emoji/3_159.png index 9ef261ad8..9b2e29982 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_159.png and b/TMessagesProj/src/main/assets/emoji/3_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_16.png b/TMessagesProj/src/main/assets/emoji/3_16.png index a4e11b8b7..4b2cb45f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_16.png and b/TMessagesProj/src/main/assets/emoji/3_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_160.png b/TMessagesProj/src/main/assets/emoji/3_160.png index 5e3bcf4a5..a1a6e3525 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_160.png and b/TMessagesProj/src/main/assets/emoji/3_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_161.png b/TMessagesProj/src/main/assets/emoji/3_161.png index d217ced3e..9cc928256 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_161.png and b/TMessagesProj/src/main/assets/emoji/3_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_162.png b/TMessagesProj/src/main/assets/emoji/3_162.png index 6d0dff514..dc0981fac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_162.png and b/TMessagesProj/src/main/assets/emoji/3_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_163.png b/TMessagesProj/src/main/assets/emoji/3_163.png index ddcf127c0..e0ab051cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_163.png and b/TMessagesProj/src/main/assets/emoji/3_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_164.png b/TMessagesProj/src/main/assets/emoji/3_164.png index 0e6ec6c09..3e43612b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_164.png and b/TMessagesProj/src/main/assets/emoji/3_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_165.png b/TMessagesProj/src/main/assets/emoji/3_165.png index c03cac452..7bab4bc07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_165.png and b/TMessagesProj/src/main/assets/emoji/3_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_166.png b/TMessagesProj/src/main/assets/emoji/3_166.png index db9cb92be..acda4c6b7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_166.png and b/TMessagesProj/src/main/assets/emoji/3_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_167.png b/TMessagesProj/src/main/assets/emoji/3_167.png index 67075773a..483495dbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_167.png and b/TMessagesProj/src/main/assets/emoji/3_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_168.png b/TMessagesProj/src/main/assets/emoji/3_168.png index c4eaa3939..463e89c36 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_168.png and b/TMessagesProj/src/main/assets/emoji/3_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_169.png b/TMessagesProj/src/main/assets/emoji/3_169.png index c3845fb95..ce75609d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_169.png and b/TMessagesProj/src/main/assets/emoji/3_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_17.png b/TMessagesProj/src/main/assets/emoji/3_17.png index b9e0d7469..401fee0d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_17.png and b/TMessagesProj/src/main/assets/emoji/3_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_170.png b/TMessagesProj/src/main/assets/emoji/3_170.png index 6c308a261..912127892 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_170.png and b/TMessagesProj/src/main/assets/emoji/3_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_171.png b/TMessagesProj/src/main/assets/emoji/3_171.png index ac28dc421..0a70a5c97 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_171.png and b/TMessagesProj/src/main/assets/emoji/3_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_172.png b/TMessagesProj/src/main/assets/emoji/3_172.png index 3aaeb6fc5..6c308a261 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_172.png and b/TMessagesProj/src/main/assets/emoji/3_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_173.png b/TMessagesProj/src/main/assets/emoji/3_173.png index 208b4ac4c..3c7f10304 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_173.png and b/TMessagesProj/src/main/assets/emoji/3_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_174.png b/TMessagesProj/src/main/assets/emoji/3_174.png index 41f924349..3aaeb6fc5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_174.png and b/TMessagesProj/src/main/assets/emoji/3_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_175.png b/TMessagesProj/src/main/assets/emoji/3_175.png index f2a97a71b..7082fd88c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_175.png and b/TMessagesProj/src/main/assets/emoji/3_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_176.png b/TMessagesProj/src/main/assets/emoji/3_176.png index 1e0871a19..41f924349 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_176.png and b/TMessagesProj/src/main/assets/emoji/3_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_177.png b/TMessagesProj/src/main/assets/emoji/3_177.png index 39ed75236..0ffb574e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_177.png and b/TMessagesProj/src/main/assets/emoji/3_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_178.png b/TMessagesProj/src/main/assets/emoji/3_178.png index 6c0afba2d..b613180a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_178.png and b/TMessagesProj/src/main/assets/emoji/3_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_179.png b/TMessagesProj/src/main/assets/emoji/3_179.png index 9f741e3a3..46922f339 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_179.png and b/TMessagesProj/src/main/assets/emoji/3_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_18.png b/TMessagesProj/src/main/assets/emoji/3_18.png index a4b89b86b..b9e0d7469 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_18.png and b/TMessagesProj/src/main/assets/emoji/3_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_180.png b/TMessagesProj/src/main/assets/emoji/3_180.png index 87d0b0111..11cbc3fdf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_180.png and b/TMessagesProj/src/main/assets/emoji/3_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_181.png b/TMessagesProj/src/main/assets/emoji/3_181.png index 3e68534b3..dd71830e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_181.png and b/TMessagesProj/src/main/assets/emoji/3_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_182.png b/TMessagesProj/src/main/assets/emoji/3_182.png index 26c37475d..87d0b0111 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_182.png and b/TMessagesProj/src/main/assets/emoji/3_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_183.png b/TMessagesProj/src/main/assets/emoji/3_183.png index 41b99edff..d7435849a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_183.png and b/TMessagesProj/src/main/assets/emoji/3_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_184.png b/TMessagesProj/src/main/assets/emoji/3_184.png index 503d2c8eb..26c37475d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_184.png and b/TMessagesProj/src/main/assets/emoji/3_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_185.png b/TMessagesProj/src/main/assets/emoji/3_185.png index 7708a7668..474ae6747 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_185.png and b/TMessagesProj/src/main/assets/emoji/3_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_186.png b/TMessagesProj/src/main/assets/emoji/3_186.png index aa4ae5098..5a74c0a1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_186.png and b/TMessagesProj/src/main/assets/emoji/3_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_187.png b/TMessagesProj/src/main/assets/emoji/3_187.png index d76aef35c..7708a7668 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_187.png and b/TMessagesProj/src/main/assets/emoji/3_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_188.png b/TMessagesProj/src/main/assets/emoji/3_188.png index c02e235ab..aa4ae5098 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_188.png and b/TMessagesProj/src/main/assets/emoji/3_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_189.png b/TMessagesProj/src/main/assets/emoji/3_189.png index f9db5b7a3..8516f81a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_189.png and b/TMessagesProj/src/main/assets/emoji/3_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_19.png b/TMessagesProj/src/main/assets/emoji/3_19.png index 4cebeb21b..4e4b21515 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_19.png and b/TMessagesProj/src/main/assets/emoji/3_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_190.png b/TMessagesProj/src/main/assets/emoji/3_190.png index e8a38ba5d..773e9752a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_190.png and b/TMessagesProj/src/main/assets/emoji/3_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_191.png b/TMessagesProj/src/main/assets/emoji/3_191.png index 35b66ae3f..f9db5b7a3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_191.png and b/TMessagesProj/src/main/assets/emoji/3_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_192.png b/TMessagesProj/src/main/assets/emoji/3_192.png index f61286b58..e8a38ba5d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_192.png and b/TMessagesProj/src/main/assets/emoji/3_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_193.png b/TMessagesProj/src/main/assets/emoji/3_193.png index 082a9529b..35b66ae3f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_193.png and b/TMessagesProj/src/main/assets/emoji/3_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_194.png b/TMessagesProj/src/main/assets/emoji/3_194.png index 770387386..f61286b58 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_194.png and b/TMessagesProj/src/main/assets/emoji/3_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_195.png b/TMessagesProj/src/main/assets/emoji/3_195.png index 70fcb952e..082a9529b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_195.png and b/TMessagesProj/src/main/assets/emoji/3_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_196.png b/TMessagesProj/src/main/assets/emoji/3_196.png index aa2938a5e..770387386 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_196.png and b/TMessagesProj/src/main/assets/emoji/3_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_197.png b/TMessagesProj/src/main/assets/emoji/3_197.png index 3d8208a68..70fcb952e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_197.png and b/TMessagesProj/src/main/assets/emoji/3_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_198.png b/TMessagesProj/src/main/assets/emoji/3_198.png index a73a57943..aa2938a5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_198.png and b/TMessagesProj/src/main/assets/emoji/3_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_199.png b/TMessagesProj/src/main/assets/emoji/3_199.png index 2e4cb9d6a..3d8208a68 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_199.png and b/TMessagesProj/src/main/assets/emoji/3_199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_2.png b/TMessagesProj/src/main/assets/emoji/3_2.png index d57d75181..6e9aa4359 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_2.png and b/TMessagesProj/src/main/assets/emoji/3_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_20.png b/TMessagesProj/src/main/assets/emoji/3_20.png index 26d57e925..df9617b2f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_20.png and b/TMessagesProj/src/main/assets/emoji/3_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_200.png b/TMessagesProj/src/main/assets/emoji/3_200.png index e50b1d0a6..a73a57943 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_200.png and b/TMessagesProj/src/main/assets/emoji/3_200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_201.png b/TMessagesProj/src/main/assets/emoji/3_201.png index e52fe724e..2e4cb9d6a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_201.png and b/TMessagesProj/src/main/assets/emoji/3_201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_202.png b/TMessagesProj/src/main/assets/emoji/3_202.png index 44833b5a5..e50b1d0a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_202.png and b/TMessagesProj/src/main/assets/emoji/3_202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_203.png b/TMessagesProj/src/main/assets/emoji/3_203.png index 7e69d9055..e52fe724e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_203.png and b/TMessagesProj/src/main/assets/emoji/3_203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_204.png b/TMessagesProj/src/main/assets/emoji/3_204.png index d85693509..44833b5a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_204.png and b/TMessagesProj/src/main/assets/emoji/3_204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_205.png b/TMessagesProj/src/main/assets/emoji/3_205.png index e5dd9ba02..7e69d9055 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_205.png and b/TMessagesProj/src/main/assets/emoji/3_205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_206.png b/TMessagesProj/src/main/assets/emoji/3_206.png index 5833c952e..d85693509 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_206.png and b/TMessagesProj/src/main/assets/emoji/3_206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_207.png b/TMessagesProj/src/main/assets/emoji/3_207.png index 832059a73..e5dd9ba02 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_207.png and b/TMessagesProj/src/main/assets/emoji/3_207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_208.png b/TMessagesProj/src/main/assets/emoji/3_208.png index d4003b917..39b8094d9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_208.png and b/TMessagesProj/src/main/assets/emoji/3_208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_209.png b/TMessagesProj/src/main/assets/emoji/3_209.png index 76a548565..7b93a216c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_209.png and b/TMessagesProj/src/main/assets/emoji/3_209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_21.png b/TMessagesProj/src/main/assets/emoji/3_21.png index 64df22901..3c72d7164 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_21.png and b/TMessagesProj/src/main/assets/emoji/3_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_210.png b/TMessagesProj/src/main/assets/emoji/3_210.png index 32b279d90..6bdf1aaa6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_210.png and b/TMessagesProj/src/main/assets/emoji/3_210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_211.png b/TMessagesProj/src/main/assets/emoji/3_211.png index 7dce676ea..50144b953 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_211.png and b/TMessagesProj/src/main/assets/emoji/3_211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_212.png b/TMessagesProj/src/main/assets/emoji/3_212.png index 9bed5e296..8d8f4654e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_212.png and b/TMessagesProj/src/main/assets/emoji/3_212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_213.png b/TMessagesProj/src/main/assets/emoji/3_213.png index 142345f48..5d314704f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_213.png and b/TMessagesProj/src/main/assets/emoji/3_213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_214.png b/TMessagesProj/src/main/assets/emoji/3_214.png index 36d8c48e9..570ea5763 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_214.png and b/TMessagesProj/src/main/assets/emoji/3_214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_215.png b/TMessagesProj/src/main/assets/emoji/3_215.png index 786a1fce5..8b1734021 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_215.png and b/TMessagesProj/src/main/assets/emoji/3_215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_216.png b/TMessagesProj/src/main/assets/emoji/3_216.png index 4bf27d6dc..aaada03fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_216.png and b/TMessagesProj/src/main/assets/emoji/3_216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_217.png b/TMessagesProj/src/main/assets/emoji/3_217.png index b87580c19..70711de71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_217.png and b/TMessagesProj/src/main/assets/emoji/3_217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_218.png b/TMessagesProj/src/main/assets/emoji/3_218.png index e75c5c22b..1f47f2d72 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_218.png and b/TMessagesProj/src/main/assets/emoji/3_218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_219.png b/TMessagesProj/src/main/assets/emoji/3_219.png index f1839e411..2ce741344 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_219.png and b/TMessagesProj/src/main/assets/emoji/3_219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_22.png b/TMessagesProj/src/main/assets/emoji/3_22.png index ac579dd57..13c4aac59 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_22.png and b/TMessagesProj/src/main/assets/emoji/3_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_220.png b/TMessagesProj/src/main/assets/emoji/3_220.png index f7f8a7658..f19a47789 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_220.png and b/TMessagesProj/src/main/assets/emoji/3_220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_221.png b/TMessagesProj/src/main/assets/emoji/3_221.png index d63de0494..796c5c2ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_221.png and b/TMessagesProj/src/main/assets/emoji/3_221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_222.png b/TMessagesProj/src/main/assets/emoji/3_222.png index 8f608364b..f87472d6c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_222.png and b/TMessagesProj/src/main/assets/emoji/3_222.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_223.png b/TMessagesProj/src/main/assets/emoji/3_223.png index 19c284362..522b4e366 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_223.png and b/TMessagesProj/src/main/assets/emoji/3_223.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_224.png b/TMessagesProj/src/main/assets/emoji/3_224.png index 49d071374..fab01fdfb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_224.png and b/TMessagesProj/src/main/assets/emoji/3_224.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_225.png b/TMessagesProj/src/main/assets/emoji/3_225.png index 80e3352e7..62b742079 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_225.png and b/TMessagesProj/src/main/assets/emoji/3_225.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_226.png b/TMessagesProj/src/main/assets/emoji/3_226.png index 57146730f..6f0b3045c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_226.png and b/TMessagesProj/src/main/assets/emoji/3_226.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_227.png b/TMessagesProj/src/main/assets/emoji/3_227.png index fc4e3b1c3..72c7c5393 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_227.png and b/TMessagesProj/src/main/assets/emoji/3_227.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_228.png b/TMessagesProj/src/main/assets/emoji/3_228.png index 8694b6a3a..d9a4fe7fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_228.png and b/TMessagesProj/src/main/assets/emoji/3_228.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_229.png b/TMessagesProj/src/main/assets/emoji/3_229.png index 4b76ca37b..f1099ef59 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_229.png and b/TMessagesProj/src/main/assets/emoji/3_229.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_23.png b/TMessagesProj/src/main/assets/emoji/3_23.png index e8cf4bc79..be7c48374 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_23.png and b/TMessagesProj/src/main/assets/emoji/3_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_230.png b/TMessagesProj/src/main/assets/emoji/3_230.png index 217a1752c..d6dfebfe9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_230.png and b/TMessagesProj/src/main/assets/emoji/3_230.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_231.png b/TMessagesProj/src/main/assets/emoji/3_231.png index dfce67694..ae995873e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_231.png and b/TMessagesProj/src/main/assets/emoji/3_231.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_232.png b/TMessagesProj/src/main/assets/emoji/3_232.png index 95ba9c264..02687cfa1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_232.png and b/TMessagesProj/src/main/assets/emoji/3_232.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_233.png b/TMessagesProj/src/main/assets/emoji/3_233.png index c58c7b083..7c10bb7ee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_233.png and b/TMessagesProj/src/main/assets/emoji/3_233.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_234.png b/TMessagesProj/src/main/assets/emoji/3_234.png index f3d531981..5ead944bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_234.png and b/TMessagesProj/src/main/assets/emoji/3_234.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_235.png b/TMessagesProj/src/main/assets/emoji/3_235.png index 0e17d823e..2fbc53ba5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_235.png and b/TMessagesProj/src/main/assets/emoji/3_235.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_236.png b/TMessagesProj/src/main/assets/emoji/3_236.png index 4a74cd183..5db368dc2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_236.png and b/TMessagesProj/src/main/assets/emoji/3_236.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_237.png b/TMessagesProj/src/main/assets/emoji/3_237.png index d5e66c53f..706ba0365 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_237.png and b/TMessagesProj/src/main/assets/emoji/3_237.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_238.png b/TMessagesProj/src/main/assets/emoji/3_238.png index 0a5ff70f0..f5b89ff7d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_238.png and b/TMessagesProj/src/main/assets/emoji/3_238.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_239.png b/TMessagesProj/src/main/assets/emoji/3_239.png index ca6f2c4a1..480576d40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_239.png and b/TMessagesProj/src/main/assets/emoji/3_239.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_24.png b/TMessagesProj/src/main/assets/emoji/3_24.png index 98070d24e..51757cd3e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_24.png and b/TMessagesProj/src/main/assets/emoji/3_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_240.png b/TMessagesProj/src/main/assets/emoji/3_240.png index 524f8e89d..12cfeeefc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_240.png and b/TMessagesProj/src/main/assets/emoji/3_240.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_241.png b/TMessagesProj/src/main/assets/emoji/3_241.png index 803a779ec..e24445bb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_241.png and b/TMessagesProj/src/main/assets/emoji/3_241.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_242.png b/TMessagesProj/src/main/assets/emoji/3_242.png index 465ca6aa9..2ed789699 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_242.png and b/TMessagesProj/src/main/assets/emoji/3_242.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_243.png b/TMessagesProj/src/main/assets/emoji/3_243.png index c885b9547..98f15b141 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_243.png and b/TMessagesProj/src/main/assets/emoji/3_243.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_244.png b/TMessagesProj/src/main/assets/emoji/3_244.png index 6f734b76c..6e9eacbe8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_244.png and b/TMessagesProj/src/main/assets/emoji/3_244.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_245.png b/TMessagesProj/src/main/assets/emoji/3_245.png index 933e399d6..f9d1585c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_245.png and b/TMessagesProj/src/main/assets/emoji/3_245.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_246.png b/TMessagesProj/src/main/assets/emoji/3_246.png index d74173547..6e8c1348f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_246.png and b/TMessagesProj/src/main/assets/emoji/3_246.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_247.png b/TMessagesProj/src/main/assets/emoji/3_247.png index 13540f97c..71a45907e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_247.png and b/TMessagesProj/src/main/assets/emoji/3_247.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_248.png b/TMessagesProj/src/main/assets/emoji/3_248.png index 59de2d2ad..be91da609 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_248.png and b/TMessagesProj/src/main/assets/emoji/3_248.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_249.png b/TMessagesProj/src/main/assets/emoji/3_249.png index 91a33edca..fb7d80535 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_249.png and b/TMessagesProj/src/main/assets/emoji/3_249.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_25.png b/TMessagesProj/src/main/assets/emoji/3_25.png index 293415d71..7ba0a7747 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_25.png and b/TMessagesProj/src/main/assets/emoji/3_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_250.png b/TMessagesProj/src/main/assets/emoji/3_250.png index b03b937c8..59bcaa6cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_250.png and b/TMessagesProj/src/main/assets/emoji/3_250.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_251.png b/TMessagesProj/src/main/assets/emoji/3_251.png index 3f0b96917..8933213c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_251.png and b/TMessagesProj/src/main/assets/emoji/3_251.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_252.png b/TMessagesProj/src/main/assets/emoji/3_252.png index 8e6fa35be..c2261bd8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_252.png and b/TMessagesProj/src/main/assets/emoji/3_252.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_253.png b/TMessagesProj/src/main/assets/emoji/3_253.png index 4a116afb8..f03da47b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_253.png and b/TMessagesProj/src/main/assets/emoji/3_253.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_254.png b/TMessagesProj/src/main/assets/emoji/3_254.png index 2c0ea2e91..91ac574a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_254.png and b/TMessagesProj/src/main/assets/emoji/3_254.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_255.png b/TMessagesProj/src/main/assets/emoji/3_255.png index 699985ce5..319e7c63d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_255.png and b/TMessagesProj/src/main/assets/emoji/3_255.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_256.png b/TMessagesProj/src/main/assets/emoji/3_256.png index 63aac9d85..c8aa1ec9a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_256.png and b/TMessagesProj/src/main/assets/emoji/3_256.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_257.png b/TMessagesProj/src/main/assets/emoji/3_257.png index 9c37ebe6a..6be250218 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_257.png and b/TMessagesProj/src/main/assets/emoji/3_257.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_258.png b/TMessagesProj/src/main/assets/emoji/3_258.png index a91059818..d08c4c9fb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_258.png and b/TMessagesProj/src/main/assets/emoji/3_258.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_259.png b/TMessagesProj/src/main/assets/emoji/3_259.png index 1c31a3e07..223ac3c91 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_259.png and b/TMessagesProj/src/main/assets/emoji/3_259.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_26.png b/TMessagesProj/src/main/assets/emoji/3_26.png index b8a2bbfab..beca071e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_26.png and b/TMessagesProj/src/main/assets/emoji/3_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_260.png b/TMessagesProj/src/main/assets/emoji/3_260.png index 363407d0e..36fad3e7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_260.png and b/TMessagesProj/src/main/assets/emoji/3_260.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_261.png b/TMessagesProj/src/main/assets/emoji/3_261.png index 55c4365a6..46b4f4247 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_261.png and b/TMessagesProj/src/main/assets/emoji/3_261.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_262.png b/TMessagesProj/src/main/assets/emoji/3_262.png index 808c37ba4..e4ae74bfd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_262.png and b/TMessagesProj/src/main/assets/emoji/3_262.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_263.png b/TMessagesProj/src/main/assets/emoji/3_263.png index b2e35db91..35bc0426b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_263.png and b/TMessagesProj/src/main/assets/emoji/3_263.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_264.png b/TMessagesProj/src/main/assets/emoji/3_264.png index 49ce4cfbc..0dd1bd0e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_264.png and b/TMessagesProj/src/main/assets/emoji/3_264.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_265.png b/TMessagesProj/src/main/assets/emoji/3_265.png index 19ff9e2ea..c28ebe110 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_265.png and b/TMessagesProj/src/main/assets/emoji/3_265.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_266.png b/TMessagesProj/src/main/assets/emoji/3_266.png index 8d87b77d6..0941197b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_266.png and b/TMessagesProj/src/main/assets/emoji/3_266.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_267.png b/TMessagesProj/src/main/assets/emoji/3_267.png index 3f294e3da..41ed305bf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_267.png and b/TMessagesProj/src/main/assets/emoji/3_267.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_268.png b/TMessagesProj/src/main/assets/emoji/3_268.png index 08b216ad9..ade5fdfb9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_268.png and b/TMessagesProj/src/main/assets/emoji/3_268.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_269.png b/TMessagesProj/src/main/assets/emoji/3_269.png index ebc023545..e5d2f5928 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_269.png and b/TMessagesProj/src/main/assets/emoji/3_269.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_27.png b/TMessagesProj/src/main/assets/emoji/3_27.png index a4314983a..3d6d34d9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_27.png and b/TMessagesProj/src/main/assets/emoji/3_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_270.png b/TMessagesProj/src/main/assets/emoji/3_270.png index eb109064d..6597088ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_270.png and b/TMessagesProj/src/main/assets/emoji/3_270.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_271.png b/TMessagesProj/src/main/assets/emoji/3_271.png index 82b177cda..725370bec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_271.png and b/TMessagesProj/src/main/assets/emoji/3_271.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_272.png b/TMessagesProj/src/main/assets/emoji/3_272.png index 24fb6919f..812e7a61c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_272.png and b/TMessagesProj/src/main/assets/emoji/3_272.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_273.png b/TMessagesProj/src/main/assets/emoji/3_273.png index 37aaa1a25..d90629aa7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_273.png and b/TMessagesProj/src/main/assets/emoji/3_273.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_274.png b/TMessagesProj/src/main/assets/emoji/3_274.png index fa8b35d26..4ab457e94 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_274.png and b/TMessagesProj/src/main/assets/emoji/3_274.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_275.png b/TMessagesProj/src/main/assets/emoji/3_275.png index 3339702bd..8fdb32fbe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_275.png and b/TMessagesProj/src/main/assets/emoji/3_275.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_276.png b/TMessagesProj/src/main/assets/emoji/3_276.png index 718df8d1f..32136784b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_276.png and b/TMessagesProj/src/main/assets/emoji/3_276.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_277.png b/TMessagesProj/src/main/assets/emoji/3_277.png index 4a8e10293..947aba763 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_277.png and b/TMessagesProj/src/main/assets/emoji/3_277.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_278.png b/TMessagesProj/src/main/assets/emoji/3_278.png index fe263e302..5a4d346fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_278.png and b/TMessagesProj/src/main/assets/emoji/3_278.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_279.png b/TMessagesProj/src/main/assets/emoji/3_279.png index b27e1c788..2264edb05 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_279.png and b/TMessagesProj/src/main/assets/emoji/3_279.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_28.png b/TMessagesProj/src/main/assets/emoji/3_28.png index c69d0bba6..dfd6081f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_28.png and b/TMessagesProj/src/main/assets/emoji/3_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_280.png b/TMessagesProj/src/main/assets/emoji/3_280.png index d77f3c237..fc3797841 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_280.png and b/TMessagesProj/src/main/assets/emoji/3_280.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_281.png b/TMessagesProj/src/main/assets/emoji/3_281.png index f9fc8e0fe..c0dd76841 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_281.png and b/TMessagesProj/src/main/assets/emoji/3_281.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_282.png b/TMessagesProj/src/main/assets/emoji/3_282.png index 65a34762d..8f79bd982 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_282.png and b/TMessagesProj/src/main/assets/emoji/3_282.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_283.png b/TMessagesProj/src/main/assets/emoji/3_283.png index cb61d1560..a24568d68 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_283.png and b/TMessagesProj/src/main/assets/emoji/3_283.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_284.png b/TMessagesProj/src/main/assets/emoji/3_284.png index 7461b2081..ecedf6167 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_284.png and b/TMessagesProj/src/main/assets/emoji/3_284.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_285.png b/TMessagesProj/src/main/assets/emoji/3_285.png index f0998a632..29610a364 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_285.png and b/TMessagesProj/src/main/assets/emoji/3_285.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_286.png b/TMessagesProj/src/main/assets/emoji/3_286.png index dd4461205..3ea923c0d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_286.png and b/TMessagesProj/src/main/assets/emoji/3_286.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_287.png b/TMessagesProj/src/main/assets/emoji/3_287.png index 401b66fdb..67dcf9d93 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_287.png and b/TMessagesProj/src/main/assets/emoji/3_287.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_288.png b/TMessagesProj/src/main/assets/emoji/3_288.png index aa5706c43..bc00c93cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_288.png and b/TMessagesProj/src/main/assets/emoji/3_288.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_289.png b/TMessagesProj/src/main/assets/emoji/3_289.png index e905551a9..cf6c48d93 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_289.png and b/TMessagesProj/src/main/assets/emoji/3_289.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_29.png b/TMessagesProj/src/main/assets/emoji/3_29.png index f8e321421..477797db6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_29.png and b/TMessagesProj/src/main/assets/emoji/3_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_290.png b/TMessagesProj/src/main/assets/emoji/3_290.png index dc2917bdf..4f13473ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_290.png and b/TMessagesProj/src/main/assets/emoji/3_290.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_291.png b/TMessagesProj/src/main/assets/emoji/3_291.png index 3ad92cfcd..39df2e825 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_291.png and b/TMessagesProj/src/main/assets/emoji/3_291.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_292.png b/TMessagesProj/src/main/assets/emoji/3_292.png index 43025cd81..a5843536a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_292.png and b/TMessagesProj/src/main/assets/emoji/3_292.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_293.png b/TMessagesProj/src/main/assets/emoji/3_293.png index b59d4e350..c3814e3d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_293.png and b/TMessagesProj/src/main/assets/emoji/3_293.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_294.png b/TMessagesProj/src/main/assets/emoji/3_294.png index 94b7438fd..07b969692 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_294.png and b/TMessagesProj/src/main/assets/emoji/3_294.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_295.png b/TMessagesProj/src/main/assets/emoji/3_295.png index e2144b6b3..92246376c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_295.png and b/TMessagesProj/src/main/assets/emoji/3_295.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_296.png b/TMessagesProj/src/main/assets/emoji/3_296.png index 532e7f5a0..45c6effbc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_296.png and b/TMessagesProj/src/main/assets/emoji/3_296.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_297.png b/TMessagesProj/src/main/assets/emoji/3_297.png index 9440a4be1..e56e0b3f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_297.png and b/TMessagesProj/src/main/assets/emoji/3_297.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_298.png b/TMessagesProj/src/main/assets/emoji/3_298.png index aa5a26535..7c414fc2f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_298.png and b/TMessagesProj/src/main/assets/emoji/3_298.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_299.png b/TMessagesProj/src/main/assets/emoji/3_299.png index 76977d306..686a8303e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_299.png and b/TMessagesProj/src/main/assets/emoji/3_299.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_3.png b/TMessagesProj/src/main/assets/emoji/3_3.png index 59d84b9f8..0e71acad2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_3.png and b/TMessagesProj/src/main/assets/emoji/3_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_30.png b/TMessagesProj/src/main/assets/emoji/3_30.png index 86fcece32..7152f6dc0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_30.png and b/TMessagesProj/src/main/assets/emoji/3_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_300.png b/TMessagesProj/src/main/assets/emoji/3_300.png index 8dc3b65fe..6054a95ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_300.png and b/TMessagesProj/src/main/assets/emoji/3_300.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_301.png b/TMessagesProj/src/main/assets/emoji/3_301.png index ff604eee4..f27dabb90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_301.png and b/TMessagesProj/src/main/assets/emoji/3_301.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_302.png b/TMessagesProj/src/main/assets/emoji/3_302.png index baf47db8d..21b652972 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_302.png and b/TMessagesProj/src/main/assets/emoji/3_302.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_303.png b/TMessagesProj/src/main/assets/emoji/3_303.png index 2534194e9..1443cf837 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_303.png and b/TMessagesProj/src/main/assets/emoji/3_303.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_304.png b/TMessagesProj/src/main/assets/emoji/3_304.png index 1908f9930..adb080d8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_304.png and b/TMessagesProj/src/main/assets/emoji/3_304.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_305.png b/TMessagesProj/src/main/assets/emoji/3_305.png index 2bb681756..9ff20dd17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_305.png and b/TMessagesProj/src/main/assets/emoji/3_305.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_306.png b/TMessagesProj/src/main/assets/emoji/3_306.png index 9638201a8..ef8ee36e7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_306.png and b/TMessagesProj/src/main/assets/emoji/3_306.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_307.png b/TMessagesProj/src/main/assets/emoji/3_307.png index 8cc75704b..db110ab48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_307.png and b/TMessagesProj/src/main/assets/emoji/3_307.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_308.png b/TMessagesProj/src/main/assets/emoji/3_308.png index aa339c25a..32e76777d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_308.png and b/TMessagesProj/src/main/assets/emoji/3_308.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_309.png b/TMessagesProj/src/main/assets/emoji/3_309.png index 6ce80d48a..d66a6f92e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_309.png and b/TMessagesProj/src/main/assets/emoji/3_309.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_31.png b/TMessagesProj/src/main/assets/emoji/3_31.png index 6950c2bd7..f4d3bd6f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_31.png and b/TMessagesProj/src/main/assets/emoji/3_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_310.png b/TMessagesProj/src/main/assets/emoji/3_310.png index d5a621354..3e3160373 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_310.png and b/TMessagesProj/src/main/assets/emoji/3_310.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_311.png b/TMessagesProj/src/main/assets/emoji/3_311.png index 751bf58b9..85162cdb1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_311.png and b/TMessagesProj/src/main/assets/emoji/3_311.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_312.png b/TMessagesProj/src/main/assets/emoji/3_312.png index cb5404350..4f66cd74a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_312.png and b/TMessagesProj/src/main/assets/emoji/3_312.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_313.png b/TMessagesProj/src/main/assets/emoji/3_313.png index 9cd483223..1b95b0720 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_313.png and b/TMessagesProj/src/main/assets/emoji/3_313.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_314.png b/TMessagesProj/src/main/assets/emoji/3_314.png index b7254916c..cb3744c45 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_314.png and b/TMessagesProj/src/main/assets/emoji/3_314.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_315.png b/TMessagesProj/src/main/assets/emoji/3_315.png index 42dad6315..09c505141 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_315.png and b/TMessagesProj/src/main/assets/emoji/3_315.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_316.png b/TMessagesProj/src/main/assets/emoji/3_316.png index aa94fa002..b7254916c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_316.png and b/TMessagesProj/src/main/assets/emoji/3_316.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_317.png b/TMessagesProj/src/main/assets/emoji/3_317.png index b4eee55fd..17e3ffa7d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_317.png and b/TMessagesProj/src/main/assets/emoji/3_317.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_318.png b/TMessagesProj/src/main/assets/emoji/3_318.png index b9f97b6a7..a284c61d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_318.png and b/TMessagesProj/src/main/assets/emoji/3_318.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_319.png b/TMessagesProj/src/main/assets/emoji/3_319.png index bb2c29ea5..c966c2c24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_319.png and b/TMessagesProj/src/main/assets/emoji/3_319.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_32.png b/TMessagesProj/src/main/assets/emoji/3_32.png index 32d9a1058..4ef5dd2aa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_32.png and b/TMessagesProj/src/main/assets/emoji/3_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_320.png b/TMessagesProj/src/main/assets/emoji/3_320.png index 6412c6e09..d18d85d9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_320.png and b/TMessagesProj/src/main/assets/emoji/3_320.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_321.png b/TMessagesProj/src/main/assets/emoji/3_321.png index 54040626c..924f9eb11 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_321.png and b/TMessagesProj/src/main/assets/emoji/3_321.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_322.png b/TMessagesProj/src/main/assets/emoji/3_322.png index 06b287951..d169e7f40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_322.png and b/TMessagesProj/src/main/assets/emoji/3_322.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_323.png b/TMessagesProj/src/main/assets/emoji/3_323.png index b62320fde..a795fb9b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_323.png and b/TMessagesProj/src/main/assets/emoji/3_323.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_324.png b/TMessagesProj/src/main/assets/emoji/3_324.png index 032431b2a..11433453e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_324.png and b/TMessagesProj/src/main/assets/emoji/3_324.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_325.png b/TMessagesProj/src/main/assets/emoji/3_325.png index 5e4c65817..d4cdb4691 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_325.png and b/TMessagesProj/src/main/assets/emoji/3_325.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_326.png b/TMessagesProj/src/main/assets/emoji/3_326.png index 48f934813..32051b0f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_326.png and b/TMessagesProj/src/main/assets/emoji/3_326.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_327.png b/TMessagesProj/src/main/assets/emoji/3_327.png index bf8ae1c20..46b3fce88 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_327.png and b/TMessagesProj/src/main/assets/emoji/3_327.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_328.png b/TMessagesProj/src/main/assets/emoji/3_328.png new file mode 100644 index 000000000..032431b2a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/3_328.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_329.png b/TMessagesProj/src/main/assets/emoji/3_329.png new file mode 100644 index 000000000..45ead3d01 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/3_329.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_33.png b/TMessagesProj/src/main/assets/emoji/3_33.png index 64d5cfe5d..027c9a0f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_33.png and b/TMessagesProj/src/main/assets/emoji/3_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_330.png b/TMessagesProj/src/main/assets/emoji/3_330.png new file mode 100644 index 000000000..8d1e99818 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/3_330.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_331.png b/TMessagesProj/src/main/assets/emoji/3_331.png new file mode 100644 index 000000000..46ebd748b Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/3_331.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_34.png b/TMessagesProj/src/main/assets/emoji/3_34.png index e033f0100..63155d0ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_34.png and b/TMessagesProj/src/main/assets/emoji/3_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_35.png b/TMessagesProj/src/main/assets/emoji/3_35.png index b0fcd706d..517b8253d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_35.png and b/TMessagesProj/src/main/assets/emoji/3_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_36.png b/TMessagesProj/src/main/assets/emoji/3_36.png index ade81bf09..78fd8f50a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_36.png and b/TMessagesProj/src/main/assets/emoji/3_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_37.png b/TMessagesProj/src/main/assets/emoji/3_37.png index 422957da0..28f3dccc3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_37.png and b/TMessagesProj/src/main/assets/emoji/3_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_38.png b/TMessagesProj/src/main/assets/emoji/3_38.png index 43216d93b..47a550b3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_38.png and b/TMessagesProj/src/main/assets/emoji/3_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_39.png b/TMessagesProj/src/main/assets/emoji/3_39.png index b829c9ea5..d4451e7b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_39.png and b/TMessagesProj/src/main/assets/emoji/3_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_4.png b/TMessagesProj/src/main/assets/emoji/3_4.png index db1a3c8f2..81e72846c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_4.png and b/TMessagesProj/src/main/assets/emoji/3_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_40.png b/TMessagesProj/src/main/assets/emoji/3_40.png index f0eb2e474..af1d4e3f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_40.png and b/TMessagesProj/src/main/assets/emoji/3_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_41.png b/TMessagesProj/src/main/assets/emoji/3_41.png index 1cb5e2c9c..34d3b3509 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_41.png and b/TMessagesProj/src/main/assets/emoji/3_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_42.png b/TMessagesProj/src/main/assets/emoji/3_42.png index 8a709a786..6d3f00f8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_42.png and b/TMessagesProj/src/main/assets/emoji/3_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_43.png b/TMessagesProj/src/main/assets/emoji/3_43.png index 9767d8693..b670fb50d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_43.png and b/TMessagesProj/src/main/assets/emoji/3_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_44.png b/TMessagesProj/src/main/assets/emoji/3_44.png index e92293194..65a863e63 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_44.png and b/TMessagesProj/src/main/assets/emoji/3_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_45.png b/TMessagesProj/src/main/assets/emoji/3_45.png index b2c8df9e1..3acac8deb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_45.png and b/TMessagesProj/src/main/assets/emoji/3_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_46.png b/TMessagesProj/src/main/assets/emoji/3_46.png index 4ad3a3638..a90d965e8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_46.png and b/TMessagesProj/src/main/assets/emoji/3_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_47.png b/TMessagesProj/src/main/assets/emoji/3_47.png index 0b7dc584d..60963afc3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_47.png and b/TMessagesProj/src/main/assets/emoji/3_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_48.png b/TMessagesProj/src/main/assets/emoji/3_48.png index 49e6c926f..0df49c728 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_48.png and b/TMessagesProj/src/main/assets/emoji/3_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_49.png b/TMessagesProj/src/main/assets/emoji/3_49.png index cedfe3e0d..4ef5af153 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_49.png and b/TMessagesProj/src/main/assets/emoji/3_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_5.png b/TMessagesProj/src/main/assets/emoji/3_5.png index 7df71154b..b4583786b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_5.png and b/TMessagesProj/src/main/assets/emoji/3_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_50.png b/TMessagesProj/src/main/assets/emoji/3_50.png index 3fca6daf6..86f668a8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_50.png and b/TMessagesProj/src/main/assets/emoji/3_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_51.png b/TMessagesProj/src/main/assets/emoji/3_51.png index 8bf324e7f..0dfd90a43 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_51.png and b/TMessagesProj/src/main/assets/emoji/3_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_52.png b/TMessagesProj/src/main/assets/emoji/3_52.png index 7c40cd33d..9c693ba25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_52.png and b/TMessagesProj/src/main/assets/emoji/3_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_53.png b/TMessagesProj/src/main/assets/emoji/3_53.png index b23113af7..5958b968c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_53.png and b/TMessagesProj/src/main/assets/emoji/3_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_54.png b/TMessagesProj/src/main/assets/emoji/3_54.png index 303a2ea30..253557aa1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_54.png and b/TMessagesProj/src/main/assets/emoji/3_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_55.png b/TMessagesProj/src/main/assets/emoji/3_55.png index 6350e82bb..ea1f974e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_55.png and b/TMessagesProj/src/main/assets/emoji/3_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_56.png b/TMessagesProj/src/main/assets/emoji/3_56.png index e67ee5018..18ca2abf4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_56.png and b/TMessagesProj/src/main/assets/emoji/3_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_57.png b/TMessagesProj/src/main/assets/emoji/3_57.png index 048ec0d92..2b4eb7e22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_57.png and b/TMessagesProj/src/main/assets/emoji/3_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_58.png b/TMessagesProj/src/main/assets/emoji/3_58.png index c0c7cfabc..ef4d73a0d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_58.png and b/TMessagesProj/src/main/assets/emoji/3_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_59.png b/TMessagesProj/src/main/assets/emoji/3_59.png index c03006919..a624a787b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_59.png and b/TMessagesProj/src/main/assets/emoji/3_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_6.png b/TMessagesProj/src/main/assets/emoji/3_6.png index afaaf0331..c85cb19ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_6.png and b/TMessagesProj/src/main/assets/emoji/3_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_60.png b/TMessagesProj/src/main/assets/emoji/3_60.png index 7012fc5b7..bb14c8285 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_60.png and b/TMessagesProj/src/main/assets/emoji/3_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_61.png b/TMessagesProj/src/main/assets/emoji/3_61.png index 548c8fca7..c4f9350be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_61.png and b/TMessagesProj/src/main/assets/emoji/3_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_62.png b/TMessagesProj/src/main/assets/emoji/3_62.png index c98c10dea..256917728 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_62.png and b/TMessagesProj/src/main/assets/emoji/3_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_63.png b/TMessagesProj/src/main/assets/emoji/3_63.png index b2e965fbc..81d587047 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_63.png and b/TMessagesProj/src/main/assets/emoji/3_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_64.png b/TMessagesProj/src/main/assets/emoji/3_64.png index 37fb320f2..20efae5c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_64.png and b/TMessagesProj/src/main/assets/emoji/3_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_65.png b/TMessagesProj/src/main/assets/emoji/3_65.png index 847e9eba0..fd24330ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_65.png and b/TMessagesProj/src/main/assets/emoji/3_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_66.png b/TMessagesProj/src/main/assets/emoji/3_66.png index 18098a7a2..63884a02a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_66.png and b/TMessagesProj/src/main/assets/emoji/3_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_67.png b/TMessagesProj/src/main/assets/emoji/3_67.png index 623bb0805..847e9eba0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_67.png and b/TMessagesProj/src/main/assets/emoji/3_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_68.png b/TMessagesProj/src/main/assets/emoji/3_68.png index e63861b56..65804e56c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_68.png and b/TMessagesProj/src/main/assets/emoji/3_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_69.png b/TMessagesProj/src/main/assets/emoji/3_69.png index a1c5f443f..14aa65193 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_69.png and b/TMessagesProj/src/main/assets/emoji/3_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_7.png b/TMessagesProj/src/main/assets/emoji/3_7.png index 00ef1d42e..1c2764970 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_7.png and b/TMessagesProj/src/main/assets/emoji/3_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_70.png b/TMessagesProj/src/main/assets/emoji/3_70.png index f5ed0ca31..d2158da66 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_70.png and b/TMessagesProj/src/main/assets/emoji/3_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_71.png b/TMessagesProj/src/main/assets/emoji/3_71.png index dc77e71e3..608d098c3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_71.png and b/TMessagesProj/src/main/assets/emoji/3_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_72.png b/TMessagesProj/src/main/assets/emoji/3_72.png index 185a15988..ef887f95d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_72.png and b/TMessagesProj/src/main/assets/emoji/3_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_73.png b/TMessagesProj/src/main/assets/emoji/3_73.png index 5396ed829..719fd6c2c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_73.png and b/TMessagesProj/src/main/assets/emoji/3_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_74.png b/TMessagesProj/src/main/assets/emoji/3_74.png index 0e60fafc7..b6c438ea9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_74.png and b/TMessagesProj/src/main/assets/emoji/3_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_75.png b/TMessagesProj/src/main/assets/emoji/3_75.png index 81fb6e118..54fe7deca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_75.png and b/TMessagesProj/src/main/assets/emoji/3_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_76.png b/TMessagesProj/src/main/assets/emoji/3_76.png index 8cfa2d13d..14a44457a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_76.png and b/TMessagesProj/src/main/assets/emoji/3_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_77.png b/TMessagesProj/src/main/assets/emoji/3_77.png index f11d2c6f1..cdff0e70c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_77.png and b/TMessagesProj/src/main/assets/emoji/3_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_78.png b/TMessagesProj/src/main/assets/emoji/3_78.png index 91d641800..8a8999f25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_78.png and b/TMessagesProj/src/main/assets/emoji/3_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_79.png b/TMessagesProj/src/main/assets/emoji/3_79.png index 913bc2cd5..8293346eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_79.png and b/TMessagesProj/src/main/assets/emoji/3_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_8.png b/TMessagesProj/src/main/assets/emoji/3_8.png index fc6542b3c..e125a9027 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_8.png and b/TMessagesProj/src/main/assets/emoji/3_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_80.png b/TMessagesProj/src/main/assets/emoji/3_80.png index 2d2804ffe..a850b4f57 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_80.png and b/TMessagesProj/src/main/assets/emoji/3_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_81.png b/TMessagesProj/src/main/assets/emoji/3_81.png index fdd5db528..07559cbff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_81.png and b/TMessagesProj/src/main/assets/emoji/3_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_82.png b/TMessagesProj/src/main/assets/emoji/3_82.png index c40ba8ef3..f318567ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_82.png and b/TMessagesProj/src/main/assets/emoji/3_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_83.png b/TMessagesProj/src/main/assets/emoji/3_83.png index 9754fc880..9662a5fd9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_83.png and b/TMessagesProj/src/main/assets/emoji/3_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_84.png b/TMessagesProj/src/main/assets/emoji/3_84.png index c4cebcaf3..e5ba8f93c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_84.png and b/TMessagesProj/src/main/assets/emoji/3_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_85.png b/TMessagesProj/src/main/assets/emoji/3_85.png index f05ef824e..0408b2381 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_85.png and b/TMessagesProj/src/main/assets/emoji/3_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_86.png b/TMessagesProj/src/main/assets/emoji/3_86.png index 8927c2191..4edf621fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_86.png and b/TMessagesProj/src/main/assets/emoji/3_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_87.png b/TMessagesProj/src/main/assets/emoji/3_87.png index 624fa99e8..9be1ed167 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_87.png and b/TMessagesProj/src/main/assets/emoji/3_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_88.png b/TMessagesProj/src/main/assets/emoji/3_88.png index 0dde2d310..0ebb8c2a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_88.png and b/TMessagesProj/src/main/assets/emoji/3_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_89.png b/TMessagesProj/src/main/assets/emoji/3_89.png index d306420c4..4c16a7c32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_89.png and b/TMessagesProj/src/main/assets/emoji/3_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_9.png b/TMessagesProj/src/main/assets/emoji/3_9.png index 2e5446208..8f3b01df8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_9.png and b/TMessagesProj/src/main/assets/emoji/3_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_90.png b/TMessagesProj/src/main/assets/emoji/3_90.png index aabc122e0..886cd3635 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_90.png and b/TMessagesProj/src/main/assets/emoji/3_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_91.png b/TMessagesProj/src/main/assets/emoji/3_91.png index b86ba4b9d..2628c7d4c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_91.png and b/TMessagesProj/src/main/assets/emoji/3_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_92.png b/TMessagesProj/src/main/assets/emoji/3_92.png index 1b03865f5..f2ac60f99 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_92.png and b/TMessagesProj/src/main/assets/emoji/3_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_93.png b/TMessagesProj/src/main/assets/emoji/3_93.png index 11214b292..6dcb51d32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_93.png and b/TMessagesProj/src/main/assets/emoji/3_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_94.png b/TMessagesProj/src/main/assets/emoji/3_94.png index 6145e73d1..039597255 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_94.png and b/TMessagesProj/src/main/assets/emoji/3_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_95.png b/TMessagesProj/src/main/assets/emoji/3_95.png index c0bd92057..de30e8a94 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_95.png and b/TMessagesProj/src/main/assets/emoji/3_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_96.png b/TMessagesProj/src/main/assets/emoji/3_96.png index d15642fa1..90884dd3d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_96.png and b/TMessagesProj/src/main/assets/emoji/3_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_97.png b/TMessagesProj/src/main/assets/emoji/3_97.png index 1785a6fb9..8b3a7a77f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_97.png and b/TMessagesProj/src/main/assets/emoji/3_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_98.png b/TMessagesProj/src/main/assets/emoji/3_98.png index cf49b5bd2..c52179147 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_98.png and b/TMessagesProj/src/main/assets/emoji/3_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/3_99.png b/TMessagesProj/src/main/assets/emoji/3_99.png index 14ba97c35..58d4e1515 100644 Binary files a/TMessagesProj/src/main/assets/emoji/3_99.png and b/TMessagesProj/src/main/assets/emoji/3_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_0.png b/TMessagesProj/src/main/assets/emoji/4_0.png index 7e1134c8b..b322909f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_0.png and b/TMessagesProj/src/main/assets/emoji/4_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_1.png b/TMessagesProj/src/main/assets/emoji/4_1.png index 3d1f91660..7d8d4e788 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_1.png and b/TMessagesProj/src/main/assets/emoji/4_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_10.png b/TMessagesProj/src/main/assets/emoji/4_10.png index a4547a15a..09e1411d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_10.png and b/TMessagesProj/src/main/assets/emoji/4_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_100.png b/TMessagesProj/src/main/assets/emoji/4_100.png index a67aab413..552fd4905 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_100.png and b/TMessagesProj/src/main/assets/emoji/4_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_101.png b/TMessagesProj/src/main/assets/emoji/4_101.png index 5f6ec428c..7fa43077a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_101.png and b/TMessagesProj/src/main/assets/emoji/4_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_102.png b/TMessagesProj/src/main/assets/emoji/4_102.png index eff2aa592..82e0838f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_102.png and b/TMessagesProj/src/main/assets/emoji/4_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_103.png b/TMessagesProj/src/main/assets/emoji/4_103.png index 0ddb5518d..5f601d51f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_103.png and b/TMessagesProj/src/main/assets/emoji/4_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_104.png b/TMessagesProj/src/main/assets/emoji/4_104.png index 8b9a856cb..58d6d7b8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_104.png and b/TMessagesProj/src/main/assets/emoji/4_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_105.png b/TMessagesProj/src/main/assets/emoji/4_105.png index 63170c186..736ae77a8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_105.png and b/TMessagesProj/src/main/assets/emoji/4_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_106.png b/TMessagesProj/src/main/assets/emoji/4_106.png index 770aee791..83d5edc47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_106.png and b/TMessagesProj/src/main/assets/emoji/4_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_107.png b/TMessagesProj/src/main/assets/emoji/4_107.png index d1a81e20b..5b4a6b81a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_107.png and b/TMessagesProj/src/main/assets/emoji/4_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_108.png b/TMessagesProj/src/main/assets/emoji/4_108.png index 4be9d49ca..2a41d0874 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_108.png and b/TMessagesProj/src/main/assets/emoji/4_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_109.png b/TMessagesProj/src/main/assets/emoji/4_109.png index afe338bcf..15449ca93 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_109.png and b/TMessagesProj/src/main/assets/emoji/4_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_11.png b/TMessagesProj/src/main/assets/emoji/4_11.png index dae331b06..9a32cff55 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_11.png and b/TMessagesProj/src/main/assets/emoji/4_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_110.png b/TMessagesProj/src/main/assets/emoji/4_110.png index f38d5f5bc..bd6d68b04 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_110.png and b/TMessagesProj/src/main/assets/emoji/4_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_111.png b/TMessagesProj/src/main/assets/emoji/4_111.png index 6be5ff54f..b369e6b69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_111.png and b/TMessagesProj/src/main/assets/emoji/4_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_112.png b/TMessagesProj/src/main/assets/emoji/4_112.png index f426462a8..3f2bf670e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_112.png and b/TMessagesProj/src/main/assets/emoji/4_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_113.png b/TMessagesProj/src/main/assets/emoji/4_113.png index e0e3cf1c2..8aa8ac99a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_113.png and b/TMessagesProj/src/main/assets/emoji/4_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_114.png b/TMessagesProj/src/main/assets/emoji/4_114.png index 7031b6acb..fea30de3a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_114.png and b/TMessagesProj/src/main/assets/emoji/4_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_115.png b/TMessagesProj/src/main/assets/emoji/4_115.png index 5d6d1afcb..2d71799a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_115.png and b/TMessagesProj/src/main/assets/emoji/4_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_116.png b/TMessagesProj/src/main/assets/emoji/4_116.png index af7e49006..d5da8d565 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_116.png and b/TMessagesProj/src/main/assets/emoji/4_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_117.png b/TMessagesProj/src/main/assets/emoji/4_117.png index e3d908b37..4cba7412b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_117.png and b/TMessagesProj/src/main/assets/emoji/4_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_118.png b/TMessagesProj/src/main/assets/emoji/4_118.png index 862885f0f..8cdc514f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_118.png and b/TMessagesProj/src/main/assets/emoji/4_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_119.png b/TMessagesProj/src/main/assets/emoji/4_119.png index 372cf7707..af7e49006 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_119.png and b/TMessagesProj/src/main/assets/emoji/4_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_12.png b/TMessagesProj/src/main/assets/emoji/4_12.png index afe9f8f5f..56805763a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_12.png and b/TMessagesProj/src/main/assets/emoji/4_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_120.png b/TMessagesProj/src/main/assets/emoji/4_120.png index 970f604c0..5ebd1a79e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_120.png and b/TMessagesProj/src/main/assets/emoji/4_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_121.png b/TMessagesProj/src/main/assets/emoji/4_121.png index c963b218b..0252d9314 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_121.png and b/TMessagesProj/src/main/assets/emoji/4_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_122.png b/TMessagesProj/src/main/assets/emoji/4_122.png index fc492d310..a0884a15e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_122.png and b/TMessagesProj/src/main/assets/emoji/4_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_123.png b/TMessagesProj/src/main/assets/emoji/4_123.png index 2eeed00bd..c061b7e77 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_123.png and b/TMessagesProj/src/main/assets/emoji/4_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_124.png b/TMessagesProj/src/main/assets/emoji/4_124.png index 48e7e0489..cf6c0d6d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_124.png and b/TMessagesProj/src/main/assets/emoji/4_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_125.png b/TMessagesProj/src/main/assets/emoji/4_125.png new file mode 100644 index 000000000..1bf421542 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/4_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_126.png b/TMessagesProj/src/main/assets/emoji/4_126.png new file mode 100644 index 000000000..620536314 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/4_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_127.png b/TMessagesProj/src/main/assets/emoji/4_127.png new file mode 100644 index 000000000..55964a25a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/4_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_13.png b/TMessagesProj/src/main/assets/emoji/4_13.png index 3aa383c15..5e3bf4e71 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_13.png and b/TMessagesProj/src/main/assets/emoji/4_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_14.png b/TMessagesProj/src/main/assets/emoji/4_14.png index fe45dbc2c..f8b9e89cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_14.png and b/TMessagesProj/src/main/assets/emoji/4_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_15.png b/TMessagesProj/src/main/assets/emoji/4_15.png index 6bbbee574..26a4c7a26 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_15.png and b/TMessagesProj/src/main/assets/emoji/4_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_16.png b/TMessagesProj/src/main/assets/emoji/4_16.png index d35055b95..3c58f9f8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_16.png and b/TMessagesProj/src/main/assets/emoji/4_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_17.png b/TMessagesProj/src/main/assets/emoji/4_17.png index eb197abef..3d70f0a00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_17.png and b/TMessagesProj/src/main/assets/emoji/4_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_18.png b/TMessagesProj/src/main/assets/emoji/4_18.png index 791a10990..22b70d2e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_18.png and b/TMessagesProj/src/main/assets/emoji/4_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_19.png b/TMessagesProj/src/main/assets/emoji/4_19.png index 49e726e12..238781d7e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_19.png and b/TMessagesProj/src/main/assets/emoji/4_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_2.png b/TMessagesProj/src/main/assets/emoji/4_2.png index d56d68d5a..d7a0c2b0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_2.png and b/TMessagesProj/src/main/assets/emoji/4_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_20.png b/TMessagesProj/src/main/assets/emoji/4_20.png index 874c08a44..02ff4c982 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_20.png and b/TMessagesProj/src/main/assets/emoji/4_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_21.png b/TMessagesProj/src/main/assets/emoji/4_21.png index 60045fda1..874c08a44 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_21.png and b/TMessagesProj/src/main/assets/emoji/4_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_22.png b/TMessagesProj/src/main/assets/emoji/4_22.png index 36702a9c5..e0e02ae26 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_22.png and b/TMessagesProj/src/main/assets/emoji/4_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_23.png b/TMessagesProj/src/main/assets/emoji/4_23.png index 4a1774abb..0614e61b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_23.png and b/TMessagesProj/src/main/assets/emoji/4_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_24.png b/TMessagesProj/src/main/assets/emoji/4_24.png index 1737ffc01..5a2c00ffb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_24.png and b/TMessagesProj/src/main/assets/emoji/4_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_25.png b/TMessagesProj/src/main/assets/emoji/4_25.png index 6026a06e6..e4c31de1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_25.png and b/TMessagesProj/src/main/assets/emoji/4_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_26.png b/TMessagesProj/src/main/assets/emoji/4_26.png index 665dbd681..69bcee9e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_26.png and b/TMessagesProj/src/main/assets/emoji/4_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_27.png b/TMessagesProj/src/main/assets/emoji/4_27.png index d232ddeff..bb2975841 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_27.png and b/TMessagesProj/src/main/assets/emoji/4_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_28.png b/TMessagesProj/src/main/assets/emoji/4_28.png index e9e5a60f5..6a4847aed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_28.png and b/TMessagesProj/src/main/assets/emoji/4_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_29.png b/TMessagesProj/src/main/assets/emoji/4_29.png index f0fb288f6..60bc12d78 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_29.png and b/TMessagesProj/src/main/assets/emoji/4_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_3.png b/TMessagesProj/src/main/assets/emoji/4_3.png index 121b65810..12622a88b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_3.png and b/TMessagesProj/src/main/assets/emoji/4_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_30.png b/TMessagesProj/src/main/assets/emoji/4_30.png index 3f22fc688..e8757fe8a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_30.png and b/TMessagesProj/src/main/assets/emoji/4_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_31.png b/TMessagesProj/src/main/assets/emoji/4_31.png index b0c11f308..3b3dbc902 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_31.png and b/TMessagesProj/src/main/assets/emoji/4_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_32.png b/TMessagesProj/src/main/assets/emoji/4_32.png index c551cc2ce..5b5a1fbaa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_32.png and b/TMessagesProj/src/main/assets/emoji/4_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_33.png b/TMessagesProj/src/main/assets/emoji/4_33.png index 28172e2a2..7827c4699 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_33.png and b/TMessagesProj/src/main/assets/emoji/4_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_34.png b/TMessagesProj/src/main/assets/emoji/4_34.png index d54c40680..efd49282c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_34.png and b/TMessagesProj/src/main/assets/emoji/4_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_35.png b/TMessagesProj/src/main/assets/emoji/4_35.png index 7e6cbb61d..3a5b16e55 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_35.png and b/TMessagesProj/src/main/assets/emoji/4_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_36.png b/TMessagesProj/src/main/assets/emoji/4_36.png index ce4dcf11b..bff9c0a46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_36.png and b/TMessagesProj/src/main/assets/emoji/4_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_37.png b/TMessagesProj/src/main/assets/emoji/4_37.png index 676ee7dab..cfa8055c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_37.png and b/TMessagesProj/src/main/assets/emoji/4_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_38.png b/TMessagesProj/src/main/assets/emoji/4_38.png index 5820f80bc..89f130781 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_38.png and b/TMessagesProj/src/main/assets/emoji/4_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_39.png b/TMessagesProj/src/main/assets/emoji/4_39.png index e10153d34..9f9f0fb50 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_39.png and b/TMessagesProj/src/main/assets/emoji/4_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_4.png b/TMessagesProj/src/main/assets/emoji/4_4.png index be4441b96..0cb7b6917 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_4.png and b/TMessagesProj/src/main/assets/emoji/4_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_40.png b/TMessagesProj/src/main/assets/emoji/4_40.png index e063faa43..849574225 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_40.png and b/TMessagesProj/src/main/assets/emoji/4_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_41.png b/TMessagesProj/src/main/assets/emoji/4_41.png index a08af7626..3f619d9b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_41.png and b/TMessagesProj/src/main/assets/emoji/4_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_42.png b/TMessagesProj/src/main/assets/emoji/4_42.png index 5af492df7..052d8ba7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_42.png and b/TMessagesProj/src/main/assets/emoji/4_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_43.png b/TMessagesProj/src/main/assets/emoji/4_43.png index a8550e40a..2b7c2365e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_43.png and b/TMessagesProj/src/main/assets/emoji/4_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_44.png b/TMessagesProj/src/main/assets/emoji/4_44.png index b78646e20..6762203ef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_44.png and b/TMessagesProj/src/main/assets/emoji/4_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_45.png b/TMessagesProj/src/main/assets/emoji/4_45.png index a4ac006c1..ad0bf5471 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_45.png and b/TMessagesProj/src/main/assets/emoji/4_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_46.png b/TMessagesProj/src/main/assets/emoji/4_46.png index 602aed8fe..44fe9232b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_46.png and b/TMessagesProj/src/main/assets/emoji/4_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_47.png b/TMessagesProj/src/main/assets/emoji/4_47.png index 551361287..d2c401cd6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_47.png and b/TMessagesProj/src/main/assets/emoji/4_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_48.png b/TMessagesProj/src/main/assets/emoji/4_48.png index d54e89ab6..e9da2d1b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_48.png and b/TMessagesProj/src/main/assets/emoji/4_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_49.png b/TMessagesProj/src/main/assets/emoji/4_49.png index 97c16058a..2d2cd89aa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_49.png and b/TMessagesProj/src/main/assets/emoji/4_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_5.png b/TMessagesProj/src/main/assets/emoji/4_5.png index 6fef615a9..501941547 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_5.png and b/TMessagesProj/src/main/assets/emoji/4_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_50.png b/TMessagesProj/src/main/assets/emoji/4_50.png index edc8ee44a..3fdae3f12 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_50.png and b/TMessagesProj/src/main/assets/emoji/4_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_51.png b/TMessagesProj/src/main/assets/emoji/4_51.png index 48c1f5b83..ea360691b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_51.png and b/TMessagesProj/src/main/assets/emoji/4_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_52.png b/TMessagesProj/src/main/assets/emoji/4_52.png index 75b970e42..0f3c676f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_52.png and b/TMessagesProj/src/main/assets/emoji/4_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_53.png b/TMessagesProj/src/main/assets/emoji/4_53.png index 38a19ba10..75b970e42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_53.png and b/TMessagesProj/src/main/assets/emoji/4_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_54.png b/TMessagesProj/src/main/assets/emoji/4_54.png index 269ff39f2..84006fbb3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_54.png and b/TMessagesProj/src/main/assets/emoji/4_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_55.png b/TMessagesProj/src/main/assets/emoji/4_55.png index 3dc6c4852..fcb740f34 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_55.png and b/TMessagesProj/src/main/assets/emoji/4_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_56.png b/TMessagesProj/src/main/assets/emoji/4_56.png index 2d455b0d2..994bb360f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_56.png and b/TMessagesProj/src/main/assets/emoji/4_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_57.png b/TMessagesProj/src/main/assets/emoji/4_57.png index e6512da83..13a0bc7b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_57.png and b/TMessagesProj/src/main/assets/emoji/4_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_58.png b/TMessagesProj/src/main/assets/emoji/4_58.png index bf3008f1a..b83347b81 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_58.png and b/TMessagesProj/src/main/assets/emoji/4_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_59.png b/TMessagesProj/src/main/assets/emoji/4_59.png index d81625ba1..c3f35eb12 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_59.png and b/TMessagesProj/src/main/assets/emoji/4_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_6.png b/TMessagesProj/src/main/assets/emoji/4_6.png index 4fcb0c5fa..d6caaaf78 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_6.png and b/TMessagesProj/src/main/assets/emoji/4_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_60.png b/TMessagesProj/src/main/assets/emoji/4_60.png index 51e18386e..c416c9b40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_60.png and b/TMessagesProj/src/main/assets/emoji/4_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_61.png b/TMessagesProj/src/main/assets/emoji/4_61.png index 0bd1045f7..a91f9b882 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_61.png and b/TMessagesProj/src/main/assets/emoji/4_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_62.png b/TMessagesProj/src/main/assets/emoji/4_62.png index 85f303c4b..21c41b4b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_62.png and b/TMessagesProj/src/main/assets/emoji/4_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_63.png b/TMessagesProj/src/main/assets/emoji/4_63.png index 56ebbe674..a1d6e6c51 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_63.png and b/TMessagesProj/src/main/assets/emoji/4_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_64.png b/TMessagesProj/src/main/assets/emoji/4_64.png index c0fafa09b..69bcce20e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_64.png and b/TMessagesProj/src/main/assets/emoji/4_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_65.png b/TMessagesProj/src/main/assets/emoji/4_65.png index d11f0779d..56ebbe674 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_65.png and b/TMessagesProj/src/main/assets/emoji/4_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_66.png b/TMessagesProj/src/main/assets/emoji/4_66.png index 5eba5f87d..692adc5ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_66.png and b/TMessagesProj/src/main/assets/emoji/4_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_67.png b/TMessagesProj/src/main/assets/emoji/4_67.png index 303877019..2d6ce421d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_67.png and b/TMessagesProj/src/main/assets/emoji/4_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_68.png b/TMessagesProj/src/main/assets/emoji/4_68.png index 7004284a4..5ebb2ffaf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_68.png and b/TMessagesProj/src/main/assets/emoji/4_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_69.png b/TMessagesProj/src/main/assets/emoji/4_69.png index efdf606ae..835cbed64 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_69.png and b/TMessagesProj/src/main/assets/emoji/4_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_7.png b/TMessagesProj/src/main/assets/emoji/4_7.png index 13e72bc5f..8d4d3e139 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_7.png and b/TMessagesProj/src/main/assets/emoji/4_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_70.png b/TMessagesProj/src/main/assets/emoji/4_70.png index 32cad8f8e..545a6fa75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_70.png and b/TMessagesProj/src/main/assets/emoji/4_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_71.png b/TMessagesProj/src/main/assets/emoji/4_71.png index 2085388af..331c825a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_71.png and b/TMessagesProj/src/main/assets/emoji/4_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_72.png b/TMessagesProj/src/main/assets/emoji/4_72.png index 0fa7a8139..7c0495ccb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_72.png and b/TMessagesProj/src/main/assets/emoji/4_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_73.png b/TMessagesProj/src/main/assets/emoji/4_73.png index 8d00f1ed6..8c5b0bed0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_73.png and b/TMessagesProj/src/main/assets/emoji/4_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_74.png b/TMessagesProj/src/main/assets/emoji/4_74.png index 8815a1247..dc96a091e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_74.png and b/TMessagesProj/src/main/assets/emoji/4_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_75.png b/TMessagesProj/src/main/assets/emoji/4_75.png index d5e477d21..11718d4ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_75.png and b/TMessagesProj/src/main/assets/emoji/4_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_76.png b/TMessagesProj/src/main/assets/emoji/4_76.png index 0cac8cf05..555291141 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_76.png and b/TMessagesProj/src/main/assets/emoji/4_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_77.png b/TMessagesProj/src/main/assets/emoji/4_77.png index 3e79b68bd..80010f68c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_77.png and b/TMessagesProj/src/main/assets/emoji/4_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_78.png b/TMessagesProj/src/main/assets/emoji/4_78.png index b013ae176..91a8194d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_78.png and b/TMessagesProj/src/main/assets/emoji/4_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_79.png b/TMessagesProj/src/main/assets/emoji/4_79.png index 400d78c1c..2516593f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_79.png and b/TMessagesProj/src/main/assets/emoji/4_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_8.png b/TMessagesProj/src/main/assets/emoji/4_8.png index defd5f5ce..b38d7817c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_8.png and b/TMessagesProj/src/main/assets/emoji/4_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_80.png b/TMessagesProj/src/main/assets/emoji/4_80.png index dd88b3a06..327f2845d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_80.png and b/TMessagesProj/src/main/assets/emoji/4_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_81.png b/TMessagesProj/src/main/assets/emoji/4_81.png index 84d15c6c9..92bf7cd84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_81.png and b/TMessagesProj/src/main/assets/emoji/4_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_82.png b/TMessagesProj/src/main/assets/emoji/4_82.png index 29a4b937a..8bc7746b2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_82.png and b/TMessagesProj/src/main/assets/emoji/4_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_83.png b/TMessagesProj/src/main/assets/emoji/4_83.png index 6ca45ddf4..23131512a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_83.png and b/TMessagesProj/src/main/assets/emoji/4_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_84.png b/TMessagesProj/src/main/assets/emoji/4_84.png index e7be306dd..3f2344db9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_84.png and b/TMessagesProj/src/main/assets/emoji/4_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_85.png b/TMessagesProj/src/main/assets/emoji/4_85.png index 9f9ea5c9b..55f4cf355 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_85.png and b/TMessagesProj/src/main/assets/emoji/4_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_86.png b/TMessagesProj/src/main/assets/emoji/4_86.png index 563555760..d52c55b0f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_86.png and b/TMessagesProj/src/main/assets/emoji/4_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_87.png b/TMessagesProj/src/main/assets/emoji/4_87.png index 5382e32b5..e60a7ac6f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_87.png and b/TMessagesProj/src/main/assets/emoji/4_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_88.png b/TMessagesProj/src/main/assets/emoji/4_88.png index 28cb778cf..9a5b65f0b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_88.png and b/TMessagesProj/src/main/assets/emoji/4_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_89.png b/TMessagesProj/src/main/assets/emoji/4_89.png index 267b867a5..af9cb1974 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_89.png and b/TMessagesProj/src/main/assets/emoji/4_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_9.png b/TMessagesProj/src/main/assets/emoji/4_9.png index 1e3cfedf4..48c9c6226 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_9.png and b/TMessagesProj/src/main/assets/emoji/4_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_90.png b/TMessagesProj/src/main/assets/emoji/4_90.png index def31b819..da4afc434 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_90.png and b/TMessagesProj/src/main/assets/emoji/4_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_91.png b/TMessagesProj/src/main/assets/emoji/4_91.png index 06e01a39e..2adc49010 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_91.png and b/TMessagesProj/src/main/assets/emoji/4_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_92.png b/TMessagesProj/src/main/assets/emoji/4_92.png index 4ed594662..b48822c7f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_92.png and b/TMessagesProj/src/main/assets/emoji/4_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_93.png b/TMessagesProj/src/main/assets/emoji/4_93.png index b0116c735..e1a8335df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_93.png and b/TMessagesProj/src/main/assets/emoji/4_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_94.png b/TMessagesProj/src/main/assets/emoji/4_94.png index 08a67e854..54b9b2759 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_94.png and b/TMessagesProj/src/main/assets/emoji/4_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_95.png b/TMessagesProj/src/main/assets/emoji/4_95.png index 80e43fe12..f993d8daf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_95.png and b/TMessagesProj/src/main/assets/emoji/4_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_96.png b/TMessagesProj/src/main/assets/emoji/4_96.png index 01f1de8b7..62bfe5af2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_96.png and b/TMessagesProj/src/main/assets/emoji/4_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_97.png b/TMessagesProj/src/main/assets/emoji/4_97.png index c479035c9..414f49afa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_97.png and b/TMessagesProj/src/main/assets/emoji/4_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_98.png b/TMessagesProj/src/main/assets/emoji/4_98.png index abf7dbcbd..239c6c7a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_98.png and b/TMessagesProj/src/main/assets/emoji/4_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/4_99.png b/TMessagesProj/src/main/assets/emoji/4_99.png index 590ca701d..bbe56ad85 100644 Binary files a/TMessagesProj/src/main/assets/emoji/4_99.png and b/TMessagesProj/src/main/assets/emoji/4_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_0.png b/TMessagesProj/src/main/assets/emoji/5_0.png index 5117aaf8d..866725d6a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_0.png and b/TMessagesProj/src/main/assets/emoji/5_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_1.png b/TMessagesProj/src/main/assets/emoji/5_1.png index 4cef481f1..6624522f3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_1.png and b/TMessagesProj/src/main/assets/emoji/5_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_10.png b/TMessagesProj/src/main/assets/emoji/5_10.png index ef88f0df7..093f28718 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_10.png and b/TMessagesProj/src/main/assets/emoji/5_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_100.png b/TMessagesProj/src/main/assets/emoji/5_100.png index db4b31a04..982920376 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_100.png and b/TMessagesProj/src/main/assets/emoji/5_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_101.png b/TMessagesProj/src/main/assets/emoji/5_101.png index 272987a66..d799fa2ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_101.png and b/TMessagesProj/src/main/assets/emoji/5_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_102.png b/TMessagesProj/src/main/assets/emoji/5_102.png index 36171b004..42510a240 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_102.png and b/TMessagesProj/src/main/assets/emoji/5_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_103.png b/TMessagesProj/src/main/assets/emoji/5_103.png index 0894290c5..1c22c74a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_103.png and b/TMessagesProj/src/main/assets/emoji/5_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_104.png b/TMessagesProj/src/main/assets/emoji/5_104.png index 0fecd47b6..1386ba7b0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_104.png and b/TMessagesProj/src/main/assets/emoji/5_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_105.png b/TMessagesProj/src/main/assets/emoji/5_105.png index 9d75fa0d7..a54f664aa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_105.png and b/TMessagesProj/src/main/assets/emoji/5_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_106.png b/TMessagesProj/src/main/assets/emoji/5_106.png index ad977c695..3dbcfd090 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_106.png and b/TMessagesProj/src/main/assets/emoji/5_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_107.png b/TMessagesProj/src/main/assets/emoji/5_107.png index ec636e11b..260dc2b92 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_107.png and b/TMessagesProj/src/main/assets/emoji/5_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_108.png b/TMessagesProj/src/main/assets/emoji/5_108.png index 93de83754..272987a66 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_108.png and b/TMessagesProj/src/main/assets/emoji/5_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_109.png b/TMessagesProj/src/main/assets/emoji/5_109.png index 670cf0e93..63f85141e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_109.png and b/TMessagesProj/src/main/assets/emoji/5_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_11.png b/TMessagesProj/src/main/assets/emoji/5_11.png index c7c102bc2..edd374f24 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_11.png and b/TMessagesProj/src/main/assets/emoji/5_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_110.png b/TMessagesProj/src/main/assets/emoji/5_110.png index ae094bd60..e147a4812 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_110.png and b/TMessagesProj/src/main/assets/emoji/5_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_111.png b/TMessagesProj/src/main/assets/emoji/5_111.png index e44e9b6b9..7abb5b3e7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_111.png and b/TMessagesProj/src/main/assets/emoji/5_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_112.png b/TMessagesProj/src/main/assets/emoji/5_112.png index f6ab47f5e..67c8f8969 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_112.png and b/TMessagesProj/src/main/assets/emoji/5_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_113.png b/TMessagesProj/src/main/assets/emoji/5_113.png index 1aac3a666..184ef8d83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_113.png and b/TMessagesProj/src/main/assets/emoji/5_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_114.png b/TMessagesProj/src/main/assets/emoji/5_114.png index 024a48549..1a7cc9a88 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_114.png and b/TMessagesProj/src/main/assets/emoji/5_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_115.png b/TMessagesProj/src/main/assets/emoji/5_115.png index 7c0c12921..0ff4930c6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_115.png and b/TMessagesProj/src/main/assets/emoji/5_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_116.png b/TMessagesProj/src/main/assets/emoji/5_116.png index 76ee7cb18..dd0403052 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_116.png and b/TMessagesProj/src/main/assets/emoji/5_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_117.png b/TMessagesProj/src/main/assets/emoji/5_117.png index 0ecd7e34e..e381c4598 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_117.png and b/TMessagesProj/src/main/assets/emoji/5_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_118.png b/TMessagesProj/src/main/assets/emoji/5_118.png index e7d5651c8..08ffd96a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_118.png and b/TMessagesProj/src/main/assets/emoji/5_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_119.png b/TMessagesProj/src/main/assets/emoji/5_119.png index dd58c1271..f6bdf1e87 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_119.png and b/TMessagesProj/src/main/assets/emoji/5_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_120.png b/TMessagesProj/src/main/assets/emoji/5_120.png index 69e546fea..f6ab47f5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_120.png and b/TMessagesProj/src/main/assets/emoji/5_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_121.png b/TMessagesProj/src/main/assets/emoji/5_121.png index d1332a6f7..3ce1c562d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_121.png and b/TMessagesProj/src/main/assets/emoji/5_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_122.png b/TMessagesProj/src/main/assets/emoji/5_122.png index 8c9441266..1aac3a666 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_122.png and b/TMessagesProj/src/main/assets/emoji/5_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_123.png b/TMessagesProj/src/main/assets/emoji/5_123.png index 710dadd78..a881b2676 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_123.png and b/TMessagesProj/src/main/assets/emoji/5_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_124.png b/TMessagesProj/src/main/assets/emoji/5_124.png index 0c00d9fc6..be6a9811c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_124.png and b/TMessagesProj/src/main/assets/emoji/5_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_125.png b/TMessagesProj/src/main/assets/emoji/5_125.png index 9f9b609b2..eebb8a64f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_125.png and b/TMessagesProj/src/main/assets/emoji/5_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_126.png b/TMessagesProj/src/main/assets/emoji/5_126.png index ad845e513..0ecd7e34e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_126.png and b/TMessagesProj/src/main/assets/emoji/5_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_127.png b/TMessagesProj/src/main/assets/emoji/5_127.png index 87377011e..a9f014a25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_127.png and b/TMessagesProj/src/main/assets/emoji/5_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_128.png b/TMessagesProj/src/main/assets/emoji/5_128.png index 8ae6de538..dd58c1271 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_128.png and b/TMessagesProj/src/main/assets/emoji/5_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_129.png b/TMessagesProj/src/main/assets/emoji/5_129.png index 549ff858e..fcfdfc993 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_129.png and b/TMessagesProj/src/main/assets/emoji/5_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_13.png b/TMessagesProj/src/main/assets/emoji/5_13.png index 72f726f8a..a05af4eeb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_13.png and b/TMessagesProj/src/main/assets/emoji/5_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_130.png b/TMessagesProj/src/main/assets/emoji/5_130.png index af47836c2..4a0dc9889 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_130.png and b/TMessagesProj/src/main/assets/emoji/5_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_131.png b/TMessagesProj/src/main/assets/emoji/5_131.png index bfc738ff8..128d933f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_131.png and b/TMessagesProj/src/main/assets/emoji/5_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_132.png b/TMessagesProj/src/main/assets/emoji/5_132.png index aa4532ba6..a908d4365 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_132.png and b/TMessagesProj/src/main/assets/emoji/5_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_133.png b/TMessagesProj/src/main/assets/emoji/5_133.png index 1818c8f99..69bfe48df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_133.png and b/TMessagesProj/src/main/assets/emoji/5_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_134.png b/TMessagesProj/src/main/assets/emoji/5_134.png index 131dd7d86..39379bdff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_134.png and b/TMessagesProj/src/main/assets/emoji/5_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_135.png b/TMessagesProj/src/main/assets/emoji/5_135.png index 6991d0e32..81c70bbbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_135.png and b/TMessagesProj/src/main/assets/emoji/5_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_136.png b/TMessagesProj/src/main/assets/emoji/5_136.png index 901a1eb7c..96f8618e9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_136.png and b/TMessagesProj/src/main/assets/emoji/5_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_137.png b/TMessagesProj/src/main/assets/emoji/5_137.png index 79daa0ddc..6df113dee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_137.png and b/TMessagesProj/src/main/assets/emoji/5_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_138.png b/TMessagesProj/src/main/assets/emoji/5_138.png index c6166ecf6..52ab6e883 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_138.png and b/TMessagesProj/src/main/assets/emoji/5_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_139.png b/TMessagesProj/src/main/assets/emoji/5_139.png index b00ebe45a..039b2725a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_139.png and b/TMessagesProj/src/main/assets/emoji/5_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_14.png b/TMessagesProj/src/main/assets/emoji/5_14.png index d608f5d3d..b138a44a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_14.png and b/TMessagesProj/src/main/assets/emoji/5_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_140.png b/TMessagesProj/src/main/assets/emoji/5_140.png index 147f190b7..0c8ad72db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_140.png and b/TMessagesProj/src/main/assets/emoji/5_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_141.png b/TMessagesProj/src/main/assets/emoji/5_141.png index 960e2e817..6180a98f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_141.png and b/TMessagesProj/src/main/assets/emoji/5_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_142.png b/TMessagesProj/src/main/assets/emoji/5_142.png index 050476e52..dcf6f37da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_142.png and b/TMessagesProj/src/main/assets/emoji/5_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_143.png b/TMessagesProj/src/main/assets/emoji/5_143.png index 5b90630bc..4a070f0f9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_143.png and b/TMessagesProj/src/main/assets/emoji/5_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_144.png b/TMessagesProj/src/main/assets/emoji/5_144.png index eaa56dd4c..eb55d2e8c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_144.png and b/TMessagesProj/src/main/assets/emoji/5_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_145.png b/TMessagesProj/src/main/assets/emoji/5_145.png index d4175b32d..a249ac8d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_145.png and b/TMessagesProj/src/main/assets/emoji/5_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_146.png b/TMessagesProj/src/main/assets/emoji/5_146.png index 5f7ada2e4..f4329e64e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_146.png and b/TMessagesProj/src/main/assets/emoji/5_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_147.png b/TMessagesProj/src/main/assets/emoji/5_147.png index 2d3792e9b..10c7952f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_147.png and b/TMessagesProj/src/main/assets/emoji/5_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_148.png b/TMessagesProj/src/main/assets/emoji/5_148.png index e773f96f9..131dd7d86 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_148.png and b/TMessagesProj/src/main/assets/emoji/5_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_149.png b/TMessagesProj/src/main/assets/emoji/5_149.png index f68d226d5..50f735de8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_149.png and b/TMessagesProj/src/main/assets/emoji/5_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_15.png b/TMessagesProj/src/main/assets/emoji/5_15.png index 6a8e5c709..e01a04fe7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_15.png and b/TMessagesProj/src/main/assets/emoji/5_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_150.png b/TMessagesProj/src/main/assets/emoji/5_150.png index fcc2a2b02..334e7fd98 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_150.png and b/TMessagesProj/src/main/assets/emoji/5_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_151.png b/TMessagesProj/src/main/assets/emoji/5_151.png index 5ed7eddd4..eb1daa9b6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_151.png and b/TMessagesProj/src/main/assets/emoji/5_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_152.png b/TMessagesProj/src/main/assets/emoji/5_152.png index 91f876d0e..a7a6e5e80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_152.png and b/TMessagesProj/src/main/assets/emoji/5_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_153.png b/TMessagesProj/src/main/assets/emoji/5_153.png index 4dceb0c82..7bda40470 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_153.png and b/TMessagesProj/src/main/assets/emoji/5_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_154.png b/TMessagesProj/src/main/assets/emoji/5_154.png index a8574f2fd..870d99199 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_154.png and b/TMessagesProj/src/main/assets/emoji/5_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_155.png b/TMessagesProj/src/main/assets/emoji/5_155.png index 2a06d433a..4e59322a5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_155.png and b/TMessagesProj/src/main/assets/emoji/5_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_156.png b/TMessagesProj/src/main/assets/emoji/5_156.png index 41261fa97..050476e52 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_156.png and b/TMessagesProj/src/main/assets/emoji/5_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_157.png b/TMessagesProj/src/main/assets/emoji/5_157.png index f8c32a405..62d069ca3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_157.png and b/TMessagesProj/src/main/assets/emoji/5_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_158.png b/TMessagesProj/src/main/assets/emoji/5_158.png index fc399b2f9..eaa56dd4c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_158.png and b/TMessagesProj/src/main/assets/emoji/5_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_159.png b/TMessagesProj/src/main/assets/emoji/5_159.png index 476490513..6ec35dfc8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_159.png and b/TMessagesProj/src/main/assets/emoji/5_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_16.png b/TMessagesProj/src/main/assets/emoji/5_16.png index e8ec3b38d..083f427da 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_16.png and b/TMessagesProj/src/main/assets/emoji/5_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_160.png b/TMessagesProj/src/main/assets/emoji/5_160.png index 6743a837b..38c832603 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_160.png and b/TMessagesProj/src/main/assets/emoji/5_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_161.png b/TMessagesProj/src/main/assets/emoji/5_161.png index 1c118e484..9299fa726 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_161.png and b/TMessagesProj/src/main/assets/emoji/5_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_162.png b/TMessagesProj/src/main/assets/emoji/5_162.png index 671007109..cfc660171 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_162.png and b/TMessagesProj/src/main/assets/emoji/5_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_163.png b/TMessagesProj/src/main/assets/emoji/5_163.png index fcf96d016..1ac2c4bcf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_163.png and b/TMessagesProj/src/main/assets/emoji/5_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_164.png b/TMessagesProj/src/main/assets/emoji/5_164.png index 9caa973b8..e8eed9665 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_164.png and b/TMessagesProj/src/main/assets/emoji/5_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_165.png b/TMessagesProj/src/main/assets/emoji/5_165.png index c3c32866b..1660d9bc2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_165.png and b/TMessagesProj/src/main/assets/emoji/5_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_166.png b/TMessagesProj/src/main/assets/emoji/5_166.png index 18fc3d1df..f7910b5c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_166.png and b/TMessagesProj/src/main/assets/emoji/5_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_167.png b/TMessagesProj/src/main/assets/emoji/5_167.png index c5766dd1a..91f876d0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_167.png and b/TMessagesProj/src/main/assets/emoji/5_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_168.png b/TMessagesProj/src/main/assets/emoji/5_168.png index f31ed0a2c..4dceb0c82 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_168.png and b/TMessagesProj/src/main/assets/emoji/5_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_169.png b/TMessagesProj/src/main/assets/emoji/5_169.png index d44b50e5a..f35af9185 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_169.png and b/TMessagesProj/src/main/assets/emoji/5_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_17.png b/TMessagesProj/src/main/assets/emoji/5_17.png index 14fa50d6a..875efc3c2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_17.png and b/TMessagesProj/src/main/assets/emoji/5_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_170.png b/TMessagesProj/src/main/assets/emoji/5_170.png index 1c55c48e2..0bccc626f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_170.png and b/TMessagesProj/src/main/assets/emoji/5_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_171.png b/TMessagesProj/src/main/assets/emoji/5_171.png index 3f3f298cc..58363eddd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_171.png and b/TMessagesProj/src/main/assets/emoji/5_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_172.png b/TMessagesProj/src/main/assets/emoji/5_172.png index 4ff41075c..589a7f5e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_172.png and b/TMessagesProj/src/main/assets/emoji/5_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_173.png b/TMessagesProj/src/main/assets/emoji/5_173.png index 5fec5ba78..9ffe1b7ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_173.png and b/TMessagesProj/src/main/assets/emoji/5_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_174.png b/TMessagesProj/src/main/assets/emoji/5_174.png index b93fae250..476490513 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_174.png and b/TMessagesProj/src/main/assets/emoji/5_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_175.png b/TMessagesProj/src/main/assets/emoji/5_175.png index 16729985b..367e12310 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_175.png and b/TMessagesProj/src/main/assets/emoji/5_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_176.png b/TMessagesProj/src/main/assets/emoji/5_176.png index 7d04f6cc0..67662fa4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_176.png and b/TMessagesProj/src/main/assets/emoji/5_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_177.png b/TMessagesProj/src/main/assets/emoji/5_177.png index 8c34db5ec..d7a71ddbb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_177.png and b/TMessagesProj/src/main/assets/emoji/5_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_178.png b/TMessagesProj/src/main/assets/emoji/5_178.png index ddb9dd3a5..fcf96d016 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_178.png and b/TMessagesProj/src/main/assets/emoji/5_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_179.png b/TMessagesProj/src/main/assets/emoji/5_179.png index 2f1c6127c..5d64e1e99 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_179.png and b/TMessagesProj/src/main/assets/emoji/5_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_18.png b/TMessagesProj/src/main/assets/emoji/5_18.png index 9cf00fde6..ea32e568f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_18.png and b/TMessagesProj/src/main/assets/emoji/5_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_180.png b/TMessagesProj/src/main/assets/emoji/5_180.png index f5dcf5832..64dbe2f75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_180.png and b/TMessagesProj/src/main/assets/emoji/5_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_181.png b/TMessagesProj/src/main/assets/emoji/5_181.png index 3309e2df1..f3396b0d5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_181.png and b/TMessagesProj/src/main/assets/emoji/5_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_182.png b/TMessagesProj/src/main/assets/emoji/5_182.png index 6fd93f396..96d955e09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_182.png and b/TMessagesProj/src/main/assets/emoji/5_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_183.png b/TMessagesProj/src/main/assets/emoji/5_183.png index 5d32a6221..051c2ee76 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_183.png and b/TMessagesProj/src/main/assets/emoji/5_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_184.png b/TMessagesProj/src/main/assets/emoji/5_184.png index ed50f3cf2..e495ef681 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_184.png and b/TMessagesProj/src/main/assets/emoji/5_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_185.png b/TMessagesProj/src/main/assets/emoji/5_185.png index d807e2498..4fb92ad44 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_185.png and b/TMessagesProj/src/main/assets/emoji/5_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_186.png b/TMessagesProj/src/main/assets/emoji/5_186.png index 549eb4854..77d3a7474 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_186.png and b/TMessagesProj/src/main/assets/emoji/5_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_187.png b/TMessagesProj/src/main/assets/emoji/5_187.png index 40e4432fa..4c2925d00 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_187.png and b/TMessagesProj/src/main/assets/emoji/5_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_188.png b/TMessagesProj/src/main/assets/emoji/5_188.png index ad0e9e83c..06acf091a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_188.png and b/TMessagesProj/src/main/assets/emoji/5_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_189.png b/TMessagesProj/src/main/assets/emoji/5_189.png index fd13e02b8..d8849f8ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_189.png and b/TMessagesProj/src/main/assets/emoji/5_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_19.png b/TMessagesProj/src/main/assets/emoji/5_19.png index d997228cc..da1d9bb5a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_19.png and b/TMessagesProj/src/main/assets/emoji/5_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_190.png b/TMessagesProj/src/main/assets/emoji/5_190.png index 30102ffcb..a8dd2feea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_190.png and b/TMessagesProj/src/main/assets/emoji/5_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_191.png b/TMessagesProj/src/main/assets/emoji/5_191.png index 797106c38..f4856617f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_191.png and b/TMessagesProj/src/main/assets/emoji/5_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_192.png b/TMessagesProj/src/main/assets/emoji/5_192.png index 9eefc5a65..44132520d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_192.png and b/TMessagesProj/src/main/assets/emoji/5_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_193.png b/TMessagesProj/src/main/assets/emoji/5_193.png index 0731e224a..983a3be41 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_193.png and b/TMessagesProj/src/main/assets/emoji/5_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_194.png b/TMessagesProj/src/main/assets/emoji/5_194.png index 2ea4e75b9..e5cc875fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_194.png and b/TMessagesProj/src/main/assets/emoji/5_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_195.png b/TMessagesProj/src/main/assets/emoji/5_195.png index c0a3351f7..b880bb12f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_195.png and b/TMessagesProj/src/main/assets/emoji/5_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_196.png b/TMessagesProj/src/main/assets/emoji/5_196.png index a7e715e20..858bfe0ff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_196.png and b/TMessagesProj/src/main/assets/emoji/5_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_197.png b/TMessagesProj/src/main/assets/emoji/5_197.png index a5dbd0ea1..f994fb16f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_197.png and b/TMessagesProj/src/main/assets/emoji/5_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_198.png b/TMessagesProj/src/main/assets/emoji/5_198.png index 428c6c55c..1f36d0c2d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_198.png and b/TMessagesProj/src/main/assets/emoji/5_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_199.png b/TMessagesProj/src/main/assets/emoji/5_199.png index a2b83c49f..c2e56e88e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_199.png and b/TMessagesProj/src/main/assets/emoji/5_199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_2.png b/TMessagesProj/src/main/assets/emoji/5_2.png index ba852edd8..4f30e0cff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_2.png and b/TMessagesProj/src/main/assets/emoji/5_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_20.png b/TMessagesProj/src/main/assets/emoji/5_20.png index 1d3708b29..c225b0904 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_20.png and b/TMessagesProj/src/main/assets/emoji/5_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_200.png b/TMessagesProj/src/main/assets/emoji/5_200.png index 136777b8d..22286fbb5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_200.png and b/TMessagesProj/src/main/assets/emoji/5_200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_201.png b/TMessagesProj/src/main/assets/emoji/5_201.png index 623c35c11..d83521925 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_201.png and b/TMessagesProj/src/main/assets/emoji/5_201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_202.png b/TMessagesProj/src/main/assets/emoji/5_202.png index e758ffa17..1f16bd675 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_202.png and b/TMessagesProj/src/main/assets/emoji/5_202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_203.png b/TMessagesProj/src/main/assets/emoji/5_203.png index 929a394f3..ad0e9e83c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_203.png and b/TMessagesProj/src/main/assets/emoji/5_203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_204.png b/TMessagesProj/src/main/assets/emoji/5_204.png index e34fe3343..00c545103 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_204.png and b/TMessagesProj/src/main/assets/emoji/5_204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_205.png b/TMessagesProj/src/main/assets/emoji/5_205.png index 5a4dd43ca..75fb54497 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_205.png and b/TMessagesProj/src/main/assets/emoji/5_205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_206.png b/TMessagesProj/src/main/assets/emoji/5_206.png index cb09209ed..a4a0bf19b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_206.png and b/TMessagesProj/src/main/assets/emoji/5_206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_207.png b/TMessagesProj/src/main/assets/emoji/5_207.png new file mode 100644 index 000000000..3d3b8621d Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_208.png b/TMessagesProj/src/main/assets/emoji/5_208.png new file mode 100644 index 000000000..0731e224a Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_209.png b/TMessagesProj/src/main/assets/emoji/5_209.png new file mode 100644 index 000000000..19f3e0d06 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_210.png b/TMessagesProj/src/main/assets/emoji/5_210.png new file mode 100644 index 000000000..327fde078 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_211.png b/TMessagesProj/src/main/assets/emoji/5_211.png new file mode 100644 index 000000000..e516a57f1 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_212.png b/TMessagesProj/src/main/assets/emoji/5_212.png new file mode 100644 index 000000000..e4a93f1d4 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_213.png b/TMessagesProj/src/main/assets/emoji/5_213.png new file mode 100644 index 000000000..6833a538e Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_214.png b/TMessagesProj/src/main/assets/emoji/5_214.png new file mode 100644 index 000000000..462110644 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_215.png b/TMessagesProj/src/main/assets/emoji/5_215.png new file mode 100644 index 000000000..8277f9fee Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_216.png b/TMessagesProj/src/main/assets/emoji/5_216.png new file mode 100644 index 000000000..623c35c11 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_217.png b/TMessagesProj/src/main/assets/emoji/5_217.png new file mode 100644 index 000000000..e758ffa17 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_218.png b/TMessagesProj/src/main/assets/emoji/5_218.png new file mode 100644 index 000000000..4c14b8165 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_219.png b/TMessagesProj/src/main/assets/emoji/5_219.png new file mode 100644 index 000000000..04461962b Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_22.png b/TMessagesProj/src/main/assets/emoji/5_22.png index 56b0b5fb5..af9159578 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_22.png and b/TMessagesProj/src/main/assets/emoji/5_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_220.png b/TMessagesProj/src/main/assets/emoji/5_220.png new file mode 100644 index 000000000..2a3ea0457 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_221.png b/TMessagesProj/src/main/assets/emoji/5_221.png new file mode 100644 index 000000000..7e844aead Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/5_221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_23.png b/TMessagesProj/src/main/assets/emoji/5_23.png index 57c915dbb..24fa4fe83 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_23.png and b/TMessagesProj/src/main/assets/emoji/5_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_24.png b/TMessagesProj/src/main/assets/emoji/5_24.png index 338c64cbb..4801a6937 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_24.png and b/TMessagesProj/src/main/assets/emoji/5_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_25.png b/TMessagesProj/src/main/assets/emoji/5_25.png index 51d3977d4..bcbae5a1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_25.png and b/TMessagesProj/src/main/assets/emoji/5_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_26.png b/TMessagesProj/src/main/assets/emoji/5_26.png index 8004597b6..1db9e2c01 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_26.png and b/TMessagesProj/src/main/assets/emoji/5_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_27.png b/TMessagesProj/src/main/assets/emoji/5_27.png index 2d1da8631..fdeb4948a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_27.png and b/TMessagesProj/src/main/assets/emoji/5_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_28.png b/TMessagesProj/src/main/assets/emoji/5_28.png index 7ab22661c..dfcdb765b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_28.png and b/TMessagesProj/src/main/assets/emoji/5_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_29.png b/TMessagesProj/src/main/assets/emoji/5_29.png index 6f05928af..50cd33f2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_29.png and b/TMessagesProj/src/main/assets/emoji/5_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_3.png b/TMessagesProj/src/main/assets/emoji/5_3.png index b1c691e76..8a139d6eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_3.png and b/TMessagesProj/src/main/assets/emoji/5_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_30.png b/TMessagesProj/src/main/assets/emoji/5_30.png index 098e42abc..c9f3cb95d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_30.png and b/TMessagesProj/src/main/assets/emoji/5_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_31.png b/TMessagesProj/src/main/assets/emoji/5_31.png index 175a9d577..6da4c05df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_31.png and b/TMessagesProj/src/main/assets/emoji/5_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_32.png b/TMessagesProj/src/main/assets/emoji/5_32.png index d873f30a6..2c4572791 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_32.png and b/TMessagesProj/src/main/assets/emoji/5_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_33.png b/TMessagesProj/src/main/assets/emoji/5_33.png index 4b5f5db51..b40a5b7bb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_33.png and b/TMessagesProj/src/main/assets/emoji/5_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_34.png b/TMessagesProj/src/main/assets/emoji/5_34.png index 2726cec84..c1e001d54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_34.png and b/TMessagesProj/src/main/assets/emoji/5_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_35.png b/TMessagesProj/src/main/assets/emoji/5_35.png index 48610bfdf..824e98f12 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_35.png and b/TMessagesProj/src/main/assets/emoji/5_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_36.png b/TMessagesProj/src/main/assets/emoji/5_36.png index 65e2b4253..a7254b762 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_36.png and b/TMessagesProj/src/main/assets/emoji/5_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_37.png b/TMessagesProj/src/main/assets/emoji/5_37.png index 1496d9bd0..c3db1af0d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_37.png and b/TMessagesProj/src/main/assets/emoji/5_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_38.png b/TMessagesProj/src/main/assets/emoji/5_38.png index e87d177c8..6c91b86cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_38.png and b/TMessagesProj/src/main/assets/emoji/5_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_39.png b/TMessagesProj/src/main/assets/emoji/5_39.png index d7dbd8f87..8e11ea49d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_39.png and b/TMessagesProj/src/main/assets/emoji/5_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_4.png b/TMessagesProj/src/main/assets/emoji/5_4.png index c094f11be..dde2d9a69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_4.png and b/TMessagesProj/src/main/assets/emoji/5_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_40.png b/TMessagesProj/src/main/assets/emoji/5_40.png index 80603f326..7199ecec7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_40.png and b/TMessagesProj/src/main/assets/emoji/5_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_41.png b/TMessagesProj/src/main/assets/emoji/5_41.png index 818a63ddb..081d6bfe6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_41.png and b/TMessagesProj/src/main/assets/emoji/5_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_42.png b/TMessagesProj/src/main/assets/emoji/5_42.png index 6161df9ff..8dc49a3eb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_42.png and b/TMessagesProj/src/main/assets/emoji/5_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_43.png b/TMessagesProj/src/main/assets/emoji/5_43.png index cbbc05785..afdae9a90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_43.png and b/TMessagesProj/src/main/assets/emoji/5_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_44.png b/TMessagesProj/src/main/assets/emoji/5_44.png index 64db03b4c..0ed2437a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_44.png and b/TMessagesProj/src/main/assets/emoji/5_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_45.png b/TMessagesProj/src/main/assets/emoji/5_45.png index 499fe3e00..287895906 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_45.png and b/TMessagesProj/src/main/assets/emoji/5_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_46.png b/TMessagesProj/src/main/assets/emoji/5_46.png index 2a4bafdbf..10efb75de 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_46.png and b/TMessagesProj/src/main/assets/emoji/5_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_47.png b/TMessagesProj/src/main/assets/emoji/5_47.png index c647563cc..a278fe2dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_47.png and b/TMessagesProj/src/main/assets/emoji/5_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_48.png b/TMessagesProj/src/main/assets/emoji/5_48.png index b107dce34..906965d49 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_48.png and b/TMessagesProj/src/main/assets/emoji/5_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_49.png b/TMessagesProj/src/main/assets/emoji/5_49.png index 8420e3228..c1e8b8827 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_49.png and b/TMessagesProj/src/main/assets/emoji/5_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_5.png b/TMessagesProj/src/main/assets/emoji/5_5.png index a57352d0b..178928ea5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_5.png and b/TMessagesProj/src/main/assets/emoji/5_5.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_50.png b/TMessagesProj/src/main/assets/emoji/5_50.png index 5681fa636..ca6484492 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_50.png and b/TMessagesProj/src/main/assets/emoji/5_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_51.png b/TMessagesProj/src/main/assets/emoji/5_51.png index 75a1eec84..a04d49f8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_51.png and b/TMessagesProj/src/main/assets/emoji/5_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_52.png b/TMessagesProj/src/main/assets/emoji/5_52.png index 83a139ab0..399c5bc72 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_52.png and b/TMessagesProj/src/main/assets/emoji/5_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_53.png b/TMessagesProj/src/main/assets/emoji/5_53.png index ea0973364..dbfe03180 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_53.png and b/TMessagesProj/src/main/assets/emoji/5_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_54.png b/TMessagesProj/src/main/assets/emoji/5_54.png index 994e5d0f0..824a3fa6a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_54.png and b/TMessagesProj/src/main/assets/emoji/5_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_55.png b/TMessagesProj/src/main/assets/emoji/5_55.png index 9e4e12361..0705c14b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_55.png and b/TMessagesProj/src/main/assets/emoji/5_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_56.png b/TMessagesProj/src/main/assets/emoji/5_56.png index a65cb96f1..9e4e12361 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_56.png and b/TMessagesProj/src/main/assets/emoji/5_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_57.png b/TMessagesProj/src/main/assets/emoji/5_57.png index ebca4384d..3f4cc53f9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_57.png and b/TMessagesProj/src/main/assets/emoji/5_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_58.png b/TMessagesProj/src/main/assets/emoji/5_58.png index ecfab680b..314f3da17 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_58.png and b/TMessagesProj/src/main/assets/emoji/5_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_59.png b/TMessagesProj/src/main/assets/emoji/5_59.png index 6152ae8f5..24c211129 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_59.png and b/TMessagesProj/src/main/assets/emoji/5_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_60.png b/TMessagesProj/src/main/assets/emoji/5_60.png index d8475519d..368715c6a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_60.png and b/TMessagesProj/src/main/assets/emoji/5_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_61.png b/TMessagesProj/src/main/assets/emoji/5_61.png index 8a188d257..eee2fcce1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_61.png and b/TMessagesProj/src/main/assets/emoji/5_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_62.png b/TMessagesProj/src/main/assets/emoji/5_62.png index 6701cdcd6..e9e18a830 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_62.png and b/TMessagesProj/src/main/assets/emoji/5_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_63.png b/TMessagesProj/src/main/assets/emoji/5_63.png index 9f2fa7b34..f0eed666e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_63.png and b/TMessagesProj/src/main/assets/emoji/5_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_64.png b/TMessagesProj/src/main/assets/emoji/5_64.png index 15fb94634..d8f3da8ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_64.png and b/TMessagesProj/src/main/assets/emoji/5_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_65.png b/TMessagesProj/src/main/assets/emoji/5_65.png index 74629c5b8..303c51170 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_65.png and b/TMessagesProj/src/main/assets/emoji/5_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_66.png b/TMessagesProj/src/main/assets/emoji/5_66.png index 195535332..f5920cf84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_66.png and b/TMessagesProj/src/main/assets/emoji/5_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_67.png b/TMessagesProj/src/main/assets/emoji/5_67.png index f271b91c7..0a823528c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_67.png and b/TMessagesProj/src/main/assets/emoji/5_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_68.png b/TMessagesProj/src/main/assets/emoji/5_68.png index 0fb14d09e..f59426ac5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_68.png and b/TMessagesProj/src/main/assets/emoji/5_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_69.png b/TMessagesProj/src/main/assets/emoji/5_69.png index 03cb02002..668cf2e4d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_69.png and b/TMessagesProj/src/main/assets/emoji/5_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_7.png b/TMessagesProj/src/main/assets/emoji/5_7.png index a1ac929b3..82f173d1f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_7.png and b/TMessagesProj/src/main/assets/emoji/5_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_70.png b/TMessagesProj/src/main/assets/emoji/5_70.png index a24c551fb..2be67feb5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_70.png and b/TMessagesProj/src/main/assets/emoji/5_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_71.png b/TMessagesProj/src/main/assets/emoji/5_71.png index 4c2dd7043..c197c8f73 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_71.png and b/TMessagesProj/src/main/assets/emoji/5_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_72.png b/TMessagesProj/src/main/assets/emoji/5_72.png index e1de91c3c..f271b91c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_72.png and b/TMessagesProj/src/main/assets/emoji/5_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_73.png b/TMessagesProj/src/main/assets/emoji/5_73.png index 7fc29ff63..0bb11a7d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_73.png and b/TMessagesProj/src/main/assets/emoji/5_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_74.png b/TMessagesProj/src/main/assets/emoji/5_74.png index 7ea02e82c..ef91e5929 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_74.png and b/TMessagesProj/src/main/assets/emoji/5_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_75.png b/TMessagesProj/src/main/assets/emoji/5_75.png index e2eb0f1e5..af05b76ec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_75.png and b/TMessagesProj/src/main/assets/emoji/5_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_76.png b/TMessagesProj/src/main/assets/emoji/5_76.png index 7493b46cb..8855576a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_76.png and b/TMessagesProj/src/main/assets/emoji/5_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_77.png b/TMessagesProj/src/main/assets/emoji/5_77.png index a08c0c4bd..72b78ab42 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_77.png and b/TMessagesProj/src/main/assets/emoji/5_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_78.png b/TMessagesProj/src/main/assets/emoji/5_78.png index 2609904fe..faaffd70b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_78.png and b/TMessagesProj/src/main/assets/emoji/5_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_79.png b/TMessagesProj/src/main/assets/emoji/5_79.png index ab7a9c493..6a0bdeb95 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_79.png and b/TMessagesProj/src/main/assets/emoji/5_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_8.png b/TMessagesProj/src/main/assets/emoji/5_8.png index 90ad4712e..65f1bfdc6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_8.png and b/TMessagesProj/src/main/assets/emoji/5_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_80.png b/TMessagesProj/src/main/assets/emoji/5_80.png index fdce19584..e2eb0f1e5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_80.png and b/TMessagesProj/src/main/assets/emoji/5_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_81.png b/TMessagesProj/src/main/assets/emoji/5_81.png index c534c2d2a..ea77f6f73 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_81.png and b/TMessagesProj/src/main/assets/emoji/5_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_82.png b/TMessagesProj/src/main/assets/emoji/5_82.png index efa45b774..246e1dfd5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_82.png and b/TMessagesProj/src/main/assets/emoji/5_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_83.png b/TMessagesProj/src/main/assets/emoji/5_83.png index c5f48de1f..ce517680b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_83.png and b/TMessagesProj/src/main/assets/emoji/5_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_84.png b/TMessagesProj/src/main/assets/emoji/5_84.png index 118dc1296..f96ebf003 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_84.png and b/TMessagesProj/src/main/assets/emoji/5_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_85.png b/TMessagesProj/src/main/assets/emoji/5_85.png index 139b44199..d72535b60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_85.png and b/TMessagesProj/src/main/assets/emoji/5_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_86.png b/TMessagesProj/src/main/assets/emoji/5_86.png index 6b6b2576c..a009842bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_86.png and b/TMessagesProj/src/main/assets/emoji/5_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_87.png b/TMessagesProj/src/main/assets/emoji/5_87.png index ceba94121..23b3ff94b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_87.png and b/TMessagesProj/src/main/assets/emoji/5_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_88.png b/TMessagesProj/src/main/assets/emoji/5_88.png index 8ab1770f5..6d9685eec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_88.png and b/TMessagesProj/src/main/assets/emoji/5_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_89.png b/TMessagesProj/src/main/assets/emoji/5_89.png index 830037340..e6272d9cf 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_89.png and b/TMessagesProj/src/main/assets/emoji/5_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_9.png b/TMessagesProj/src/main/assets/emoji/5_9.png index 1122ee855..f72ddd0b6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_9.png and b/TMessagesProj/src/main/assets/emoji/5_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_90.png b/TMessagesProj/src/main/assets/emoji/5_90.png index cc49fe2be..692eb9618 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_90.png and b/TMessagesProj/src/main/assets/emoji/5_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_91.png b/TMessagesProj/src/main/assets/emoji/5_91.png index 94d6030d6..ec2d15c40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_91.png and b/TMessagesProj/src/main/assets/emoji/5_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_92.png b/TMessagesProj/src/main/assets/emoji/5_92.png index 71df573ca..db4a77e67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_92.png and b/TMessagesProj/src/main/assets/emoji/5_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_93.png b/TMessagesProj/src/main/assets/emoji/5_93.png index f94bf04c4..05a7d0c89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_93.png and b/TMessagesProj/src/main/assets/emoji/5_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_94.png b/TMessagesProj/src/main/assets/emoji/5_94.png index 541c9e82a..a847ab798 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_94.png and b/TMessagesProj/src/main/assets/emoji/5_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_95.png b/TMessagesProj/src/main/assets/emoji/5_95.png index 174d7cdc4..a552ea43a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_95.png and b/TMessagesProj/src/main/assets/emoji/5_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_96.png b/TMessagesProj/src/main/assets/emoji/5_96.png index 1347aec8a..8cb07f4e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_96.png and b/TMessagesProj/src/main/assets/emoji/5_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_97.png b/TMessagesProj/src/main/assets/emoji/5_97.png index 7db025583..bb6177bfc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_97.png and b/TMessagesProj/src/main/assets/emoji/5_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_98.png b/TMessagesProj/src/main/assets/emoji/5_98.png index 029e0f9ca..71df573ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_98.png and b/TMessagesProj/src/main/assets/emoji/5_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/5_99.png b/TMessagesProj/src/main/assets/emoji/5_99.png index 8b9d1f229..dd3bbed7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/5_99.png and b/TMessagesProj/src/main/assets/emoji/5_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_0.png b/TMessagesProj/src/main/assets/emoji/6_0.png index ec96bb823..faf459c23 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_0.png and b/TMessagesProj/src/main/assets/emoji/6_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_10.png b/TMessagesProj/src/main/assets/emoji/6_10.png index 082e02413..cceba00bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_10.png and b/TMessagesProj/src/main/assets/emoji/6_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_100.png b/TMessagesProj/src/main/assets/emoji/6_100.png index ac2726547..d7cdf42d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_100.png and b/TMessagesProj/src/main/assets/emoji/6_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_101.png b/TMessagesProj/src/main/assets/emoji/6_101.png index 2dc346ff9..1715d1afe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_101.png and b/TMessagesProj/src/main/assets/emoji/6_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_103.png b/TMessagesProj/src/main/assets/emoji/6_103.png index 90f9dd563..39d274fa5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_103.png and b/TMessagesProj/src/main/assets/emoji/6_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_104.png b/TMessagesProj/src/main/assets/emoji/6_104.png index 68cb0907a..3b7ed8cbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_104.png and b/TMessagesProj/src/main/assets/emoji/6_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_107.png b/TMessagesProj/src/main/assets/emoji/6_107.png index bc24494b8..bd3986c50 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_107.png and b/TMessagesProj/src/main/assets/emoji/6_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_109.png b/TMessagesProj/src/main/assets/emoji/6_109.png index fe54a480a..06f4d9fc3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_109.png and b/TMessagesProj/src/main/assets/emoji/6_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_11.png b/TMessagesProj/src/main/assets/emoji/6_11.png index db789b2d3..9db7abfdc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_11.png and b/TMessagesProj/src/main/assets/emoji/6_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_110.png b/TMessagesProj/src/main/assets/emoji/6_110.png index fddcf1f37..4303086ad 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_110.png and b/TMessagesProj/src/main/assets/emoji/6_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_111.png b/TMessagesProj/src/main/assets/emoji/6_111.png index 593ab7958..73974455c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_111.png and b/TMessagesProj/src/main/assets/emoji/6_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_112.png b/TMessagesProj/src/main/assets/emoji/6_112.png index cadd2a458..2e6ee48f7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_112.png and b/TMessagesProj/src/main/assets/emoji/6_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_113.png b/TMessagesProj/src/main/assets/emoji/6_113.png index 3f02e2ff3..f6e9a739d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_113.png and b/TMessagesProj/src/main/assets/emoji/6_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_114.png b/TMessagesProj/src/main/assets/emoji/6_114.png index 1713cd2b2..866d646d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_114.png and b/TMessagesProj/src/main/assets/emoji/6_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_115.png b/TMessagesProj/src/main/assets/emoji/6_115.png index d79cc50ad..b74ce2d16 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_115.png and b/TMessagesProj/src/main/assets/emoji/6_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_116.png b/TMessagesProj/src/main/assets/emoji/6_116.png index ba6aa81a8..9cecc9af9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_116.png and b/TMessagesProj/src/main/assets/emoji/6_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_117.png b/TMessagesProj/src/main/assets/emoji/6_117.png index bb878beb6..51a96d405 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_117.png and b/TMessagesProj/src/main/assets/emoji/6_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_118.png b/TMessagesProj/src/main/assets/emoji/6_118.png index f9916ef65..b7c6f5690 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_118.png and b/TMessagesProj/src/main/assets/emoji/6_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_119.png b/TMessagesProj/src/main/assets/emoji/6_119.png index eb9a12bc9..99ce239e6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_119.png and b/TMessagesProj/src/main/assets/emoji/6_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_12.png b/TMessagesProj/src/main/assets/emoji/6_12.png index 92f5caa14..2965fb53d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_12.png and b/TMessagesProj/src/main/assets/emoji/6_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_120.png b/TMessagesProj/src/main/assets/emoji/6_120.png index a16288d89..6447765ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_120.png and b/TMessagesProj/src/main/assets/emoji/6_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_121.png b/TMessagesProj/src/main/assets/emoji/6_121.png index c776cbacf..9148016dd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_121.png and b/TMessagesProj/src/main/assets/emoji/6_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_122.png b/TMessagesProj/src/main/assets/emoji/6_122.png index 5c29c687c..1e272ec2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_122.png and b/TMessagesProj/src/main/assets/emoji/6_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_123.png b/TMessagesProj/src/main/assets/emoji/6_123.png index b4abb794d..769141567 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_123.png and b/TMessagesProj/src/main/assets/emoji/6_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_124.png b/TMessagesProj/src/main/assets/emoji/6_124.png index 11e92de3e..1c4ab846c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_124.png and b/TMessagesProj/src/main/assets/emoji/6_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_125.png b/TMessagesProj/src/main/assets/emoji/6_125.png index fdafae60d..cc08f32a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_125.png and b/TMessagesProj/src/main/assets/emoji/6_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_126.png b/TMessagesProj/src/main/assets/emoji/6_126.png index 408649135..fdafae60d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_126.png and b/TMessagesProj/src/main/assets/emoji/6_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_127.png b/TMessagesProj/src/main/assets/emoji/6_127.png index e817bda56..ded5a25ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_127.png and b/TMessagesProj/src/main/assets/emoji/6_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_128.png b/TMessagesProj/src/main/assets/emoji/6_128.png index 59375537e..0950b27d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_128.png and b/TMessagesProj/src/main/assets/emoji/6_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_129.png b/TMessagesProj/src/main/assets/emoji/6_129.png index a5322db04..5b39c2001 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_129.png and b/TMessagesProj/src/main/assets/emoji/6_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_130.png b/TMessagesProj/src/main/assets/emoji/6_130.png index 47df52507..24e47e548 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_130.png and b/TMessagesProj/src/main/assets/emoji/6_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_131.png b/TMessagesProj/src/main/assets/emoji/6_131.png index 9a1089ec3..d241f8676 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_131.png and b/TMessagesProj/src/main/assets/emoji/6_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_132.png b/TMessagesProj/src/main/assets/emoji/6_132.png index 0a4fb3682..d3629af6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_132.png and b/TMessagesProj/src/main/assets/emoji/6_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_133.png b/TMessagesProj/src/main/assets/emoji/6_133.png index 67b207dc4..08a4be51d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_133.png and b/TMessagesProj/src/main/assets/emoji/6_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_134.png b/TMessagesProj/src/main/assets/emoji/6_134.png index 4a383bace..0a4fb3682 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_134.png and b/TMessagesProj/src/main/assets/emoji/6_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_135.png b/TMessagesProj/src/main/assets/emoji/6_135.png index 5ecfcde3e..a2b7feb3c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_135.png and b/TMessagesProj/src/main/assets/emoji/6_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_136.png b/TMessagesProj/src/main/assets/emoji/6_136.png index aba47b1ba..ca28680a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_136.png and b/TMessagesProj/src/main/assets/emoji/6_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_137.png b/TMessagesProj/src/main/assets/emoji/6_137.png index f1b8c5509..7e430ceee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_137.png and b/TMessagesProj/src/main/assets/emoji/6_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_138.png b/TMessagesProj/src/main/assets/emoji/6_138.png index 812c2cf9f..176ea27f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_138.png and b/TMessagesProj/src/main/assets/emoji/6_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_139.png b/TMessagesProj/src/main/assets/emoji/6_139.png index 6f8ea12df..57696db33 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_139.png and b/TMessagesProj/src/main/assets/emoji/6_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_140.png b/TMessagesProj/src/main/assets/emoji/6_140.png index f5768a3bd..77da3b5ef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_140.png and b/TMessagesProj/src/main/assets/emoji/6_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_141.png b/TMessagesProj/src/main/assets/emoji/6_141.png index d2a3571aa..120d12aeb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_141.png and b/TMessagesProj/src/main/assets/emoji/6_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_142.png b/TMessagesProj/src/main/assets/emoji/6_142.png index 31a0065d7..f5768a3bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_142.png and b/TMessagesProj/src/main/assets/emoji/6_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_143.png b/TMessagesProj/src/main/assets/emoji/6_143.png index 3ffb3ce65..da06ce6f9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_143.png and b/TMessagesProj/src/main/assets/emoji/6_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_144.png b/TMessagesProj/src/main/assets/emoji/6_144.png index fa30d5a44..31a0065d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_144.png and b/TMessagesProj/src/main/assets/emoji/6_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_145.png b/TMessagesProj/src/main/assets/emoji/6_145.png index 53d87bf87..3ffb3ce65 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_145.png and b/TMessagesProj/src/main/assets/emoji/6_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_146.png b/TMessagesProj/src/main/assets/emoji/6_146.png index 7afee6683..e48a70805 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_146.png and b/TMessagesProj/src/main/assets/emoji/6_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_147.png b/TMessagesProj/src/main/assets/emoji/6_147.png index 944405560..53d87bf87 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_147.png and b/TMessagesProj/src/main/assets/emoji/6_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_148.png b/TMessagesProj/src/main/assets/emoji/6_148.png index 3929561ab..7afee6683 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_148.png and b/TMessagesProj/src/main/assets/emoji/6_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_149.png b/TMessagesProj/src/main/assets/emoji/6_149.png index 08df584df..944405560 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_149.png and b/TMessagesProj/src/main/assets/emoji/6_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_15.png b/TMessagesProj/src/main/assets/emoji/6_15.png index 9b80e32ff..775a08a4e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_15.png and b/TMessagesProj/src/main/assets/emoji/6_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_150.png b/TMessagesProj/src/main/assets/emoji/6_150.png index 0f51db297..3929561ab 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_150.png and b/TMessagesProj/src/main/assets/emoji/6_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_151.png b/TMessagesProj/src/main/assets/emoji/6_151.png index e8808125c..08df584df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_151.png and b/TMessagesProj/src/main/assets/emoji/6_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_152.png b/TMessagesProj/src/main/assets/emoji/6_152.png index c2105111f..a7a40f602 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_152.png and b/TMessagesProj/src/main/assets/emoji/6_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_153.png b/TMessagesProj/src/main/assets/emoji/6_153.png index 3f59bbb29..e8808125c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_153.png and b/TMessagesProj/src/main/assets/emoji/6_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_154.png b/TMessagesProj/src/main/assets/emoji/6_154.png index d21ac35e7..e03cbca60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_154.png and b/TMessagesProj/src/main/assets/emoji/6_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_155.png b/TMessagesProj/src/main/assets/emoji/6_155.png index 745cef3a0..284f9e0c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_155.png and b/TMessagesProj/src/main/assets/emoji/6_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_156.png b/TMessagesProj/src/main/assets/emoji/6_156.png index c0141bbc2..d21ac35e7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_156.png and b/TMessagesProj/src/main/assets/emoji/6_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_157.png b/TMessagesProj/src/main/assets/emoji/6_157.png index 8a7f00d2d..344bc0546 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_157.png and b/TMessagesProj/src/main/assets/emoji/6_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_158.png b/TMessagesProj/src/main/assets/emoji/6_158.png index 6ad82a6f1..2a96f4350 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_158.png and b/TMessagesProj/src/main/assets/emoji/6_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_159.png b/TMessagesProj/src/main/assets/emoji/6_159.png index 4c5fca5b1..8a7f00d2d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_159.png and b/TMessagesProj/src/main/assets/emoji/6_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_16.png b/TMessagesProj/src/main/assets/emoji/6_16.png index 39e3970a1..a43b6cf75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_16.png and b/TMessagesProj/src/main/assets/emoji/6_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_160.png b/TMessagesProj/src/main/assets/emoji/6_160.png index d7cc889ad..5b5067b81 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_160.png and b/TMessagesProj/src/main/assets/emoji/6_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_161.png b/TMessagesProj/src/main/assets/emoji/6_161.png index ce726b86f..5c310106d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_161.png and b/TMessagesProj/src/main/assets/emoji/6_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_162.png b/TMessagesProj/src/main/assets/emoji/6_162.png index 5c671cbc6..59fba5067 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_162.png and b/TMessagesProj/src/main/assets/emoji/6_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_163.png b/TMessagesProj/src/main/assets/emoji/6_163.png index 1c194166d..ce726b86f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_163.png and b/TMessagesProj/src/main/assets/emoji/6_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_164.png b/TMessagesProj/src/main/assets/emoji/6_164.png index 6ac2fda1c..5c671cbc6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_164.png and b/TMessagesProj/src/main/assets/emoji/6_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_165.png b/TMessagesProj/src/main/assets/emoji/6_165.png index 041a5d81d..1c194166d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_165.png and b/TMessagesProj/src/main/assets/emoji/6_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_166.png b/TMessagesProj/src/main/assets/emoji/6_166.png index 89b73543d..6ac2fda1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_166.png and b/TMessagesProj/src/main/assets/emoji/6_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_167.png b/TMessagesProj/src/main/assets/emoji/6_167.png index 417cfde35..041a5d81d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_167.png and b/TMessagesProj/src/main/assets/emoji/6_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_168.png b/TMessagesProj/src/main/assets/emoji/6_168.png index ff7ba8ad2..e5956b412 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_168.png and b/TMessagesProj/src/main/assets/emoji/6_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_169.png b/TMessagesProj/src/main/assets/emoji/6_169.png index ea1705a2d..417cfde35 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_169.png and b/TMessagesProj/src/main/assets/emoji/6_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_17.png b/TMessagesProj/src/main/assets/emoji/6_17.png index 5ba4fa405..dfa56a2b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_17.png and b/TMessagesProj/src/main/assets/emoji/6_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_170.png b/TMessagesProj/src/main/assets/emoji/6_170.png index c611d1f25..ff7ba8ad2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_170.png and b/TMessagesProj/src/main/assets/emoji/6_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_171.png b/TMessagesProj/src/main/assets/emoji/6_171.png index 80aab6481..b32a41a89 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_171.png and b/TMessagesProj/src/main/assets/emoji/6_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_172.png b/TMessagesProj/src/main/assets/emoji/6_172.png index 7f33184d6..c611d1f25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_172.png and b/TMessagesProj/src/main/assets/emoji/6_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_173.png b/TMessagesProj/src/main/assets/emoji/6_173.png index 7459a363b..80aab6481 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_173.png and b/TMessagesProj/src/main/assets/emoji/6_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_174.png b/TMessagesProj/src/main/assets/emoji/6_174.png index dc4451f75..7f33184d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_174.png and b/TMessagesProj/src/main/assets/emoji/6_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_175.png b/TMessagesProj/src/main/assets/emoji/6_175.png index 34edcf126..5e5b9dac4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_175.png and b/TMessagesProj/src/main/assets/emoji/6_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_176.png b/TMessagesProj/src/main/assets/emoji/6_176.png index 4c00fcbef..dc4451f75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_176.png and b/TMessagesProj/src/main/assets/emoji/6_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_177.png b/TMessagesProj/src/main/assets/emoji/6_177.png index 956849844..669d74677 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_177.png and b/TMessagesProj/src/main/assets/emoji/6_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_178.png b/TMessagesProj/src/main/assets/emoji/6_178.png index dac31ee06..4c00fcbef 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_178.png and b/TMessagesProj/src/main/assets/emoji/6_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_179.png b/TMessagesProj/src/main/assets/emoji/6_179.png index bd967ee50..956849844 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_179.png and b/TMessagesProj/src/main/assets/emoji/6_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_180.png b/TMessagesProj/src/main/assets/emoji/6_180.png index da557fb49..b000ce649 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_180.png and b/TMessagesProj/src/main/assets/emoji/6_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_181.png b/TMessagesProj/src/main/assets/emoji/6_181.png index dd6023d28..cc6608f7e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_181.png and b/TMessagesProj/src/main/assets/emoji/6_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_182.png b/TMessagesProj/src/main/assets/emoji/6_182.png index 3a6fd3623..da557fb49 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_182.png and b/TMessagesProj/src/main/assets/emoji/6_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_183.png b/TMessagesProj/src/main/assets/emoji/6_183.png index b2db59db8..0e4b31f49 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_183.png and b/TMessagesProj/src/main/assets/emoji/6_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_184.png b/TMessagesProj/src/main/assets/emoji/6_184.png index 4363cec1b..3a6fd3623 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_184.png and b/TMessagesProj/src/main/assets/emoji/6_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_185.png b/TMessagesProj/src/main/assets/emoji/6_185.png index 7ae9ef3ba..b2db59db8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_185.png and b/TMessagesProj/src/main/assets/emoji/6_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_186.png b/TMessagesProj/src/main/assets/emoji/6_186.png index 16a4f8d5e..4363cec1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_186.png and b/TMessagesProj/src/main/assets/emoji/6_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_187.png b/TMessagesProj/src/main/assets/emoji/6_187.png index 1ba1631d3..69827bff7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_187.png and b/TMessagesProj/src/main/assets/emoji/6_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_188.png b/TMessagesProj/src/main/assets/emoji/6_188.png index cdd2b7b72..16a4f8d5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_188.png and b/TMessagesProj/src/main/assets/emoji/6_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_189.png b/TMessagesProj/src/main/assets/emoji/6_189.png index 75d5d2216..ef902c239 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_189.png and b/TMessagesProj/src/main/assets/emoji/6_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_19.png b/TMessagesProj/src/main/assets/emoji/6_19.png index 4e8655345..2bb55b271 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_19.png and b/TMessagesProj/src/main/assets/emoji/6_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_190.png b/TMessagesProj/src/main/assets/emoji/6_190.png index 35ad95b77..f37b421ba 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_190.png and b/TMessagesProj/src/main/assets/emoji/6_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_191.png b/TMessagesProj/src/main/assets/emoji/6_191.png index 31f47e95d..75d5d2216 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_191.png and b/TMessagesProj/src/main/assets/emoji/6_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_192.png b/TMessagesProj/src/main/assets/emoji/6_192.png index b71ed7c75..518cc5978 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_192.png and b/TMessagesProj/src/main/assets/emoji/6_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_193.png b/TMessagesProj/src/main/assets/emoji/6_193.png index 9ec58588e..d30d16968 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_193.png and b/TMessagesProj/src/main/assets/emoji/6_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_194.png b/TMessagesProj/src/main/assets/emoji/6_194.png index fb1aaebf5..b71ed7c75 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_194.png and b/TMessagesProj/src/main/assets/emoji/6_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_195.png b/TMessagesProj/src/main/assets/emoji/6_195.png index 97c3ff5c4..9ec58588e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_195.png and b/TMessagesProj/src/main/assets/emoji/6_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_196.png b/TMessagesProj/src/main/assets/emoji/6_196.png index 19b4c8071..fb1aaebf5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_196.png and b/TMessagesProj/src/main/assets/emoji/6_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_197.png b/TMessagesProj/src/main/assets/emoji/6_197.png index 6913c1f00..1d8ee3647 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_197.png and b/TMessagesProj/src/main/assets/emoji/6_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_198.png b/TMessagesProj/src/main/assets/emoji/6_198.png index 48ea251ce..27a612fd6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_198.png and b/TMessagesProj/src/main/assets/emoji/6_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_199.png b/TMessagesProj/src/main/assets/emoji/6_199.png index cee00e89c..2bd531489 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_199.png and b/TMessagesProj/src/main/assets/emoji/6_199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_200.png b/TMessagesProj/src/main/assets/emoji/6_200.png index fcac58757..ec10141c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_200.png and b/TMessagesProj/src/main/assets/emoji/6_200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_201.png b/TMessagesProj/src/main/assets/emoji/6_201.png index 057a08ece..dc1bc1f02 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_201.png and b/TMessagesProj/src/main/assets/emoji/6_201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_202.png b/TMessagesProj/src/main/assets/emoji/6_202.png index 0888d1c02..6fbbbfe2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_202.png and b/TMessagesProj/src/main/assets/emoji/6_202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_203.png b/TMessagesProj/src/main/assets/emoji/6_203.png index f7b0307e9..03b3f980b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_203.png and b/TMessagesProj/src/main/assets/emoji/6_203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_204.png b/TMessagesProj/src/main/assets/emoji/6_204.png index 1109fe3f0..caff35f2b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_204.png and b/TMessagesProj/src/main/assets/emoji/6_204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_205.png b/TMessagesProj/src/main/assets/emoji/6_205.png index ae8106503..cd645d940 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_205.png and b/TMessagesProj/src/main/assets/emoji/6_205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_206.png b/TMessagesProj/src/main/assets/emoji/6_206.png index 7d1830ac2..eec457023 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_206.png and b/TMessagesProj/src/main/assets/emoji/6_206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_207.png b/TMessagesProj/src/main/assets/emoji/6_207.png index 311d9951b..91ca06388 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_207.png and b/TMessagesProj/src/main/assets/emoji/6_207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_208.png b/TMessagesProj/src/main/assets/emoji/6_208.png index 91026b50c..9a28a6443 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_208.png and b/TMessagesProj/src/main/assets/emoji/6_208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_209.png b/TMessagesProj/src/main/assets/emoji/6_209.png index 55b6494ab..d0d4aa5be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_209.png and b/TMessagesProj/src/main/assets/emoji/6_209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_21.png b/TMessagesProj/src/main/assets/emoji/6_21.png index b92de4949..c34187cf0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_21.png and b/TMessagesProj/src/main/assets/emoji/6_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_210.png b/TMessagesProj/src/main/assets/emoji/6_210.png index 2b70d723c..6750aeb23 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_210.png and b/TMessagesProj/src/main/assets/emoji/6_210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_211.png b/TMessagesProj/src/main/assets/emoji/6_211.png index b79ca0acb..749a9d5b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_211.png and b/TMessagesProj/src/main/assets/emoji/6_211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_212.png b/TMessagesProj/src/main/assets/emoji/6_212.png index 450e1d446..622efe14c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_212.png and b/TMessagesProj/src/main/assets/emoji/6_212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_213.png b/TMessagesProj/src/main/assets/emoji/6_213.png index 70b79591c..0a4bb59a7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_213.png and b/TMessagesProj/src/main/assets/emoji/6_213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_214.png b/TMessagesProj/src/main/assets/emoji/6_214.png index 388a8bc96..ac3bf17f6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_214.png and b/TMessagesProj/src/main/assets/emoji/6_214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_215.png b/TMessagesProj/src/main/assets/emoji/6_215.png index 9d3f2c4b9..3fc89bc67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_215.png and b/TMessagesProj/src/main/assets/emoji/6_215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_216.png b/TMessagesProj/src/main/assets/emoji/6_216.png index 22f8310cc..388a8bc96 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_216.png and b/TMessagesProj/src/main/assets/emoji/6_216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_217.png b/TMessagesProj/src/main/assets/emoji/6_217.png index 0f432e98d..96f065f30 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_217.png and b/TMessagesProj/src/main/assets/emoji/6_217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_218.png b/TMessagesProj/src/main/assets/emoji/6_218.png index 6ccf10ba3..22f8310cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_218.png and b/TMessagesProj/src/main/assets/emoji/6_218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_219.png b/TMessagesProj/src/main/assets/emoji/6_219.png index b9727cd27..a148aa7d6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_219.png and b/TMessagesProj/src/main/assets/emoji/6_219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_220.png b/TMessagesProj/src/main/assets/emoji/6_220.png index 9741e93f1..6ccf10ba3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_220.png and b/TMessagesProj/src/main/assets/emoji/6_220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_221.png b/TMessagesProj/src/main/assets/emoji/6_221.png index a113346d4..500c66092 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_221.png and b/TMessagesProj/src/main/assets/emoji/6_221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_222.png b/TMessagesProj/src/main/assets/emoji/6_222.png index 75073c39d..9741e93f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_222.png and b/TMessagesProj/src/main/assets/emoji/6_222.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_223.png b/TMessagesProj/src/main/assets/emoji/6_223.png index d29824b3a..a113346d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_223.png and b/TMessagesProj/src/main/assets/emoji/6_223.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_224.png b/TMessagesProj/src/main/assets/emoji/6_224.png index 34ba94c38..75073c39d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_224.png and b/TMessagesProj/src/main/assets/emoji/6_224.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_225.png b/TMessagesProj/src/main/assets/emoji/6_225.png index 024ff28b3..d29824b3a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_225.png and b/TMessagesProj/src/main/assets/emoji/6_225.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_226.png b/TMessagesProj/src/main/assets/emoji/6_226.png index 06c8cd2a1..792b05248 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_226.png and b/TMessagesProj/src/main/assets/emoji/6_226.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_227.png b/TMessagesProj/src/main/assets/emoji/6_227.png index 6eddf4a54..693bd546d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_227.png and b/TMessagesProj/src/main/assets/emoji/6_227.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_228.png b/TMessagesProj/src/main/assets/emoji/6_228.png index d2344bdf7..56a0de349 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_228.png and b/TMessagesProj/src/main/assets/emoji/6_228.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_229.png b/TMessagesProj/src/main/assets/emoji/6_229.png index 8d069a451..6eddf4a54 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_229.png and b/TMessagesProj/src/main/assets/emoji/6_229.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_23.png b/TMessagesProj/src/main/assets/emoji/6_23.png index 7f3803731..bc804d52d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_23.png and b/TMessagesProj/src/main/assets/emoji/6_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_230.png b/TMessagesProj/src/main/assets/emoji/6_230.png index d6a84c711..c3e3ddcc3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_230.png and b/TMessagesProj/src/main/assets/emoji/6_230.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_231.png b/TMessagesProj/src/main/assets/emoji/6_231.png index 201d664aa..8d069a451 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_231.png and b/TMessagesProj/src/main/assets/emoji/6_231.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_232.png b/TMessagesProj/src/main/assets/emoji/6_232.png index cad720e40..d6a84c711 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_232.png and b/TMessagesProj/src/main/assets/emoji/6_232.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_233.png b/TMessagesProj/src/main/assets/emoji/6_233.png index b40fa7f86..201d664aa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_233.png and b/TMessagesProj/src/main/assets/emoji/6_233.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_234.png b/TMessagesProj/src/main/assets/emoji/6_234.png index 527f17eb0..cad720e40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_234.png and b/TMessagesProj/src/main/assets/emoji/6_234.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_235.png b/TMessagesProj/src/main/assets/emoji/6_235.png index 0a8820f2d..b40fa7f86 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_235.png and b/TMessagesProj/src/main/assets/emoji/6_235.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_236.png b/TMessagesProj/src/main/assets/emoji/6_236.png index 96d4a0dce..527f17eb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_236.png and b/TMessagesProj/src/main/assets/emoji/6_236.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_237.png b/TMessagesProj/src/main/assets/emoji/6_237.png index f836e792b..0a8820f2d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_237.png and b/TMessagesProj/src/main/assets/emoji/6_237.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_238.png b/TMessagesProj/src/main/assets/emoji/6_238.png index 1167b60b6..96d4a0dce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_238.png and b/TMessagesProj/src/main/assets/emoji/6_238.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_239.png b/TMessagesProj/src/main/assets/emoji/6_239.png index f16afc07d..f836e792b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_239.png and b/TMessagesProj/src/main/assets/emoji/6_239.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_24.png b/TMessagesProj/src/main/assets/emoji/6_24.png index d173d9344..10a7fad6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_24.png and b/TMessagesProj/src/main/assets/emoji/6_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_240.png b/TMessagesProj/src/main/assets/emoji/6_240.png index 1e958109e..1167b60b6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_240.png and b/TMessagesProj/src/main/assets/emoji/6_240.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_241.png b/TMessagesProj/src/main/assets/emoji/6_241.png index 2c09c66b6..f16afc07d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_241.png and b/TMessagesProj/src/main/assets/emoji/6_241.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_242.png b/TMessagesProj/src/main/assets/emoji/6_242.png index bd4fcbb93..1e958109e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_242.png and b/TMessagesProj/src/main/assets/emoji/6_242.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_243.png b/TMessagesProj/src/main/assets/emoji/6_243.png index a234ad1c9..2c09c66b6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_243.png and b/TMessagesProj/src/main/assets/emoji/6_243.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_244.png b/TMessagesProj/src/main/assets/emoji/6_244.png index 9831c8334..bd4fcbb93 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_244.png and b/TMessagesProj/src/main/assets/emoji/6_244.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_245.png b/TMessagesProj/src/main/assets/emoji/6_245.png index d90334be1..a234ad1c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_245.png and b/TMessagesProj/src/main/assets/emoji/6_245.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_246.png b/TMessagesProj/src/main/assets/emoji/6_246.png index bd32ed967..9831c8334 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_246.png and b/TMessagesProj/src/main/assets/emoji/6_246.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_247.png b/TMessagesProj/src/main/assets/emoji/6_247.png index f41dee7e4..d90334be1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_247.png and b/TMessagesProj/src/main/assets/emoji/6_247.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_248.png b/TMessagesProj/src/main/assets/emoji/6_248.png index dbd55974b..647085489 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_248.png and b/TMessagesProj/src/main/assets/emoji/6_248.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_249.png b/TMessagesProj/src/main/assets/emoji/6_249.png index 4b6a35b46..84b9d4963 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_249.png and b/TMessagesProj/src/main/assets/emoji/6_249.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_250.png b/TMessagesProj/src/main/assets/emoji/6_250.png index c70d1f3e1..1db510013 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_250.png and b/TMessagesProj/src/main/assets/emoji/6_250.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_251.png b/TMessagesProj/src/main/assets/emoji/6_251.png index 00d3fafd4..b85cd406a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_251.png and b/TMessagesProj/src/main/assets/emoji/6_251.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_252.png b/TMessagesProj/src/main/assets/emoji/6_252.png index 3fbc987c0..6d05c2e4c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_252.png and b/TMessagesProj/src/main/assets/emoji/6_252.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_253.png b/TMessagesProj/src/main/assets/emoji/6_253.png index 6099df6c7..92d1f2d08 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_253.png and b/TMessagesProj/src/main/assets/emoji/6_253.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_254.png b/TMessagesProj/src/main/assets/emoji/6_254.png index 8ac95ec7c..5185c273e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_254.png and b/TMessagesProj/src/main/assets/emoji/6_254.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_255.png b/TMessagesProj/src/main/assets/emoji/6_255.png index 7234b67be..e5c36d73b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_255.png and b/TMessagesProj/src/main/assets/emoji/6_255.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_256.png b/TMessagesProj/src/main/assets/emoji/6_256.png index c8a1698e4..7584d0697 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_256.png and b/TMessagesProj/src/main/assets/emoji/6_256.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_257.png b/TMessagesProj/src/main/assets/emoji/6_257.png index fa1d1c9df..161ae17fe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_257.png and b/TMessagesProj/src/main/assets/emoji/6_257.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_258.png b/TMessagesProj/src/main/assets/emoji/6_258.png index 849e0b6be..d30d0457b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_258.png and b/TMessagesProj/src/main/assets/emoji/6_258.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_259.png b/TMessagesProj/src/main/assets/emoji/6_259.png index acc510bb4..fa1d1c9df 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_259.png and b/TMessagesProj/src/main/assets/emoji/6_259.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_26.png b/TMessagesProj/src/main/assets/emoji/6_26.png index 4e6e6a9cb..60bc2a78b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_26.png and b/TMessagesProj/src/main/assets/emoji/6_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_260.png b/TMessagesProj/src/main/assets/emoji/6_260.png index 7d2895823..849e0b6be 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_260.png and b/TMessagesProj/src/main/assets/emoji/6_260.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_261.png b/TMessagesProj/src/main/assets/emoji/6_261.png index 7e9765e5d..964392700 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_261.png and b/TMessagesProj/src/main/assets/emoji/6_261.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_262.png b/TMessagesProj/src/main/assets/emoji/6_262.png index b2abf3dd6..a2e3729bc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_262.png and b/TMessagesProj/src/main/assets/emoji/6_262.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_263.png b/TMessagesProj/src/main/assets/emoji/6_263.png index 70613e894..ef42f7ab0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_263.png and b/TMessagesProj/src/main/assets/emoji/6_263.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_264.png b/TMessagesProj/src/main/assets/emoji/6_264.png index 3aa88129e..f8256f8fa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_264.png and b/TMessagesProj/src/main/assets/emoji/6_264.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_265.png b/TMessagesProj/src/main/assets/emoji/6_265.png index 299fda828..8b1fa4ad1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_265.png and b/TMessagesProj/src/main/assets/emoji/6_265.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_266.png b/TMessagesProj/src/main/assets/emoji/6_266.png index e5c45dc8a..1d72d04b5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_266.png and b/TMessagesProj/src/main/assets/emoji/6_266.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_267.png b/TMessagesProj/src/main/assets/emoji/6_267.png index 76715a762..bc1313668 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_267.png and b/TMessagesProj/src/main/assets/emoji/6_267.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_268.png b/TMessagesProj/src/main/assets/emoji/6_268.png index 9ded3ff2c..3d58bf640 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_268.png and b/TMessagesProj/src/main/assets/emoji/6_268.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_269.png b/TMessagesProj/src/main/assets/emoji/6_269.png index eaeb461ea..58d91b1ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_269.png and b/TMessagesProj/src/main/assets/emoji/6_269.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_27.png b/TMessagesProj/src/main/assets/emoji/6_27.png index ff8a6f133..1a0d04570 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_27.png and b/TMessagesProj/src/main/assets/emoji/6_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_270.png b/TMessagesProj/src/main/assets/emoji/6_270.png index 3bc996a3c..548298581 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_270.png and b/TMessagesProj/src/main/assets/emoji/6_270.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_271.png b/TMessagesProj/src/main/assets/emoji/6_271.png index 47d9554a9..69770eee1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_271.png and b/TMessagesProj/src/main/assets/emoji/6_271.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_272.png b/TMessagesProj/src/main/assets/emoji/6_272.png index daea51b83..4b174e862 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_272.png and b/TMessagesProj/src/main/assets/emoji/6_272.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_273.png b/TMessagesProj/src/main/assets/emoji/6_273.png index 6e9b6e2ba..8c3a017c9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_273.png and b/TMessagesProj/src/main/assets/emoji/6_273.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_274.png b/TMessagesProj/src/main/assets/emoji/6_274.png index b801602fe..6ecdfeade 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_274.png and b/TMessagesProj/src/main/assets/emoji/6_274.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_275.png b/TMessagesProj/src/main/assets/emoji/6_275.png index 6e05c7138..eeeb39a0a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_275.png and b/TMessagesProj/src/main/assets/emoji/6_275.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_276.png b/TMessagesProj/src/main/assets/emoji/6_276.png index 5d90a6acb..f4fc3d0ea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_276.png and b/TMessagesProj/src/main/assets/emoji/6_276.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_277.png b/TMessagesProj/src/main/assets/emoji/6_277.png index 770be80aa..afe727966 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_277.png and b/TMessagesProj/src/main/assets/emoji/6_277.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_278.png b/TMessagesProj/src/main/assets/emoji/6_278.png index a53d22697..b48a57597 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_278.png and b/TMessagesProj/src/main/assets/emoji/6_278.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_279.png b/TMessagesProj/src/main/assets/emoji/6_279.png index a1f6dbe52..88ab5b344 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_279.png and b/TMessagesProj/src/main/assets/emoji/6_279.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_280.png b/TMessagesProj/src/main/assets/emoji/6_280.png index 21bcb5668..bbc6eb6b8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_280.png and b/TMessagesProj/src/main/assets/emoji/6_280.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_281.png b/TMessagesProj/src/main/assets/emoji/6_281.png index 8d3aa9bbe..c90ac9b25 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_281.png and b/TMessagesProj/src/main/assets/emoji/6_281.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_282.png b/TMessagesProj/src/main/assets/emoji/6_282.png index a2f9e2760..17f9cb152 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_282.png and b/TMessagesProj/src/main/assets/emoji/6_282.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_283.png b/TMessagesProj/src/main/assets/emoji/6_283.png index ab4278808..2e015c68b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_283.png and b/TMessagesProj/src/main/assets/emoji/6_283.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_284.png b/TMessagesProj/src/main/assets/emoji/6_284.png index e2a5cbc11..9726620cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_284.png and b/TMessagesProj/src/main/assets/emoji/6_284.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_285.png b/TMessagesProj/src/main/assets/emoji/6_285.png index acb93bd3e..233f562ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_285.png and b/TMessagesProj/src/main/assets/emoji/6_285.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_286.png b/TMessagesProj/src/main/assets/emoji/6_286.png index f54f1dbed..35e79f7b7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_286.png and b/TMessagesProj/src/main/assets/emoji/6_286.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_287.png b/TMessagesProj/src/main/assets/emoji/6_287.png index 165b1a04f..ce9a35e1a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_287.png and b/TMessagesProj/src/main/assets/emoji/6_287.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_288.png b/TMessagesProj/src/main/assets/emoji/6_288.png new file mode 100644 index 000000000..d932ab7a9 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/6_288.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_289.png b/TMessagesProj/src/main/assets/emoji/6_289.png new file mode 100644 index 000000000..e3d4b88de Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/6_289.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_3.png b/TMessagesProj/src/main/assets/emoji/6_3.png index 8843597c0..45224d7d7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_3.png and b/TMessagesProj/src/main/assets/emoji/6_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_30.png b/TMessagesProj/src/main/assets/emoji/6_30.png index 804ffcd41..0a24ec1f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_30.png and b/TMessagesProj/src/main/assets/emoji/6_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_31.png b/TMessagesProj/src/main/assets/emoji/6_31.png index b41a80cb0..3d914c27f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_31.png and b/TMessagesProj/src/main/assets/emoji/6_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_32.png b/TMessagesProj/src/main/assets/emoji/6_32.png index abd0ce184..1d42f907f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_32.png and b/TMessagesProj/src/main/assets/emoji/6_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_33.png b/TMessagesProj/src/main/assets/emoji/6_33.png index 874f97755..1caf02e1a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_33.png and b/TMessagesProj/src/main/assets/emoji/6_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_35.png b/TMessagesProj/src/main/assets/emoji/6_35.png index ac0163f8a..1ce68f9a9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_35.png and b/TMessagesProj/src/main/assets/emoji/6_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_36.png b/TMessagesProj/src/main/assets/emoji/6_36.png index e9f2ef38d..79db1c2e0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_36.png and b/TMessagesProj/src/main/assets/emoji/6_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_37.png b/TMessagesProj/src/main/assets/emoji/6_37.png index 7b84d1b3e..d902080d3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_37.png and b/TMessagesProj/src/main/assets/emoji/6_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_38.png b/TMessagesProj/src/main/assets/emoji/6_38.png index 595ba7f92..6c03ed22b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_38.png and b/TMessagesProj/src/main/assets/emoji/6_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_39.png b/TMessagesProj/src/main/assets/emoji/6_39.png index cc86d862c..e436d030e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_39.png and b/TMessagesProj/src/main/assets/emoji/6_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_4.png b/TMessagesProj/src/main/assets/emoji/6_4.png index e470d56a9..0d2b19551 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_4.png and b/TMessagesProj/src/main/assets/emoji/6_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_40.png b/TMessagesProj/src/main/assets/emoji/6_40.png index ba5feb4e8..dfd6839d2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_40.png and b/TMessagesProj/src/main/assets/emoji/6_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_42.png b/TMessagesProj/src/main/assets/emoji/6_42.png index f32ff1984..6004b72cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_42.png and b/TMessagesProj/src/main/assets/emoji/6_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_43.png b/TMessagesProj/src/main/assets/emoji/6_43.png index 5deddccde..7dac812cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_43.png and b/TMessagesProj/src/main/assets/emoji/6_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_44.png b/TMessagesProj/src/main/assets/emoji/6_44.png index 5a36e0b35..0bcead462 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_44.png and b/TMessagesProj/src/main/assets/emoji/6_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_45.png b/TMessagesProj/src/main/assets/emoji/6_45.png index 260b8a37a..7a691f53a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_45.png and b/TMessagesProj/src/main/assets/emoji/6_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_46.png b/TMessagesProj/src/main/assets/emoji/6_46.png index 8b8d77377..2f6a4fc15 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_46.png and b/TMessagesProj/src/main/assets/emoji/6_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_47.png b/TMessagesProj/src/main/assets/emoji/6_47.png index 6c0dfa144..4c9098d19 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_47.png and b/TMessagesProj/src/main/assets/emoji/6_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_48.png b/TMessagesProj/src/main/assets/emoji/6_48.png index 116c91a80..6304c8395 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_48.png and b/TMessagesProj/src/main/assets/emoji/6_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_49.png b/TMessagesProj/src/main/assets/emoji/6_49.png index 9af6888b9..d3f5bba9a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_49.png and b/TMessagesProj/src/main/assets/emoji/6_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_51.png b/TMessagesProj/src/main/assets/emoji/6_51.png index 29f97c6ba..0447c37b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_51.png and b/TMessagesProj/src/main/assets/emoji/6_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_52.png b/TMessagesProj/src/main/assets/emoji/6_52.png index d41053abf..8660f6443 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_52.png and b/TMessagesProj/src/main/assets/emoji/6_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_53.png b/TMessagesProj/src/main/assets/emoji/6_53.png index 1f5c2ecc4..d4c3b7b6e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_53.png and b/TMessagesProj/src/main/assets/emoji/6_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_54.png b/TMessagesProj/src/main/assets/emoji/6_54.png index 92694405b..e77a3771f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_54.png and b/TMessagesProj/src/main/assets/emoji/6_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_56.png b/TMessagesProj/src/main/assets/emoji/6_56.png index 12f1e36d9..f9fa00c9d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_56.png and b/TMessagesProj/src/main/assets/emoji/6_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_57.png b/TMessagesProj/src/main/assets/emoji/6_57.png index 546e3d0d0..17320cc21 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_57.png and b/TMessagesProj/src/main/assets/emoji/6_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_58.png b/TMessagesProj/src/main/assets/emoji/6_58.png index b63d2b3b1..f6d64ef5d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_58.png and b/TMessagesProj/src/main/assets/emoji/6_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_59.png b/TMessagesProj/src/main/assets/emoji/6_59.png index b0ff21d05..0c7a63a0b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_59.png and b/TMessagesProj/src/main/assets/emoji/6_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_60.png b/TMessagesProj/src/main/assets/emoji/6_60.png index 8e5a3da2a..b79cc78a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_60.png and b/TMessagesProj/src/main/assets/emoji/6_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_62.png b/TMessagesProj/src/main/assets/emoji/6_62.png index 5c7670a4b..7acf7c8f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_62.png and b/TMessagesProj/src/main/assets/emoji/6_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_63.png b/TMessagesProj/src/main/assets/emoji/6_63.png index 248a278a3..ebb42947b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_63.png and b/TMessagesProj/src/main/assets/emoji/6_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_64.png b/TMessagesProj/src/main/assets/emoji/6_64.png index 55093bcdc..6a37e2a8d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_64.png and b/TMessagesProj/src/main/assets/emoji/6_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_68.png b/TMessagesProj/src/main/assets/emoji/6_68.png index 05f819d03..c57c92f92 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_68.png and b/TMessagesProj/src/main/assets/emoji/6_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_69.png b/TMessagesProj/src/main/assets/emoji/6_69.png index eda281138..07cedda4d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_69.png and b/TMessagesProj/src/main/assets/emoji/6_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_71.png b/TMessagesProj/src/main/assets/emoji/6_71.png index 428ac1fdf..3c0fc308d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_71.png and b/TMessagesProj/src/main/assets/emoji/6_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_72.png b/TMessagesProj/src/main/assets/emoji/6_72.png index e1e181df1..f65501a09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_72.png and b/TMessagesProj/src/main/assets/emoji/6_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_73.png b/TMessagesProj/src/main/assets/emoji/6_73.png index 91746f28c..75064c6a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_73.png and b/TMessagesProj/src/main/assets/emoji/6_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_74.png b/TMessagesProj/src/main/assets/emoji/6_74.png index 3e28abc11..8507e9cf1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_74.png and b/TMessagesProj/src/main/assets/emoji/6_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_75.png b/TMessagesProj/src/main/assets/emoji/6_75.png index 5de77aeb0..89ca650b4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_75.png and b/TMessagesProj/src/main/assets/emoji/6_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_76.png b/TMessagesProj/src/main/assets/emoji/6_76.png index f4944fbd2..00c8c40a4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_76.png and b/TMessagesProj/src/main/assets/emoji/6_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_77.png b/TMessagesProj/src/main/assets/emoji/6_77.png index d69ec022d..4194b93a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_77.png and b/TMessagesProj/src/main/assets/emoji/6_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_78.png b/TMessagesProj/src/main/assets/emoji/6_78.png index e0f8e0e08..28f452576 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_78.png and b/TMessagesProj/src/main/assets/emoji/6_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_79.png b/TMessagesProj/src/main/assets/emoji/6_79.png index 62132c923..f22512be4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_79.png and b/TMessagesProj/src/main/assets/emoji/6_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_8.png b/TMessagesProj/src/main/assets/emoji/6_8.png index e03c5c2da..6fc2f2f5d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_8.png and b/TMessagesProj/src/main/assets/emoji/6_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_80.png b/TMessagesProj/src/main/assets/emoji/6_80.png index 3dc522e33..d2a1c87f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_80.png and b/TMessagesProj/src/main/assets/emoji/6_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_81.png b/TMessagesProj/src/main/assets/emoji/6_81.png index 5867e5951..5fa94c607 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_81.png and b/TMessagesProj/src/main/assets/emoji/6_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_82.png b/TMessagesProj/src/main/assets/emoji/6_82.png index fd7664be2..1dc18d3a0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_82.png and b/TMessagesProj/src/main/assets/emoji/6_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_83.png b/TMessagesProj/src/main/assets/emoji/6_83.png index c28d0a538..5471e9aa2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_83.png and b/TMessagesProj/src/main/assets/emoji/6_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_84.png b/TMessagesProj/src/main/assets/emoji/6_84.png index abbce769a..9a013cb74 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_84.png and b/TMessagesProj/src/main/assets/emoji/6_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_85.png b/TMessagesProj/src/main/assets/emoji/6_85.png index ade04de60..10e47f25e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_85.png and b/TMessagesProj/src/main/assets/emoji/6_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_86.png b/TMessagesProj/src/main/assets/emoji/6_86.png index db765fb36..4ed174bfc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_86.png and b/TMessagesProj/src/main/assets/emoji/6_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_87.png b/TMessagesProj/src/main/assets/emoji/6_87.png index 4ce5cd0d8..a3afb1cf6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_87.png and b/TMessagesProj/src/main/assets/emoji/6_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_88.png b/TMessagesProj/src/main/assets/emoji/6_88.png index 145d842ba..972ce8fb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_88.png and b/TMessagesProj/src/main/assets/emoji/6_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_89.png b/TMessagesProj/src/main/assets/emoji/6_89.png index 2af0f6f90..da8d80e1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_89.png and b/TMessagesProj/src/main/assets/emoji/6_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_9.png b/TMessagesProj/src/main/assets/emoji/6_9.png index 41cdebae0..fc54f2d69 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_9.png and b/TMessagesProj/src/main/assets/emoji/6_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_90.png b/TMessagesProj/src/main/assets/emoji/6_90.png index 370e54b8e..46152c91c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_90.png and b/TMessagesProj/src/main/assets/emoji/6_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_92.png b/TMessagesProj/src/main/assets/emoji/6_92.png index f0ee85537..2c10bcad0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_92.png and b/TMessagesProj/src/main/assets/emoji/6_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_93.png b/TMessagesProj/src/main/assets/emoji/6_93.png index 680c39d06..ff9d23331 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_93.png and b/TMessagesProj/src/main/assets/emoji/6_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_94.png b/TMessagesProj/src/main/assets/emoji/6_94.png index d9f34c4eb..01b1ca9ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_94.png and b/TMessagesProj/src/main/assets/emoji/6_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_95.png b/TMessagesProj/src/main/assets/emoji/6_95.png index 019b7bb27..375b72bc8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_95.png and b/TMessagesProj/src/main/assets/emoji/6_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_96.png b/TMessagesProj/src/main/assets/emoji/6_96.png index f3ffedb15..840cab0db 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_96.png and b/TMessagesProj/src/main/assets/emoji/6_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_97.png b/TMessagesProj/src/main/assets/emoji/6_97.png index da8307c70..2e97467c0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_97.png and b/TMessagesProj/src/main/assets/emoji/6_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_98.png b/TMessagesProj/src/main/assets/emoji/6_98.png index 336849796..1a6f63454 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_98.png and b/TMessagesProj/src/main/assets/emoji/6_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/6_99.png b/TMessagesProj/src/main/assets/emoji/6_99.png index 84395b412..371f418e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/6_99.png and b/TMessagesProj/src/main/assets/emoji/6_99.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_0.png b/TMessagesProj/src/main/assets/emoji/7_0.png index ccafb7466..b84425855 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_0.png and b/TMessagesProj/src/main/assets/emoji/7_0.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_1.png b/TMessagesProj/src/main/assets/emoji/7_1.png index fba3a86ca..1327aaa9c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_1.png and b/TMessagesProj/src/main/assets/emoji/7_1.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_10.png b/TMessagesProj/src/main/assets/emoji/7_10.png index d621b5445..8819b524f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_10.png and b/TMessagesProj/src/main/assets/emoji/7_10.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_100.png b/TMessagesProj/src/main/assets/emoji/7_100.png index c8569ac20..31877dc5e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_100.png and b/TMessagesProj/src/main/assets/emoji/7_100.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_101.png b/TMessagesProj/src/main/assets/emoji/7_101.png index 9ffeaac5c..6a430023f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_101.png and b/TMessagesProj/src/main/assets/emoji/7_101.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_102.png b/TMessagesProj/src/main/assets/emoji/7_102.png index 5e5d54596..e368c653b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_102.png and b/TMessagesProj/src/main/assets/emoji/7_102.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_103.png b/TMessagesProj/src/main/assets/emoji/7_103.png index a51941a4e..f79b31081 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_103.png and b/TMessagesProj/src/main/assets/emoji/7_103.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_104.png b/TMessagesProj/src/main/assets/emoji/7_104.png index d9c62a410..8cf20e73e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_104.png and b/TMessagesProj/src/main/assets/emoji/7_104.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_105.png b/TMessagesProj/src/main/assets/emoji/7_105.png index c4d2c5cd2..db677ffb6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_105.png and b/TMessagesProj/src/main/assets/emoji/7_105.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_106.png b/TMessagesProj/src/main/assets/emoji/7_106.png index 669b7cccc..4c484c289 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_106.png and b/TMessagesProj/src/main/assets/emoji/7_106.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_107.png b/TMessagesProj/src/main/assets/emoji/7_107.png index 64fe956e3..8b66dee8c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_107.png and b/TMessagesProj/src/main/assets/emoji/7_107.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_108.png b/TMessagesProj/src/main/assets/emoji/7_108.png index c5826aae7..0a9b109b7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_108.png and b/TMessagesProj/src/main/assets/emoji/7_108.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_109.png b/TMessagesProj/src/main/assets/emoji/7_109.png index ea47f26fe..004de902a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_109.png and b/TMessagesProj/src/main/assets/emoji/7_109.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_11.png b/TMessagesProj/src/main/assets/emoji/7_11.png index d022c5cda..d54ff5db8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_11.png and b/TMessagesProj/src/main/assets/emoji/7_11.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_110.png b/TMessagesProj/src/main/assets/emoji/7_110.png index ff110fe8a..01c1e9df9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_110.png and b/TMessagesProj/src/main/assets/emoji/7_110.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_111.png b/TMessagesProj/src/main/assets/emoji/7_111.png index dd4180a25..a96e418cb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_111.png and b/TMessagesProj/src/main/assets/emoji/7_111.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_112.png b/TMessagesProj/src/main/assets/emoji/7_112.png index 837e57ac8..4830da952 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_112.png and b/TMessagesProj/src/main/assets/emoji/7_112.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_113.png b/TMessagesProj/src/main/assets/emoji/7_113.png index 2d852724d..837e57ac8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_113.png and b/TMessagesProj/src/main/assets/emoji/7_113.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_114.png b/TMessagesProj/src/main/assets/emoji/7_114.png index 5b45958d3..efc503847 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_114.png and b/TMessagesProj/src/main/assets/emoji/7_114.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_115.png b/TMessagesProj/src/main/assets/emoji/7_115.png index 30afcae16..aa54a1402 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_115.png and b/TMessagesProj/src/main/assets/emoji/7_115.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_116.png b/TMessagesProj/src/main/assets/emoji/7_116.png index fd942d0f2..cdc4e6731 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_116.png and b/TMessagesProj/src/main/assets/emoji/7_116.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_117.png b/TMessagesProj/src/main/assets/emoji/7_117.png index 6e73aadd4..53836eeb6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_117.png and b/TMessagesProj/src/main/assets/emoji/7_117.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_118.png b/TMessagesProj/src/main/assets/emoji/7_118.png index 8bf2e4b17..38b051c12 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_118.png and b/TMessagesProj/src/main/assets/emoji/7_118.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_119.png b/TMessagesProj/src/main/assets/emoji/7_119.png index 0e37393db..37e446b6b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_119.png and b/TMessagesProj/src/main/assets/emoji/7_119.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_12.png b/TMessagesProj/src/main/assets/emoji/7_12.png index b6e1bc73a..707893e03 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_12.png and b/TMessagesProj/src/main/assets/emoji/7_12.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_120.png b/TMessagesProj/src/main/assets/emoji/7_120.png index a40c0988c..13d15b178 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_120.png and b/TMessagesProj/src/main/assets/emoji/7_120.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_121.png b/TMessagesProj/src/main/assets/emoji/7_121.png index e2ca5f19e..246796177 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_121.png and b/TMessagesProj/src/main/assets/emoji/7_121.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_122.png b/TMessagesProj/src/main/assets/emoji/7_122.png index 41525be63..f4c71f602 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_122.png and b/TMessagesProj/src/main/assets/emoji/7_122.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_123.png b/TMessagesProj/src/main/assets/emoji/7_123.png index 1ef877d76..1f6bc92dc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_123.png and b/TMessagesProj/src/main/assets/emoji/7_123.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_124.png b/TMessagesProj/src/main/assets/emoji/7_124.png index 3b51aa86e..83c5dfb84 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_124.png and b/TMessagesProj/src/main/assets/emoji/7_124.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_125.png b/TMessagesProj/src/main/assets/emoji/7_125.png index 34276216d..2d106999b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_125.png and b/TMessagesProj/src/main/assets/emoji/7_125.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_126.png b/TMessagesProj/src/main/assets/emoji/7_126.png index 1619d2ef2..4d5e1bc48 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_126.png and b/TMessagesProj/src/main/assets/emoji/7_126.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_127.png b/TMessagesProj/src/main/assets/emoji/7_127.png index 3d7379819..25496c4f0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_127.png and b/TMessagesProj/src/main/assets/emoji/7_127.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_128.png b/TMessagesProj/src/main/assets/emoji/7_128.png index cc965f290..1787f69f3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_128.png and b/TMessagesProj/src/main/assets/emoji/7_128.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_129.png b/TMessagesProj/src/main/assets/emoji/7_129.png index 0c5607c6c..84fbc8c80 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_129.png and b/TMessagesProj/src/main/assets/emoji/7_129.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_13.png b/TMessagesProj/src/main/assets/emoji/7_13.png index 4327baa17..8df59f6e1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_13.png and b/TMessagesProj/src/main/assets/emoji/7_13.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_130.png b/TMessagesProj/src/main/assets/emoji/7_130.png index d98569ead..5ba44a295 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_130.png and b/TMessagesProj/src/main/assets/emoji/7_130.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_131.png b/TMessagesProj/src/main/assets/emoji/7_131.png index 0a411428f..901496552 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_131.png and b/TMessagesProj/src/main/assets/emoji/7_131.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_132.png b/TMessagesProj/src/main/assets/emoji/7_132.png index db8162e98..8578ca828 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_132.png and b/TMessagesProj/src/main/assets/emoji/7_132.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_133.png b/TMessagesProj/src/main/assets/emoji/7_133.png index 5ce9a4c4f..371297f20 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_133.png and b/TMessagesProj/src/main/assets/emoji/7_133.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_134.png b/TMessagesProj/src/main/assets/emoji/7_134.png index 1af9bfc41..7e0eb900f 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_134.png and b/TMessagesProj/src/main/assets/emoji/7_134.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_135.png b/TMessagesProj/src/main/assets/emoji/7_135.png index 4171e6b65..b4c621abe 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_135.png and b/TMessagesProj/src/main/assets/emoji/7_135.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_136.png b/TMessagesProj/src/main/assets/emoji/7_136.png index 0797b4aea..ce2ee9a36 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_136.png and b/TMessagesProj/src/main/assets/emoji/7_136.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_137.png b/TMessagesProj/src/main/assets/emoji/7_137.png index e57ab1cce..5ef238b5d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_137.png and b/TMessagesProj/src/main/assets/emoji/7_137.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_138.png b/TMessagesProj/src/main/assets/emoji/7_138.png index ac6b7d64f..05f5e9702 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_138.png and b/TMessagesProj/src/main/assets/emoji/7_138.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_139.png b/TMessagesProj/src/main/assets/emoji/7_139.png index 8a03044de..000c13c1c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_139.png and b/TMessagesProj/src/main/assets/emoji/7_139.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_14.png b/TMessagesProj/src/main/assets/emoji/7_14.png index e490fbca8..cc512ee60 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_14.png and b/TMessagesProj/src/main/assets/emoji/7_14.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_140.png b/TMessagesProj/src/main/assets/emoji/7_140.png index 01153f1a5..c26155d07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_140.png and b/TMessagesProj/src/main/assets/emoji/7_140.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_141.png b/TMessagesProj/src/main/assets/emoji/7_141.png index 229d1322f..ff750ce6d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_141.png and b/TMessagesProj/src/main/assets/emoji/7_141.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_142.png b/TMessagesProj/src/main/assets/emoji/7_142.png index 12aac8b62..da9f6df33 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_142.png and b/TMessagesProj/src/main/assets/emoji/7_142.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_143.png b/TMessagesProj/src/main/assets/emoji/7_143.png index 03209b9d4..c18bab9fc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_143.png and b/TMessagesProj/src/main/assets/emoji/7_143.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_144.png b/TMessagesProj/src/main/assets/emoji/7_144.png index dfb482a08..cc2d230d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_144.png and b/TMessagesProj/src/main/assets/emoji/7_144.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_145.png b/TMessagesProj/src/main/assets/emoji/7_145.png index 82247b818..1b12c14b4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_145.png and b/TMessagesProj/src/main/assets/emoji/7_145.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_146.png b/TMessagesProj/src/main/assets/emoji/7_146.png index 794a99d8c..bf8ac28de 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_146.png and b/TMessagesProj/src/main/assets/emoji/7_146.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_147.png b/TMessagesProj/src/main/assets/emoji/7_147.png index 469e93c5a..7ac401d07 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_147.png and b/TMessagesProj/src/main/assets/emoji/7_147.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_148.png b/TMessagesProj/src/main/assets/emoji/7_148.png index 04b5f68ae..af5d1f90b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_148.png and b/TMessagesProj/src/main/assets/emoji/7_148.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_149.png b/TMessagesProj/src/main/assets/emoji/7_149.png index 2f9239842..3317fcdee 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_149.png and b/TMessagesProj/src/main/assets/emoji/7_149.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_15.png b/TMessagesProj/src/main/assets/emoji/7_15.png index 5137f7270..8c3038a1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_15.png and b/TMessagesProj/src/main/assets/emoji/7_15.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_150.png b/TMessagesProj/src/main/assets/emoji/7_150.png index b73838ff3..562e68098 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_150.png and b/TMessagesProj/src/main/assets/emoji/7_150.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_151.png b/TMessagesProj/src/main/assets/emoji/7_151.png index 3100bd1c2..8e17916b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_151.png and b/TMessagesProj/src/main/assets/emoji/7_151.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_152.png b/TMessagesProj/src/main/assets/emoji/7_152.png index 5886c77e9..ba1784e16 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_152.png and b/TMessagesProj/src/main/assets/emoji/7_152.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_153.png b/TMessagesProj/src/main/assets/emoji/7_153.png index 7f89c3adc..6f8c4e4e4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_153.png and b/TMessagesProj/src/main/assets/emoji/7_153.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_154.png b/TMessagesProj/src/main/assets/emoji/7_154.png index ab7a4a59b..3962a8169 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_154.png and b/TMessagesProj/src/main/assets/emoji/7_154.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_155.png b/TMessagesProj/src/main/assets/emoji/7_155.png index 9eeb8f53d..022de9d98 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_155.png and b/TMessagesProj/src/main/assets/emoji/7_155.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_156.png b/TMessagesProj/src/main/assets/emoji/7_156.png index 24c634db6..f6ba8a4a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_156.png and b/TMessagesProj/src/main/assets/emoji/7_156.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_157.png b/TMessagesProj/src/main/assets/emoji/7_157.png index 3f262e4fa..8060504ac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_157.png and b/TMessagesProj/src/main/assets/emoji/7_157.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_158.png b/TMessagesProj/src/main/assets/emoji/7_158.png index 525bcd7c2..750ef92fd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_158.png and b/TMessagesProj/src/main/assets/emoji/7_158.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_159.png b/TMessagesProj/src/main/assets/emoji/7_159.png index bcc7f8d52..6452c6add 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_159.png and b/TMessagesProj/src/main/assets/emoji/7_159.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_16.png b/TMessagesProj/src/main/assets/emoji/7_16.png index fbae6e980..3560792a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_16.png and b/TMessagesProj/src/main/assets/emoji/7_16.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_160.png b/TMessagesProj/src/main/assets/emoji/7_160.png index 113ef1640..7579d208a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_160.png and b/TMessagesProj/src/main/assets/emoji/7_160.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_161.png b/TMessagesProj/src/main/assets/emoji/7_161.png index 1decb2d89..161f2a3ce 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_161.png and b/TMessagesProj/src/main/assets/emoji/7_161.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_162.png b/TMessagesProj/src/main/assets/emoji/7_162.png index 5b923734f..c2222808a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_162.png and b/TMessagesProj/src/main/assets/emoji/7_162.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_163.png b/TMessagesProj/src/main/assets/emoji/7_163.png index 6ea162b90..f78f09bdd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_163.png and b/TMessagesProj/src/main/assets/emoji/7_163.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_164.png b/TMessagesProj/src/main/assets/emoji/7_164.png index b51496ccc..ffca48a94 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_164.png and b/TMessagesProj/src/main/assets/emoji/7_164.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_165.png b/TMessagesProj/src/main/assets/emoji/7_165.png index 2514173a8..debdf7feb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_165.png and b/TMessagesProj/src/main/assets/emoji/7_165.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_166.png b/TMessagesProj/src/main/assets/emoji/7_166.png index 5ef732270..6d4734d19 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_166.png and b/TMessagesProj/src/main/assets/emoji/7_166.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_167.png b/TMessagesProj/src/main/assets/emoji/7_167.png index a1f9cbbc7..0ade156cc 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_167.png and b/TMessagesProj/src/main/assets/emoji/7_167.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_168.png b/TMessagesProj/src/main/assets/emoji/7_168.png index 50e897914..6a2cde860 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_168.png and b/TMessagesProj/src/main/assets/emoji/7_168.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_169.png b/TMessagesProj/src/main/assets/emoji/7_169.png index c17867ff1..ff635e7d0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_169.png and b/TMessagesProj/src/main/assets/emoji/7_169.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_17.png b/TMessagesProj/src/main/assets/emoji/7_17.png index 6c5a8ab37..a1bf55b01 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_17.png and b/TMessagesProj/src/main/assets/emoji/7_17.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_170.png b/TMessagesProj/src/main/assets/emoji/7_170.png index 7173ffa92..7d8ad4e38 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_170.png and b/TMessagesProj/src/main/assets/emoji/7_170.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_171.png b/TMessagesProj/src/main/assets/emoji/7_171.png index 61352460f..b32e0f18c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_171.png and b/TMessagesProj/src/main/assets/emoji/7_171.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_172.png b/TMessagesProj/src/main/assets/emoji/7_172.png index 143781ec3..35439fb19 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_172.png and b/TMessagesProj/src/main/assets/emoji/7_172.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_173.png b/TMessagesProj/src/main/assets/emoji/7_173.png index 4088c92d3..4f693f027 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_173.png and b/TMessagesProj/src/main/assets/emoji/7_173.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_174.png b/TMessagesProj/src/main/assets/emoji/7_174.png index f57582583..9e10702bd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_174.png and b/TMessagesProj/src/main/assets/emoji/7_174.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_175.png b/TMessagesProj/src/main/assets/emoji/7_175.png index e83a174d7..b2e531dac 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_175.png and b/TMessagesProj/src/main/assets/emoji/7_175.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_176.png b/TMessagesProj/src/main/assets/emoji/7_176.png index 3a31c3509..b57df6f53 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_176.png and b/TMessagesProj/src/main/assets/emoji/7_176.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_177.png b/TMessagesProj/src/main/assets/emoji/7_177.png index cfd337c34..bebb9ab8e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_177.png and b/TMessagesProj/src/main/assets/emoji/7_177.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_178.png b/TMessagesProj/src/main/assets/emoji/7_178.png index f8f50b18a..bcf2d2f09 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_178.png and b/TMessagesProj/src/main/assets/emoji/7_178.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_179.png b/TMessagesProj/src/main/assets/emoji/7_179.png index 64b0ec4ea..b48188189 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_179.png and b/TMessagesProj/src/main/assets/emoji/7_179.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_18.png b/TMessagesProj/src/main/assets/emoji/7_18.png index b9dd505aa..255c3eef3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_18.png and b/TMessagesProj/src/main/assets/emoji/7_18.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_180.png b/TMessagesProj/src/main/assets/emoji/7_180.png index 8b6a3ae46..634b29e90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_180.png and b/TMessagesProj/src/main/assets/emoji/7_180.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_181.png b/TMessagesProj/src/main/assets/emoji/7_181.png index 5ed1f64e1..67783416b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_181.png and b/TMessagesProj/src/main/assets/emoji/7_181.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_182.png b/TMessagesProj/src/main/assets/emoji/7_182.png index 211831b05..ec2a5ccd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_182.png and b/TMessagesProj/src/main/assets/emoji/7_182.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_183.png b/TMessagesProj/src/main/assets/emoji/7_183.png index 09c5ec37c..8a7e7f067 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_183.png and b/TMessagesProj/src/main/assets/emoji/7_183.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_184.png b/TMessagesProj/src/main/assets/emoji/7_184.png index 2197fa335..9985ab725 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_184.png and b/TMessagesProj/src/main/assets/emoji/7_184.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_185.png b/TMessagesProj/src/main/assets/emoji/7_185.png index 438bfb1cb..da3573718 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_185.png and b/TMessagesProj/src/main/assets/emoji/7_185.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_186.png b/TMessagesProj/src/main/assets/emoji/7_186.png index a36664e6c..b779be4c1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_186.png and b/TMessagesProj/src/main/assets/emoji/7_186.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_187.png b/TMessagesProj/src/main/assets/emoji/7_187.png index 6fc41c204..358e3f312 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_187.png and b/TMessagesProj/src/main/assets/emoji/7_187.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_188.png b/TMessagesProj/src/main/assets/emoji/7_188.png index 2e72665c3..e510d8c91 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_188.png and b/TMessagesProj/src/main/assets/emoji/7_188.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_189.png b/TMessagesProj/src/main/assets/emoji/7_189.png index 7135a1d3e..b88c891e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_189.png and b/TMessagesProj/src/main/assets/emoji/7_189.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_19.png b/TMessagesProj/src/main/assets/emoji/7_19.png index 71f22bad2..b27fe6eda 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_19.png and b/TMessagesProj/src/main/assets/emoji/7_19.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_190.png b/TMessagesProj/src/main/assets/emoji/7_190.png index cbafd8a42..1390ef902 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_190.png and b/TMessagesProj/src/main/assets/emoji/7_190.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_191.png b/TMessagesProj/src/main/assets/emoji/7_191.png index 3a77de433..8aecf3dec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_191.png and b/TMessagesProj/src/main/assets/emoji/7_191.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_192.png b/TMessagesProj/src/main/assets/emoji/7_192.png index 44729c34c..b6d3bdf90 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_192.png and b/TMessagesProj/src/main/assets/emoji/7_192.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_193.png b/TMessagesProj/src/main/assets/emoji/7_193.png index 174c27b04..a0c36a114 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_193.png and b/TMessagesProj/src/main/assets/emoji/7_193.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_194.png b/TMessagesProj/src/main/assets/emoji/7_194.png index dc5316508..cc295b03b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_194.png and b/TMessagesProj/src/main/assets/emoji/7_194.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_195.png b/TMessagesProj/src/main/assets/emoji/7_195.png index 76a257a9c..fbb197047 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_195.png and b/TMessagesProj/src/main/assets/emoji/7_195.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_196.png b/TMessagesProj/src/main/assets/emoji/7_196.png index 3e439f6a2..f9131d893 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_196.png and b/TMessagesProj/src/main/assets/emoji/7_196.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_197.png b/TMessagesProj/src/main/assets/emoji/7_197.png index c62fa1d59..b359e82a1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_197.png and b/TMessagesProj/src/main/assets/emoji/7_197.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_198.png b/TMessagesProj/src/main/assets/emoji/7_198.png index 8613e3a69..9a1802e03 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_198.png and b/TMessagesProj/src/main/assets/emoji/7_198.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_199.png b/TMessagesProj/src/main/assets/emoji/7_199.png index b46c20669..2e48d1871 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_199.png and b/TMessagesProj/src/main/assets/emoji/7_199.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_2.png b/TMessagesProj/src/main/assets/emoji/7_2.png index 1ca93854b..2fb4b1cec 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_2.png and b/TMessagesProj/src/main/assets/emoji/7_2.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_20.png b/TMessagesProj/src/main/assets/emoji/7_20.png index 45b7efc65..fc7227f32 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_20.png and b/TMessagesProj/src/main/assets/emoji/7_20.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_200.png b/TMessagesProj/src/main/assets/emoji/7_200.png index 6f4cb379a..b44f88192 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_200.png and b/TMessagesProj/src/main/assets/emoji/7_200.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_201.png b/TMessagesProj/src/main/assets/emoji/7_201.png index f88ab4353..32c532eff 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_201.png and b/TMessagesProj/src/main/assets/emoji/7_201.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_202.png b/TMessagesProj/src/main/assets/emoji/7_202.png index b3ac7bae3..5e9563958 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_202.png and b/TMessagesProj/src/main/assets/emoji/7_202.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_203.png b/TMessagesProj/src/main/assets/emoji/7_203.png index 0c7518867..8a826e125 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_203.png and b/TMessagesProj/src/main/assets/emoji/7_203.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_204.png b/TMessagesProj/src/main/assets/emoji/7_204.png index 80bcbd117..448604d4d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_204.png and b/TMessagesProj/src/main/assets/emoji/7_204.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_205.png b/TMessagesProj/src/main/assets/emoji/7_205.png index c44497f84..4c9af98a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_205.png and b/TMessagesProj/src/main/assets/emoji/7_205.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_206.png b/TMessagesProj/src/main/assets/emoji/7_206.png index 37680b1c0..5fe7f5630 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_206.png and b/TMessagesProj/src/main/assets/emoji/7_206.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_207.png b/TMessagesProj/src/main/assets/emoji/7_207.png index 613ff2bb0..f61e9b6e2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_207.png and b/TMessagesProj/src/main/assets/emoji/7_207.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_208.png b/TMessagesProj/src/main/assets/emoji/7_208.png index b93b96206..cdbcbb3f2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_208.png and b/TMessagesProj/src/main/assets/emoji/7_208.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_209.png b/TMessagesProj/src/main/assets/emoji/7_209.png index e1de93340..c60b913a6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_209.png and b/TMessagesProj/src/main/assets/emoji/7_209.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_21.png b/TMessagesProj/src/main/assets/emoji/7_21.png index 2f7fa0b6a..f76c48d47 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_21.png and b/TMessagesProj/src/main/assets/emoji/7_21.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_210.png b/TMessagesProj/src/main/assets/emoji/7_210.png index 14385c961..f6b4ef474 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_210.png and b/TMessagesProj/src/main/assets/emoji/7_210.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_211.png b/TMessagesProj/src/main/assets/emoji/7_211.png index 402931f47..8a6d2408b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_211.png and b/TMessagesProj/src/main/assets/emoji/7_211.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_212.png b/TMessagesProj/src/main/assets/emoji/7_212.png index a67f4fef7..d7b7d079b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_212.png and b/TMessagesProj/src/main/assets/emoji/7_212.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_213.png b/TMessagesProj/src/main/assets/emoji/7_213.png index 91ee5e856..8823fda08 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_213.png and b/TMessagesProj/src/main/assets/emoji/7_213.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_214.png b/TMessagesProj/src/main/assets/emoji/7_214.png index 70120fdbe..0b933781a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_214.png and b/TMessagesProj/src/main/assets/emoji/7_214.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_215.png b/TMessagesProj/src/main/assets/emoji/7_215.png index 02a2bdee1..2e26e76aa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_215.png and b/TMessagesProj/src/main/assets/emoji/7_215.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_216.png b/TMessagesProj/src/main/assets/emoji/7_216.png index 7c264dd91..ed6e1e4b9 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_216.png and b/TMessagesProj/src/main/assets/emoji/7_216.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_217.png b/TMessagesProj/src/main/assets/emoji/7_217.png index 2db1783f2..56c792857 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_217.png and b/TMessagesProj/src/main/assets/emoji/7_217.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_218.png b/TMessagesProj/src/main/assets/emoji/7_218.png index 3b2b74ca9..bf91dd322 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_218.png and b/TMessagesProj/src/main/assets/emoji/7_218.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_219.png b/TMessagesProj/src/main/assets/emoji/7_219.png index 6237def1c..5c9044fcd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_219.png and b/TMessagesProj/src/main/assets/emoji/7_219.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_22.png b/TMessagesProj/src/main/assets/emoji/7_22.png index a071c650a..39d592642 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_22.png and b/TMessagesProj/src/main/assets/emoji/7_22.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_220.png b/TMessagesProj/src/main/assets/emoji/7_220.png index e3557d618..0b9f17007 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_220.png and b/TMessagesProj/src/main/assets/emoji/7_220.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_221.png b/TMessagesProj/src/main/assets/emoji/7_221.png index a9d71b66d..93a75d745 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_221.png and b/TMessagesProj/src/main/assets/emoji/7_221.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_222.png b/TMessagesProj/src/main/assets/emoji/7_222.png index 123be052b..f994c4379 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_222.png and b/TMessagesProj/src/main/assets/emoji/7_222.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_223.png b/TMessagesProj/src/main/assets/emoji/7_223.png index f269f0cf0..097067095 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_223.png and b/TMessagesProj/src/main/assets/emoji/7_223.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_224.png b/TMessagesProj/src/main/assets/emoji/7_224.png index c4c5593f3..ae54e15e3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_224.png and b/TMessagesProj/src/main/assets/emoji/7_224.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_225.png b/TMessagesProj/src/main/assets/emoji/7_225.png index 1091689b5..f12fbe644 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_225.png and b/TMessagesProj/src/main/assets/emoji/7_225.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_226.png b/TMessagesProj/src/main/assets/emoji/7_226.png index 72c9602bd..ee359a4f4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_226.png and b/TMessagesProj/src/main/assets/emoji/7_226.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_227.png b/TMessagesProj/src/main/assets/emoji/7_227.png index 1d3c0dd5c..c72f89f4a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_227.png and b/TMessagesProj/src/main/assets/emoji/7_227.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_228.png b/TMessagesProj/src/main/assets/emoji/7_228.png index 8fc713e01..f31219c3b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_228.png and b/TMessagesProj/src/main/assets/emoji/7_228.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_229.png b/TMessagesProj/src/main/assets/emoji/7_229.png index 2d5ede6ba..0cffb794e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_229.png and b/TMessagesProj/src/main/assets/emoji/7_229.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_23.png b/TMessagesProj/src/main/assets/emoji/7_23.png index 9d46a43a5..975c2568d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_23.png and b/TMessagesProj/src/main/assets/emoji/7_23.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_230.png b/TMessagesProj/src/main/assets/emoji/7_230.png index 792db6ff1..59956e3ae 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_230.png and b/TMessagesProj/src/main/assets/emoji/7_230.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_231.png b/TMessagesProj/src/main/assets/emoji/7_231.png index abb02fbd6..7887fb580 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_231.png and b/TMessagesProj/src/main/assets/emoji/7_231.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_232.png b/TMessagesProj/src/main/assets/emoji/7_232.png index e4ab58e90..9ec222723 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_232.png and b/TMessagesProj/src/main/assets/emoji/7_232.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_233.png b/TMessagesProj/src/main/assets/emoji/7_233.png index 87bb62811..1345fbab4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_233.png and b/TMessagesProj/src/main/assets/emoji/7_233.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_234.png b/TMessagesProj/src/main/assets/emoji/7_234.png index 7ad4238eb..f020ea1ca 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_234.png and b/TMessagesProj/src/main/assets/emoji/7_234.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_235.png b/TMessagesProj/src/main/assets/emoji/7_235.png index 404b374ab..b8db390f8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_235.png and b/TMessagesProj/src/main/assets/emoji/7_235.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_236.png b/TMessagesProj/src/main/assets/emoji/7_236.png index 2fee6762d..a35d6d742 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_236.png and b/TMessagesProj/src/main/assets/emoji/7_236.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_237.png b/TMessagesProj/src/main/assets/emoji/7_237.png index 9521d1e2d..2c8387e34 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_237.png and b/TMessagesProj/src/main/assets/emoji/7_237.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_238.png b/TMessagesProj/src/main/assets/emoji/7_238.png index a1b8db314..966dfec8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_238.png and b/TMessagesProj/src/main/assets/emoji/7_238.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_239.png b/TMessagesProj/src/main/assets/emoji/7_239.png index fbe6ef88a..b5b1a295a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_239.png and b/TMessagesProj/src/main/assets/emoji/7_239.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_24.png b/TMessagesProj/src/main/assets/emoji/7_24.png index ebed66052..eb0563cb1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_24.png and b/TMessagesProj/src/main/assets/emoji/7_24.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_240.png b/TMessagesProj/src/main/assets/emoji/7_240.png index 668719896..2e5611376 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_240.png and b/TMessagesProj/src/main/assets/emoji/7_240.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_241.png b/TMessagesProj/src/main/assets/emoji/7_241.png index ff38ecc9b..711984b5a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_241.png and b/TMessagesProj/src/main/assets/emoji/7_241.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_242.png b/TMessagesProj/src/main/assets/emoji/7_242.png index 7f282634b..f7c885082 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_242.png and b/TMessagesProj/src/main/assets/emoji/7_242.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_243.png b/TMessagesProj/src/main/assets/emoji/7_243.png index d0cdd38dc..e3edc6f7d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_243.png and b/TMessagesProj/src/main/assets/emoji/7_243.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_244.png b/TMessagesProj/src/main/assets/emoji/7_244.png index 65cf5ce0d..4a85ccd06 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_244.png and b/TMessagesProj/src/main/assets/emoji/7_244.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_245.png b/TMessagesProj/src/main/assets/emoji/7_245.png index ffe6c92c5..40555f9d8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_245.png and b/TMessagesProj/src/main/assets/emoji/7_245.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_246.png b/TMessagesProj/src/main/assets/emoji/7_246.png index 9ee158602..d821408b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_246.png and b/TMessagesProj/src/main/assets/emoji/7_246.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_247.png b/TMessagesProj/src/main/assets/emoji/7_247.png index dc88f4ff2..6318419c7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_247.png and b/TMessagesProj/src/main/assets/emoji/7_247.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_248.png b/TMessagesProj/src/main/assets/emoji/7_248.png index 67b83d593..7d4bb6a7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_248.png and b/TMessagesProj/src/main/assets/emoji/7_248.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_249.png b/TMessagesProj/src/main/assets/emoji/7_249.png index 28a011e46..e6ebd2ad5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_249.png and b/TMessagesProj/src/main/assets/emoji/7_249.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_25.png b/TMessagesProj/src/main/assets/emoji/7_25.png index 828eddd55..d6f0bbdc4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_25.png and b/TMessagesProj/src/main/assets/emoji/7_25.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_250.png b/TMessagesProj/src/main/assets/emoji/7_250.png index fb085fe92..92a32597d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_250.png and b/TMessagesProj/src/main/assets/emoji/7_250.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_251.png b/TMessagesProj/src/main/assets/emoji/7_251.png index d4f2e4e2f..c3033b021 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_251.png and b/TMessagesProj/src/main/assets/emoji/7_251.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_252.png b/TMessagesProj/src/main/assets/emoji/7_252.png index 697071a21..844d2a6ed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_252.png and b/TMessagesProj/src/main/assets/emoji/7_252.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_253.png b/TMessagesProj/src/main/assets/emoji/7_253.png index 6472cd4d8..873d6476a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_253.png and b/TMessagesProj/src/main/assets/emoji/7_253.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_254.png b/TMessagesProj/src/main/assets/emoji/7_254.png index 8aba080c3..9ba532408 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_254.png and b/TMessagesProj/src/main/assets/emoji/7_254.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_255.png b/TMessagesProj/src/main/assets/emoji/7_255.png index 1358f49a5..a3eaf2295 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_255.png and b/TMessagesProj/src/main/assets/emoji/7_255.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_256.png b/TMessagesProj/src/main/assets/emoji/7_256.png index c22c5db9c..47f5198b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_256.png and b/TMessagesProj/src/main/assets/emoji/7_256.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_257.png b/TMessagesProj/src/main/assets/emoji/7_257.png index 4e386bcec..b46eaeeed 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_257.png and b/TMessagesProj/src/main/assets/emoji/7_257.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_258.png b/TMessagesProj/src/main/assets/emoji/7_258.png new file mode 100644 index 000000000..bec587256 Binary files /dev/null and b/TMessagesProj/src/main/assets/emoji/7_258.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_26.png b/TMessagesProj/src/main/assets/emoji/7_26.png index 3ed343858..9f14a4b22 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_26.png and b/TMessagesProj/src/main/assets/emoji/7_26.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_27.png b/TMessagesProj/src/main/assets/emoji/7_27.png index 1f7c4c98a..7e19aebe0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_27.png and b/TMessagesProj/src/main/assets/emoji/7_27.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_28.png b/TMessagesProj/src/main/assets/emoji/7_28.png index 6c0fe17ab..67d4b7684 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_28.png and b/TMessagesProj/src/main/assets/emoji/7_28.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_29.png b/TMessagesProj/src/main/assets/emoji/7_29.png index 5faa9fd24..00ce1dad8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_29.png and b/TMessagesProj/src/main/assets/emoji/7_29.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_3.png b/TMessagesProj/src/main/assets/emoji/7_3.png index 731847d0d..b3f3d138c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_3.png and b/TMessagesProj/src/main/assets/emoji/7_3.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_30.png b/TMessagesProj/src/main/assets/emoji/7_30.png index 107f4a7ee..d435c5dbd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_30.png and b/TMessagesProj/src/main/assets/emoji/7_30.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_31.png b/TMessagesProj/src/main/assets/emoji/7_31.png index a52087a9c..118f8aa28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_31.png and b/TMessagesProj/src/main/assets/emoji/7_31.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_32.png b/TMessagesProj/src/main/assets/emoji/7_32.png index 2911d40ef..c16626e2a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_32.png and b/TMessagesProj/src/main/assets/emoji/7_32.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_33.png b/TMessagesProj/src/main/assets/emoji/7_33.png index 213396f5d..2bc060739 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_33.png and b/TMessagesProj/src/main/assets/emoji/7_33.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_34.png b/TMessagesProj/src/main/assets/emoji/7_34.png index ee61eb771..1dd04d871 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_34.png and b/TMessagesProj/src/main/assets/emoji/7_34.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_35.png b/TMessagesProj/src/main/assets/emoji/7_35.png index d3a220ab3..5b5cbd858 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_35.png and b/TMessagesProj/src/main/assets/emoji/7_35.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_36.png b/TMessagesProj/src/main/assets/emoji/7_36.png index d7cc1abeb..b0944a30a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_36.png and b/TMessagesProj/src/main/assets/emoji/7_36.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_37.png b/TMessagesProj/src/main/assets/emoji/7_37.png index c938b88e7..8defd30a2 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_37.png and b/TMessagesProj/src/main/assets/emoji/7_37.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_38.png b/TMessagesProj/src/main/assets/emoji/7_38.png index 025845e39..0fb7bee7a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_38.png and b/TMessagesProj/src/main/assets/emoji/7_38.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_39.png b/TMessagesProj/src/main/assets/emoji/7_39.png index b00885048..e2fd82760 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_39.png and b/TMessagesProj/src/main/assets/emoji/7_39.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_4.png b/TMessagesProj/src/main/assets/emoji/7_4.png index c0378073c..c23957d46 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_4.png and b/TMessagesProj/src/main/assets/emoji/7_4.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_40.png b/TMessagesProj/src/main/assets/emoji/7_40.png index 7b95c8c9c..dd8483198 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_40.png and b/TMessagesProj/src/main/assets/emoji/7_40.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_41.png b/TMessagesProj/src/main/assets/emoji/7_41.png index 9b7be9855..e12d02425 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_41.png and b/TMessagesProj/src/main/assets/emoji/7_41.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_42.png b/TMessagesProj/src/main/assets/emoji/7_42.png index fccbfd40f..8ff887e87 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_42.png and b/TMessagesProj/src/main/assets/emoji/7_42.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_43.png b/TMessagesProj/src/main/assets/emoji/7_43.png index 951ff90f1..c25a27b67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_43.png and b/TMessagesProj/src/main/assets/emoji/7_43.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_44.png b/TMessagesProj/src/main/assets/emoji/7_44.png index 9a0aacb4c..bd2a0bdea 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_44.png and b/TMessagesProj/src/main/assets/emoji/7_44.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_45.png b/TMessagesProj/src/main/assets/emoji/7_45.png index 0efe7138f..d4fab752a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_45.png and b/TMessagesProj/src/main/assets/emoji/7_45.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_46.png b/TMessagesProj/src/main/assets/emoji/7_46.png index fcdc745ff..f0fb28b67 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_46.png and b/TMessagesProj/src/main/assets/emoji/7_46.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_47.png b/TMessagesProj/src/main/assets/emoji/7_47.png index 02602b5ad..384cb4dd7 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_47.png and b/TMessagesProj/src/main/assets/emoji/7_47.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_48.png b/TMessagesProj/src/main/assets/emoji/7_48.png index df4b84f2d..1625cb62c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_48.png and b/TMessagesProj/src/main/assets/emoji/7_48.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_49.png b/TMessagesProj/src/main/assets/emoji/7_49.png index bd67495b6..fd6637f85 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_49.png and b/TMessagesProj/src/main/assets/emoji/7_49.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_50.png b/TMessagesProj/src/main/assets/emoji/7_50.png index 811384738..c24f82aa1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_50.png and b/TMessagesProj/src/main/assets/emoji/7_50.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_51.png b/TMessagesProj/src/main/assets/emoji/7_51.png index d7aa69270..0c86d4f97 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_51.png and b/TMessagesProj/src/main/assets/emoji/7_51.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_52.png b/TMessagesProj/src/main/assets/emoji/7_52.png index 453c8d79c..ba725e8cd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_52.png and b/TMessagesProj/src/main/assets/emoji/7_52.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_53.png b/TMessagesProj/src/main/assets/emoji/7_53.png index 15d6abb73..9e7d89e29 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_53.png and b/TMessagesProj/src/main/assets/emoji/7_53.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_54.png b/TMessagesProj/src/main/assets/emoji/7_54.png index be7298706..261525ded 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_54.png and b/TMessagesProj/src/main/assets/emoji/7_54.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_55.png b/TMessagesProj/src/main/assets/emoji/7_55.png index 3b11ca952..78ce3ffd8 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_55.png and b/TMessagesProj/src/main/assets/emoji/7_55.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_56.png b/TMessagesProj/src/main/assets/emoji/7_56.png index c96f78270..8e7ab5a92 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_56.png and b/TMessagesProj/src/main/assets/emoji/7_56.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_57.png b/TMessagesProj/src/main/assets/emoji/7_57.png index 7d69e0abd..b78ebc13e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_57.png and b/TMessagesProj/src/main/assets/emoji/7_57.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_58.png b/TMessagesProj/src/main/assets/emoji/7_58.png index d6a1b7802..a927264b1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_58.png and b/TMessagesProj/src/main/assets/emoji/7_58.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_59.png b/TMessagesProj/src/main/assets/emoji/7_59.png index cc2145af0..300e6c3f5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_59.png and b/TMessagesProj/src/main/assets/emoji/7_59.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_6.png b/TMessagesProj/src/main/assets/emoji/7_6.png index 5d676d830..7f47a7e1b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_6.png and b/TMessagesProj/src/main/assets/emoji/7_6.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_60.png b/TMessagesProj/src/main/assets/emoji/7_60.png index a13902465..839241dd1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_60.png and b/TMessagesProj/src/main/assets/emoji/7_60.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_61.png b/TMessagesProj/src/main/assets/emoji/7_61.png index 338c5bd0e..fb52ed311 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_61.png and b/TMessagesProj/src/main/assets/emoji/7_61.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_62.png b/TMessagesProj/src/main/assets/emoji/7_62.png index 8871ce48b..861392f0e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_62.png and b/TMessagesProj/src/main/assets/emoji/7_62.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_63.png b/TMessagesProj/src/main/assets/emoji/7_63.png index a8a5d92a0..78f36433a 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_63.png and b/TMessagesProj/src/main/assets/emoji/7_63.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_64.png b/TMessagesProj/src/main/assets/emoji/7_64.png index 82bc13a47..59ac7adcd 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_64.png and b/TMessagesProj/src/main/assets/emoji/7_64.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_65.png b/TMessagesProj/src/main/assets/emoji/7_65.png index 0e63cfbc6..4737fe7fb 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_65.png and b/TMessagesProj/src/main/assets/emoji/7_65.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_66.png b/TMessagesProj/src/main/assets/emoji/7_66.png index 3ee0656ad..45bd2ddaa 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_66.png and b/TMessagesProj/src/main/assets/emoji/7_66.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_67.png b/TMessagesProj/src/main/assets/emoji/7_67.png index 657547df6..5324b3733 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_67.png and b/TMessagesProj/src/main/assets/emoji/7_67.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_68.png b/TMessagesProj/src/main/assets/emoji/7_68.png index aa2fdf700..516a34430 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_68.png and b/TMessagesProj/src/main/assets/emoji/7_68.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_69.png b/TMessagesProj/src/main/assets/emoji/7_69.png index 20d5de892..b9190b335 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_69.png and b/TMessagesProj/src/main/assets/emoji/7_69.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_7.png b/TMessagesProj/src/main/assets/emoji/7_7.png index 82ce40c5e..b3e160ece 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_7.png and b/TMessagesProj/src/main/assets/emoji/7_7.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_70.png b/TMessagesProj/src/main/assets/emoji/7_70.png index b96094b2b..4695ab741 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_70.png and b/TMessagesProj/src/main/assets/emoji/7_70.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_71.png b/TMessagesProj/src/main/assets/emoji/7_71.png index abf13658c..030e18743 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_71.png and b/TMessagesProj/src/main/assets/emoji/7_71.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_72.png b/TMessagesProj/src/main/assets/emoji/7_72.png index c1e21d241..f3fbe09f1 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_72.png and b/TMessagesProj/src/main/assets/emoji/7_72.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_73.png b/TMessagesProj/src/main/assets/emoji/7_73.png index 42cd160b8..68eb3de9b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_73.png and b/TMessagesProj/src/main/assets/emoji/7_73.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_74.png b/TMessagesProj/src/main/assets/emoji/7_74.png index 3f0c55b61..5bc91eb2e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_74.png and b/TMessagesProj/src/main/assets/emoji/7_74.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_75.png b/TMessagesProj/src/main/assets/emoji/7_75.png index 18fb7cbd9..dea2720d4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_75.png and b/TMessagesProj/src/main/assets/emoji/7_75.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_76.png b/TMessagesProj/src/main/assets/emoji/7_76.png index e05ca002c..a4115ac87 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_76.png and b/TMessagesProj/src/main/assets/emoji/7_76.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_77.png b/TMessagesProj/src/main/assets/emoji/7_77.png index 0706096e3..80788695e 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_77.png and b/TMessagesProj/src/main/assets/emoji/7_77.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_78.png b/TMessagesProj/src/main/assets/emoji/7_78.png index c2ac324e2..5ce718eb4 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_78.png and b/TMessagesProj/src/main/assets/emoji/7_78.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_79.png b/TMessagesProj/src/main/assets/emoji/7_79.png index 1db8daf09..37da0c539 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_79.png and b/TMessagesProj/src/main/assets/emoji/7_79.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_8.png b/TMessagesProj/src/main/assets/emoji/7_8.png index 72b6ed3a4..8610f9de6 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_8.png and b/TMessagesProj/src/main/assets/emoji/7_8.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_80.png b/TMessagesProj/src/main/assets/emoji/7_80.png index 683564ccb..60d0b1c8b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_80.png and b/TMessagesProj/src/main/assets/emoji/7_80.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_81.png b/TMessagesProj/src/main/assets/emoji/7_81.png index a8bcf2304..081479dd5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_81.png and b/TMessagesProj/src/main/assets/emoji/7_81.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_82.png b/TMessagesProj/src/main/assets/emoji/7_82.png index bd102a187..bfe95fa7c 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_82.png and b/TMessagesProj/src/main/assets/emoji/7_82.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_83.png b/TMessagesProj/src/main/assets/emoji/7_83.png index 69610dbad..51ea7b017 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_83.png and b/TMessagesProj/src/main/assets/emoji/7_83.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_84.png b/TMessagesProj/src/main/assets/emoji/7_84.png index c9db2063f..d5a30335d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_84.png and b/TMessagesProj/src/main/assets/emoji/7_84.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_85.png b/TMessagesProj/src/main/assets/emoji/7_85.png index 8683bf1e0..b21e7d966 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_85.png and b/TMessagesProj/src/main/assets/emoji/7_85.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_86.png b/TMessagesProj/src/main/assets/emoji/7_86.png index 58dd2045e..e73c93f7b 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_86.png and b/TMessagesProj/src/main/assets/emoji/7_86.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_87.png b/TMessagesProj/src/main/assets/emoji/7_87.png index a7ecb8ffc..d53cacc41 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_87.png and b/TMessagesProj/src/main/assets/emoji/7_87.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_88.png b/TMessagesProj/src/main/assets/emoji/7_88.png index 85520a675..fbae4c949 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_88.png and b/TMessagesProj/src/main/assets/emoji/7_88.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_89.png b/TMessagesProj/src/main/assets/emoji/7_89.png index b51548a7c..66d1cff40 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_89.png and b/TMessagesProj/src/main/assets/emoji/7_89.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_9.png b/TMessagesProj/src/main/assets/emoji/7_9.png index 807b76b4f..3f7679ca0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_9.png and b/TMessagesProj/src/main/assets/emoji/7_9.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_90.png b/TMessagesProj/src/main/assets/emoji/7_90.png index 89425b94a..8d959137d 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_90.png and b/TMessagesProj/src/main/assets/emoji/7_90.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_91.png b/TMessagesProj/src/main/assets/emoji/7_91.png index bbe37c1e2..c98fd8721 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_91.png and b/TMessagesProj/src/main/assets/emoji/7_91.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_92.png b/TMessagesProj/src/main/assets/emoji/7_92.png index 47ba1d003..c83ad26b3 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_92.png and b/TMessagesProj/src/main/assets/emoji/7_92.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_93.png b/TMessagesProj/src/main/assets/emoji/7_93.png index 256e0ae90..bde62d801 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_93.png and b/TMessagesProj/src/main/assets/emoji/7_93.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_94.png b/TMessagesProj/src/main/assets/emoji/7_94.png index 94be1fe8d..269c878c5 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_94.png and b/TMessagesProj/src/main/assets/emoji/7_94.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_95.png b/TMessagesProj/src/main/assets/emoji/7_95.png index 19b8b68b7..cb4621831 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_95.png and b/TMessagesProj/src/main/assets/emoji/7_95.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_96.png b/TMessagesProj/src/main/assets/emoji/7_96.png index f298fef8e..4dc145473 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_96.png and b/TMessagesProj/src/main/assets/emoji/7_96.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_97.png b/TMessagesProj/src/main/assets/emoji/7_97.png index 01c938e52..831289518 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_97.png and b/TMessagesProj/src/main/assets/emoji/7_97.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_98.png b/TMessagesProj/src/main/assets/emoji/7_98.png index d8519a81c..845eb9d28 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_98.png and b/TMessagesProj/src/main/assets/emoji/7_98.png differ diff --git a/TMessagesProj/src/main/assets/emoji/7_99.png b/TMessagesProj/src/main/assets/emoji/7_99.png index 36a397334..28df0ecb0 100644 Binary files a/TMessagesProj/src/main/assets/emoji/7_99.png and b/TMessagesProj/src/main/assets/emoji/7_99.png differ diff --git a/TMessagesProj/src/main/assets/night.attheme b/TMessagesProj/src/main/assets/night.attheme index 59fbe938a..209c4a232 100644 --- a/TMessagesProj/src/main/assets/night.attheme +++ b/TMessagesProj/src/main/assets/night.attheme @@ -488,3 +488,4 @@ chat_outSentClock=-8213557 dialogBackgroundGray=-14013910 chat_searchPanelText=-10767620 chat_inContactIcon=-1 +voipgroup_topPanelGray=-10521727 diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java index a13365135..a317fdb15 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java @@ -32,10 +32,6 @@ import java.util.List; public class ChatListItemAnimator extends DefaultItemAnimator { - public static final int ANIMATION_TYPE_OUT = 1; - public static final int ANIMATION_TYPE_IN = 2; - public static final int ANIMATION_TYPE_MOVE = 3; - private final ChatActivity activity; private final RecyclerListView recyclerListView; @@ -598,7 +594,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator { } @Override - void animateMoveImpl(RecyclerView.ViewHolder holder, MoveInfo moveInfo) { + protected void animateMoveImpl(RecyclerView.ViewHolder holder, MoveInfo moveInfo) { if (BuildVars.LOGS_ENABLED) { FileLog.d("animate move impl"); } diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java index 474112786..e94957035 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java @@ -65,7 +65,7 @@ public class DefaultItemAnimator extends SimpleItemAnimator { public RecyclerView.ViewHolder holder; public int fromX, fromY, toX, toY; - MoveInfo(RecyclerView.ViewHolder holder, int fromX, int fromY, int toX, int toY) { + public MoveInfo(RecyclerView.ViewHolder holder, int fromX, int fromY, int toX, int toY) { this.holder = holder; this.fromX = fromX; this.fromY = fromY; @@ -298,7 +298,7 @@ public class DefaultItemAnimator extends SimpleItemAnimator { } - void animateMoveImpl(final RecyclerView.ViewHolder holder, MoveInfo moveInfo) { + protected void animateMoveImpl(final RecyclerView.ViewHolder holder, MoveInfo moveInfo) { int fromX = moveInfo.fromX; int fromY = moveInfo.fromY; int toX = moveInfo.toX; diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java index 98635a92e..ef1fe2d6c 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java @@ -2419,6 +2419,14 @@ public class RecyclerView extends ViewGroup implements ScrollingView, return false; } + public float getCurrentVelocity() { + if (mVelocityTracker != null) { + mVelocityTracker.computeCurrentVelocity(1000, mMaxFlingVelocity); + return mVelocityTracker.getYVelocity(); + } + return 0; + } + /** * Stop any current scroll in progress, such as one started by * {@link #smoothScrollBy(int, int)}, {@link #fling(int, int)} or a touch-initiated fling. diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AccountInstance.java b/TMessagesProj/src/main/java/org/telegram/messenger/AccountInstance.java index 096b15530..e12c03f80 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AccountInstance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AccountInstance.java @@ -88,4 +88,8 @@ public class AccountInstance { public SharedPreferences getNotificationsSettings() { return MessagesController.getNotificationsSettings(currentAccount); } + + public int getCurrentAccount() { + return currentAccount; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java index a644bb5eb..71d8c6935 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java @@ -68,6 +68,7 @@ import android.text.util.Linkify; import android.util.DisplayMetrics; import android.util.StateSet; import android.util.TypedValue; +import android.view.ContextThemeWrapper; import android.view.Display; import android.view.MotionEvent; import android.view.Gravity; @@ -75,6 +76,7 @@ import android.view.Surface; import android.view.View; import android.view.ViewGroup; import android.view.Window; +import android.view.WindowInsets; import android.view.WindowManager; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityManager; @@ -255,7 +257,7 @@ public class AndroidUtilities { lastIndex = str.length(); } StaticLayout staticLayout = new StaticLayout(str, textPaint, Integer.MAX_VALUE, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); - float endOfTextX = staticLayout.getPrimaryHorizontal(lastIndex); + float endOfTextX = staticLayout.getLineWidth(0); if (endOfTextX + textPaint.measureText("...") < availableWidth) { return str; } @@ -267,6 +269,9 @@ public class AndroidUtilities { int endHighlightedIndex = i; float endOfHighlight = staticLayout.getPrimaryHorizontal(endHighlightedIndex); + if (staticLayout.isRtlCharAt(endHighlightedIndex)) { + endOfHighlight = endOfTextX - endOfHighlight; + } if (endOfHighlight < availableWidth) { return str; } @@ -344,6 +349,16 @@ public class AndroidUtilities { return spannableStringBuilder; } + public static Activity findActivity(Context context) { + if (context instanceof Activity) { + return (Activity) context; + } + if (context instanceof ContextThemeWrapper) { + return findActivity(((ContextThemeWrapper) context).getBaseContext()); + } + return null; + } + private static class LinkSpec { String url; int start; @@ -1436,6 +1451,62 @@ public class AndroidUtilities { } } + public static ArrayList getDataDirs() { + ArrayList result = null; + if (Build.VERSION.SDK_INT >= 19) { + File[] dirs = ApplicationLoader.applicationContext.getExternalFilesDirs(null); + if (dirs != null) { + for (int a = 0; a < dirs.length; a++) { + if (dirs[a] == null) { + continue; + } + String path = dirs[a].getAbsolutePath(); + + if (result == null) { + result = new ArrayList<>(); + } + result.add(dirs[a]); + } + } + } + if (result == null) { + result = new ArrayList<>(); + } + if (result.isEmpty()) { + result.add(Environment.getExternalStorageDirectory()); + } + return result; + } + + public static ArrayList getRootDirs() { + ArrayList result = null; + if (Build.VERSION.SDK_INT >= 19) { + File[] dirs = ApplicationLoader.applicationContext.getExternalFilesDirs(null); + if (dirs != null) { + for (int a = 0; a < dirs.length; a++) { + if (dirs[a] == null) { + continue; + } + String path = dirs[a].getAbsolutePath(); + int idx = path.indexOf("/Android"); + if (idx >= 0) { + if (result == null) { + result = new ArrayList<>(); + } + result.add(new File(path.substring(0, idx))); + } + } + } + } + if (result == null) { + result = new ArrayList<>(); + } + if (result.isEmpty()) { + result.add(Environment.getExternalStorageDirectory()); + } + return result; + } + public static File getCacheDir() { String state = null; try { @@ -1445,7 +1516,21 @@ public class AndroidUtilities { } if (state == null || state.startsWith(Environment.MEDIA_MOUNTED)) { try { - File file = ApplicationLoader.applicationContext.getExternalCacheDir(); + File file; + if (Build.VERSION.SDK_INT >= 19) { + File[] dirs = ApplicationLoader.applicationContext.getExternalCacheDirs(); + file = dirs[0]; + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0; a < dirs.length; a++) { + if (dirs[a] != null && dirs[a].getAbsolutePath().startsWith(SharedConfig.storageCacheDir)) { + file = dirs[a]; + break; + } + } + } + } else { + file = ApplicationLoader.applicationContext.getExternalCacheDir(); + } if (file != null) { return file; } @@ -1762,18 +1847,23 @@ public class AndroidUtilities { return 0; } try { - if (mAttachInfoField == null) { - mAttachInfoField = View.class.getDeclaredField("mAttachInfo"); - mAttachInfoField.setAccessible(true); - } - Object mAttachInfo = mAttachInfoField.get(view); - if (mAttachInfo != null) { - if (mStableInsetsField == null) { - mStableInsetsField = mAttachInfo.getClass().getDeclaredField("mStableInsets"); - mStableInsetsField.setAccessible(true); + if (Build.VERSION.SDK_INT >= 23) { + WindowInsets insets = view.getRootWindowInsets(); + return insets != null ? insets.getStableInsetBottom() : 0; + } else { + if (mAttachInfoField == null) { + mAttachInfoField = View.class.getDeclaredField("mAttachInfo"); + mAttachInfoField.setAccessible(true); + } + Object mAttachInfo = mAttachInfoField.get(view); + if (mAttachInfo != null) { + if (mStableInsetsField == null) { + mStableInsetsField = mAttachInfo.getClass().getDeclaredField("mStableInsets"); + mStableInsetsField.setAccessible(true); + } + Rect insets = (Rect) mStableInsetsField.get(mAttachInfo); + return insets.bottom; } - Rect insets = (Rect) mStableInsetsField.get(mAttachInfo); - return insets.bottom; } } catch (Exception e) { FileLog.e(e); @@ -1852,7 +1942,9 @@ public class AndroidUtilities { } public static void setScrollViewEdgeEffectColor(HorizontalScrollView scrollView, int color) { - if (Build.VERSION.SDK_INT >= 21) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + scrollView.setEdgeEffectColor(color); + } else if (Build.VERSION.SDK_INT >= 21) { try { Field field = HorizontalScrollView.class.getDeclaredField("mEdgeGlowLeft"); field.setAccessible(true); @@ -1874,7 +1966,10 @@ public class AndroidUtilities { } public static void setScrollViewEdgeEffectColor(ScrollView scrollView, int color) { - if (Build.VERSION.SDK_INT >= 21) { + if (Build.VERSION.SDK_INT >= 29) { + scrollView.setTopEdgeEffectColor(color); + scrollView.setBottomEdgeEffectColor(color); + } else if (Build.VERSION.SDK_INT >= 21) { try { Field field = ScrollView.class.getDeclaredField("mEdgeGlowTop"); field.setAccessible(true); @@ -1889,8 +1984,8 @@ public class AndroidUtilities { if (mEdgeGlowBottom != null) { mEdgeGlowBottom.setColor(color); } - } catch (Exception e) { - FileLog.e(e); + } catch (Exception ignore) { + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java index e781aafb5..866e6df2f 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java @@ -24,6 +24,7 @@ import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Handler; import android.os.PowerManager; +import android.os.SystemClock; import android.telephony.TelephonyManager; import android.text.TextUtils; @@ -49,10 +50,14 @@ public class ApplicationLoader extends Application { private static ConnectivityManager connectivityManager; private static volatile boolean applicationInited = false; + public static long startTime; + public static volatile boolean isScreenOn = false; public static volatile boolean mainInterfacePaused = true; + public static volatile boolean mainInterfaceStopped = true; public static volatile boolean externalInterfacePaused = true; public static volatile boolean mainInterfacePausedStageQueue = true; + public static boolean canDrawOverlays; public static volatile long mainInterfacePausedStageQueueTime; public static boolean hasPlayServices; @@ -85,11 +90,10 @@ public class ApplicationLoader extends Application { if (applicationInited) { return; } - applicationInited = true; try { - LocaleController.getInstance(); + LocaleController.getInstance(); //TODO improve } catch (Exception e) { e.printStackTrace(); } @@ -138,7 +142,7 @@ public class ApplicationLoader extends Application { } SharedConfig.loadConfig(); - for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { + for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { //TODO improve account UserConfig.getInstance(a).loadConfig(); MessagesController.getInstance(a); if (a == 0) { @@ -160,7 +164,7 @@ public class ApplicationLoader extends Application { } MediaController.getInstance(); - for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { + for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { //TODO improve account ContactsController.getInstance(a).checkAppAccount(); DownloadController.getInstance(a); } @@ -182,6 +186,9 @@ public class ApplicationLoader extends Application { super.onCreate(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("app start time = " + (startTime = SystemClock.elapsedRealtime())); + } if (applicationContext == null) { applicationContext = getApplicationContext(); } @@ -198,6 +205,9 @@ public class ApplicationLoader extends Application { } } }; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("load libs time = " + (SystemClock.elapsedRealtime() - startTime)); + } applicationHandler = new Handler(applicationContext.getMainLooper()); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index b00071996..71b5e4de5 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -18,8 +18,8 @@ public class BuildVars { public static boolean LOGS_ENABLED = false; public static boolean USE_CLOUD_STRINGS = true; public static boolean CHECK_UPDATES = true; - public static int BUILD_VERSION = 2139; - public static String BUILD_VERSION_STRING = "7.2.0"; + public static int BUILD_VERSION = 2195; + public static String BUILD_VERSION_STRING = "7.3.0"; public static int APP_ID = 4; public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103"; public static String APPCENTER_HASH = "a5b5c4f5-51da-dedc-9918-d9766a22ca7c"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java index 67d6b5ef8..9a7dfbc53 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java @@ -8,8 +8,18 @@ package org.telegram.messenger; +import android.os.SystemClock; +import android.text.TextUtils; +import android.util.SparseArray; + +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; + public class ChatObject { public static final int CHAT_TYPE_CHAT = 0; @@ -31,6 +41,360 @@ public class ChatObject { public static final int ACTION_VIEW = 11; public static final int ACTION_EDIT_MESSAGES = 12; public static final int ACTION_DELETE_MESSAGES = 13; + public static final int ACTION_MANAGE_CALLS = 14; + + public static class Call { + public TLRPC.GroupCall call; + public int chatId; + public SparseArray participants = new SparseArray<>(); + public ArrayList sortedParticipants = new ArrayList<>(); + public ArrayList invitedUsers = new ArrayList<>(); + public HashSet invitedUsersMap = new HashSet<>(); + public SparseArray participantsBySources = new SparseArray<>(); + private String nextLoadOffset; + public boolean membersLoadEndReached; + public boolean loadingMembers; + public int currentAccount; + public int speakingMembersCount; + private Runnable typingUpdateRunnable = () -> { + typingUpdateRunnableScheduled = false; + checkOnlineParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallTypingsUpdated); + }; + private boolean typingUpdateRunnableScheduled; + private int lastLoadGuid; + private HashSet loadingGuids = new HashSet<>(); + + public void setCall(int account, int chatId, TLRPC.TL_phone_groupCall groupCall) { + this.chatId = chatId; + currentAccount = account; + call = groupCall.call; + int date = Integer.MAX_VALUE; + for (int a = 0, N = groupCall.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = groupCall.participants.get(a); + participants.put(participant.user_id, participant); + sortedParticipants.add(participant); + participantsBySources.put(participant.source, participant); + date = Math.min(date, participant.date); + } + sortParticipants(); + nextLoadOffset = groupCall.participants_next_offset; + loadMembers(true); + } + + public void migrateToChat(TLRPC.Chat chat) { + chatId = chat.id; + VoIPService voIPService = VoIPService.getSharedInstance(); + if (voIPService != null && voIPService.getAccount() == currentAccount && voIPService.getChat() != null && voIPService.getChat().id == -chatId) { + voIPService.migrateToChat(chat); + } + } + + public void loadMembers(boolean fromBegin) { + if (fromBegin) { + membersLoadEndReached = false; + nextLoadOffset = null; + } + if (membersLoadEndReached) { + return; + } + loadingMembers = true; + TLRPC.TL_phone_getGroupParticipants req = new TLRPC.TL_phone_getGroupParticipants(); + req.call = new TLRPC.TL_inputGroupCall(); + req.call.id = call.id; + req.call.access_hash = call.access_hash; + req.offset = nextLoadOffset != null ? nextLoadOffset : ""; + req.limit = 20; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response != null) { + loadingMembers = false; + TLRPC.TL_phone_groupParticipants groupParticipants = (TLRPC.TL_phone_groupParticipants) response; + MessagesController.getInstance(currentAccount).putUsers(groupParticipants.users, false); + SparseArray old = null; + if (TextUtils.isEmpty(req.offset)) { + if (participants.size() != 0) { + old = participants; + participants = new SparseArray<>(); + } else { + participants.clear(); + } + sortedParticipants.clear(); + participantsBySources.clear(); + loadingGuids.clear(); + } + nextLoadOffset = groupParticipants.next_offset; + if (groupParticipants.participants.isEmpty() || TextUtils.isEmpty(nextLoadOffset)) { + membersLoadEndReached = true; + } + if (TextUtils.isEmpty(req.offset)) { + call.version = groupParticipants.version; + call.participants_count = groupParticipants.count; + } + for (int a = 0, N = groupParticipants.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = groupParticipants.participants.get(a); + TLRPC.TL_groupCallParticipant oldParticipant = participants.get(participant.user_id); + if (oldParticipant != null) { + sortedParticipants.remove(oldParticipant); + participantsBySources.remove(oldParticipant.source); + participant.active_date = Math.max(participant.active_date, oldParticipant.active_date); + } else if (old != null) { + oldParticipant = old.get(participant.user_id); + if (oldParticipant != null) { + participant.active_date = Math.max(participant.active_date, oldParticipant.active_date); + } + } + participants.put(participant.user_id, participant); + sortedParticipants.add(participant); + participantsBySources.put(participant.source, participant); + } + if (call.participants_count < participants.size()) { + call.participants_count = participants.size(); + } + sortParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); + } + })); + } + + public void addInvitedUser(int uid) { + if (participants.get(uid) != null || invitedUsersMap.contains(uid)) { + return; + } + invitedUsersMap.add(uid); + invitedUsers.add(uid); + } + + public void processTypingsUpdate(AccountInstance accountInstance, ArrayList uids, int date) { + boolean updated = false; + ArrayList participantsToLoad = null; + for (int a = 0, N = uids.size(); a < N; a++) { + Integer id = uids.get(a); + TLRPC.TL_groupCallParticipant participant = participants.get(id); + if (participant != null) { + participant.active_date = date; + updated = true; + } else { + if (participantsToLoad == null) { + participantsToLoad = new ArrayList<>(); + } + participantsToLoad.add(id); + } + } + if (participantsToLoad != null) { + int guid = ++lastLoadGuid; + loadingGuids.add(guid); + TLRPC.TL_phone_getGroupParticipants req = new TLRPC.TL_phone_getGroupParticipants(); + req.call = new TLRPC.TL_inputGroupCall(); + req.call.id = call.id; + req.call.access_hash = call.access_hash; + req.ids = participantsToLoad; + req.offset = ""; + req.limit = 100; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (!loadingGuids.remove(guid)) { + return; + } + if (response != null) { + TLRPC.TL_phone_groupParticipants groupParticipants = (TLRPC.TL_phone_groupParticipants) response; + MessagesController.getInstance(currentAccount).putUsers(groupParticipants.users, false); + for (int a = 0, N = groupParticipants.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = groupParticipants.participants.get(a); + TLRPC.TL_groupCallParticipant oldParticipant = participants.get(participant.user_id); + if (oldParticipant != null) { + sortedParticipants.remove(oldParticipant); + participantsBySources.remove(oldParticipant.source); + } + participants.put(participant.user_id, participant); + sortedParticipants.add(participant); + participantsBySources.put(participant.source, participant); + if (invitedUsersMap.contains(participant.user_id)) { + Integer id = participant.user_id; + invitedUsersMap.remove(id); + invitedUsers.remove(id); + } + } + if (call.participants_count < participants.size()) { + call.participants_count = participants.size(); + } + sortParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); + } + })); + } + if (updated) { + sortParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); + } + } + + public void processVoiceLevelsUpdate(int[] ssrc, float[] levels, boolean[] voice) { + boolean updated = false; + int currentTime = ConnectionsManager.getInstance(currentAccount).getCurrentTime(); + for (int a = 0; a < ssrc.length; a++) { + TLRPC.TL_groupCallParticipant participant; + if (ssrc[a] == 0) { + participant = participants.get(UserConfig.getInstance(currentAccount).getClientUserId()); + } else { + participant = participantsBySources.get(ssrc[a]); + } + if (participant != null) { + participant.hasVoice = voice[a]; + if (levels[a] > 0.1f) { + if (voice[a] && participant.active_date + 1 < currentTime) { + participant.active_date = currentTime; + updated = true; + } + participant.lastSpeakTime = SystemClock.uptimeMillis(); + participant.amplitude = levels[a]; + } else { + participant.amplitude = 0; + } + } + } + if (updated) { + sortParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); + } + } + + public void processParticipantsUpdate(AccountInstance accountInstance, TLRPC.TL_updateGroupCallParticipants update) { + boolean versioned = false; + for (int a = 0, N = update.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = update.participants.get(a); + if (participant.versioned) { + versioned = true; + break; + } + } + if (versioned && call.version + 1 < update.version) { + nextLoadOffset = null; + loadMembers(true); + return; + } + if (update.version < call.version) { + return; + } + boolean updated = false; + boolean selfUpdated = false; + int selfId = accountInstance.getUserConfig().getClientUserId(); + for (int a = 0, N = update.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = update.participants.get(a); + TLRPC.TL_groupCallParticipant oldParticipant = participants.get(participant.user_id); + if (participant.left) { + if (update.version == call.version) { + continue; + } + if (oldParticipant != null) { + participants.remove(participant.user_id); + participantsBySources.remove(participant.source); + sortedParticipants.remove(oldParticipant); + } + call.participants_count--; + if (call.participants_count < 0) { + call.participants_count = 0; + } + updated = true; + } else { + if (invitedUsersMap.contains(participant.user_id)) { + Integer id = participant.user_id; + invitedUsersMap.remove(id); + invitedUsers.remove(id); + } + if (oldParticipant != null) { + oldParticipant.flags = participant.flags; + oldParticipant.muted = participant.muted; + oldParticipant.can_self_unmute = participant.can_self_unmute; + oldParticipant.date = participant.date; + oldParticipant.active_date = Math.max(oldParticipant.active_date, participant.active_date); + if (oldParticipant.source != participant.source) { + participantsBySources.remove(oldParticipant.source); + oldParticipant.source = participant.source; + participantsBySources.put(oldParticipant.source, oldParticipant); + } + } else { + if (participant.just_joined && update.version != call.version) { + call.participants_count++; + } + sortedParticipants.add(participant); + participants.put(participant.user_id, participant); + participantsBySources.put(participant.source, participant); + } + if (participant.user_id == selfId && participant.active_date == 0) { + participant.active_date = accountInstance.getConnectionsManager().getCurrentTime(); + } + updated = true; + } + if (participant.user_id == selfId) { + selfUpdated = true; + } + } + if (update.version > call.version) { + call.version = update.version; + } + if (call.participants_count < participants.size()) { + call.participants_count = participants.size(); + } + if (updated) { + sortParticipants(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, selfUpdated); + } + } + + public void processGroupCallUpdate(AccountInstance accountInstance, TLRPC.TL_updateGroupCall update) { + if (call.version < update.call.version) { + nextLoadOffset = null; + loadMembers(true); + } + call = update.call; + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); + } + + public TLRPC.TL_inputGroupCall getInputGroupCall() { + TLRPC.TL_inputGroupCall inputGroupCall = new TLRPC.TL_inputGroupCall(); + inputGroupCall.id = call.id; + inputGroupCall.access_hash = call.access_hash; + return inputGroupCall; + } + + private void sortParticipants() { + Collections.sort(sortedParticipants, (o1, o2) -> { + if (o1.active_date != 0 && o2.active_date != 0) { + return Integer.compare(o2.active_date, o1.active_date); + } else if (o1.active_date != 0 && o2.active_date == 0) { + return -1; + } else if (o1.active_date == 0 && o2.active_date != 0) { + return 1; + } + return Integer.compare(o2.date, o1.date); + }); + checkOnlineParticipants(); + } + + private void checkOnlineParticipants() { + if (typingUpdateRunnableScheduled) { + AndroidUtilities.cancelRunOnUIThread(typingUpdateRunnable); + typingUpdateRunnableScheduled = false; + } + speakingMembersCount = 0; + int currentTime = ConnectionsManager.getInstance(currentAccount).getCurrentTime(); + int minDiff = Integer.MAX_VALUE; + for (int a = 0, N = sortedParticipants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = sortedParticipants.get(a); + int diff = currentTime - participant.active_date; + if (diff < 5) { + speakingMembersCount++; + minDiff = Math.min(diff, minDiff); + } + if (Math.max(participant.date, participant.active_date) <= currentTime - 5) { + break; + } + } + if (minDiff != Integer.MAX_VALUE) { + AndroidUtilities.runOnUIThread(typingUpdateRunnable, minDiff * 1000); + typingUpdateRunnableScheduled = true; + } + } + } private static boolean isBannableAction(int action) { switch (action) { @@ -136,6 +500,9 @@ public class ChatObject { case ACTION_BLOCK_USERS: value = chat.admin_rights.ban_users; break; + case ACTION_MANAGE_CALLS: + value = chat.admin_rights.manage_call; + break; default: value = false; break; @@ -201,6 +568,11 @@ public class ChatObject { return (chat instanceof TLRPC.TL_channel || chat instanceof TLRPC.TL_channelForbidden) && chat.megagroup; } + public static boolean isMegagroup(int currentAccount, int chatId) { + TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(chatId); + return ChatObject.isChannel(chat) && chat.megagroup; + } + public static boolean hasAdminRights(TLRPC.Chat chat) { return chat != null && (chat.creator || chat.admin_rights != null && chat.admin_rights.flags != 0); } @@ -217,6 +589,10 @@ public class ChatObject { return canUserDoAction(chat, ACTION_BLOCK_USERS); } + public static boolean canManageCalls(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_MANAGE_CALLS); + } + public static boolean canSendStickers(TLRPC.Chat chat) { return canUserDoAction(chat, ACTION_SEND_STICKERS); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java index f14cc0852..a33b85062 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java @@ -2497,8 +2497,9 @@ public class ContactsController extends BaseController { } public void createOrUpdateConnectionServiceContact(int id, String firstName, String lastName) { - if (!hasContactsPermission()) + if (!hasContactsPermission()) { return; + } try { ContentResolver resolver = ApplicationLoader.applicationContext.getContentResolver(); ArrayList ops = new ArrayList<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java index 0bd8d661a..130d10cda 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java @@ -3,6 +3,8 @@ package org.telegram.messenger; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; +import java.util.ArrayList; + public class DocumentObject { public static class ThemeDocument extends TLRPC.TL_document { @@ -39,4 +41,57 @@ public class DocumentObject { } } } + + public static SvgHelper.SvgDrawable getSvgThumb(ArrayList sizes, String colorKey, float alpha) { + int w = 0; + int h = 0; + TLRPC.TL_photoPathSize photoPathSize = null; + for (int a = 0, N = sizes.size(); a < N; a++) { + TLRPC.PhotoSize photoSize = sizes.get(a); + if (photoSize instanceof TLRPC.TL_photoPathSize) { + photoPathSize = (TLRPC.TL_photoPathSize) photoSize; + } else { + w = photoSize.w; + h = photoSize.h; + } + if (photoPathSize != null && w != 0 && h != 0) { + SvgHelper.SvgDrawable pathThumb = SvgHelper.getDrawableByPath(SvgHelper.decompress(photoPathSize.bytes), w, h); + if (pathThumb != null) { + pathThumb.setupGradient(colorKey, alpha); + } + return pathThumb; + } + } + return null; + } + + public static SvgHelper.SvgDrawable getSvgThumb(TLRPC.Document document, String colorKey, float alpha) { + return getSvgThumb(document, colorKey, alpha, 1.0f); + } + + public static SvgHelper.SvgDrawable getSvgThumb(TLRPC.Document document, String colorKey, float alpha, float zoom) { + SvgHelper.SvgDrawable pathThumb = null; + for (int b = 0, N2 = document.thumbs.size(); b < N2; b++) { + TLRPC.PhotoSize size = document.thumbs.get(b); + if (size instanceof TLRPC.TL_photoPathSize) { + int w = 512, h = 512; + for (int a = 0, N = document.attributes.size(); a < N; a++) { + TLRPC.DocumentAttribute attribute = document.attributes.get(a); + if (attribute instanceof TLRPC.TL_documentAttributeImageSize) { + w = attribute.w; + h = attribute.h; + break; + } + } + if (w != 0 && h != 0) { + pathThumb = SvgHelper.getDrawableByPath(SvgHelper.decompress(size.bytes), (int) (w * zoom), (int) (h * zoom)); + if (pathThumb != null) { + pathThumb.setupGradient(colorKey, alpha); + } + } + break; + } + } + return pathThumb; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java b/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java index 5d93ec660..56dc589c4 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java @@ -629,7 +629,8 @@ public class DownloadController extends BaseController implements NotificationCe index = 2; } } else { - if (MessageObject.isMegagroup(message)) { + TLRPC.Chat chat = message.peer_id != null && message.peer_id.channel_id != 0 ? getMessagesController().getChat(message.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && chat.megagroup) { if (message.from_id instanceof TLRPC.TL_peerUser && getContactsController().contactsDict.containsKey(message.from_id.user_id)) { index = 0; } else { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java index 1a94fa463..61e760a2a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java @@ -40,7 +40,7 @@ public class Emoji { private static int bigImgSize; private static boolean inited = false; private static Paint placeholderPaint; - private static int[] emojiCounts = new int[]{1620, 184, 115, 328, 125, 207, 288, 258}; + private static int[] emojiCounts = new int[]{1695, 199, 123, 332, 128, 222, 290, 259}; private static Bitmap[][] emojiBmp = new Bitmap[8][]; private static boolean[][] loadingEmoji = new boolean[8][]; @@ -279,9 +279,9 @@ public class Emoji { b = getBounds(); } - //if (!canvas.quickReject(b.left, b.top, b.right, b.bottom, Canvas.EdgeType.AA)) { - canvas.drawBitmap(emojiBmp[info.page][info.page2], null, b, paint); - //} + if (!canvas.quickReject(b.left, b.top, b.right, b.bottom, Canvas.EdgeType.AA)) { + canvas.drawBitmap(emojiBmp[info.page][info.page2], null, b, paint); + } } @Override @@ -360,11 +360,13 @@ public class Emoji { boolean doneEmoji = false; int nextValidLength; boolean nextValid; + boolean notOnlyEmoji; //s.setSpansCount(emojiCount); try { for (int i = 0; i < length; i++) { c = cs.charAt(i); + notOnlyEmoji = false; if (c >= 0xD83C && c <= 0xD83E || (buf != 0 && (buf & 0xFFFFFFFF00000000L) == 0 && (buf & 0xFFFF) == 0xD83C && (c >= 0xDDE6 && c <= 0xDDFF))) { if (startIndex == -1) { startIndex = i; @@ -407,10 +409,7 @@ public class Emoji { startLength = 0; doneEmoji = false; } else if (c != 0xfe0f) { - if (emojiOnly != null) { - emojiOnly[0] = 0; - emojiOnly = null; - } + notOnlyEmoji = true; } if (doneEmoji && i + 2 < length) { char next = cs.charAt(i + 1); @@ -442,19 +441,27 @@ public class Emoji { c = cs.charAt(i + 1); if (a == 1) { if (c == 0x200D && emojiCode.length() > 0) { + notOnlyEmoji = false; emojiCode.append(c); i++; startLength++; doneEmoji = false; } - } else if (startIndex != -1 || prevCh == '*' || prevCh >= '1' && prevCh <= '9') { + } else if (startIndex != -1 || prevCh == '*' || prevCh == '#' || prevCh >= '0' && prevCh <= '9') { if (c >= 0xFE00 && c <= 0xFE0F) { i++; startLength++; + if (!doneEmoji) { + doneEmoji = i + 1 >= length; + } } } } } + if (notOnlyEmoji && emojiOnly != null) { + emojiOnly[0] = 0; + emojiOnly = null; + } if (doneEmoji && i + 2 < length && cs.charAt(i + 1) == 0xD83C) { char next = cs.charAt(i + 2); if (next >= 0xDFFB && next <= 0xDFFF) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java b/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java index bc440d0d8..464c05a01 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java @@ -24,7 +24,7 @@ public class EmojiData { 0x2197, 0x2198, 0x2199, 0x2196, 0x2195, 0x2194, 0x21AA, 0x21A9, 0x2934, 0x2935, 0x2139, 0x2714, 0x2716, 0x2611, 0x26AA, 0x26AB, 0x25AA, 0x25AB, 0x2B1B, 0x2B1C, 0x25FC, 0x25FB, 0x25FE, 0x25FD, 0x2660, 0x2663, 0x2665, 0x2666, 0x263A, 0x2639, - 0x270C, 0x261D, 0x2764, 0x2603 + 0x270C, 0x261D, 0x2764, 0x2603, 0x23CF }; public static final char[] dataChars = { @@ -44,7 +44,7 @@ public class EmojiData { 0x2620, 0x26B0, 0x26B1, 0x2697, 0x26F1, 0x2709, 0x2702, 0x2712, 0x270F, 0x2708, 0x26F5, 0x26F4, 0x2693, 0x26FD, 0x26F2, 0x26F0, 0x26FA, 0x26EA, 0x26E9, 0x2618, 0x2B50, 0x2728, 0x2604, 0x2600, 0x26C5, 0x2601, 0x26C8, 0x26A1, 0x2744, 0x2603, - 0x26C4, 0x2602, 0x2614 + 0x26C4, 0x2602, 0x2614, 0x26A7, 0x23CF, 0x267E, 0x265F }; public static final String[] emojiSecret = { @@ -61,24 +61,20 @@ public class EmojiData { }; public static final String[] emojiColored = { - "🤲", "👐", "🙌", "👏", "👍", "👎", "👊", "✊", "🤛", "🤜", "🤞", "✌", "🤟", "🤘", - "👌", "🤏", "👈", "👉", "👆", "👇", "☝", "✋", "🤚", "🖐", "🖖", "👋", "🤙", "💪", - "🖕", "✍", "🙏", "🦶", "🦵", "👂", "🦻", "👃", "👶", "👧", "🧒", "👦", "👩", - "🧑", "👨", "👩‍🦱", "🧑‍🦱", "👨‍🦱", "👩‍🦰", "🧑‍🦰", "👨‍🦰", "👱‍♀", "👱", "👱‍♂", "👩‍🦳", "🧑‍🦳", "👨‍🦳", - "👩‍🦲", "🧑‍🦲", "👨‍🦲", "🧔", "👵", "🧓", "👴", "👲", "👳‍♀", "👳", "👳‍♂", "🧕", "👮‍♀", "👮", "👮‍♂", "👷‍♀", - "👷", "👷‍♂", "💂‍♀", "💂", "💂‍♂", "🕵‍♀", "🕵", "🕵‍♂", "👩‍⚕", "🧑‍⚕", "👨‍⚕", "👩‍🌾", "🧑‍🌾", "👨‍🌾", "👩‍🍳", "🧑‍🍳", - "👨‍🍳", "👩‍🎓", "🧑‍🎓", "👨‍🎓", "👩‍🎤", "🧑‍🎤", "👨‍🎤", "👩‍🏫", "🧑‍🏫", "👨‍🏫", "👩‍🏭", "🧑‍🏭", "👨‍🏭", "👩‍💻", "🧑‍💻", "👨‍💻", - "👩‍💼", "🧑‍💼", "👨‍💼", "👩‍🔧", "🧑‍🔧", "👨‍🔧", "👩‍🔬", "🧑‍🔬", "👨‍🔬", "👩‍🎨", "🧑‍🎨", "👨‍🎨", "👩‍🚒", "🧑‍🚒", "👨‍🚒", "👩‍✈", - "🧑‍✈", "👨‍✈", "👩‍🚀", "🧑‍🚀", "👨‍🚀", "👩‍⚖", "🧑‍⚖", "👨‍⚖", "👰", "🤵", "👸", "🤴", "🦸‍♀", "🦸", "🦸‍♂", "🦹‍♀", - "🦹", "🦹‍♂", "🤶", "🎅", "🧙‍♀", "🧙", "🧙‍♂", "🧝‍♀", "🧝", "🧝‍♂", "🧛‍♀", "🧛", "🧛‍♂", "🧜‍♀", "🧜", - "🧜‍♂", "🧚‍♀", "🧚", "🧚‍♂", "👼", "🤰", "🤱", "🙇‍♀", "🙇", "🙇‍♂", "💁‍♀", "💁", "💁‍♂", "🙅‍♀", "🙅", "🙅‍♂", - "🙆‍♀", "🙆", "🙆‍♂", "🙋‍♀", "🙋", "🙋‍♂", "🧏‍♀", "🧏", "🧏‍♂", "🤦‍♀", "🤦", "🤦‍♂", "🤷‍♀", "🤷", "🤷‍♂", "🙎‍♀", - "🙎", "🙎‍♂", "🙍‍♀", "🙍", "🙍‍♂", "💇‍♀", "💇", "💇‍♂", "💆‍♀", "💆", "💆‍♂", "🧖‍♀", "🧖", "🧖‍♂", "💅", "🤳", - "💃", "🕺", "🕴", "👩‍🦽", "🧑‍🦽", "👨‍🦽", "👩‍🦼", "🧑‍🦼", "👨‍🦼", "🚶‍♀", "🚶", "🚶‍♂", "👩‍🦯", "🧑‍🦯", "👨‍🦯", "🧎‍♀", - "🧎", "🧎‍♂", "🏃‍♀", "🏃", "🏃‍♂", "🧍‍♀", "🧍", "🧍‍♂", "🏋‍♀", "🏋", "🏋‍♂", "🤸‍♀", "🤸", "🤸‍♂", "⛹‍♀", "⛹", - "⛹‍♂", "🤾‍♀", "🤾", "🤾‍♂", "🏌‍♀", "🏌", "🏌‍♂", "🏇", "🧘‍♀", "🧘", "🧘‍♂", "🏄‍♀", "🏄", "🏄‍♂", "🏊‍♀", "🏊", - "🏊‍♂", "🤽‍♀", "🤽", "🤽‍♂", "🚣‍♀", "🚣", "🚣‍♂", "🧗‍♀", "🧗", "🧗‍♂", "🚵‍♀", "🚵", "🚵‍♂", "🚴‍♀", "🚴", "🚴‍♂", - "🤹‍♀", "🤹", "🤹‍♂", "🛀" + "🤲","👐","🙌","👏","👍","👎","👊","✊","🤛","🤜","🤞","✌","🤟","🤘","👌","🤌","🤏","👈","👉","👆","👇","☝","✋","🤚", + "🖐","🖖","👋","🤙","💪","🖕","✍","🙏","🦶","🦵","👂","🦻","👃","👶","👧","🧒","👦","👩","🧑","👨","👩‍🦱","🧑‍🦱","👨‍🦱", + "👩‍🦰","🧑‍🦰","👨‍🦰","👱‍♀","👱","👱‍♂","👩‍🦳","🧑‍🦳","🧑‍🦳","👨‍🦳","👩‍🦲","🧑‍🦲","👨‍🦲","🧔","👵","🧓","👴","👲","👳‍♀", + "👳","👳‍♂","🧕","👮‍♀","👮","👮‍♂","👷‍♀","👷","👷‍♂","💂‍♀","💂","💂‍♂","🕵‍♀","🕵","🕵‍♂","👩‍⚕","🧑‍⚕","👨‍⚕","👩‍🌾", + "🧑‍🌾","👨‍🌾","👩‍🍳","🧑‍🍳","👨‍🍳","👩‍🎓","🧑‍🎓","👨‍🎓","👩‍🎤","🧑‍🎤","👨‍🎤","👩‍🏫","🧑‍🏫","👨‍🏫","👩‍🏭","🧑‍🏭","👨‍🏭", + "👩‍💻","🧑‍💻","👨‍💻","👩‍💼","🧑‍💼","👨‍💼","👩‍🔧","🧑‍🔧","👨‍🔧","👩‍🔬","🧑‍🔬","👨‍🔬","👩‍🎨","🧑‍🎨","👨‍🎨","👩‍🚒","🧑‍🚒", + "👨‍🚒","👩‍✈","🧑‍✈","👨‍✈","👩‍🚀","🧑‍🚀","👨‍🚀","👩‍⚖","🧑‍⚖","👨‍⚖","👰‍♀","👰","👰‍♂","🤵‍♀","🤵","🤵‍♂","👸","🤴","🥷", + "🦸‍♀","🦸","🦸‍♂","🦹‍♀","🦹","🦹‍♂","🤶","🧑‍🎄","🎅","🧙‍♀","🧙","🧙‍♂","🧝‍♀","🧝","🧝‍♂","🧛‍♀","🧛","🧛‍♂","🧜‍♀", + "🧜","🧜‍♂","🧚‍♀","🧚","🧚‍♂","👼","🤰","🤱","👩‍🍼","🧑‍🍼","👨‍🍼","🙇‍♀","🙇","🙇‍♂","💁‍♀","💁","💁‍♂","🙅‍♀","🙅","🙅‍♂", + "🙆‍♀","🙆","🙆‍♂","🙋‍♀","🙋","🙋‍♂","🧏‍♀","🧏","🧏‍♂","🤦‍♀","🤦","🤦‍♂","🤷‍♀","🤷","🤷‍♂","🙎‍♀","🙎","🙎‍♂","🙍‍♀","🙍", + "🙍‍♂","💇‍♀","💇","💇‍♂","💆‍♀","💆","💆‍♂","🧖‍♀","🧖","🧖‍♂","💅","🤳","💃","🕺","🕴","👩‍🦽","🧑‍🦽","👨‍🦽","👩‍🦼","🧑‍🦼", + "👨‍🦼","🚶‍♀","🚶","🚶‍♂","👩‍🦯","🧑‍🦯","👨‍🦯","🧎‍♀","🧎","🧎‍♂","🏃‍♀","🏃","🏃‍♂","🧍‍♀","🧍","🧍‍♂","🏋‍♀","🏋","🏋‍♂", + "🤸‍♀","🤸","🤸‍♂","⛹‍♀","⛹","⛹‍♂","🤺","🤾‍♀","🤾","🤾‍♂","🏌‍♀","🏌","🏌‍♂","🏇","🧘‍♀","🧘","🧘‍♂","🏄‍♀","🏄", + "🏄‍♂","🏊‍♀","🏊","🏊‍♂","🤽‍♀","🤽","🤽‍♂","🚣‍♀","🚣","🚣‍♂","🧗‍♀","🧗","🧗‍♂","🚵‍♀","🚵","🚵‍♂","🚴‍♀","🚴","🚴‍♂","🤹‍♀","🤹","🤹‍♂","🛁" }; public static final String[] emojiBigColored = { @@ -87,7 +83,7 @@ public class EmojiData { public static final String[][] dataColored = { new String[]{ - "😀", "😃", "😄", "😁", "😆", "😅", "😂", "🤣", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🤭", "🤫", "🤥", "😶", "😐", "😑", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😵", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", + "😀","😃","😄","😁","😆","😅","😂","🤣","🥲","☺","😊","😇","🙂","🙃","😉","😌","😍","🥰","😘","😗","😙","😚","😋","😛","😝","😜","🤪","🤨","🧐","🤓","😎","🥸","🤩","🥳","😏","😒","😞","😔","😟","😕","🙁","☹","😣","😖","😫","😩","🥺","😢","😭","😤","😠","😡","🤬","🤯","😳","🥵","🥶","😱","😨","😰","😥","😓","🤗","🤔","🤭","🤫","🤥","😶","😐","😑","😬","🙄","😯","😦","😧","😮","😲","🥱","😴","🤤","😪","😵","🤐","🥴","🤢","🤮","🤧","😷","🤒","🤕","🤑","🤠","😈","👿","👹","👺","🤡","💩","👻","💀","☠","👽","👾","🤖","🎃","😺","😸","😹","😻","😼","😽","🙀","😿","😾", "🤲", "👐", "🙌", @@ -104,6 +100,7 @@ public class EmojiData { "🤟", "🤘", "👌", + "🤌", "🤏", "👈", "👉", @@ -123,11 +120,11 @@ public class EmojiData { "🙏", "🦶", "🦵", - "🦿", "💄", "💋", "👄", "🦷", "👅", + "🦿","💄","💋","👄","🦷","👅", "👂", "🦻", "👃", - "👣", "👁", "👀", "🧠", "🗣", "👤", "👥", + "👣","👁","👀","🫀","🫁","🧠","🗣","👤","👥","🫂", "👶", "👧", "🧒", @@ -146,6 +143,7 @@ public class EmojiData { "👱‍♂", "👩‍🦳", "🧑‍🦳", + "🧑‍🦳", "👨‍🦳", "👩‍🦲", "🧑‍🦲", @@ -219,10 +217,15 @@ public class EmojiData { "👩‍⚖", "🧑‍⚖", "👨‍⚖", + "👰‍♀", "👰", + "👰‍♂", + "🤵‍♀", "🤵", + "🤵‍♂", "👸", "🤴", + "🥷", "🦸‍♀", "🦸", "🦸‍♂", @@ -230,6 +233,7 @@ public class EmojiData { "🦹", "🦹‍♂", "🤶", + "🧑‍🎄", "🎅", "🧙‍♀", "🧙", @@ -240,7 +244,7 @@ public class EmojiData { "🧛‍♀", "🧛", "🧛‍♂", - "🧟‍♀", "🧟", "🧟‍♂", "🧞‍♀", "🧞", "🧞‍♂", + "🧟‍♀","🧟","🧟‍♂","🧞‍♀","🧞","🧞‍♂", "🧜‍♀", "🧜", "🧜‍♂", @@ -250,6 +254,9 @@ public class EmojiData { "👼", "🤰", "🤱", + "👩‍🍼", + "🧑‍🍼", + "👨‍🍼", "🙇‍♀", "🙇", "🙇‍♂", @@ -293,7 +300,7 @@ public class EmojiData { "🤳", "💃", "🕺", - "👯‍♀", "👯", "👯‍♂", + "👯‍♀","👯","👯‍♂", "🕴", "👩‍🦽", "🧑‍🦽", @@ -319,16 +326,16 @@ public class EmojiData { "👫", "👭", "👬", - "👩‍❤‍👨", "👩‍❤‍👩", "👨‍❤‍👨", "👩‍❤‍💋‍👨", "👩‍❤‍💋‍👩", "👨‍❤‍💋‍👨", "👨‍👩‍👦", "👨‍👩‍👧", "👨‍👩‍👧‍👦", "👨‍👩‍👦‍👦", "👨‍👩‍👧‍👧", "👩‍👩‍👦", "👩‍👩‍👧", "👩‍👩‍👧‍👦", "👩‍👩‍👦‍👦", "👩‍👩‍👧‍👧", "👨‍👨‍👦", "👨‍👨‍👧", "👨‍👨‍👧‍👦", "👨‍👨‍👦‍👦", "👨‍👨‍👧‍👧", "👩‍👦", "👩‍👧", "👩‍👧‍👦", "👩‍👦‍👦", "👩‍👧‍👧", "👨‍👦", "👨‍👧", "👨‍👧‍👦", "👨‍👦‍👦", "👨‍👧‍👧", "🧶", "🧵", "🧥", "🥼", "🦺", "👚", "👕", "👖", "🩲", "🩳", "👔", "👗", "👙", "👘", "🥻", "🩱", "🥿", "👠", "👡", "👢", "👞", "👟", "🥾", "🧦", "🧤", "🧣", "🎩", "🧢", "👒", "🎓", "⛑", "👑", "💍", "👝", "👛", "👜", "💼", "🎒", "🧳", "👓", "🕶", "🥽", "🌂" + "👩‍❤‍👨","👩‍❤‍👩","👨‍❤‍👨","👩‍❤‍💋‍👨","👩‍❤‍💋‍👩","👨‍❤‍💋‍👨","👨‍👩‍👦","👨‍👩‍👧","👨‍👩‍👧‍👦","👨‍👩‍👦‍👦","👨‍👩‍👧‍👧","👩‍👩‍👦","👩‍👩‍👧","👩‍👩‍👧‍👦","👩‍👩‍👦‍👦","👩‍👩‍👧‍👧","👨‍👨‍👦","👨‍👨‍👧","👨‍👨‍👧‍👦","👨‍👨‍👦‍👦","👨‍👨‍👧‍👧","👩‍👦","👩‍👧","👩‍👧‍👦","👩‍👦‍👦","👩‍👧‍👧","👨‍👦","👨‍👧","👨‍👧‍👦","👨‍👦‍👦","👨‍👧‍👧","🪢","🧶","🧵","🪡","🧥","🥼","🦺","👚","👕","👖","🩲","🩳","👔","👗","👙","🩱","👘","🥻","🩴","🥿","👠","👡","👢","👞","👟","🥾","🧦","🧤","🧣","🎩","🧢","👒","🎓","⛑","🪖","👑","💍","👝","👛","👜","💼","🎒","🧳","👓","🕶","🥽","🌂" }, null, null, new String[]{ - "⚽", "🏀", "🏈", "⚾", "🥎", "🎾", "🏐", "🏉", "🥏", "🎱", "🪀", "🏓", "🏸", "🏒", "🏑", "🥍", "🏏", "🥅", "⛳", "🪁", "🏹", "🎣", "🤿", "🥊", "🥋", "🎽", "🛹", "🛷", "⛸", "🥌", "🎿", "⛷", "🏂", "🪂", + "⚽","🏀","🏈","⚾","🥎","🎾","🏐","🏉","🥏","🎱","🪀","🏓","🏸","🏒","🏑","🥍","🏏","🪃","🥅","⛳","🪁","🏹","🎣","🤿","🥊","🥋","🎽","🛹","🛼","🛷","⛸","🥌","🎿","⛷","🏂","🪂", "🏋‍♀", "🏋", "🏋‍♂", - "🤼‍♀", "🤼", "🤼‍♂", + "🤼‍♀","🤼","🤼‍♂", "🤸‍♀", "🤸", "🤸‍♂", @@ -367,17 +374,17 @@ public class EmojiData { "🚴‍♀", "🚴", "🚴‍♂", - "🏆", "🥇", "🥈", "🥉", "🏅", "🎖", "🏵", "🎗", "🎫", "🎟", "🎪", + "🏆","🥇","🥈","🥉","🏅","🎖","🏵","🎗","🎫","🎟","🎪", "🤹‍♀", "🤹", "🤹‍♂", - "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🥁", "🎷", "🎺", "🎸", "🪕", "🎻", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" + "🎭","🩰","🎨","🎬","🎤","🎧","🎼","🎹","🥁","🪘","🎷","🎺","🪗","🎸","🪕","🎻","🎲","♟","🎯","🎳","🎮","🎰","🧩" }, null, new String[]{ - "⌚", "📱", "📲", "💻", "⌨", "🖥", "🖨", "🖱", "🖲", "🕹", "🗜", "💽", "💾", "💿", "📀", "📼", "📷", "📸", "📹", "🎥", "📽", "🎞", "📞", "☎", "📟", "📠", "📺", "📻", "🎙", "🎚", "🎛", "🧭", "⏱", "⏲", "⏰", "🕰", "⌛", "⏳", "📡", "🔋", "🔌", "💡", "🔦", "🕯", "🪔", "🧯", "🛢", "💸", "💵", "💴", "💶", "💷", "💰", "💳", "💎", "⚖", "🧰", "🔧", "🔨", "⚒", "🛠", "⛏", "🔩", "⚙", "🧱", "⛓", "🧲", "🔫", "💣", "🧨", "🪓", "🔪", "🗡", "⚔", "🛡", "🚬", "⚰", "⚱", "🏺", "🔮", "📿", "🧿", "💈", "⚗", "🔭", "🔬", "🕳", "🩹", "🩺", "💊", "💉", "🩸", "🧬", "🦠", "🧫", "🧪", "🌡", "🧹", "🧺", "🧻", "🚽", "🚰", "🚿", "🛁", - "🛀", - "🧼", "🪒", "🧽", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🖼", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🎊", "🎉", "🎎", "🏮", "🎐", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" + "⌚","📱","📲","💻","⌨","🖥","🖨","🖱","🖲","🕹","🗜","💽","💾","💿","📀","📼","📷","📸","📹","🎥","📽","🎞","📞","☎","📟","📠","📺","📻","🎙","🎚","🎛","🧭","⏱","⏲","⏰","🕰","⌛","⏳","📡","🔋","🔌","💡","🔦","🕯","🪔","🧯","🛢","💸","💵","💴","💶","💷","🪙","💰","💳","💎","⚖","🪜","🧰","🪛","🔧","🔨","⚒","🛠","⛏","🪚","🔩","⚙","🪤","🧱","⛓","🧲","🔫","💣","🧨","🪓","🔪","🗡","⚔","🛡","🚬","⚰","🪦","⚱","🏺","🔮","📿","🧿","💈","⚗","🔭","🔬","🕳","🩹","🩺","💊","💉","🩸","🧬","🦠","🧫","🧪","🌡","🧹","🪠","🧺","🧻","🚽","🚰","🚿", + "🛁", + "🧼","🪥","🪒","🧽","🪣","🧴","🛎","🔑","🗝","🚪","🪑","🛋","🛏","🛌","🧸","🪆","🖼","🪞","🪟","🛍","🛒","🎁","🎈","🎏","🎀","🪄","🪅","🎊","🎉","🎎","🏮","🎐","🧧","✉","📩","📨","📧","💌","📥","📤","📦","🏷","🪧","📪","📫","📬","📭","📮","📯","📜","📃","📄","📑","🧾","📊","📈","📉","🗒","🗓","📆","📅","🗑","📇","🗃","🗳","🗄","📋","📁","📂","🗂","🗞","📰","📓","📔","📒","📕","📗","📘","📙","📚","📖","🔖","🧷","🔗","📎","🖇","📐","📏","🧮","📌","📍","✂","🖊","🖋","✒","🖌","🖍","📝","✏","🔍","🔎","🔏","🔐","🔒","🔓" }, null, null @@ -483,309 +490,320 @@ public class EmojiData { public static final String[][] data = { new String[]{ - "😀", "😃", "😄", "😁", "😆", "😅", "😂", "🤣", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🤭", "🤫", "🤥", "😶", "😐", "😑", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😵", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", - "🤲", "🤲🏻", "🤲🏼", "🤲🏽", "🤲🏾", "🤲🏿", - "👐", "👐🏻", "👐🏼", "👐🏽", "👐🏾", "👐🏿", - "🙌", "🙌🏻", "🙌🏼", "🙌🏽", "🙌🏾", "🙌🏿", - "👏", "👏🏻", "👏🏼", "👏🏽", "👏🏾", "👏🏿", + "😀","😃","😄","😁","😆","😅","😂","🤣","🥲","☺","😊","😇","🙂","🙃","😉","😌","😍","🥰","😘","😗","😙","😚","😋","😛","😝","😜","🤪","🤨","🧐","🤓","😎","🥸","🤩","🥳","😏","😒","😞","😔","😟","😕","🙁","☹","😣","😖","😫","😩","🥺","😢","😭","😤","😠","😡","🤬","🤯","😳","🥵","🥶","😱","😨","😰","😥","😓","🤗","🤔","🤭","🤫","🤥","😶","😐","😑","😬","🙄","😯","😦","😧","😮","😲","🥱","😴","🤤","😪","😵","🤐","🥴","🤢","🤮","🤧","😷","🤒","🤕","🤑","🤠","😈","👿","👹","👺","🤡","💩","👻","💀","☠","👽","👾","🤖","🎃","😺","😸","😹","😻","😼","😽","🙀","😿","😾", + "🤲","🤲🏻","🤲🏼","🤲🏽","🤲🏾","🤲🏿", + "👐","👐🏻","👐🏼","👐🏽","👐🏾","👐🏿", + "🙌","🙌🏻","🙌🏼","🙌🏽","🙌🏾","🙌🏿", + "👏","👏🏻","👏🏼","👏🏽","👏🏾","👏🏿", "🤝", - "👍", "👍🏻", "👍🏼", "👍🏽", "👍🏾", "👍🏿", - "👎", "👎🏻", "👎🏼", "👎🏽", "👎🏾", "👎🏿", - "👊", "👊🏻", "👊🏼", "👊🏽", "👊🏾", "👊🏿", - "✊", "✊🏻", "✊🏼", "✊🏽", "✊🏾", "✊🏿", - "🤛", "🤛🏻", "🤛🏼", "🤛🏽", "🤛🏾", "🤛🏿", - "🤜", "🤜🏻", "🤜🏼", "🤜🏽", "🤜🏾", "🤜🏿", - "🤞", "🤞🏻", "🤞🏼", "🤞🏽", "🤞🏾", "🤞🏿", - "✌", "✌🏻", "✌🏼", "✌🏽", "✌🏾", "✌🏿", - "🤟", "🤟🏻", "🤟🏼", "🤟🏽", "🤟🏾", "🤟🏿", - "🤘", "🤘🏻", "🤘🏼", "🤘🏽", "🤘🏾", "🤘🏿", - "👌", "👌🏻", "👌🏼", "👌🏽", "👌🏾", "👌🏿", - "🤏", "🤏🏻", "🤏🏼", "🤏🏽", "🤏🏾", "🤏🏿", - "👈", "👈🏻", "👈🏼", "👈🏽", "👈🏾", "👈🏿", - "👉", "👉🏻", "👉🏼", "👉🏽", "👉🏾", "👉🏿", - "👆", "👆🏻", "👆🏼", "👆🏽", "👆🏾", "👆🏿", - "👇", "👇🏻", "👇🏼", "👇🏽", "👇🏾", "👇🏿", - "☝", "☝🏻", "☝🏼", "☝🏽", "☝🏾", "☝🏿", - "✋", "✋🏻", "✋🏼", "✋🏽", "✋🏾", "✋🏿", - "🤚", "🤚🏻", "🤚🏼", "🤚🏽", "🤚🏾", "🤚🏿", - "🖐", "🖐🏻", "🖐🏼", "🖐🏽", "🖐🏾", "🖐🏿", - "🖖", "🖖🏻", "🖖🏼", "🖖🏽", "🖖🏾", "🖖🏿", - "👋", "👋🏻", "👋🏼", "👋🏽", "👋🏾", "👋🏿", - "🤙", "🤙🏻", "🤙🏼", "🤙🏽", "🤙🏾", "🤙🏿", - "💪", "💪🏻", "💪🏼", "💪🏽", "💪🏾", "💪🏿", + "👍","👍🏻","👍🏼","👍🏽","👍🏾","👍🏿", + "👎","👎🏻","👎🏼","👎🏽","👎🏾","👎🏿", + "👊","👊🏻","👊🏼","👊🏽","👊🏾","👊🏿", + "✊","✊🏻","✊🏼","✊🏽","✊🏾","✊🏿", + "🤛","🤛🏻","🤛🏼","🤛🏽","🤛🏾","🤛🏿", + "🤜","🤜🏻","🤜🏼","🤜🏽","🤜🏾","🤜🏿", + "🤞","🤞🏻","🤞🏼","🤞🏽","🤞🏾","🤞🏿", + "✌","✌🏻","✌🏼","✌🏽","✌🏾","✌🏿", + "🤟","🤟🏻","🤟🏼","🤟🏽","🤟🏾","🤟🏿", + "🤘","🤘🏻","🤘🏼","🤘🏽","🤘🏾","🤘🏿", + "👌","👌🏻","👌🏼","👌🏽","👌🏾","👌🏿", + "🤌","🤌🏻","🤌🏼","🤌🏽","🤌🏾","🤌🏿", + "🤏","🤏🏻","🤏🏼","🤏🏽","🤏🏾","🤏🏿", + "👈","👈🏻","👈🏼","👈🏽","👈🏾","👈🏿", + "👉","👉🏻","👉🏼","👉🏽","👉🏾","👉🏿", + "👆","👆🏻","👆🏼","👆🏽","👆🏾","👆🏿", + "👇","👇🏻","👇🏼","👇🏽","👇🏾","👇🏿", + "☝","☝🏻","☝🏼","☝🏽","☝🏾","☝🏿", + "✋","✋🏻","✋🏼","✋🏽","✋🏾","✋🏿", + "🤚","🤚🏻","🤚🏼","🤚🏽","🤚🏾","🤚🏿", + "🖐","🖐🏻","🖐🏼","🖐🏽","🖐🏾","🖐🏿", + "🖖","🖖🏻","🖖🏼","🖖🏽","🖖🏾","🖖🏿", + "👋","👋🏻","👋🏼","👋🏽","👋🏾","👋🏿", + "🤙","🤙🏻","🤙🏼","🤙🏽","🤙🏾","🤙🏿", + "💪","💪🏻","💪🏼","💪🏽","💪🏾","💪🏿", "🦾", - "🖕", "🖕🏻", "🖕🏼", "🖕🏽", "🖕🏾", "🖕🏿", - "✍", "✍🏻", "✍🏼", "✍🏽", "✍🏾", "✍🏿", - "🙏", "🙏🏻", "🙏🏼", "🙏🏽", "🙏🏾", "🙏🏿", - "🦶", "🦶🏻", "🦶🏼", "🦶🏽", "🦶🏾", "🦶🏿", - "🦵", "🦵🏻", "🦵🏼", "🦵🏽", "🦵🏾", "🦵🏿", - "🦿", "💄", "💋", "👄", "🦷", "👅", - "👂", "👂🏻", "👂🏼", "👂🏽", "👂🏾", "👂🏿", - "🦻", "🦻🏻", "🦻🏼", "🦻🏽", "🦻🏾", "🦻🏿", - "👃", "👃🏻", "👃🏼", "👃🏽", "👃🏾", "👃🏿", - "👣", "👁", "👀", "🧠", "🗣", "👤", "👥", - "👶", "👶🏻", "👶🏼", "👶🏽", "👶🏾", "👶🏿", - "👧", "👧🏻", "👧🏼", "👧🏽", "👧🏾", "👧🏿", - "🧒", "🧒🏻", "🧒🏼", "🧒🏽", "🧒🏾", "🧒🏿", - "👦", "👦🏻", "👦🏼", "👦🏽", "👦🏾", "👦🏿", - "👩", "👩🏻", "👩🏼", "👩🏽", "👩🏾", "👩🏿", - "🧑", "🧑🏻", "🧑🏼", "🧑🏽", "🧑🏾", "🧑🏿", - "👨", "👨🏻", "👨🏼", "👨🏽", "👨🏾", "👨🏿", - "👩‍🦱", "👩🏻‍🦱", "👩🏼‍🦱", "👩🏽‍🦱", "👩🏾‍🦱", "👩🏿‍🦱", - "🧑‍🦱", "🧑🏻‍🦱", "🧑🏼‍🦱", "🧑🏽‍🦱", "🧑🏾‍🦱", "🧑🏿‍🦱", - "👨‍🦱", "👨🏻‍🦱", "👨🏼‍🦱", "👨🏽‍🦱", "👨🏾‍🦱", "👨🏿‍🦱", - "👩‍🦰", "👩🏻‍🦰", "👩🏼‍🦰", "👩🏽‍🦰", "👩🏾‍🦰", "👩🏿‍🦰", - "🧑‍🦰", "🧑🏻‍🦰", "🧑🏼‍🦰", "🧑🏽‍🦰", "🧑🏾‍🦰", "🧑🏿‍🦰", - "👨‍🦰", "👨🏻‍🦰", "👨🏼‍🦰", "👨🏽‍🦰", "👨🏾‍🦰", "👨🏿‍🦰", - "👱‍♀", "👱🏻‍♀", "👱🏼‍♀", "👱🏽‍♀", "👱🏾‍♀", "👱🏿‍♀", - "👱", "👱🏻", "👱🏼", "👱🏽", "👱🏾", "👱🏿", - "👱‍♂", "👱🏻‍♂", "👱🏼‍♂", "👱🏽‍♂", "👱🏾‍♂", "👱🏿‍♂", - "👩‍🦳", "👩🏻‍🦳", "👩🏼‍🦳", "👩🏽‍🦳", "👩🏾‍🦳", "👩🏿‍🦳", - "🧑‍🦳", "🧑🏻‍🦳", "🧑🏼‍🦳", "🧑🏽‍🦳", "🧑🏾‍🦳", "🧑🏿‍🦳", - "👨‍🦳", "👨🏻‍🦳", "👨🏼‍🦳", "👨🏽‍🦳", "👨🏾‍🦳", "👨🏿‍🦳", - "👩‍🦲", "👩🏻‍🦲", "👩🏼‍🦲", "👩🏽‍🦲", "👩🏾‍🦲", "👩🏿‍🦲", - "🧑‍🦲", "🧑🏻‍🦲", "🧑🏼‍🦲", "🧑🏽‍🦲", "🧑🏾‍🦲", "🧑🏿‍🦲", - "👨‍🦲", "👨🏻‍🦲", "👨🏼‍🦲", "👨🏽‍🦲", "👨🏾‍🦲", "👨🏿‍🦲", - "🧔", "🧔🏻", "🧔🏼", "🧔🏽", "🧔🏾", "🧔🏿", - "👵", "👵🏻", "👵🏼", "👵🏽", "👵🏾", "👵🏿", - "🧓", "🧓🏻", "🧓🏼", "🧓🏽", "🧓🏾", "🧓🏿", - "👴", "👴🏻", "👴🏼", "👴🏽", "👴🏾", "👴🏿", - "👲", "👲🏻", "👲🏼", "👲🏽", "👲🏾", "👲🏿", - "👳‍♀", "👳🏻‍♀", "👳🏼‍♀", "👳🏽‍♀", "👳🏾‍♀", "👳🏿‍♀", - "👳", "👳🏻", "👳🏼", "👳🏽", "👳🏾", "👳🏿", - "👳‍♂", "👳🏻‍♂", "👳🏼‍♂", "👳🏽‍♂", "👳🏾‍♂", "👳🏿‍♂", - "🧕", "🧕🏻", "🧕🏼", "🧕🏽", "🧕🏾", "🧕🏿", - "👮‍♀", "👮🏻‍♀", "👮🏼‍♀", "👮🏽‍♀", "👮🏾‍♀", "👮🏿‍♀", - "👮", "👮🏻", "👮🏼", "👮🏽", "👮🏾", "👮🏿", - "👮‍♂", "👮🏻‍♂", "👮🏼‍♂", "👮🏽‍♂", "👮🏾‍♂", "👮🏿‍♂", - "👷‍♀", "👷🏻‍♀", "👷🏼‍♀", "👷🏽‍♀", "👷🏾‍♀", "👷🏿‍♀", - "👷", "👷🏻", "👷🏼", "👷🏽", "👷🏾", "👷🏿", - "👷‍♂", "👷🏻‍♂", "👷🏼‍♂", "👷🏽‍♂", "👷🏾‍♂", "👷🏿‍♂", - "💂‍♀", "💂🏻‍♀", "💂🏼‍♀", "💂🏽‍♀", "💂🏾‍♀", "💂🏿‍♀", - "💂", "💂🏻", "💂🏼", "💂🏽", "💂🏾", "💂🏿", - "💂‍♂", "💂🏻‍♂", "💂🏼‍♂", "💂🏽‍♂", "💂🏾‍♂", "💂🏿‍♂", - "🕵‍♀", "🕵🏻‍♀", "🕵🏼‍♀", "🕵🏽‍♀", "🕵🏾‍♀", "🕵🏿‍♀", - "🕵", "🕵🏻", "🕵🏼", "🕵🏽", "🕵🏾", "🕵🏿", - "🕵‍♂", "🕵🏻‍♂", "🕵🏼‍♂", "🕵🏽‍♂", "🕵🏾‍♂", "🕵🏿‍♂", - "👩‍⚕", "👩🏻‍⚕", "👩🏼‍⚕", "👩🏽‍⚕", "👩🏾‍⚕", "👩🏿‍⚕", - "🧑‍⚕", "🧑🏻‍⚕", "🧑🏼‍⚕", "🧑🏽‍⚕", "🧑🏾‍⚕", "🧑🏿‍⚕", - "👨‍⚕", "👨🏻‍⚕", "👨🏼‍⚕", "👨🏽‍⚕", "👨🏾‍⚕", "👨🏿‍⚕", - "👩‍🌾", "👩🏻‍🌾", "👩🏼‍🌾", "👩🏽‍🌾", "👩🏾‍🌾", "👩🏿‍🌾", - "🧑‍🌾", "🧑🏻‍🌾", "🧑🏼‍🌾", "🧑🏽‍🌾", "🧑🏾‍🌾", "🧑🏿‍🌾", - "👨‍🌾", "👨🏻‍🌾", "👨🏼‍🌾", "👨🏽‍🌾", "👨🏾‍🌾", "👨🏿‍🌾", - "👩‍🍳", "👩🏻‍🍳", "👩🏼‍🍳", "👩🏽‍🍳", "👩🏾‍🍳", "👩🏿‍🍳", - "🧑‍🍳", "🧑🏻‍🍳", "🧑🏼‍🍳", "🧑🏽‍🍳", "🧑🏾‍🍳", "🧑🏿‍🍳", - "👨‍🍳", "👨🏻‍🍳", "👨🏼‍🍳", "👨🏽‍🍳", "👨🏾‍🍳", "👨🏿‍🍳", - "👩‍🎓", "👩🏻‍🎓", "👩🏼‍🎓", "👩🏽‍🎓", "👩🏾‍🎓", "👩🏿‍🎓", - "🧑‍🎓", "🧑🏻‍🎓", "🧑🏼‍🎓", "🧑🏽‍🎓", "🧑🏾‍🎓", "🧑🏿‍🎓", - "👨‍🎓", "👨🏻‍🎓", "👨🏼‍🎓", "👨🏽‍🎓", "👨🏾‍🎓", "👨🏿‍🎓", - "👩‍🎤", "👩🏻‍🎤", "👩🏼‍🎤", "👩🏽‍🎤", "👩🏾‍🎤", "👩🏿‍🎤", - "🧑‍🎤", "🧑🏻‍🎤", "🧑🏼‍🎤", "🧑🏽‍🎤", "🧑🏾‍🎤", "🧑🏿‍🎤", - "👨‍🎤", "👨🏻‍🎤", "👨🏼‍🎤", "👨🏽‍🎤", "👨🏾‍🎤", "👨🏿‍🎤", - "👩‍🏫", "👩🏻‍🏫", "👩🏼‍🏫", "👩🏽‍🏫", "👩🏾‍🏫", "👩🏿‍🏫", - "🧑‍🏫", "🧑🏻‍🏫", "🧑🏼‍🏫", "🧑🏽‍🏫", "🧑🏾‍🏫", "🧑🏿‍🏫", - "👨‍🏫", "👨🏻‍🏫", "👨🏼‍🏫", "👨🏽‍🏫", "👨🏾‍🏫", "👨🏿‍🏫", - "👩‍🏭", "👩🏻‍🏭", "👩🏼‍🏭", "👩🏽‍🏭", "👩🏾‍🏭", "👩🏿‍🏭", - "🧑‍🏭", "🧑🏻‍🏭", "🧑🏼‍🏭", "🧑🏽‍🏭", "🧑🏾‍🏭", "🧑🏿‍🏭", - "👨‍🏭", "👨🏻‍🏭", "👨🏼‍🏭", "👨🏽‍🏭", "👨🏾‍🏭", "👨🏿‍🏭", - "👩‍💻", "👩🏻‍💻", "👩🏼‍💻", "👩🏽‍💻", "👩🏾‍💻", "👩🏿‍💻", - "🧑‍💻", "🧑🏻‍💻", "🧑🏼‍💻", "🧑🏽‍💻", "🧑🏾‍💻", "🧑🏿‍💻", - "👨‍💻", "👨🏻‍💻", "👨🏼‍💻", "👨🏽‍💻", "👨🏾‍💻", "👨🏿‍💻", - "👩‍💼", "👩🏻‍💼", "👩🏼‍💼", "👩🏽‍💼", "👩🏾‍💼", "👩🏿‍💼", - "🧑‍💼", "🧑🏻‍💼", "🧑🏼‍💼", "🧑🏽‍💼", "🧑🏾‍💼", "🧑🏿‍💼", - "👨‍💼", "👨🏻‍💼", "👨🏼‍💼", "👨🏽‍💼", "👨🏾‍💼", "👨🏿‍💼", - "👩‍🔧", "👩🏻‍🔧", "👩🏼‍🔧", "👩🏽‍🔧", "👩🏾‍🔧", "👩🏿‍🔧", - "🧑‍🔧", "🧑🏻‍🔧", "🧑🏼‍🔧", "🧑🏽‍🔧", "🧑🏾‍🔧", "🧑🏿‍🔧", - "👨‍🔧", "👨🏻‍🔧", "👨🏼‍🔧", "👨🏽‍🔧", "👨🏾‍🔧", "👨🏿‍🔧", - "👩‍🔬", "👩🏻‍🔬", "👩🏼‍🔬", "👩🏽‍🔬", "👩🏾‍🔬", "👩🏿‍🔬", - "🧑‍🔬", "🧑🏻‍🔬", "🧑🏼‍🔬", "🧑🏽‍🔬", "🧑🏾‍🔬", "🧑🏿‍🔬", - "👨‍🔬", "👨🏻‍🔬", "👨🏼‍🔬", "👨🏽‍🔬", "👨🏾‍🔬", "👨🏿‍🔬", - "👩‍🎨", "👩🏻‍🎨", "👩🏼‍🎨", "👩🏽‍🎨", "👩🏾‍🎨", "👩🏿‍🎨", - "🧑‍🎨", "🧑🏻‍🎨", "🧑🏼‍🎨", "🧑🏽‍🎨", "🧑🏾‍🎨", "🧑🏿‍🎨", - "👨‍🎨", "👨🏻‍🎨", "👨🏼‍🎨", "👨🏽‍🎨", "👨🏾‍🎨", "👨🏿‍🎨", - "👩‍🚒", "👩🏻‍🚒", "👩🏼‍🚒", "👩🏽‍🚒", "👩🏾‍🚒", "👩🏿‍🚒", - "🧑‍🚒", "🧑🏻‍🚒", "🧑🏼‍🚒", "🧑🏽‍🚒", "🧑🏾‍🚒", "🧑🏿‍🚒", - "👨‍🚒", "👨🏻‍🚒", "👨🏼‍🚒", "👨🏽‍🚒", "👨🏾‍🚒", "👨🏿‍🚒", - "👩‍✈", "👩🏻‍✈", "👩🏼‍✈", "👩🏽‍✈", "👩🏾‍✈", "👩🏿‍✈", - "🧑‍✈", "🧑🏻‍✈", "🧑🏼‍✈", "🧑🏽‍✈", "🧑🏾‍✈", "🧑🏿‍✈", - "👨‍✈", "👨🏻‍✈", "👨🏼‍✈", "👨🏽‍✈", "👨🏾‍✈", "👨🏿‍✈", - "👩‍🚀", "👩🏻‍🚀", "👩🏼‍🚀", "👩🏽‍🚀", "👩🏾‍🚀", "👩🏿‍🚀", - "🧑‍🚀", "🧑🏻‍🚀", "🧑🏼‍🚀", "🧑🏽‍🚀", "🧑🏾‍🚀", "🧑🏿‍🚀", - "👨‍🚀", "👨🏻‍🚀", "👨🏼‍🚀", "👨🏽‍🚀", "👨🏾‍🚀", "👨🏿‍🚀", - "👩‍⚖", "👩🏻‍⚖", "👩🏼‍⚖", "👩🏽‍⚖", "👩🏾‍⚖", "👩🏿‍⚖", - "🧑‍⚖", "🧑🏻‍⚖", "🧑🏼‍⚖", "🧑🏽‍⚖", "🧑🏾‍⚖", "🧑🏿‍⚖", - "👨‍⚖", "👨🏻‍⚖", "👨🏼‍⚖", "👨🏽‍⚖", "👨🏾‍⚖", "👨🏿‍⚖", - "👰", "👰🏻", "👰🏼", "👰🏽", "👰🏾", "👰🏿", - "🤵", "🤵🏻", "🤵🏼", "🤵🏽", "🤵🏾", "🤵🏿", - "👸", "👸🏻", "👸🏼", "👸🏽", "👸🏾", "👸🏿", - "🤴", "🤴🏻", "🤴🏼", "🤴🏽", "🤴🏾", "🤴🏿", - "🦸‍♀", "🦸🏻‍♀", "🦸🏼‍♀", "🦸🏽‍♀", "🦸🏾‍♀", "🦸🏿‍♀", - "🦸", "🦸🏻", "🦸🏼", "🦸🏽", "🦸🏾", "🦸🏿", - "🦸‍♂", "🦸🏻‍♂", "🦸🏼‍♂", "🦸🏽‍♂", "🦸🏾‍♂", "🦸🏿‍♂", - "🦹‍♀", "🦹🏻‍♀", "🦹🏼‍♀", "🦹🏽‍♀", "🦹🏾‍♀", "🦹🏿‍♀", - "🦹", "🦹🏻", "🦹🏼", "🦹🏽", "🦹🏾", "🦹🏿", - "🦹‍♂", "🦹🏻‍♂", "🦹🏼‍♂", "🦹🏽‍♂", "🦹🏾‍♂", "🦹🏿‍♂", - "🤶", "🤶🏻", "🤶🏼", "🤶🏽", "🤶🏾", "🤶🏿", - "🎅", "🎅🏻", "🎅🏼", "🎅🏽", "🎅🏾", "🎅🏿", - "🧙‍♀", "🧙🏻‍♀", "🧙🏼‍♀", "🧙🏽‍♀", "🧙🏾‍♀", "🧙🏿‍♀", - "🧙", "🧙🏻", "🧙🏼", "🧙🏽", "🧙🏾", "🧙🏿", - "🧙‍♂", "🧙🏻‍♂", "🧙🏼‍♂", "🧙🏽‍♂", "🧙🏾‍♂", "🧙🏿‍♂", - "🧝‍♀", "🧝🏻‍♀", "🧝🏼‍♀", "🧝🏽‍♀", "🧝🏾‍♀", "🧝🏿‍♀", - "🧝", "🧝🏻", "🧝🏼", "🧝🏽", "🧝🏾", "🧝🏿", - "🧝‍♂", "🧝🏻‍♂", "🧝🏼‍♂", "🧝🏽‍♂", "🧝🏾‍♂", "🧝🏿‍♂", - "🧛‍♀", "🧛🏻‍♀", "🧛🏼‍♀", "🧛🏽‍♀", "🧛🏾‍♀", "🧛🏿‍♀", - "🧛", "🧛🏻", "🧛🏼", "🧛🏽", "🧛🏾", "🧛🏿", - "🧛‍♂", "🧛🏻‍♂", "🧛🏼‍♂", "🧛🏽‍♂", "🧛🏾‍♂", "🧛🏿‍♂", - "🧟‍♀", "🧟", "🧟‍♂", "🧞‍♀", "🧞", "🧞‍♂", - "🧜‍♀", "🧜🏻‍♀", "🧜🏼‍♀", "🧜🏽‍♀", "🧜🏾‍♀", "🧜🏿‍♀", - "🧜", "🧜🏻", "🧜🏼", "🧜🏽", "🧜🏾", "🧜🏿", - "🧜‍♂", "🧜🏻‍♂", "🧜🏼‍♂", "🧜🏽‍♂", "🧜🏾‍♂", "🧜🏿‍♂", - "🧚‍♀", "🧚🏻‍♀", "🧚🏼‍♀", "🧚🏽‍♀", "🧚🏾‍♀", "🧚🏿‍♀", - "🧚", "🧚🏻", "🧚🏼", "🧚🏽", "🧚🏾", "🧚🏿", - "🧚‍♂", "🧚🏻‍♂", "🧚🏼‍♂", "🧚🏽‍♂", "🧚🏾‍♂", "🧚🏿‍♂", - "👼", "👼🏻", "👼🏼", "👼🏽", "👼🏾", "👼🏿", - "🤰", "🤰🏻", "🤰🏼", "🤰🏽", "🤰🏾", "🤰🏿", - "🤱", "🤱🏻", "🤱🏼", "🤱🏽", "🤱🏾", "🤱🏿", - "🙇‍♀", "🙇🏻‍♀", "🙇🏼‍♀", "🙇🏽‍♀", "🙇🏾‍♀", "🙇🏿‍♀", - "🙇", "🙇🏻", "🙇🏼", "🙇🏽", "🙇🏾", "🙇🏿", - "🙇‍♂", "🙇🏻‍♂", "🙇🏼‍♂", "🙇🏽‍♂", "🙇🏾‍♂", "🙇🏿‍♂", - "💁‍♀", "💁🏻‍♀", "💁🏼‍♀", "💁🏽‍♀", "💁🏾‍♀", "💁🏿‍♀", - "💁", "💁🏻", "💁🏼", "💁🏽", "💁🏾", "💁🏿", - "💁‍♂", "💁🏻‍♂", "💁🏼‍♂", "💁🏽‍♂", "💁🏾‍♂", "💁🏿‍♂", - "🙅‍♀", "🙅🏻‍♀", "🙅🏼‍♀", "🙅🏽‍♀", "🙅🏾‍♀", "🙅🏿‍♀", - "🙅", "🙅🏻", "🙅🏼", "🙅🏽", "🙅🏾", "🙅🏿", - "🙅‍♂", "🙅🏻‍♂", "🙅🏼‍♂", "🙅🏽‍♂", "🙅🏾‍♂", "🙅🏿‍♂", - "🙆‍♀", "🙆🏻‍♀", "🙆🏼‍♀", "🙆🏽‍♀", "🙆🏾‍♀", "🙆🏿‍♀", - "🙆", "🙆🏻", "🙆🏼", "🙆🏽", "🙆🏾", "🙆🏿", - "🙆‍♂", "🙆🏻‍♂", "🙆🏼‍♂", "🙆🏽‍♂", "🙆🏾‍♂", "🙆🏿‍♂", - "🙋‍♀", "🙋🏻‍♀", "🙋🏼‍♀", "🙋🏽‍♀", "🙋🏾‍♀", "🙋🏿‍♀", - "🙋", "🙋🏻", "🙋🏼", "🙋🏽", "🙋🏾", "🙋🏿", - "🙋‍♂", "🙋🏻‍♂", "🙋🏼‍♂", "🙋🏽‍♂", "🙋🏾‍♂", "🙋🏿‍♂", - "🧏‍♀", "🧏🏻‍♀", "🧏🏼‍♀", "🧏🏽‍♀", "🧏🏾‍♀", "🧏🏿‍♀", - "🧏", "🧏🏻", "🧏🏼", "🧏🏽", "🧏🏾", "🧏🏿", - "🧏‍♂", "🧏🏻‍♂", "🧏🏼‍♂", "🧏🏽‍♂", "🧏🏾‍♂", "🧏🏿‍♂", - "🤦‍♀", "🤦🏻‍♀", "🤦🏼‍♀", "🤦🏽‍♀", "🤦🏾‍♀", "🤦🏿‍♀", - "🤦", "🤦🏻", "🤦🏼", "🤦🏽", "🤦🏾", "🤦🏿", - "🤦‍♂", "🤦🏻‍♂", "🤦🏼‍♂", "🤦🏽‍♂", "🤦🏾‍♂", "🤦🏿‍♂", - "🤷‍♀", "🤷🏻‍♀", "🤷🏼‍♀", "🤷🏽‍♀", "🤷🏾‍♀", "🤷🏿‍♀", - "🤷", "🤷🏻", "🤷🏼", "🤷🏽", "🤷🏾", "🤷🏿", - "🤷‍♂", "🤷🏻‍♂", "🤷🏼‍♂", "🤷🏽‍♂", "🤷🏾‍♂", "🤷🏿‍♂", - "🙎‍♀", "🙎🏻‍♀", "🙎🏼‍♀", "🙎🏽‍♀", "🙎🏾‍♀", "🙎🏿‍♀", - "🙎", "🙎🏻", "🙎🏼", "🙎🏽", "🙎🏾", "🙎🏿", - "🙎‍♂", "🙎🏻‍♂", "🙎🏼‍♂", "🙎🏽‍♂", "🙎🏾‍♂", "🙎🏿‍♂", - "🙍‍♀", "🙍🏻‍♀", "🙍🏼‍♀", "🙍🏽‍♀", "🙍🏾‍♀", "🙍🏿‍♀", - "🙍", "🙍🏻", "🙍🏼", "🙍🏽", "🙍🏾", "🙍🏿", - "🙍‍♂", "🙍🏻‍♂", "🙍🏼‍♂", "🙍🏽‍♂", "🙍🏾‍♂", "🙍🏿‍♂", - "💇‍♀", "💇🏻‍♀", "💇🏼‍♀", "💇🏽‍♀", "💇🏾‍♀", "💇🏿‍♀", - "💇", "💇🏻", "💇🏼", "💇🏽", "💇🏾", "💇🏿", - "💇‍♂", "💇🏻‍♂", "💇🏼‍♂", "💇🏽‍♂", "💇🏾‍♂", "💇🏿‍♂", - "💆‍♀", "💆🏻‍♀", "💆🏼‍♀", "💆🏽‍♀", "💆🏾‍♀", "💆🏿‍♀", - "💆", "💆🏻", "💆🏼", "💆🏽", "💆🏾", "💆🏿", - "💆‍♂", "💆🏻‍♂", "💆🏼‍♂", "💆🏽‍♂", "💆🏾‍♂", "💆🏿‍♂", - "🧖‍♀", "🧖🏻‍♀", "🧖🏼‍♀", "🧖🏽‍♀", "🧖🏾‍♀", "🧖🏿‍♀", - "🧖", "🧖🏻", "🧖🏼", "🧖🏽", "🧖🏾", "🧖🏿", - "🧖‍♂", "🧖🏻‍♂", "🧖🏼‍♂", "🧖🏽‍♂", "🧖🏾‍♂", "🧖🏿‍♂", - "💅", "💅🏻", "💅🏼", "💅🏽", "💅🏾", "💅🏿", - "🤳", "🤳🏻", "🤳🏼", "🤳🏽", "🤳🏾", "🤳🏿", - "💃", "💃🏻", "💃🏼", "💃🏽", "💃🏾", "💃🏿", - "🕺", "🕺🏻", "🕺🏼", "🕺🏽", "🕺🏾", "🕺🏿", - "👯‍♀", "👯", "👯‍♂", - "🕴", "🕴🏻", "🕴🏼", "🕴🏽", "🕴🏾", "🕴🏿", - "👩‍🦽", "👩🏻‍🦽", "👩🏼‍🦽", "👩🏽‍🦽", "👩🏾‍🦽", "👩🏿‍🦽", - "🧑‍🦽", "🧑🏻‍🦽", "🧑🏼‍🦽", "🧑🏽‍🦽", "🧑🏾‍🦽", "🧑🏿‍🦽", - "👨‍🦽", "👨🏻‍🦽", "👨🏼‍🦽", "👨🏽‍🦽", "👨🏾‍🦽", "👨🏿‍🦽", - "👩‍🦼", "👩🏻‍🦼", "👩🏼‍🦼", "👩🏽‍🦼", "👩🏾‍🦼", "👩🏿‍🦼", - "🧑‍🦼", "🧑🏻‍🦼", "🧑🏼‍🦼", "🧑🏽‍🦼", "🧑🏾‍🦼", "🧑🏿‍🦼", - "👨‍🦼", "👨🏻‍🦼", "👨🏼‍🦼", "👨🏽‍🦼", "👨🏾‍🦼", "👨🏿‍🦼", - "🚶‍♀", "🚶🏻‍♀", "🚶🏼‍♀", "🚶🏽‍♀", "🚶🏾‍♀", "🚶🏿‍♀", - "🚶", "🚶🏻", "🚶🏼", "🚶🏽", "🚶🏾", "🚶🏿", - "🚶‍♂", "🚶🏻‍♂", "🚶🏼‍♂", "🚶🏽‍♂", "🚶🏾‍♂", "🚶🏿‍♂", - "👩‍🦯", "👩🏻‍🦯", "👩🏼‍🦯", "👩🏽‍🦯", "👩🏾‍🦯", "👩🏿‍🦯", - "🧑‍🦯", "🧑🏻‍🦯", "🧑🏼‍🦯", "🧑🏽‍🦯", "🧑🏾‍🦯", "🧑🏿‍🦯", - "👨‍🦯", "👨🏻‍🦯", "👨🏼‍🦯", "👨🏽‍🦯", "👨🏾‍🦯", "👨🏿‍🦯", - "🧎‍♀", "🧎🏻‍♀", "🧎🏼‍♀", "🧎🏽‍♀", "🧎🏾‍♀", "🧎🏿‍♀", - "🧎", "🧎🏻", "🧎🏼", "🧎🏽", "🧎🏾", "🧎🏿", - "🧎‍♂", "🧎🏻‍♂", "🧎🏼‍♂", "🧎🏽‍♂", "🧎🏾‍♂", "🧎🏿‍♂", - "🏃‍♀", "🏃🏻‍♀", "🏃🏼‍♀", "🏃🏽‍♀", "🏃🏾‍♀", "🏃🏿‍♀", - "🏃", "🏃🏻", "🏃🏼", "🏃🏽", "🏃🏾", "🏃🏿", - "🏃‍♂", "🏃🏻‍♂", "🏃🏼‍♂", "🏃🏽‍♂", "🏃🏾‍♂", "🏃🏿‍♂", - "🧍‍♀", "🧍🏻‍♀", "🧍🏼‍♀", "🧍🏽‍♀", "🧍🏾‍♀", "🧍🏿‍♀", - "🧍", "🧍🏻", "🧍🏼", "🧍🏽", "🧍🏾", "🧍🏿", - "🧍‍♂", "🧍🏻‍♂", "🧍🏼‍♂", "🧍🏽‍♂", "🧍🏾‍♂", "🧍🏿‍♂", - "👫", "👫🏻", "👩🏻‍🤝‍👨🏼", "👩🏻‍🤝‍👨🏽", "👩🏻‍🤝‍👨🏾", "👩🏻‍🤝‍👨🏿", "👩🏼‍🤝‍👨🏻", "👫🏼", "👩🏼‍🤝‍👨🏽", "👩🏼‍🤝‍👨🏾", "👩🏼‍🤝‍👨🏿", "👩🏽‍🤝‍👨🏻", "👩🏽‍🤝‍👨🏼", "👫🏽", "👩🏽‍🤝‍👨🏾", "👩🏽‍🤝‍👨🏿", "👩🏾‍🤝‍👨🏻", "👩🏾‍🤝‍👨🏼", "👩🏾‍🤝‍👨🏽", "👫🏾", "👩🏾‍🤝‍👨🏿", "👩🏿‍🤝‍👨🏻", "👩🏿‍🤝‍👨🏼", "👩🏿‍🤝‍👨🏽", "👩🏿‍🤝‍👨🏾", "👫🏿", - "👭", "👭🏻", "👩🏻‍🤝‍👩🏼", "👩🏻‍🤝‍👩🏽", "👩🏻‍🤝‍👩🏾", "👩🏻‍🤝‍👩🏿", "👩🏼‍🤝‍👩🏻", "👭🏼", "👩🏼‍🤝‍👩🏽", "👩🏼‍🤝‍👩🏾", "👩🏼‍🤝‍👩🏿", "👩🏽‍🤝‍👩🏻", "👩🏽‍🤝‍👩🏼", "👭🏽", "👩🏽‍🤝‍👩🏾", "👩🏽‍🤝‍👩🏿", "👩🏾‍🤝‍👩🏻", "👩🏾‍🤝‍👩🏼", "👩🏾‍🤝‍👩🏽", "👭🏾", "👩🏾‍🤝‍👩🏿", "👩🏿‍🤝‍👩🏻", "👩🏿‍🤝‍👩🏼", "👩🏿‍🤝‍👩🏽", "👩🏿‍🤝‍👩🏾", "👭🏿", - "👬", "👬🏻", "👨🏻‍🤝‍👨🏼", "👨🏻‍🤝‍👨🏽", "👨🏻‍🤝‍👨🏾", "👨🏻‍🤝‍👨🏿", "👨🏼‍🤝‍👨🏻", "👬🏼", "👨🏼‍🤝‍👨🏽", "👨🏼‍🤝‍👨🏾", "👨🏼‍🤝‍👨🏿", "👨🏽‍🤝‍👨🏻", "👨🏽‍🤝‍👨🏼", "👬🏽", "👨🏽‍🤝‍👨🏾", "👨🏽‍🤝‍👨🏿", "👨🏾‍🤝‍👨🏻", "👨🏾‍🤝‍👨🏼", "👨🏾‍🤝‍👨🏽", "👬🏾", "👨🏾‍🤝‍👨🏿", "👨🏿‍🤝‍👨🏻", "👨🏿‍🤝‍👨🏼", "👨🏿‍🤝‍👨🏽", "👨🏿‍🤝‍👨🏾", "👬🏿", - "👩‍❤‍👨", "👩‍❤‍👩", "👨‍❤‍👨", "👩‍❤‍💋‍👨", "👩‍❤‍💋‍👩", "👨‍❤‍💋‍👨", "👨‍👩‍👦", "👨‍👩‍👧", "👨‍👩‍👧‍👦", "👨‍👩‍👦‍👦", "👨‍👩‍👧‍👧", "👩‍👩‍👦", "👩‍👩‍👧", "👩‍👩‍👧‍👦", "👩‍👩‍👦‍👦", "👩‍👩‍👧‍👧", "👨‍👨‍👦", "👨‍👨‍👧", "👨‍👨‍👧‍👦", "👨‍👨‍👦‍👦", "👨‍👨‍👧‍👧", "👩‍👦", "👩‍👧", "👩‍👧‍👦", "👩‍👦‍👦", "👩‍👧‍👧", "👨‍👦", "👨‍👧", "👨‍👧‍👦", "👨‍👦‍👦", "👨‍👧‍👧", "🧶", "🧵", "🧥", "🥼", "🦺", "👚", "👕", "👖", "🩲", "🩳", "👔", "👗", "👙", "👘", "🥻", "🩱", "🥿", "👠", "👡", "👢", "👞", "👟", "🥾", "🧦", "🧤", "🧣", "🎩", "🧢", "👒", "🎓", "⛑", "👑", "💍", "👝", "👛", "👜", "💼", "🎒", "🧳", "👓", "🕶", "🥽", "🌂" + "🖕","🖕🏻","🖕🏼","🖕🏽","🖕🏾","🖕🏿", + "✍","✍🏻","✍🏼","✍🏽","✍🏾","✍🏿", + "🙏","🙏🏻","🙏🏼","🙏🏽","🙏🏾","🙏🏿", + "🦶","🦶🏻","🦶🏼","🦶🏽","🦶🏾","🦶🏿", + "🦵","🦵🏻","🦵🏼","🦵🏽","🦵🏾","🦵🏿", + "🦿","💄","💋","👄","🦷","👅", + "👂","👂🏻","👂🏼","👂🏽","👂🏾","👂🏿", + "🦻","🦻🏻","🦻🏼","🦻🏽","🦻🏾","🦻🏿", + "👃","👃🏻","👃🏼","👃🏽","👃🏾","👃🏿", + "👣","👁","👀","🫀","🫁","🧠","🗣","👤","👥","🫂", + "👶","👶🏻","👶🏼","👶🏽","👶🏾","👶🏿", + "👧","👧🏻","👧🏼","👧🏽","👧🏾","👧🏿", + "🧒","🧒🏻","🧒🏼","🧒🏽","🧒🏾","🧒🏿", + "👦","👦🏻","👦🏼","👦🏽","👦🏾","👦🏿", + "👩","👩🏻","👩🏼","👩🏽","👩🏾","👩🏿", + "🧑","🧑🏻","🧑🏼","🧑🏽","🧑🏾","🧑🏿", + "👨","👨🏻","👨🏼","👨🏽","👨🏾","👨🏿", + "👩‍🦱","👩🏻‍🦱","👩🏼‍🦱","👩🏽‍🦱","👩🏾‍🦱","👩🏿‍🦱", + "🧑‍🦱","🧑🏻‍🦱","🧑🏼‍🦱","🧑🏽‍🦱","🧑🏾‍🦱","🧑🏿‍🦱", + "👨‍🦱","👨🏻‍🦱","👨🏼‍🦱","👨🏽‍🦱","👨🏾‍🦱","👨🏿‍🦱", + "👩‍🦰","👩🏻‍🦰","👩🏼‍🦰","👩🏽‍🦰","👩🏾‍🦰","👩🏿‍🦰", + "🧑‍🦰","🧑🏻‍🦰","🧑🏼‍🦰","🧑🏽‍🦰","🧑🏾‍🦰","🧑🏿‍🦰", + "👨‍🦰","👨🏻‍🦰","👨🏼‍🦰","👨🏽‍🦰","👨🏾‍🦰","👨🏿‍🦰", + "👱‍♀","👱🏻‍♀","👱🏼‍♀","👱🏽‍♀","👱🏾‍♀","👱🏿‍♀", + "👱","👱🏻","👱🏼","👱🏽","👱🏾","👱🏿", + "👱‍♂","👱🏻‍♂","👱🏼‍♂","👱🏽‍♂","👱🏾‍♂","👱🏿‍♂", + "👩‍🦳","👩🏻‍🦳","👩🏼‍🦳","👩🏽‍🦳","👩🏾‍🦳","👩🏿‍🦳", + "🧑‍🦳","🧑🏻‍🦳","🧑🏼‍🦳","🧑🏽‍🦳","🧑🏾‍🦳","🧑🏿‍🦳", + "🧑‍🦳","🧑🏻‍🦳","🧑🏼‍🦳","🧑🏽‍🦳","🧑🏾‍🦳","🧑🏿‍🦳", + "👨‍🦳","👨🏻‍🦳","👨🏼‍🦳","👨🏽‍🦳","👨🏾‍🦳","👨🏿‍🦳", + "👩‍🦲","👩🏻‍🦲","👩🏼‍🦲","👩🏽‍🦲","👩🏾‍🦲","👩🏿‍🦲", + "🧑‍🦲","🧑🏻‍🦲","🧑🏼‍🦲","🧑🏽‍🦲","🧑🏾‍🦲","🧑🏿‍🦲", + "👨‍🦲","👨🏻‍🦲","👨🏼‍🦲","👨🏽‍🦲","👨🏾‍🦲","👨🏿‍🦲", + "🧔","🧔🏻","🧔🏼","🧔🏽","🧔🏾","🧔🏿", + "👵","👵🏻","👵🏼","👵🏽","👵🏾","👵🏿", + "🧓","🧓🏻","🧓🏼","🧓🏽","🧓🏾","🧓🏿", + "👴","👴🏻","👴🏼","👴🏽","👴🏾","👴🏿", + "👲","👲🏻","👲🏼","👲🏽","👲🏾","👲🏿", + "👳‍♀","👳🏻‍♀","👳🏼‍♀","👳🏽‍♀","👳🏾‍♀","👳🏿‍♀", + "👳","👳🏻","👳🏼","👳🏽","👳🏾","👳🏿", + "👳‍♂","👳🏻‍♂","👳🏼‍♂","👳🏽‍♂","👳🏾‍♂","👳🏿‍♂", + "🧕","🧕🏻","🧕🏼","🧕🏽","🧕🏾","🧕🏿", + "👮‍♀","👮🏻‍♀","👮🏼‍♀","👮🏽‍♀","👮🏾‍♀","👮🏿‍♀", + "👮","👮🏻","👮🏼","👮🏽","👮🏾","👮🏿", + "👮‍♂","👮🏻‍♂","👮🏼‍♂","👮🏽‍♂","👮🏾‍♂","👮🏿‍♂", + "👷‍♀","👷🏻‍♀","👷🏼‍♀","👷🏽‍♀","👷🏾‍♀","👷🏿‍♀", + "👷","👷🏻","👷🏼","👷🏽","👷🏾","👷🏿", + "👷‍♂","👷🏻‍♂","👷🏼‍♂","👷🏽‍♂","👷🏾‍♂","👷🏿‍♂", + "💂‍♀","💂🏻‍♀","💂🏼‍♀","💂🏽‍♀","💂🏾‍♀","💂🏿‍♀", + "💂","💂🏻","💂🏼","💂🏽","💂🏾","💂🏿", + "💂‍♂","💂🏻‍♂","💂🏼‍♂","💂🏽‍♂","💂🏾‍♂","💂🏿‍♂", + "🕵‍♀","🕵🏻‍♀","🕵🏼‍♀","🕵🏽‍♀","🕵🏾‍♀","🕵🏿‍♀", + "🕵","🕵🏻","🕵🏼","🕵🏽","🕵🏾","🕵🏿", + "🕵‍♂","🕵🏻‍♂","🕵🏼‍♂","🕵🏽‍♂","🕵🏾‍♂","🕵🏿‍♂", + "👩‍⚕","👩🏻‍⚕","👩🏼‍⚕","👩🏽‍⚕","👩🏾‍⚕","👩🏿‍⚕", + "🧑‍⚕","🧑🏻‍⚕","🧑🏼‍⚕","🧑🏽‍⚕","🧑🏾‍⚕","🧑🏿‍⚕", + "👨‍⚕","👨🏻‍⚕","👨🏼‍⚕","👨🏽‍⚕","👨🏾‍⚕","👨🏿‍⚕", + "👩‍🌾","👩🏻‍🌾","👩🏼‍🌾","👩🏽‍🌾","👩🏾‍🌾","👩🏿‍🌾", + "🧑‍🌾","🧑🏻‍🌾","🧑🏼‍🌾","🧑🏽‍🌾","🧑🏾‍🌾","🧑🏿‍🌾", + "👨‍🌾","👨🏻‍🌾","👨🏼‍🌾","👨🏽‍🌾","👨🏾‍🌾","👨🏿‍🌾", + "👩‍🍳","👩🏻‍🍳","👩🏼‍🍳","👩🏽‍🍳","👩🏾‍🍳","👩🏿‍🍳", + "🧑‍🍳","🧑🏻‍🍳","🧑🏼‍🍳","🧑🏽‍🍳","🧑🏾‍🍳","🧑🏿‍🍳", + "👨‍🍳","👨🏻‍🍳","👨🏼‍🍳","👨🏽‍🍳","👨🏾‍🍳","👨🏿‍🍳", + "👩‍🎓","👩🏻‍🎓","👩🏼‍🎓","👩🏽‍🎓","👩🏾‍🎓","👩🏿‍🎓", + "🧑‍🎓","🧑🏻‍🎓","🧑🏼‍🎓","🧑🏽‍🎓","🧑🏾‍🎓","🧑🏿‍🎓", + "👨‍🎓","👨🏻‍🎓","👨🏼‍🎓","👨🏽‍🎓","👨🏾‍🎓","👨🏿‍🎓", + "👩‍🎤","👩🏻‍🎤","👩🏼‍🎤","👩🏽‍🎤","👩🏾‍🎤","👩🏿‍🎤", + "🧑‍🎤","🧑🏻‍🎤","🧑🏼‍🎤","🧑🏽‍🎤","🧑🏾‍🎤","🧑🏿‍🎤", + "👨‍🎤","👨🏻‍🎤","👨🏼‍🎤","👨🏽‍🎤","👨🏾‍🎤","👨🏿‍🎤", + "👩‍🏫","👩🏻‍🏫","👩🏼‍🏫","👩🏽‍🏫","👩🏾‍🏫","👩🏿‍🏫", + "🧑‍🏫","🧑🏻‍🏫","🧑🏼‍🏫","🧑🏽‍🏫","🧑🏾‍🏫","🧑🏿‍🏫", + "👨‍🏫","👨🏻‍🏫","👨🏼‍🏫","👨🏽‍🏫","👨🏾‍🏫","👨🏿‍🏫", + "👩‍🏭","👩🏻‍🏭","👩🏼‍🏭","👩🏽‍🏭","👩🏾‍🏭","👩🏿‍🏭", + "🧑‍🏭","🧑🏻‍🏭","🧑🏼‍🏭","🧑🏽‍🏭","🧑🏾‍🏭","🧑🏿‍🏭", + "👨‍🏭","👨🏻‍🏭","👨🏼‍🏭","👨🏽‍🏭","👨🏾‍🏭","👨🏿‍🏭", + "👩‍💻","👩🏻‍💻","👩🏼‍💻","👩🏽‍💻","👩🏾‍💻","👩🏿‍💻", + "🧑‍💻","🧑🏻‍💻","🧑🏼‍💻","🧑🏽‍💻","🧑🏾‍💻","🧑🏿‍💻", + "👨‍💻","👨🏻‍💻","👨🏼‍💻","👨🏽‍💻","👨🏾‍💻","👨🏿‍💻", + "👩‍💼","👩🏻‍💼","👩🏼‍💼","👩🏽‍💼","👩🏾‍💼","👩🏿‍💼", + "🧑‍💼","🧑🏻‍💼","🧑🏼‍💼","🧑🏽‍💼","🧑🏾‍💼","🧑🏿‍💼", + "👨‍💼","👨🏻‍💼","👨🏼‍💼","👨🏽‍💼","👨🏾‍💼","👨🏿‍💼", + "👩‍🔧","👩🏻‍🔧","👩🏼‍🔧","👩🏽‍🔧","👩🏾‍🔧","👩🏿‍🔧", + "🧑‍🔧","🧑🏻‍🔧","🧑🏼‍🔧","🧑🏽‍🔧","🧑🏾‍🔧","🧑🏿‍🔧", + "👨‍🔧","👨🏻‍🔧","👨🏼‍🔧","👨🏽‍🔧","👨🏾‍🔧","👨🏿‍🔧", + "👩‍🔬","👩🏻‍🔬","👩🏼‍🔬","👩🏽‍🔬","👩🏾‍🔬","👩🏿‍🔬", + "🧑‍🔬","🧑🏻‍🔬","🧑🏼‍🔬","🧑🏽‍🔬","🧑🏾‍🔬","🧑🏿‍🔬", + "👨‍🔬","👨🏻‍🔬","👨🏼‍🔬","👨🏽‍🔬","👨🏾‍🔬","👨🏿‍🔬", + "👩‍🎨","👩🏻‍🎨","👩🏼‍🎨","👩🏽‍🎨","👩🏾‍🎨","👩🏿‍🎨", + "🧑‍🎨","🧑🏻‍🎨","🧑🏼‍🎨","🧑🏽‍🎨","🧑🏾‍🎨","🧑🏿‍🎨", + "👨‍🎨","👨🏻‍🎨","👨🏼‍🎨","👨🏽‍🎨","👨🏾‍🎨","👨🏿‍🎨", + "👩‍🚒","👩🏻‍🚒","👩🏼‍🚒","👩🏽‍🚒","👩🏾‍🚒","👩🏿‍🚒", + "🧑‍🚒","🧑🏻‍🚒","🧑🏼‍🚒","🧑🏽‍🚒","🧑🏾‍🚒","🧑🏿‍🚒", + "👨‍🚒","👨🏻‍🚒","👨🏼‍🚒","👨🏽‍🚒","👨🏾‍🚒","👨🏿‍🚒", + "👩‍✈","👩🏻‍✈","👩🏼‍✈","👩🏽‍✈","👩🏾‍✈","👩🏿‍✈", + "🧑‍✈","🧑🏻‍✈","🧑🏼‍✈","🧑🏽‍✈","🧑🏾‍✈","🧑🏿‍✈", + "👨‍✈","👨🏻‍✈","👨🏼‍✈","👨🏽‍✈","👨🏾‍✈","👨🏿‍✈", + "👩‍🚀","👩🏻‍🚀","👩🏼‍🚀","👩🏽‍🚀","👩🏾‍🚀","👩🏿‍🚀", + "🧑‍🚀","🧑🏻‍🚀","🧑🏼‍🚀","🧑🏽‍🚀","🧑🏾‍🚀","🧑🏿‍🚀", + "👨‍🚀","👨🏻‍🚀","👨🏼‍🚀","👨🏽‍🚀","👨🏾‍🚀","👨🏿‍🚀", + "👩‍⚖","👩🏻‍⚖","👩🏼‍⚖","👩🏽‍⚖","👩🏾‍⚖","👩🏿‍⚖", + "🧑‍⚖","🧑🏻‍⚖","🧑🏼‍⚖","🧑🏽‍⚖","🧑🏾‍⚖","🧑🏿‍⚖", + "👨‍⚖","👨🏻‍⚖","👨🏼‍⚖","👨🏽‍⚖","👨🏾‍⚖","👨🏿‍⚖", + "👰‍♀","👰🏻‍♀","👰🏼‍♀","👰🏽‍♀","👰🏾‍♀","👰🏿‍♀", + "👰","👰🏻","👰🏼","👰🏽","👰🏾","👰🏿", + "👰‍♂","👰🏻‍♂","👰🏼‍♂","👰🏽‍♂","👰🏾‍♂","👰🏿‍♂", + "🤵‍♀","🤵🏻‍♀","🤵🏼‍♀","🤵🏽‍♀","🤵🏾‍♀","🤵🏿‍♀", + "🤵","🤵🏻","🤵🏼","🤵🏽","🤵🏾","🤵🏿", + "🤵‍♂","🤵🏻‍♂","🤵🏼‍♂","🤵🏽‍♂","🤵🏾‍♂","🤵🏿‍♂", + "👸","👸🏻","👸🏼","👸🏽","👸🏾","👸🏿", + "🤴","🤴🏻","🤴🏼","🤴🏽","🤴🏾","🤴🏿", + "🥷","🥷🏻","🥷🏼","🥷🏽","🥷🏾","🥷🏿", + "🦸‍♀","🦸🏻‍♀","🦸🏼‍♀","🦸🏽‍♀","🦸🏾‍♀","🦸🏿‍♀", + "🦸","🦸🏻","🦸🏼","🦸🏽","🦸🏾","🦸🏿", + "🦸‍♂","🦸🏻‍♂","🦸🏼‍♂","🦸🏽‍♂","🦸🏾‍♂","🦸🏿‍♂", + "🦹‍♀","🦹🏻‍♀","🦹🏼‍♀","🦹🏽‍♀","🦹🏾‍♀","🦹🏿‍♀", + "🦹","🦹🏻","🦹🏼","🦹🏽","🦹🏾","🦹🏿", + "🦹‍♂","🦹🏻‍♂","🦹🏼‍♂","🦹🏽‍♂","🦹🏾‍♂","🦹🏿‍♂", + "🤶","🤶🏻","🤶🏼","🤶🏽","🤶🏾","🤶🏿", + "🧑‍🎄","🧑🏻‍🎄","🧑🏼‍🎄","🧑🏽‍🎄","🧑🏾‍🎄","🧑🏿‍🎄", + "🎅","🎅🏻","🎅🏼","🎅🏽","🎅🏾","🎅🏿", + "🧙‍♀","🧙🏻‍♀","🧙🏼‍♀","🧙🏽‍♀","🧙🏾‍♀","🧙🏿‍♀", + "🧙","🧙🏻","🧙🏼","🧙🏽","🧙🏾","🧙🏿", + "🧙‍♂","🧙🏻‍♂","🧙🏼‍♂","🧙🏽‍♂","🧙🏾‍♂","🧙🏿‍♂", + "🧝‍♀","🧝🏻‍♀","🧝🏼‍♀","🧝🏽‍♀","🧝🏾‍♀","🧝🏿‍♀", + "🧝","🧝🏻","🧝🏼","🧝🏽","🧝🏾","🧝🏿", + "🧝‍♂","🧝🏻‍♂","🧝🏼‍♂","🧝🏽‍♂","🧝🏾‍♂","🧝🏿‍♂", + "🧛‍♀","🧛🏻‍♀","🧛🏼‍♀","🧛🏽‍♀","🧛🏾‍♀","🧛🏿‍♀", + "🧛","🧛🏻","🧛🏼","🧛🏽","🧛🏾","🧛🏿", + "🧛‍♂","🧛🏻‍♂","🧛🏼‍♂","🧛🏽‍♂","🧛🏾‍♂","🧛🏿‍♂", + "🧟‍♀","🧟","🧟‍♂","🧞‍♀","🧞","🧞‍♂", + "🧜‍♀","🧜🏻‍♀","🧜🏼‍♀","🧜🏽‍♀","🧜🏾‍♀","🧜🏿‍♀", + "🧜","🧜🏻","🧜🏼","🧜🏽","🧜🏾","🧜🏿", + "🧜‍♂","🧜🏻‍♂","🧜🏼‍♂","🧜🏽‍♂","🧜🏾‍♂","🧜🏿‍♂", + "🧚‍♀","🧚🏻‍♀","🧚🏼‍♀","🧚🏽‍♀","🧚🏾‍♀","🧚🏿‍♀", + "🧚","🧚🏻","🧚🏼","🧚🏽","🧚🏾","🧚🏿", + "🧚‍♂","🧚🏻‍♂","🧚🏼‍♂","🧚🏽‍♂","🧚🏾‍♂","🧚🏿‍♂", + "👼","👼🏻","👼🏼","👼🏽","👼🏾","👼🏿", + "🤰","🤰🏻","🤰🏼","🤰🏽","🤰🏾","🤰🏿", + "🤱","🤱🏻","🤱🏼","🤱🏽","🤱🏾","🤱🏿", + "👩‍🍼","👩🏻‍🍼","👩🏼‍🍼","👩🏽‍🍼","👩🏾‍🍼","👩🏿‍🍼", + "🧑‍🍼","🧑🏻‍🍼","🧑🏼‍🍼","🧑🏽‍🍼","🧑🏾‍🍼","🧑🏿‍🍼", + "👨‍🍼","👨🏻‍🍼","👨🏼‍🍼","👨🏽‍🍼","👨🏾‍🍼","👨🏿‍🍼", + "🙇‍♀","🙇🏻‍♀","🙇🏼‍♀","🙇🏽‍♀","🙇🏾‍♀","🙇🏿‍♀", + "🙇","🙇🏻","🙇🏼","🙇🏽","🙇🏾","🙇🏿", + "🙇‍♂","🙇🏻‍♂","🙇🏼‍♂","🙇🏽‍♂","🙇🏾‍♂","🙇🏿‍♂", + "💁‍♀","💁🏻‍♀","💁🏼‍♀","💁🏽‍♀","💁🏾‍♀","💁🏿‍♀", + "💁","💁🏻","💁🏼","💁🏽","💁🏾","💁🏿", + "💁‍♂","💁🏻‍♂","💁🏼‍♂","💁🏽‍♂","💁🏾‍♂","💁🏿‍♂", + "🙅‍♀","🙅🏻‍♀","🙅🏼‍♀","🙅🏽‍♀","🙅🏾‍♀","🙅🏿‍♀", + "🙅","🙅🏻","🙅🏼","🙅🏽","🙅🏾","🙅🏿", + "🙅‍♂","🙅🏻‍♂","🙅🏼‍♂","🙅🏽‍♂","🙅🏾‍♂","🙅🏿‍♂", + "🙆‍♀","🙆🏻‍♀","🙆🏼‍♀","🙆🏽‍♀","🙆🏾‍♀","🙆🏿‍♀", + "🙆","🙆🏻","🙆🏼","🙆🏽","🙆🏾","🙆🏿", + "🙆‍♂","🙆🏻‍♂","🙆🏼‍♂","🙆🏽‍♂","🙆🏾‍♂","🙆🏿‍♂", + "🙋‍♀","🙋🏻‍♀","🙋🏼‍♀","🙋🏽‍♀","🙋🏾‍♀","🙋🏿‍♀", + "🙋","🙋🏻","🙋🏼","🙋🏽","🙋🏾","🙋🏿", + "🙋‍♂","🙋🏻‍♂","🙋🏼‍♂","🙋🏽‍♂","🙋🏾‍♂","🙋🏿‍♂", + "🧏‍♀","🧏🏻‍♀","🧏🏼‍♀","🧏🏽‍♀","🧏🏾‍♀","🧏🏿‍♀", + "🧏","🧏🏻","🧏🏼","🧏🏽","🧏🏾","🧏🏿", + "🧏‍♂","🧏🏻‍♂","🧏🏼‍♂","🧏🏽‍♂","🧏🏾‍♂","🧏🏿‍♂", + "🤦‍♀","🤦🏻‍♀","🤦🏼‍♀","🤦🏽‍♀","🤦🏾‍♀","🤦🏿‍♀", + "🤦","🤦🏻","🤦🏼","🤦🏽","🤦🏾","🤦🏿", + "🤦‍♂","🤦🏻‍♂","🤦🏼‍♂","🤦🏽‍♂","🤦🏾‍♂","🤦🏿‍♂", + "🤷‍♀","🤷🏻‍♀","🤷🏼‍♀","🤷🏽‍♀","🤷🏾‍♀","🤷🏿‍♀", + "🤷","🤷🏻","🤷🏼","🤷🏽","🤷🏾","🤷🏿", + "🤷‍♂","🤷🏻‍♂","🤷🏼‍♂","🤷🏽‍♂","🤷🏾‍♂","🤷🏿‍♂", + "🙎‍♀","🙎🏻‍♀","🙎🏼‍♀","🙎🏽‍♀","🙎🏾‍♀","🙎🏿‍♀", + "🙎","🙎🏻","🙎🏼","🙎🏽","🙎🏾","🙎🏿", + "🙎‍♂","🙎🏻‍♂","🙎🏼‍♂","🙎🏽‍♂","🙎🏾‍♂","🙎🏿‍♂", + "🙍‍♀","🙍🏻‍♀","🙍🏼‍♀","🙍🏽‍♀","🙍🏾‍♀","🙍🏿‍♀", + "🙍","🙍🏻","🙍🏼","🙍🏽","🙍🏾","🙍🏿", + "🙍‍♂","🙍🏻‍♂","🙍🏼‍♂","🙍🏽‍♂","🙍🏾‍♂","🙍🏿‍♂", + "💇‍♀","💇🏻‍♀","💇🏼‍♀","💇🏽‍♀","💇🏾‍♀","💇🏿‍♀", + "💇","💇🏻","💇🏼","💇🏽","💇🏾","💇🏿", + "💇‍♂","💇🏻‍♂","💇🏼‍♂","💇🏽‍♂","💇🏾‍♂","💇🏿‍♂", + "💆‍♀","💆🏻‍♀","💆🏼‍♀","💆🏽‍♀","💆🏾‍♀","💆🏿‍♀", + "💆","💆🏻","💆🏼","💆🏽","💆🏾","💆🏿", + "💆‍♂","💆🏻‍♂","💆🏼‍♂","💆🏽‍♂","💆🏾‍♂","💆🏿‍♂", + "🧖‍♀","🧖🏻‍♀","🧖🏼‍♀","🧖🏽‍♀","🧖🏾‍♀","🧖🏿‍♀", + "🧖","🧖🏻","🧖🏼","🧖🏽","🧖🏾","🧖🏿", + "🧖‍♂","🧖🏻‍♂","🧖🏼‍♂","🧖🏽‍♂","🧖🏾‍♂","🧖🏿‍♂", + "💅","💅🏻","💅🏼","💅🏽","💅🏾","💅🏿", + "🤳","🤳🏻","🤳🏼","🤳🏽","🤳🏾","🤳🏿", + "💃","💃🏻","💃🏼","💃🏽","💃🏾","💃🏿", + "🕺","🕺🏻","🕺🏼","🕺🏽","🕺🏾","🕺🏿", + "👯‍♀","👯","👯‍♂", + "🕴","🕴🏻","🕴🏼","🕴🏽","🕴🏾","🕴🏿", + "👩‍🦽","👩🏻‍🦽","👩🏼‍🦽","👩🏽‍🦽","👩🏾‍🦽","👩🏿‍🦽", + "🧑‍🦽","🧑🏻‍🦽","🧑🏼‍🦽","🧑🏽‍🦽","🧑🏾‍🦽","🧑🏿‍🦽", + "👨‍🦽","👨🏻‍🦽","👨🏼‍🦽","👨🏽‍🦽","👨🏾‍🦽","👨🏿‍🦽", + "👩‍🦼","👩🏻‍🦼","👩🏼‍🦼","👩🏽‍🦼","👩🏾‍🦼","👩🏿‍🦼", + "🧑‍🦼","🧑🏻‍🦼","🧑🏼‍🦼","🧑🏽‍🦼","🧑🏾‍🦼","🧑🏿‍🦼", + "👨‍🦼","👨🏻‍🦼","👨🏼‍🦼","👨🏽‍🦼","👨🏾‍🦼","👨🏿‍🦼", + "🚶‍♀","🚶🏻‍♀","🚶🏼‍♀","🚶🏽‍♀","🚶🏾‍♀","🚶🏿‍♀", + "🚶","🚶🏻","🚶🏼","🚶🏽","🚶🏾","🚶🏿", + "🚶‍♂","🚶🏻‍♂","🚶🏼‍♂","🚶🏽‍♂","🚶🏾‍♂","🚶🏿‍♂", + "👩‍🦯","👩🏻‍🦯","👩🏼‍🦯","👩🏽‍🦯","👩🏾‍🦯","👩🏿‍🦯", + "🧑‍🦯","🧑🏻‍🦯","🧑🏼‍🦯","🧑🏽‍🦯","🧑🏾‍🦯","🧑🏿‍🦯", + "👨‍🦯","👨🏻‍🦯","👨🏼‍🦯","👨🏽‍🦯","👨🏾‍🦯","👨🏿‍🦯", + "🧎‍♀","🧎🏻‍♀","🧎🏼‍♀","🧎🏽‍♀","🧎🏾‍♀","🧎🏿‍♀", + "🧎","🧎🏻","🧎🏼","🧎🏽","🧎🏾","🧎🏿", + "🧎‍♂","🧎🏻‍♂","🧎🏼‍♂","🧎🏽‍♂","🧎🏾‍♂","🧎🏿‍♂", + "🏃‍♀","🏃🏻‍♀","🏃🏼‍♀","🏃🏽‍♀","🏃🏾‍♀","🏃🏿‍♀", + "🏃","🏃🏻","🏃🏼","🏃🏽","🏃🏾","🏃🏿", + "🏃‍♂","🏃🏻‍♂","🏃🏼‍♂","🏃🏽‍♂","🏃🏾‍♂","🏃🏿‍♂", + "🧍‍♀","🧍🏻‍♀","🧍🏼‍♀","🧍🏽‍♀","🧍🏾‍♀","🧍🏿‍♀", + "🧍","🧍🏻","🧍🏼","🧍🏽","🧍🏾","🧍🏿", + "🧍‍♂","🧍🏻‍♂","🧍🏼‍♂","🧍🏽‍♂","🧍🏾‍♂","🧍🏿‍♂", + "👫","👫🏻","👩🏻‍🤝‍👨🏼","👩🏻‍🤝‍👨🏽","👩🏻‍🤝‍👨🏾","👩🏻‍🤝‍👨🏿","👩🏼‍🤝‍👨🏻","👫🏼","👩🏼‍🤝‍👨🏽","👩🏼‍🤝‍👨🏾","👩🏼‍🤝‍👨🏿","👩🏽‍🤝‍👨🏻","👩🏽‍🤝‍👨🏼","👫🏽","👩🏽‍🤝‍👨🏾","👩🏽‍🤝‍👨🏿","👩🏾‍🤝‍👨🏻","👩🏾‍🤝‍👨🏼","👩🏾‍🤝‍👨🏽","👫🏾","👩🏾‍🤝‍👨🏿","👩🏿‍🤝‍👨🏻","👩🏿‍🤝‍👨🏼","👩🏿‍🤝‍👨🏽","👩🏿‍🤝‍👨🏾","👫🏿", + "👭","👭🏻","👩🏻‍🤝‍👩🏼","👩🏻‍🤝‍👩🏽","👩🏻‍🤝‍👩🏾","👩🏻‍🤝‍👩🏿","👩🏼‍🤝‍👩🏻","👭🏼","👩🏼‍🤝‍👩🏽","👩🏼‍🤝‍👩🏾","👩🏼‍🤝‍👩🏿","👩🏽‍🤝‍👩🏻","👩🏽‍🤝‍👩🏼","👭🏽","👩🏽‍🤝‍👩🏾","👩🏽‍🤝‍👩🏿","👩🏾‍🤝‍👩🏻","👩🏾‍🤝‍👩🏼","👩🏾‍🤝‍👩🏽","👭🏾","👩🏾‍🤝‍👩🏿","👩🏿‍🤝‍👩🏻","👩🏿‍🤝‍👩🏼","👩🏿‍🤝‍👩🏽","👩🏿‍🤝‍👩🏾","👭🏿", + "👬","👬🏻","👨🏻‍🤝‍👨🏼","👨🏻‍🤝‍👨🏽","👨🏻‍🤝‍👨🏾","👨🏻‍🤝‍👨🏿","👨🏼‍🤝‍👨🏻","👬🏼","👨🏼‍🤝‍👨🏽","👨🏼‍🤝‍👨🏾","👨🏼‍🤝‍👨🏿","👨🏽‍🤝‍👨🏻","👨🏽‍🤝‍👨🏼","👬🏽","👨🏽‍🤝‍👨🏾","👨🏽‍🤝‍👨🏿","👨🏾‍🤝‍👨🏻","👨🏾‍🤝‍👨🏼","👨🏾‍🤝‍👨🏽","👬🏾","👨🏾‍🤝‍👨🏿","👨🏿‍🤝‍👨🏻","👨🏿‍🤝‍👨🏼","👨🏿‍🤝‍👨🏽","👨🏿‍🤝‍👨🏾","👬🏿", + "👩‍❤‍👨","👩‍❤‍👩","👨‍❤‍👨","👩‍❤‍💋‍👨","👩‍❤‍💋‍👩","👨‍❤‍💋‍👨","👨‍👩‍👦","👨‍👩‍👧","👨‍👩‍👧‍👦","👨‍👩‍👦‍👦","👨‍👩‍👧‍👧","👩‍👩‍👦","👩‍👩‍👧","👩‍👩‍👧‍👦","👩‍👩‍👦‍👦","👩‍👩‍👧‍👧","👨‍👨‍👦","👨‍👨‍👧","👨‍👨‍👧‍👦","👨‍👨‍👦‍👦","👨‍👨‍👧‍👧","👩‍👦","👩‍👧","👩‍👧‍👦","👩‍👦‍👦","👩‍👧‍👧","👨‍👦","👨‍👧","👨‍👧‍👦","👨‍👦‍👦","👨‍👧‍👧","🪢","🧶","🧵","🪡","🧥","🥼","🦺","👚","👕","👖","🩲","🩳","👔","👗","👙","🩱","👘","🥻","🩴","🥿","👠","👡","👢","👞","👟","🥾","🧦","🧤","🧣","🎩","🧢","👒","🎓","⛑","🪖","👑","💍","👝","👛","👜","💼","🎒","🧳","👓","🕶","🥽","🌂" }, new String[]{ - "🐶", "🐱", "🐭", "🐹", "🐰", "🦊", "🐻", "🐼", "🐨", "🐯", "🦁", "🐮", "🐷", "🐽", "🐸", "🐵", "🙈", "🙉", "🙊", "🐒", "🐔", "🐧", "🐦", "🐤", "🐣", "🐥", "🦆", "🦅", "🦉", "🦇", "🐺", "🐗", "🐴", "🦄", "🐝", "🐛", "🦋", "🐌", "🐞", "🐜", "🦟", "🦗", "🕷", "🕸", "🦂", "🐢", "🐍", "🦎", "🦖", "🦕", "🐙", "🦑", "🦐", "🦞", "🦀", "🐡", "🐠", "🐟", "🐬", "🐳", "🐋", "🦈", "🐊", "🐅", "🐆", "🦓", "🦍", "🦧", "🐘", "🦛", "🦏", "🐪", "🐫", "🦒", "🦘", "🐃", "🐂", "🐄", "🐎", "🐖", "🐏", "🐑", "🦙", "🐐", "🦌", "🐕", "🐩", "🦮", "🐕‍🦺", "🐈", "🐓", "🦃", "🦚", "🦜", "🦢", "🦩", "🕊", "🐇", "🦝", "🦨", "🦡", "🦦", "🦥", "🐁", "🐀", "🐿", "🦔", "🐾", "🐉", "🐲", "🌵", "🎄", "🌲", "🌳", "🌴", "🌱", "🌿", "☘", "🍀", "🎍", "🎋", "🍃", "🍂", "🍁", "🍄", "🐚", "🌾", "💐", "🌷", "🌹", "🥀", "🌺", "🌸", "🌼", "🌻", "🌞", "🌝", "🌛", "🌜", "🌚", "🌕", "🌖", "🌗", "🌘", "🌑", "🌒", "🌓", "🌔", "🌙", "🌎", "🌍", "🌏", "🪐", "💫", "⭐", "🌟", "✨", "⚡", "☄", "💥", "🔥", "🌪", "🌈", "☀", "🌤", "⛅", "🌥", "☁", "🌦", "🌧", "⛈", "🌩", "🌨", "❄", "☃", "⛄", "🌬", "💨", "💧", "💦", "☔", "☂", "🌊", "🌫" + "🐶","🐱","🐭","🐹","🐰","🦊","🐻","🐼","🐻‍❄","🐨","🐯","🦁","🐮","🐷","🐽","🐸","🐵","🙈","🙉","🙊","🐒","🐔","🐧","🐦","🐤","🐣","🐥","🦆","🦅","🦉","🦇","🐺","🐗","🐴","🦄","🐝","🪱","🐛","🦋","🐌","🐞","🐜","🪰","🪲","🪳","🦟","🦗","🕷","🕸","🦂","🐢","🐍","🦎","🦖","🦕","🐙","🦑","🦐","🦞","🦀","🐡","🐠","🐟","🐬","🐳","🐋","🦈","🦭","🐊","🐅","🐆","🦓","🦍","🦧","🦣","🐘","🦛","🦏","🐪","🐫","🦒","🦘","🦬","🐃","🐂","🐄","🐎","🐖","🐏","🐑","🦙","🐐","🦌","🐕","🐩","🦮","🐕‍🦺","🐈","🐈‍⬛","🪶","🐓","🦃","🦤","🦚","🦜","🦢","🦩","🕊","🐇","🦝","🦨","🦡","🦫","🦦","🦥","🐁","🐀","🐿","🦔","🐾","🐉","🐲","🌵","🎄","🌲","🌳","🌴","🪵","🌱","🌿","☘","🍀","🎍","🪴","🎋","🍃","🍂","🍁","🍄","🐚","🪨","🌾","💐","🌷","🌹","🥀","🌺","🌸","🌼","🌻","🌞","🌝","🌛","🌜","🌚","🌕","🌖","🌗","🌘","🌑","🌒","🌓","🌔","🌙","🌎","🌍","🌏","🪐","💫","⭐","🌟","✨","⚡","☄","💥","🔥","🌪","🌈","☀","🌤","⛅","🌥","☁","🌦","🌧","⛈","🌩","🌨","❄","☃","⛄","🌬","💨","💧","💦","☔","☂","🌊","🌫" }, new String[]{ - "🍏", "🍎", "🍐", "🍊", "🍋", "🍌", "🍉", "🍇", "🍓", "🍈", "🍒", "🍑", "🥭", "🍍", "🥥", "🥝", "🍅", "🍆", "🥑", "🥦", "🥬", "🥒", "🌶", "🌽", "🥕", "🧄", "🧅", "🥔", "🍠", "🥐", "🥯", "🍞", "🥖", "🥨", "🧀", "🥚", "🍳", "🧈", "🥞", "🧇", "🥓", "🥩", "🍗", "🍖", "🦴", "🌭", "🍔", "🍟", "🍕", "🥪", "🥙", "🧆", "🌮", "🌯", "🥗", "🥘", "🥫", "🍝", "🍜", "🍲", "🍛", "🍣", "🍱", "🥟", "🦪", "🍤", "🍙", "🍚", "🍘", "🍥", "🥠", "🥮", "🍢", "🍡", "🍧", "🍨", "🍦", "🥧", "🧁", "🍰", "🎂", "🍮", "🍭", "🍬", "🍫", "🍿", "🍩", "🍪", "🌰", "🥜", "🍯", "🥛", "🍼", "☕", "🍵", "🧃", "🥤", "🍶", "🍺", "🍻", "🥂", "🍷", "🥃", "🍸", "🍹", "🧉", "🍾", "🧊", "🥄", "🍴", "🍽", "🥣", "🥡", "🥢", "🧂" + "🍏","🍎","🍐","🍊","🍋","🍌","🍉","🍇","🍓","🫐","🍈","🍒","🍑","🥭","🍍","🥥","🥝","🍅","🍆","🥑","🥦","🥬","🥒","🌶","🫑","🌽","🥕","🫒","🧄","🧅","🥔","🍠","🥐","🥯","🍞","🥖","🥨","🧀","🥚","🍳","🧈","🥞","🧇","🥓","🥩","🍗","🍖","🦴","🌭","🍔","🍟","🍕","🫓","🥪","🥙","🧆","🌮","🌯","🫔","🥗","🥘","🫕","🥫","🍝","🍜","🍲","🍛","🍣","🍱","🥟","🦪","🍤","🍙","🍚","🍘","🍥","🥠","🥮","🍢","🍡","🍧","🍨","🍦","🥧","🧁","🍰","🎂","🍮","🍭","🍬","🍫","🍿","🍩","🍪","🌰","🥜","🍯","🥛","🍼","🫖","☕","🍵","🧃","🥤","🧋","🍶","🍺","🍻","🥂","🍷","🥃","🍸","🍹","🧉","🍾","🧊","🥄","🍴","🍽","🥣","🥡","🥢","🧂" }, new String[]{ - "⚽", "🏀", "🏈", "⚾", "🥎", "🎾", "🏐", "🏉", "🥏", "🎱", "🪀", "🏓", "🏸", "🏒", "🏑", "🥍", "🏏", "🥅", "⛳", "🪁", "🏹", "🎣", "🤿", "🥊", "🥋", "🎽", "🛹", "🛷", "⛸", "🥌", "🎿", "⛷", "🏂", "🪂", - "🏋‍♀", "🏋🏻‍♀", "🏋🏼‍♀", "🏋🏽‍♀", "🏋🏾‍♀", "🏋🏿‍♀", - "🏋", "🏋🏻", "🏋🏼", "🏋🏽", "🏋🏾", "🏋🏿", - "🏋‍♂", "🏋🏻‍♂", "🏋🏼‍♂", "🏋🏽‍♂", "🏋🏾‍♂", "🏋🏿‍♂", - "🤼‍♀", "🤼", "🤼‍♂", - "🤸‍♀", "🤸🏻‍♀", "🤸🏼‍♀", "🤸🏽‍♀", "🤸🏾‍♀", "🤸🏿‍♀", - "🤸", "🤸🏻", "🤸🏼", "🤸🏽", "🤸🏾", "🤸🏿", - "🤸‍♂", "🤸🏻‍♂", "🤸🏼‍♂", "🤸🏽‍♂", "🤸🏾‍♂", "🤸🏿‍♂", - "⛹‍♀", "⛹🏻‍♀", "⛹🏼‍♀", "⛹🏽‍♀", "⛹🏾‍♀", "⛹🏿‍♀", - "⛹", "⛹🏻", "⛹🏼", "⛹🏽", "⛹🏾", "⛹🏿", - "⛹‍♂", "⛹🏻‍♂", "⛹🏼‍♂", "⛹🏽‍♂", "⛹🏾‍♂", "⛹🏿‍♂", + "⚽","🏀","🏈","⚾","🥎","🎾","🏐","🏉","🥏","🎱","🪀","🏓","🏸","🏒","🏑","🥍","🏏","🪃","🥅","⛳","🪁","🏹","🎣","🤿","🥊","🥋","🎽","🛹","🛼","🛷","⛸","🥌","🎿","⛷","🏂","🪂", + "🏋‍♀","🏋🏻‍♀","🏋🏼‍♀","🏋🏽‍♀","🏋🏾‍♀","🏋🏿‍♀", + "🏋","🏋🏻","🏋🏼","🏋🏽","🏋🏾","🏋🏿", + "🏋‍♂","🏋🏻‍♂","🏋🏼‍♂","🏋🏽‍♂","🏋🏾‍♂","🏋🏿‍♂", + "🤼‍♀","🤼","🤼‍♂", + "🤸‍♀","🤸🏻‍♀","🤸🏼‍♀","🤸🏽‍♀","🤸🏾‍♀","🤸🏿‍♀", + "🤸","🤸🏻","🤸🏼","🤸🏽","🤸🏾","🤸🏿", + "🤸‍♂","🤸🏻‍♂","🤸🏼‍♂","🤸🏽‍♂","🤸🏾‍♂","🤸🏿‍♂", + "⛹‍♀","⛹🏻‍♀","⛹🏼‍♀","⛹🏽‍♀","⛹🏾‍♀","⛹🏿‍♀", + "⛹","⛹🏻","⛹🏼","⛹🏽","⛹🏾","⛹🏿", + "⛹‍♂","⛹🏻‍♂","⛹🏼‍♂","⛹🏽‍♂","⛹🏾‍♂","⛹🏿‍♂", "🤺", - "🤾‍♀", "🤾🏻‍♀", "🤾🏼‍♀", "🤾🏽‍♀", "🤾🏾‍♀", "🤾🏿‍♀", - "🤾", "🤾🏻", "🤾🏼", "🤾🏽", "🤾🏾", "🤾🏿", - "🤾‍♂", "🤾🏻‍♂", "🤾🏼‍♂", "🤾🏽‍♂", "🤾🏾‍♂", "🤾🏿‍♂", - "🏌‍♀", "🏌🏻‍♀", "🏌🏼‍♀", "🏌🏽‍♀", "🏌🏾‍♀", "🏌🏿‍♀", - "🏌", "🏌🏻", "🏌🏼", "🏌🏽", "🏌🏾", "🏌🏿", - "🏌‍♂", "🏌🏻‍♂", "🏌🏼‍♂", "🏌🏽‍♂", "🏌🏾‍♂", "🏌🏿‍♂", - "🏇", "🏇🏻", "🏇🏼", "🏇🏽", "🏇🏾", "🏇🏿", - "🧘‍♀", "🧘🏻‍♀", "🧘🏼‍♀", "🧘🏽‍♀", "🧘🏾‍♀", "🧘🏿‍♀", - "🧘", "🧘🏻", "🧘🏼", "🧘🏽", "🧘🏾", "🧘🏿", - "🧘‍♂", "🧘🏻‍♂", "🧘🏼‍♂", "🧘🏽‍♂", "🧘🏾‍♂", "🧘🏿‍♂", - "🏄‍♀", "🏄🏻‍♀", "🏄🏼‍♀", "🏄🏽‍♀", "🏄🏾‍♀", "🏄🏿‍♀", - "🏄", "🏄🏻", "🏄🏼", "🏄🏽", "🏄🏾", "🏄🏿", - "🏄‍♂", "🏄🏻‍♂", "🏄🏼‍♂", "🏄🏽‍♂", "🏄🏾‍♂", "🏄🏿‍♂", - "🏊‍♀", "🏊🏻‍♀", "🏊🏼‍♀", "🏊🏽‍♀", "🏊🏾‍♀", "🏊🏿‍♀", - "🏊", "🏊🏻", "🏊🏼", "🏊🏽", "🏊🏾", "🏊🏿", - "🏊‍♂", "🏊🏻‍♂", "🏊🏼‍♂", "🏊🏽‍♂", "🏊🏾‍♂", "🏊🏿‍♂", - "🤽‍♀", "🤽🏻‍♀", "🤽🏼‍♀", "🤽🏽‍♀", "🤽🏾‍♀", "🤽🏿‍♀", - "🤽", "🤽🏻", "🤽🏼", "🤽🏽", "🤽🏾", "🤽🏿", - "🤽‍♂", "🤽🏻‍♂", "🤽🏼‍♂", "🤽🏽‍♂", "🤽🏾‍♂", "🤽🏿‍♂", - "🚣‍♀", "🚣🏻‍♀", "🚣🏼‍♀", "🚣🏽‍♀", "🚣🏾‍♀", "🚣🏿‍♀", - "🚣", "🚣🏻", "🚣🏼", "🚣🏽", "🚣🏾", "🚣🏿", - "🚣‍♂", "🚣🏻‍♂", "🚣🏼‍♂", "🚣🏽‍♂", "🚣🏾‍♂", "🚣🏿‍♂", - "🧗‍♀", "🧗🏻‍♀", "🧗🏼‍♀", "🧗🏽‍♀", "🧗🏾‍♀", "🧗🏿‍♀", - "🧗", "🧗🏻", "🧗🏼", "🧗🏽", "🧗🏾", "🧗🏿", - "🧗‍♂", "🧗🏻‍♂", "🧗🏼‍♂", "🧗🏽‍♂", "🧗🏾‍♂", "🧗🏿‍♂", - "🚵‍♀", "🚵🏻‍♀", "🚵🏼‍♀", "🚵🏽‍♀", "🚵🏾‍♀", "🚵🏿‍♀", - "🚵", "🚵🏻", "🚵🏼", "🚵🏽", "🚵🏾", "🚵🏿", - "🚵‍♂", "🚵🏻‍♂", "🚵🏼‍♂", "🚵🏽‍♂", "🚵🏾‍♂", "🚵🏿‍♂", - "🚴‍♀", "🚴🏻‍♀", "🚴🏼‍♀", "🚴🏽‍♀", "🚴🏾‍♀", "🚴🏿‍♀", - "🚴", "🚴🏻", "🚴🏼", "🚴🏽", "🚴🏾", "🚴🏿", - "🚴‍♂", "🚴🏻‍♂", "🚴🏼‍♂", "🚴🏽‍♂", "🚴🏾‍♂", "🚴🏿‍♂", - "🏆", "🥇", "🥈", "🥉", "🏅", "🎖", "🏵", "🎗", "🎫", "🎟", "🎪", - "🤹‍♀", "🤹🏻‍♀", "🤹🏼‍♀", "🤹🏽‍♀", "🤹🏾‍♀", "🤹🏿‍♀", - "🤹", "🤹🏻", "🤹🏼", "🤹🏽", "🤹🏾", "🤹🏿", - "🤹‍♂", "🤹🏻‍♂", "🤹🏼‍♂", "🤹🏽‍♂", "🤹🏾‍♂", "🤹🏿‍♂", - "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🥁", "🎷", "🎺", "🎸", "🪕", "🎻", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" + "🤾‍♀","🤾🏻‍♀","🤾🏼‍♀","🤾🏽‍♀","🤾🏾‍♀","🤾🏿‍♀", + "🤾","🤾🏻","🤾🏼","🤾🏽","🤾🏾","🤾🏿", + "🤾‍♂","🤾🏻‍♂","🤾🏼‍♂","🤾🏽‍♂","🤾🏾‍♂","🤾🏿‍♂", + "🏌‍♀","🏌🏻‍♀","🏌🏼‍♀","🏌🏽‍♀","🏌🏾‍♀","🏌🏿‍♀", + "🏌","🏌🏻","🏌🏼","🏌🏽","🏌🏾","🏌🏿", + "🏌‍♂","🏌🏻‍♂","🏌🏼‍♂","🏌🏽‍♂","🏌🏾‍♂","🏌🏿‍♂", + "🏇","🏇🏻","🏇🏼","🏇🏽","🏇🏾","🏇🏿", + "🧘‍♀","🧘🏻‍♀","🧘🏼‍♀","🧘🏽‍♀","🧘🏾‍♀","🧘🏿‍♀", + "🧘","🧘🏻","🧘🏼","🧘🏽","🧘🏾","🧘🏿", + "🧘‍♂","🧘🏻‍♂","🧘🏼‍♂","🧘🏽‍♂","🧘🏾‍♂","🧘🏿‍♂", + "🏄‍♀","🏄🏻‍♀","🏄🏼‍♀","🏄🏽‍♀","🏄🏾‍♀","🏄🏿‍♀", + "🏄","🏄🏻","🏄🏼","🏄🏽","🏄🏾","🏄🏿", + "🏄‍♂","🏄🏻‍♂","🏄🏼‍♂","🏄🏽‍♂","🏄🏾‍♂","🏄🏿‍♂", + "🏊‍♀","🏊🏻‍♀","🏊🏼‍♀","🏊🏽‍♀","🏊🏾‍♀","🏊🏿‍♀", + "🏊","🏊🏻","🏊🏼","🏊🏽","🏊🏾","🏊🏿", + "🏊‍♂","🏊🏻‍♂","🏊🏼‍♂","🏊🏽‍♂","🏊🏾‍♂","🏊🏿‍♂", + "🤽‍♀","🤽🏻‍♀","🤽🏼‍♀","🤽🏽‍♀","🤽🏾‍♀","🤽🏿‍♀", + "🤽","🤽🏻","🤽🏼","🤽🏽","🤽🏾","🤽🏿", + "🤽‍♂","🤽🏻‍♂","🤽🏼‍♂","🤽🏽‍♂","🤽🏾‍♂","🤽🏿‍♂", + "🚣‍♀","🚣🏻‍♀","🚣🏼‍♀","🚣🏽‍♀","🚣🏾‍♀","🚣🏿‍♀", + "🚣","🚣🏻","🚣🏼","🚣🏽","🚣🏾","🚣🏿", + "🚣‍♂","🚣🏻‍♂","🚣🏼‍♂","🚣🏽‍♂","🚣🏾‍♂","🚣🏿‍♂", + "🧗‍♀","🧗🏻‍♀","🧗🏼‍♀","🧗🏽‍♀","🧗🏾‍♀","🧗🏿‍♀", + "🧗","🧗🏻","🧗🏼","🧗🏽","🧗🏾","🧗🏿", + "🧗‍♂","🧗🏻‍♂","🧗🏼‍♂","🧗🏽‍♂","🧗🏾‍♂","🧗🏿‍♂", + "🚵‍♀","🚵🏻‍♀","🚵🏼‍♀","🚵🏽‍♀","🚵🏾‍♀","🚵🏿‍♀", + "🚵","🚵🏻","🚵🏼","🚵🏽","🚵🏾","🚵🏿", + "🚵‍♂","🚵🏻‍♂","🚵🏼‍♂","🚵🏽‍♂","🚵🏾‍♂","🚵🏿‍♂", + "🚴‍♀","🚴🏻‍♀","🚴🏼‍♀","🚴🏽‍♀","🚴🏾‍♀","🚴🏿‍♀", + "🚴","🚴🏻","🚴🏼","🚴🏽","🚴🏾","🚴🏿", + "🚴‍♂","🚴🏻‍♂","🚴🏼‍♂","🚴🏽‍♂","🚴🏾‍♂","🚴🏿‍♂", + "🏆","🥇","🥈","🥉","🏅","🎖","🏵","🎗","🎫","🎟","🎪", + "🤹‍♀","🤹🏻‍♀","🤹🏼‍♀","🤹🏽‍♀","🤹🏾‍♀","🤹🏿‍♀", + "🤹","🤹🏻","🤹🏼","🤹🏽","🤹🏾","🤹🏿", + "🤹‍♂","🤹🏻‍♂","🤹🏼‍♂","🤹🏽‍♂","🤹🏾‍♂","🤹🏿‍♂", + "🎭","🩰","🎨","🎬","🎤","🎧","🎼","🎹","🥁","🪘","🎷","🎺","🪗","🎸","🪕","🎻","🎲","♟","🎯","🎳","🎮","🎰","🧩" }, new String[]{ - "🚗", "🚕", "🚙", "🚌", "🚎", "🏎", "🚓", "🚑", "🚒", "🚐", "🚚", "🚛", "🚜", "🦯", "🦽", "🦼", "🛴", "🚲", "🛵", "🏍", "🛺", "🚨", "🚔", "🚍", "🚘", "🚖", "🚡", "🚠", "🚟", "🚃", "🚋", "🚞", "🚝", "🚄", "🚅", "🚈", "🚂", "🚆", "🚇", "🚊", "🚉", "✈", "🛫", "🛬", "🛩", "💺", "🛰", "🚀", "🛸", "🚁", "🛶", "⛵", "🚤", "🛥", "🛳", "⛴", "🚢", "⚓", "⛽", "🚧", "🚦", "🚥", "🚏", "🗺", "🗿", "🗽", "🗼", "🏰", "🏯", "🏟", "🎡", "🎢", "🎠", "⛲", "⛱", "🏖", "🏝", "🏜", "🌋", "⛰", "🏔", "🗻", "🏕", "⛺", "🏠", "🏡", "🏘", "🏚", "🏗", "🏭", "🏢", "🏬", "🏣", "🏤", "🏥", "🏦", "🏨", "🏪", "🏫", "🏩", "💒", "🏛", "⛪", "🕌", "🕍", "🛕", "🕋", "⛩", "🛤", "🛣", "🗾", "🎑", "🏞", "🌅", "🌄", "🌠", "🎇", "🎆", "🌇", "🌆", "🏙", "🌃", "🌌", "🌉", "🌁" + "🚗","🚕","🚙","🚌","🚎","🏎","🚓","🚑","🚒","🚐","🛻","🚚","🚛","🚜","🦯","🦽","🦼","🛴","🚲","🛵","🏍","🛺","🚨","🚔","🚍","🚘","🚖","🚡","🚠","🚟","🚃","🚋","🚞","🚝","🚄","🚅","🚈","🚂","🚆","🚇","🚊","🚉","✈","🛫","🛬","🛩","💺","🛰","🚀","🛸","🚁","🛶","⛵","🚤","🛥","🛳","⛴","🚢","⚓","🪝","⛽","🚧","🚦","🚥","🚏","🗺","🗿","🗽","🗼","🏰","🏯","🏟","🎡","🎢","🎠","⛲","⛱","🏖","🏝","🏜","🌋","⛰","🏔","🗻","🏕","⛺","🛖","🏠","🏡","🏘","🏚","🏗","🏭","🏢","🏬","🏣","🏤","🏥","🏦","🏨","🏪","🏫","🏩","💒","🏛","⛪","🕌","🕍","🛕","🕋","⛩","🛤","🛣","🗾","🎑","🏞","🌅","🌄","🌠","🎇","🎆","🌇","🌆","🏙","🌃","🌌","🌉","🌁" }, new String[]{ - "⌚", "📱", "📲", "💻", "⌨", "🖥", "🖨", "🖱", "🖲", "🕹", "🗜", "💽", "💾", "💿", "📀", "📼", "📷", "📸", "📹", "🎥", "📽", "🎞", "📞", "☎", "📟", "📠", "📺", "📻", "🎙", "🎚", "🎛", "🧭", "⏱", "⏲", "⏰", "🕰", "⌛", "⏳", "📡", "🔋", "🔌", "💡", "🔦", "🕯", "🪔", "🧯", "🛢", "💸", "💵", "💴", "💶", "💷", "💰", "💳", "💎", "⚖", "🧰", "🔧", "🔨", "⚒", "🛠", "⛏", "🔩", "⚙", "🧱", "⛓", "🧲", "🔫", "💣", "🧨", "🪓", "🔪", "🗡", "⚔", "🛡", "🚬", "⚰", "⚱", "🏺", "🔮", "📿", "🧿", "💈", "⚗", "🔭", "🔬", "🕳", "🩹", "🩺", "💊", "💉", "🩸", "🧬", "🦠", "🧫", "🧪", "🌡", "🧹", "🧺", "🧻", "🚽", "🚰", "🚿", "🛁", - "🛀", "🛀🏻", "🛀🏼", "🛀🏽", "🛀🏾", "🛀🏿", - "🧼", "🪒", "🧽", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🖼", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🎊", "🎉", "🎎", "🏮", "🎐", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" + "⌚","📱","📲","💻","⌨","🖥","🖨","🖱","🖲","🕹","🗜","💽","💾","💿","📀","📼","📷","📸","📹","🎥","📽","🎞","📞","☎","📟","📠","📺","📻","🎙","🎚","🎛","🧭","⏱","⏲","⏰","🕰","⌛","⏳","📡","🔋","🔌","💡","🔦","🕯","🪔","🧯","🛢","💸","💵","💴","💶","💷","🪙","💰","💳","💎","⚖","🪜","🧰","🪛","🔧","🔨","⚒","🛠","⛏","🪚","🔩","⚙","🪤","🧱","⛓","🧲","🔫","💣","🧨","🪓","🔪","🗡","⚔","🛡","🚬","⚰","🪦","⚱","🏺","🔮","📿","🧿","💈","⚗","🔭","🔬","🕳","🩹","🩺","💊","💉","🩸","🧬","🦠","🧫","🧪","🌡","🧹","🪠","🧺","🧻","🚽","🚰","🚿", + "🛁","🛀","🛀🏻","🛀🏼","🛀🏽","🛀🏾","🛀🏿", + "🧼","🪥","🪒","🧽","🪣","🧴","🛎","🔑","🗝","🚪","🪑","🛋","🛏","🛌","🧸","🪆","🖼","🪞","🪟","🛍","🛒","🎁","🎈","🎏","🎀","🪄","🪅","🎊","🎉","🎎","🏮","🎐","🧧","✉","📩","📨","📧","💌","📥","📤","📦","🏷","🪧","📪","📫","📬","📭","📮","📯","📜","📃","📄","📑","🧾","📊","📈","📉","🗒","🗓","📆","📅","🗑","📇","🗃","🗳","🗄","📋","📁","📂","🗂","🗞","📰","📓","📔","📒","📕","📗","📘","📙","📚","📖","🔖","🧷","🔗","📎","🖇","📐","📏","🧮","📌","📍","✂","🖊","🖋","✒","🖌","🖍","📝","✏","🔍","🔎","🔏","🔐","🔒","🔓" }, new String[]{ - "❤", "🧡", "💛", "💚", "💙", "💜", "🖤", "🤍", "🤎", "💔", "❣", "💕", "💞", "💓", "💗", "💖", "💘", "💝", "💟", "☮", "✝", "☪", "🕉", "☸", "✡", "🔯", "🕎", "☯", "☦", "🛐", "⛎", "♈", "♉", "♊", "♋", "♌", "♍", "♎", "♏", "♐", "♑", "♒", "♓", "🆔", "⚛", "🉑", "☢", "☣", "📴", "📳", "🈶", "🈚", "🈸", "🈺", "🈷", "✴", "🆚", "💮", "🉐", "㊙", "㊗", "🈴", "🈵", "🈹", "🈲", "🅰", "🅱", "🆎", "🆑", "🅾", "🆘", "❌", "⭕", "🛑", "⛔", "📛", "🚫", "💯", "💢", "♨", "🚷", "🚯", "🚳", "🚱", "🔞", "📵", "🚭", "❗", "❕", "❓", "❔", "‼", "⁉", "🔅", "🔆", "〽", "⚠", "🚸", "🔱", "⚜", "🔰", "♻", "✅", "🈯", "💹", "❇", "✳", "❎", "🌐", "💠", "Ⓜ", "🌀", "💤", "🏧", "🚾", "♿", "🅿", "🈳", "🈂", "🛂", "🛃", "🛄", "🛅", "🚹", "🚺", "🚼", "🚻", "🚮", "🎦", "📶", "🈁", "🔣", "ℹ", "🔤", "🔡", "🔠", "🆖", "🆗", "🆙", "🆒", "🆕", "🆓", "0⃣", "1⃣", "2⃣", "3⃣", "4⃣", "5⃣", "6⃣", "7⃣", "8⃣", "9⃣", "🔟", "🔢", "#⃣", "*⃣", "⏏", "▶", "⏸", "⏯", "⏹", "⏺", "⏭", "⏮", "⏩", "⏪", "⏫", "⏬", "◀", "🔼", "🔽", "➡", "⬅", "⬆", "⬇", "↗", "↘", "↙", "↖", "↕", "↔", "↪", "↩", "⤴", "⤵", "🔀", "🔁", "🔂", "🔄", "🔃", "🎵", "🎶", "➕", "➖", "➗", "✖", "♾", "💲", "💱", "™", "©", "®", "👁‍🗨", "🔚", "🔙", "🔛", "🔝", "🔜", "〰", "➰", "➿", "✔", "☑", "🔘", "🔴", "🟠", "🟡", "🟢", "🔵", "🟣", "⚫", "⚪", "🟤", "🔺", "🔻", "🔸", "🔹", "🔶", "🔷", "🔳", "🔲", "▪", "▫", "◾", "◽", "◼", "◻", "🟥", "🟧", "🟨", "🟩", "🟦", "🟪", "⬛", "⬜", "🟫", "🔈", "🔇", "🔉", "🔊", "🔔", "🔕", "📣", "📢", "💬", "💭", "🗯", "♠", "♣", "♥", "♦", "🃏", "🎴", "🀄", "🕐", "🕑", "🕒", "🕓", "🕔", "🕕", "🕖", "🕗", "🕘", "🕙", "🕚", "🕛", "🕜", "🕝", "🕞", "🕟", "🕠", "🕡", "🕢", "🕣", "🕤", "🕥", "🕦", "🕧" + "❤","🧡","💛","💚","💙","💜","🖤","🤍","🤎","💔","❣","💕","💞","💓","💗","💖","💘","💝","💟","☮","✝","☪","🕉","☸","✡","🔯","🕎","☯","☦","🛐","⛎","♈","♉","♊","♋","♌","♍","♎","♏","♐","♑","♒","♓","🆔","⚛","🉑","☢","☣","📴","📳","🈶","🈚","🈸","🈺","🈷","✴","🆚","💮","🉐","㊙","㊗","🈴","🈵","🈹","🈲","🅰","🅱","🆎","🆑","🅾","🆘","❌","⭕","🛑","⛔","📛","🚫","💯","💢","♨","🚷","🚯","🚳","🚱","🔞","📵","🚭","❗","❕","❓","❔","‼","⁉","🔅","🔆","〽","⚠","🚸","🔱","⚜","🔰","♻","✅","🈯","💹","❇","✳","❎","🌐","💠","Ⓜ","🌀","💤","🏧","🚾","♿","🅿","🛗","🈳","🈂","🛂","🛃","🛄","🛅","🚹","🚺","🚼","⚧","🚻","🚮","🎦","📶","🈁","🔣","ℹ","🔤","🔡","🔠","🆖","🆗","🆙","🆒","🆕","🆓","0⃣","1⃣","2⃣","3⃣","4⃣","5⃣","6⃣","7⃣","8⃣","9⃣","🔟","🔢","#⃣","*⃣","⏏","▶","⏸","⏯","⏹","⏺","⏭","⏮","⏩","⏪","⏫","⏬","◀","🔼","🔽","➡","⬅","⬆","⬇","↗","↘","↙","↖","↕","↔","↪","↩","⤴","⤵","🔀","🔁","🔂","🔄","🔃","🎵","🎶","➕","➖","➗","✖","♾","💲","💱","™","©","®","👁‍🗨","🔚","🔙","🔛","🔝","🔜","〰","➰","➿","✔","☑","🔘","🔴","🟠","🟡","🟢","🔵","🟣","⚫","⚪","🟤","🔺","🔻","🔸","🔹","🔶","🔷","🔳","🔲","▪","▫","◾","◽","◼","◻","🟥","🟧","🟨","🟩","🟦","🟪","⬛","⬜","🟫","🔈","🔇","🔉","🔊","🔔","🔕","📣","📢","💬","💭","🗯","♠","♣","♥","♦","🃏","🎴","🀄","🕐","🕑","🕒","🕓","🕔","🕕","🕖","🕗","🕘","🕙","🕚","🕛","🕜","🕝","🕞","🕟","🕠","🕡","🕢","🕣","🕤","🕥","🕦","🕧" }, new String[]{ - "🏳", "🏴", "🏴‍☠", "🏁", "🚩", "🏳‍🌈", "🇺🇳", "🇦🇫", "🇦🇽", "🇦🇱", "🇩🇿", "🇦🇸", "🇦🇩", "🇦🇴", "🇦🇮", "🇦🇶", "🇦🇬", "🇦🇷", "🇦🇲", "🇦🇼", "🇦🇺", "🇦🇹", "🇦🇿", "🇧🇸", "🇧🇭", "🇧🇩", "🇧🇧", "🇧🇾", "🇧🇪", "🇧🇿", "🇧🇯", "🇧🇲", "🇧🇹", "🇧🇴", "🇧🇦", "🇧🇼", "🇧🇷", "🇮🇴", "🇻🇬", "🇧🇳", "🇧🇬", "🇧🇫", "🇧🇮", "🇰🇭", "🇨🇲", "🇨🇦", "🇮🇨", "🇨🇻", "🇧🇶", "🇰🇾", "🇨🇫", "🇹🇩", "🇨🇱", "🇨🇳", "🇨🇽", "🇨🇨", "🇨🇴", "🇰🇲", "🇨🇬", "🇨🇩", "🇨🇰", "🇨🇷", "🇨🇮", "🇭🇷", "🇨🇺", "🇨🇼", "🇨🇾", "🇨🇿", "🇩🇰", "🇩🇯", "🇩🇲", "🇩🇴", "🇪🇨", "🇪🇬", "🇸🇻", "🇬🇶", "🇪🇷", "🇪🇪", "🇸🇿", "🇪🇹", "🇪🇺", "🇫🇰", "🇫🇴", "🇫🇯", "🇫🇮", "🇫🇷", "🇬🇫", "🇵🇫", "🇹🇫", "🇬🇦", "🇬🇲", "🇬🇪", "🇩🇪", "🇬🇭", "🇬🇮", "🇬🇷", "🇬🇱", "🇬🇩", "🇬🇵", "🇬🇺", "🇬🇹", "🇬🇬", "🇬🇳", "🇬🇼", "🇬🇾", "🇭🇹", "🇭🇳", "🇭🇰", "🇭🇺", "🇮🇸", "🇮🇳", "🇮🇩", "🇮🇷", "🇮🇶", "🇮🇪", "🇮🇲", "🇮🇱", "🇮🇹", "🇯🇲", "🇯🇵", "🎌", "🇯🇪", "🇯🇴", "🇰🇿", "🇰🇪", "🇰🇮", "🇽🇰", "🇰🇼", "🇰🇬", "🇱🇦", "🇱🇻", "🇱🇧", "🇱🇸", "🇱🇷", "🇱🇾", "🇱🇮", "🇱🇹", "🇱🇺", "🇲🇴", "🇲🇬", "🇲🇼", "🇲🇾", "🇲🇻", "🇲🇱", "🇲🇹", "🇲🇭", "🇲🇶", "🇲🇷", "🇲🇺", "🇾🇹", "🇲🇽", "🇫🇲", "🇲🇩", "🇲🇨", "🇲🇳", "🇲🇪", "🇲🇸", "🇲🇦", "🇲🇿", "🇲🇲", "🇳🇦", "🇳🇷", "🇳🇵", "🇳🇱", "🇳🇨", "🇳🇿", "🇳🇮", "🇳🇪", "🇳🇬", "🇳🇺", "🇳🇫", "🇰🇵", "🇲🇰", "🇲🇵", "🇳🇴", "🇴🇲", "🇵🇰", "🇵🇼", "🇵🇸", "🇵🇦", "🇵🇬", "🇵🇾", "🇵🇪", "🇵🇭", "🇵🇳", "🇵🇱", "🇵🇹", "🇵🇷", "🇶🇦", "🇷🇪", "🇷🇴", "🇷🇺", "🇷🇼", "🇼🇸", "🇸🇲", "🇸🇹", "🇸🇦", "🇸🇳", "🇷🇸", "🇸🇨", "🇸🇱", "🇸🇬", "🇸🇽", "🇸🇰", "🇸🇮", "🇬🇸", "🇸🇧", "🇸🇴", "🇿🇦", "🇰🇷", "🇸🇸", "🇪🇸", "🇱🇰", "🇧🇱", "🇸🇭", "🇰🇳", "🇱🇨", "🇵🇲", "🇻🇨", "🇸🇩", "🇸🇷", "🇸🇪", "🇨🇭", "🇸🇾", "🇹🇼", "🇹🇯", "🇹🇿", "🇹🇭", "🇹🇱", "🇹🇬", "🇹🇰", "🇹🇴", "🇹🇹", "🇹🇳", "🇹🇷", "🇹🇲", "🇹🇨", "🇹🇻", "🇻🇮", "🇺🇬", "🇺🇦", "🇦🇪", "🇬🇧", "🏴󠁧󠁢󠁥󠁮󠁧󠁿", "🏴󠁧󠁢󠁳󠁣󠁴󠁿", "🏴󠁧󠁢󠁷󠁬󠁳󠁿", "🇺🇸", "🇺🇾", "🇺🇿", "🇻🇺", "🇻🇦", "🇻🇪", "🇻🇳", "🇼🇫", "🇪🇭", "🇾🇪", "🇿🇲", "🇿🇼" + "🏳","🏴","🏴‍☠","🏁","🚩","🏳‍🌈","🏳‍⚧","🇺🇳","🇦🇫","🇦🇽","🇦🇱","🇩🇿","🇦🇸","🇦🇩","🇦🇴","🇦🇮","🇦🇶","🇦🇬","🇦🇷","🇦🇲","🇦🇼","🇦🇺","🇦🇹","🇦🇿","🇧🇸","🇧🇭","🇧🇩","🇧🇧","🇧🇾","🇧🇪","🇧🇿","🇧🇯","🇧🇲","🇧🇹","🇧🇴","🇧🇦","🇧🇼","🇧🇷","🇮🇴","🇻🇬","🇧🇳","🇧🇬","🇧🇫","🇧🇮","🇰🇭","🇨🇲","🇨🇦","🇮🇨","🇨🇻","🇧🇶","🇰🇾","🇨🇫","🇹🇩","🇨🇱","🇨🇳","🇨🇽","🇨🇨","🇨🇴","🇰🇲","🇨🇬","🇨🇩","🇨🇰","🇨🇷","🇨🇮","🇭🇷","🇨🇺","🇨🇼","🇨🇾","🇨🇿","🇩🇰","🇩🇯","🇩🇲","🇩🇴","🇪🇨","🇪🇬","🇸🇻","🇬🇶","🇪🇷","🇪🇪","🇸🇿","🇪🇹","🇪🇺","🇫🇰","🇫🇴","🇫🇯","🇫🇮","🇫🇷","🇬🇫","🇵🇫","🇹🇫","🇬🇦","🇬🇲","🇬🇪","🇩🇪","🇬🇭","🇬🇮","🇬🇷","🇬🇱","🇬🇩","🇬🇵","🇬🇺","🇬🇹","🇬🇬","🇬🇳","🇬🇼","🇬🇾","🇭🇹","🇭🇳","🇭🇰","🇭🇺","🇮🇸","🇮🇳","🇮🇩","🇮🇷","🇮🇶","🇮🇪","🇮🇲","🇮🇱","🇮🇹","🇯🇲","🇯🇵","🎌","🇯🇪","🇯🇴","🇰🇿","🇰🇪","🇰🇮","🇽🇰","🇰🇼","🇰🇬","🇱🇦","🇱🇻","🇱🇧","🇱🇸","🇱🇷","🇱🇾","🇱🇮","🇱🇹","🇱🇺","🇲🇴","🇲🇬","🇲🇼","🇲🇾","🇲🇻","🇲🇱","🇲🇹","🇲🇭","🇲🇶","🇲🇷","🇲🇺","🇾🇹","🇲🇽","🇫🇲","🇲🇩","🇲🇨","🇲🇳","🇲🇪","🇲🇸","🇲🇦","🇲🇿","🇲🇲","🇳🇦","🇳🇷","🇳🇵","🇳🇱","🇳🇨","🇳🇿","🇳🇮","🇳🇪","🇳🇬","🇳🇺","🇳🇫","🇰🇵","🇲🇰","🇲🇵","🇳🇴","🇴🇲","🇵🇰","🇵🇼","🇵🇸","🇵🇦","🇵🇬","🇵🇾","🇵🇪","🇵🇭","🇵🇳","🇵🇱","🇵🇹","🇵🇷","🇶🇦","🇷🇪","🇷🇴","🇷🇺","🇷🇼","🇼🇸","🇸🇲","🇸🇹","🇸🇦","🇸🇳","🇷🇸","🇸🇨","🇸🇱","🇸🇬","🇸🇽","🇸🇰","🇸🇮","🇬🇸","🇸🇧","🇸🇴","🇿🇦","🇰🇷","🇸🇸","🇪🇸","🇱🇰","🇧🇱","🇸🇭","🇰🇳","🇱🇨","🇵🇲","🇻🇨","🇸🇩","🇸🇷","🇸🇪","🇨🇭","🇸🇾","🇹🇼","🇹🇯","🇹🇿","🇹🇭","🇹🇱","🇹🇬","🇹🇰","🇹🇴","🇹🇹","🇹🇳","🇹🇷","🇹🇲","🇹🇨","🇹🇻","🇺🇬","🇺🇦","🇦🇪","🇬🇧","🏴󠁧󠁢󠁥󠁮󠁧󠁿","🏴󠁧󠁢󠁳󠁣󠁴󠁿","🏴󠁧󠁢󠁷󠁬󠁳󠁿","🇺🇸","🇺🇾","🇻🇮","🇺🇿","🇻🇺","🇻🇦","🇻🇪","🇻🇳","🇼🇫","🇪🇭","🇾🇪","🇿🇲","🇿🇼" } }; @@ -804,7 +822,7 @@ public class EmojiData { } public static boolean isCofinEmoji(String emoji) { - return "⚰️".equals(emoji); + return "⚰".equals(emoji); } static { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java b/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java index adf89aba8..2f3c38a99 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java @@ -191,6 +191,9 @@ public class EmuDetector { if (!detectResult) { detectResult = checkPackageName(); } + if (!detectResult) { + detectResult = EmuInputDevicesDetector.detect(); + } return detectResult; } catch (Exception ignore) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/EmuInputDevicesDetector.java b/TMessagesProj/src/main/java/org/telegram/messenger/EmuInputDevicesDetector.java new file mode 100644 index 000000000..b57991d87 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/EmuInputDevicesDetector.java @@ -0,0 +1,62 @@ +package org.telegram.messenger; + +import android.text.TextUtils; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.List; + +public final class EmuInputDevicesDetector { + + private static final String NAME_PREFIX = "N: Name=\""; + private static final String INPUT_DEVICES_FILE = "/proc/bus/input/devices"; + private static final String[] RESTRICTED_DEVICES = {"bluestacks", "memuhyperv", "virtualbox"}; + + private EmuInputDevicesDetector() { + } + + public static boolean detect() { + final List deviceNames = getInputDevicesNames(); + + if (deviceNames != null) { + for (String deviceName : deviceNames) { + for (String restrictedDeviceName : RESTRICTED_DEVICES) { + if (deviceName.toLowerCase().contains(restrictedDeviceName)) { + return true; + } + } + } + } + + return false; + } + + private static List getInputDevicesNames() { + final File devicesFile = new File(INPUT_DEVICES_FILE); + + if (!devicesFile.canRead()) { + return null; + } + + try { + final List lines = new ArrayList<>(); + final BufferedReader r = new BufferedReader(new InputStreamReader(new FileInputStream(devicesFile))); + for (String line; (line = r.readLine()) != null; ) { + if (line.startsWith(NAME_PREFIX)) { + final String name = line.substring(NAME_PREFIX.length(), line.length() - 1); + if (!TextUtils.isEmpty(name)) { + lines.add(name); + } + } + } + return lines; + } catch (IOException e) { + FileLog.e(e); + return null; + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java index f622123a0..29a26edc2 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java @@ -982,7 +982,7 @@ public class FileLoader extends BaseController { return getPathToAttach(photoSize, ext, false); } else if (attach instanceof TLRPC.PhotoSize) { TLRPC.PhotoSize photoSize = (TLRPC.PhotoSize) attach; - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { dir = null; } else if (photoSize.location == null || photoSize.location.key != null || photoSize.location.volume_id == Integer.MIN_VALUE && photoSize.location.local_id < 0 || photoSize.size < 0) { dir = getDirectory(MEDIA_DIR_CACHE); @@ -1036,7 +1036,7 @@ public class FileLoader extends BaseController { TLRPC.PhotoSize closestObject = null; for (int a = 0; a < sizes.size(); a++) { TLRPC.PhotoSize obj = sizes.get(a); - if (obj == null || obj instanceof TLRPC.TL_photoSizeEmpty) { + if (obj == null || obj instanceof TLRPC.TL_photoSizeEmpty || obj instanceof TLRPC.TL_photoPathSize) { continue; } if (byMinSide) { @@ -1056,6 +1056,20 @@ public class FileLoader extends BaseController { return closestObject; } + public static TLRPC.TL_photoPathSize getPathPhotoSize(ArrayList sizes) { + if (sizes == null || sizes.isEmpty()) { + return null; + } + for (int a = 0; a < sizes.size(); a++) { + TLRPC.PhotoSize obj = sizes.get(a); + if (obj instanceof TLRPC.TL_photoPathSize) { + continue; + } + return (TLRPC.TL_photoPathSize) obj; + } + return null; + } + public static String getFileExtension(File file) { String name = file.getName(); try { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java index 0f9f6fc0a..4b74a055b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java @@ -658,17 +658,6 @@ public class FileRefController extends BaseController { } if (result != null) { if (cache) { - if (message.peer_id != null && message.peer_id.channel_id != 0) { - for (int a = 0, N2 = res.chats.size(); a < N2; a++) { - TLRPC.Chat chat = res.chats.get(a); - if (chat.id == message.peer_id.channel_id) { - if (chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } - break; - } - } - } getMessagesStorage().replaceMessageIfExists(message, res.users, res.chats, false); } break; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/GcmPushListenerService.java b/TMessagesProj/src/main/java/org/telegram/messenger/GcmPushListenerService.java index 0cc0219d7..8c3739424 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/GcmPushListenerService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/GcmPushListenerService.java @@ -695,6 +695,22 @@ public class GcmPushListenerService extends FirebaseMessagingService { messageText = LocaleController.formatString("NotificationGroupAddMember", R.string.NotificationGroupAddMember, args[0], args[1], args[2]); break; } + case "CHAT_VOICECHAT_START": { + messageText = LocaleController.formatString("NotificationGroupCreatedCall", R.string.NotificationGroupCreatedCall, args[0], args[1]); + break; + } + case "CHAT_VOICECHAT_INVITE": { + messageText = LocaleController.formatString("NotificationGroupInvitedToCall", R.string.NotificationGroupInvitedToCall, args[0], args[1], args[2]); + break; + } + case "CHAT_VOICECHAT_END": { + messageText = LocaleController.formatString("NotificationGroupEndedCall", R.string.NotificationGroupEndedCall, args[0], args[1]); + break; + } + case "CHAT_VOICECHAT_INVITE_YOU": { + messageText = LocaleController.formatString("NotificationGroupInvitedYouToCall", R.string.NotificationGroupInvitedYouToCall, args[0], args[1]); + break; + } case "CHAT_DELETE_MEMBER": { messageText = LocaleController.formatString("NotificationGroupKickMember", R.string.NotificationGroupKickMember, args[0], args[1]); break; @@ -996,7 +1012,7 @@ public class GcmPushListenerService extends FirebaseMessagingService { messageOwner.action = new TLRPC.TL_messageActionPinMessage(); } if (supergroup) { - messageOwner.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; + messageOwner.flags |= 0x80000000; } messageOwner.dialog_id = dialogId; if (channel_id != 0) { @@ -1026,7 +1042,7 @@ public class GcmPushListenerService extends FirebaseMessagingService { messageOwner.silent = silent; messageOwner.from_scheduled = scheduled; - MessageObject messageObject = new MessageObject(currentAccount, messageOwner, messageText, name, userName, localMessage, channel, edited); + MessageObject messageObject = new MessageObject(currentAccount, messageOwner, messageText, name, userName, localMessage, channel, supergroup, edited); ArrayList arrayList = new ArrayList<>(); arrayList.add(messageObject); canRelease = false; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java index 03ad700a0..4a18dab26 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java @@ -39,7 +39,6 @@ import org.telegram.ui.Components.AnimatedFileDrawable; import org.telegram.ui.Components.Point; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.SlotsDrawable; -import org.telegram.ui.Components.SvgHelper; import org.telegram.ui.Components.ThemePreviewDrawable; import java.io.ByteArrayOutputStream; @@ -854,7 +853,19 @@ public class ImageLoader { int size = document != null ? cacheImage.size : cacheImage.imageLocation.currentSize; fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, false, size, document, document == null ? cacheImage.imageLocation : null, cacheImage.parentObject, seekTo, cacheImage.currentAccount, false); } else { - fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, "d".equals(cacheImage.filter), 0, null, null, null, seekTo, cacheImage.currentAccount, false); + + int w = 0; + int h = 0; + if (cacheImage.filter != null) { + String[] args = cacheImage.filter.split("_"); + if (args.length >= 2) { + float w_filter = Float.parseFloat(args[0]); + float h_filter = Float.parseFloat(args[1]); + w = (int) (w_filter * AndroidUtilities.density); + h = (int) (h_filter * AndroidUtilities.density); + } + } + fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, "d".equals(cacheImage.filter), 0, null, null, null, seekTo, cacheImage.currentAccount, false , w, h); } Thread.interrupted(); onPostExecute(fileDrawable); @@ -1800,8 +1811,33 @@ public class ImageLoader { try { if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) { - telegramPath = new File(Environment.getExternalStorageDirectory(), "Telegram"); + File path = Environment.getExternalStorageDirectory(); + if (Build.VERSION.SDK_INT >= 19 && !TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + ArrayList dirs = AndroidUtilities.getRootDirs(); + for (int a = 0, N = dirs.size(); a < N; a++) { + File dir = dirs.get(a); + if (dir.getAbsolutePath().startsWith(SharedConfig.storageCacheDir)) { + path = dir; + break; + } + } + } + telegramPath = new File(path, "Telegram"); telegramPath.mkdirs(); + if (Build.VERSION.SDK_INT >= 19 && !telegramPath.isDirectory()) { + ArrayList dirs = AndroidUtilities.getDataDirs(); + if (dirs != null) { + for (int a = 0, N = dirs.size(); a < N; a++) { + File dir = dirs.get(a); + if (dir.getAbsolutePath().startsWith(SharedConfig.storageCacheDir)) { + path = dir; + telegramPath = new File(path, "Telegram"); + telegramPath.mkdirs(); + break; + } + } + } + } if (telegramPath.isDirectory()) { try { @@ -2323,7 +2359,7 @@ public class ImageLoader { if (cacheFile == null) { int fileSize = 0; - if (imageLocation.photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (imageLocation.photoSize instanceof TLRPC.TL_photoStrippedSize || imageLocation.photoSize instanceof TLRPC.TL_photoPathSize) { onlyCache = true; } else if (imageLocation.secureDocument != null) { img.secureDocument = imageLocation.secureDocument; @@ -2648,7 +2684,7 @@ public class ImageLoader { String url = object.getKey(parentObject, mediaLocation != null ? mediaLocation : imageLocation, true); if (object.path != null) { url = url + "." + getHttpUrlExtension(object.path, "jpg"); - } else if (object.photoSize instanceof TLRPC.TL_photoStrippedSize) { + } else if (object.photoSize instanceof TLRPC.TL_photoStrippedSize || object.photoSize instanceof TLRPC.TL_photoPathSize) { url = url + "." + ext; } else if (object.location != null) { url = url + "." + ext; @@ -2712,7 +2748,7 @@ public class ImageLoader { thumbUrl = thumbLocation.getKey(parentObject, strippedLoc, true); if (thumbLocation.path != null) { thumbUrl = thumbUrl + "." + getHttpUrlExtension(thumbLocation.path, "jpg"); - } else if (thumbLocation.photoSize instanceof TLRPC.TL_photoStrippedSize) { + } else if (thumbLocation.photoSize instanceof TLRPC.TL_photoStrippedSize || thumbLocation.photoSize instanceof TLRPC.TL_photoPathSize) { thumbUrl = thumbUrl + "." + thumbExt; } else if (thumbLocation.location != null) { thumbUrl = thumbUrl + "." + thumbExt; @@ -2764,8 +2800,10 @@ public class ImageLoader { return; } HttpImageTask oldTask = img.httpTask; - img.httpTask = new HttpImageTask(oldTask.cacheImage, oldTask.imageSize); - httpTasks.add(img.httpTask); + if (oldTask != null) { + img.httpTask = new HttpImageTask(oldTask.cacheImage, oldTask.imageSize); + httpTasks.add(img.httpTask); + } runHttpTasks(false); }); } @@ -2777,8 +2815,10 @@ public class ImageLoader { return; } ArtworkLoadTask oldTask = img.artworkTask; - img.artworkTask = new ArtworkLoadTask(oldTask.cacheImage); - artworkTasks.add(img.artworkTask); + if (oldTask != null) { + img.artworkTask = new ArtworkLoadTask(oldTask.cacheImage); + artworkTasks.add(img.artworkTask); + } runArtworkTasks(false); }); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java index 2489aca77..2b42979fc 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java @@ -86,7 +86,7 @@ public class ImageLocation { } public static ImageLocation getForPhoto(TLRPC.PhotoSize photoSize, TLRPC.Photo photo) { - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { ImageLocation imageLocation = new ImageLocation(); imageLocation.photoSize = photoSize; return imageLocation; @@ -152,7 +152,7 @@ public class ImageLocation { } public static ImageLocation getForSticker(TLRPC.PhotoSize photoSize, TLRPC.Document sticker) { - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { ImageLocation imageLocation = new ImageLocation(); imageLocation.photoSize = photoSize; return imageLocation; @@ -192,7 +192,7 @@ public class ImageLocation { } public static ImageLocation getForDocument(TLRPC.PhotoSize photoSize, TLRPC.Document document) { - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { ImageLocation imageLocation = new ImageLocation(); imageLocation.photoSize = photoSize; return imageLocation; @@ -291,7 +291,7 @@ public class ImageLocation { public String getKey(Object parentObject, Object fullObject, boolean url) { if (secureDocument != null) { return secureDocument.secureFile.dc_id + "_" + secureDocument.secureFile.id; - } else if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + } else if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { if (photoSize.bytes.length > 0) { return getStippedKey(parentObject, fullObject, photoSize); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java index 8c9aaeeec..071afd27a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java @@ -354,6 +354,10 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg currentAlpha = 1.0f; currentSize = 0; + if (staticThumbDrawable instanceof SvgHelper.SvgDrawable) { + ((SvgHelper.SvgDrawable) staticThumbDrawable).setParent(this); + } + ImageLoader.getInstance().cancelLoadingForImageReceiver(this, true); if (parentView != null) { if (invalidateAll) { @@ -492,6 +496,10 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg mediaShader = null; currentAlpha = 1.0f; + if (staticThumbDrawable instanceof SvgHelper.SvgDrawable) { + ((SvgHelper.SvgDrawable) staticThumbDrawable).setParent(this); + } + if (delegate != null) { delegate.didSetImage(this, currentImageDrawable != null || currentThumbDrawable != null || staticThumbDrawable != null || currentMediaDrawable != null, currentImageDrawable == null && currentMediaDrawable == null, false); } @@ -1199,7 +1207,13 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } } if (thumbDrawable != null) { - drawDrawable(canvas, thumbDrawable, (int) (overrideAlpha * 255), thumbShaderToUse, thumbOrientation); + int alpha; + if (thumbDrawable instanceof SvgHelper.SvgDrawable) { + alpha = (int) (overrideAlpha * 255 * (1.0f - currentAlpha)); + } else { + alpha = (int) (overrideAlpha * 255); + } + drawDrawable(canvas, thumbDrawable, alpha, thumbShaderToUse, thumbOrientation); } } drawDrawable(canvas, drawable, (int) (overrideAlpha * currentAlpha * 255), shaderToUse, orientation); @@ -1391,15 +1405,29 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg return; } isVisible = value; - if (invalidate && parentView != null) { - if (invalidateAll) { - parentView.invalidate(); - } else { - parentView.invalidate((int) imageX, (int) imageY, (int) (imageX + imageW), (int) (imageY + imageH)); - } + if (invalidate) { + invalidate(); } } + public void invalidate() { + if (parentView == null) { + return; + } + if (invalidateAll) { + parentView.invalidate(); + } else { + parentView.invalidate((int) imageX, (int) imageY, (int) (imageX + imageW), (int) (imageY + imageH)); + } + } + + public void getParentPosition(int[] position) { + if (parentView == null) { + return; + } + parentView.getLocationInWindow(position); + } + public boolean getVisible() { return isVisible; } @@ -1809,7 +1837,7 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg if (currentMediaDrawable instanceof AnimatedFileDrawable && ((AnimatedFileDrawable) currentMediaDrawable).hasBitmap()) { allowCorssfade = false; } else if (currentImageDrawable instanceof RLottieDrawable) { - allowCorssfade = staticThumbDrawable instanceof LoadingStickerDrawable; + allowCorssfade = staticThumbDrawable instanceof LoadingStickerDrawable || staticThumbDrawable instanceof SvgHelper.SvgDrawable; } if (allowCorssfade && (currentThumbDrawable == null && staticThumbDrawable == null || currentAlpha == 1.0f || forceCrossfade)) { currentAlpha = 0.0f; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java index 443519e59..0628e9bbd 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java @@ -235,8 +235,8 @@ public class LocaleController { "tk", "ur", "zu", "mn", "gsw", "chr", "rm", "pt", "an", "ast"}, new PluralRules_One()); addRules(new String[]{"cs", "sk"}, new PluralRules_Czech()); addRules(new String[]{"ff", "fr", "kab"}, new PluralRules_French()); - addRules(new String[]{"ru", "uk", "be", "sh"}, new PluralRules_Balkan()); - addRules(new String[]{"sr", "hr", "bs"}, new PluralRules_Serbian()); + addRules(new String[]{"ru", "uk", "be"}, new PluralRules_Balkan()); + addRules(new String[]{"sr", "hr", "bs", "sh"}, new PluralRules_Serbian()); addRules(new String[]{"lv"}, new PluralRules_Latvian()); addRules(new String[]{"lt"}, new PluralRules_Lithuanian()); addRules(new String[]{"pl"}, new PluralRules_Polish()); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LocationController.java b/TMessagesProj/src/main/java/org/telegram/messenger/LocationController.java index ea2444026..3bfdc41e6 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/LocationController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LocationController.java @@ -564,6 +564,9 @@ public class LocationController extends BaseController implements NotificationCe } private void setLastKnownLocation(Location location) { + if (location != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && (SystemClock.elapsedRealtimeNanos() - location.getElapsedRealtimeNanos()) / 1000000000 > 60 * 5) { + return; + } lastKnownLocation = location; if (lastKnownLocation != null) { AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.newLocationAvailable)); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java index afe8df997..bd1c14d21 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java @@ -51,6 +51,7 @@ import android.view.HapticFeedbackConstants; import android.view.TextureView; import android.view.View; import android.view.WindowManager; +import android.webkit.MimeTypeMap; import android.widget.FrameLayout; import com.google.android.exoplayer2.C; @@ -86,6 +87,7 @@ import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.Locale; import java.util.Timer; @@ -1295,13 +1297,16 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, playlistMap.put(object.getId(), object); playlistMaxId[0] = Math.min(playlistMaxId[0], object.getId()); } - playlistClassGuid = ConnectionsManager.generateClassGuid(); + sortPlaylist(); if (SharedConfig.shuffleMusic) { buildShuffledPlayList(); - currentPlaylistNum = 0; - } else { - currentPlaylistNum += arrayListBegin.size(); + } else if (playingMessageObject != null) { + int newIndex = playlist.indexOf(playingMessageObject); + if (newIndex >= 0) { + currentPlaylistNum = newIndex; + } } + playlistClassGuid = ConnectionsManager.generateClassGuid(); } } else if (id == NotificationCenter.mediaDidLoad) { int guid = (Integer) args[3]; @@ -1326,6 +1331,13 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, playlistMap.put(message.getId(), message); playlistMaxId[loadIndex] = Math.min(playlistMaxId[loadIndex], message.getId()); } + sortPlaylist(); + if (playingMessageObject != null) { + int newIndex = playlist.indexOf(playingMessageObject); + if (newIndex >= 0) { + currentPlaylistNum = newIndex; + } + } loadingPlaylist = false; if (SharedConfig.shuffleMusic) { buildShuffledPlayList(); @@ -1915,7 +1927,6 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, MessageObject messageObject = playlist.get(currentPlaylistNum); all.remove(currentPlaylistNum); - shuffledPlaylist.add(messageObject); int count = all.size(); for (int a = 0; a < count; a++) { @@ -1923,6 +1934,8 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, shuffledPlaylist.add(all.get(index)); all.remove(index); } + shuffledPlaylist.add(messageObject); + currentPlaylistNum = shuffledPlaylist.size() - 1; } public void loadMoreMusic() { @@ -1977,42 +1990,41 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, request = req; } loadingPlaylist = true; - ConnectionsManager.getInstance(currentAccount).sendRequest(request, (response, error) -> { - AndroidUtilities.runOnUIThread(() -> { - if (playlistClassGuid != finalPlaylistGuid || playlistGlobalSearchParams == null || playingMessageObject == null) { - return; - } - if (error != null) { - return; - } - loadingPlaylist = false; + ConnectionsManager.getInstance(currentAccount).sendRequest(request, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (playlistClassGuid != finalPlaylistGuid || playlistGlobalSearchParams == null || playingMessageObject == null) { + return; + } + if (error != null) { + return; + } + loadingPlaylist = false; - TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; - playlistGlobalSearchParams.nextSearchRate = res.next_rate; - MessagesStorage.getInstance(currentAccount).putUsersAndChats(res.users, res.chats, true, true); - MessagesController.getInstance(currentAccount).putUsers(res.users, false); - MessagesController.getInstance(currentAccount).putChats(res.chats, false); - int n = res.messages.size(); - int addedCount = 0; - for (int i = 0; i < n; i++) { - MessageObject messageObject = new MessageObject(currentAccount, res.messages.get(i), false, true); - if (playlistMap.containsKey(messageObject.getId())) { - continue; - } - playlist.add(0, messageObject); - playlistMap.put(messageObject.getId(), messageObject); - addedCount++; + TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; + playlistGlobalSearchParams.nextSearchRate = res.next_rate; + MessagesStorage.getInstance(currentAccount).putUsersAndChats(res.users, res.chats, true, true); + MessagesController.getInstance(currentAccount).putUsers(res.users, false); + MessagesController.getInstance(currentAccount).putChats(res.chats, false); + int n = res.messages.size(); + int addedCount = 0; + for (int i = 0; i < n; i++) { + MessageObject messageObject = new MessageObject(currentAccount, res.messages.get(i), false, true); + if (playlistMap.containsKey(messageObject.getId())) { + continue; } - loadingPlaylist = false; - playlistGlobalSearchParams.endReached = playlist.size() == playlistGlobalSearchParams.totalCount; - if (SharedConfig.shuffleMusic) { - buildShuffledPlayList(); - } - if (addedCount != 0) { - NotificationCenter.getInstance(playingMessageObject.currentAccount).postNotificationName(NotificationCenter.moreMusicDidLoad, addedCount); - } - }); - }); + playlist.add(0, messageObject); + playlistMap.put(messageObject.getId(), messageObject); + addedCount++; + } + sortPlaylist(); + loadingPlaylist = false; + playlistGlobalSearchParams.endReached = playlist.size() == playlistGlobalSearchParams.totalCount; + if (SharedConfig.shuffleMusic) { + buildShuffledPlayList(); + } + if (addedCount != 0) { + NotificationCenter.getInstance(playingMessageObject.currentAccount).postNotificationName(NotificationCenter.moreMusicDidLoad, addedCount); + } + })); } return; } @@ -2035,6 +2047,10 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, public boolean setPlaylist(ArrayList messageObjects, MessageObject current, long mergeDialogId, boolean loadMusic, PlaylistGlobalSearchParams params) { if (playingMessageObject == current) { + int newIdx = playlist.indexOf(current); + if (newIdx >= 0) { + currentPlaylistNum = newIdx; + } return playMessage(current); } forceLoopCurrentPlaylist = !loadMusic; @@ -2042,13 +2058,22 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, playMusicAgain = !playlist.isEmpty(); clearPlaylist(); playlistGlobalSearchParams = params; + boolean isSecretChat = !messageObjects.isEmpty() && (int) messageObjects.get(0).getDialogId() == 0; + long minId = Long.MAX_VALUE; + long maxId = Long.MIN_VALUE; for (int a = messageObjects.size() - 1; a >= 0; a--) { MessageObject messageObject = messageObjects.get(a); if (messageObject.isMusic()) { + long id = messageObject.getIdWithChannel(); + if (id > 0 || isSecretChat) { + minId = Math.min(minId, id); + maxId = Math.max(maxId, id); + } playlist.add(messageObject); playlistMap.put(messageObject.getId(), messageObject); } } + sortPlaylist(); currentPlaylistNum = playlist.indexOf(current); if (currentPlaylistNum == -1) { clearPlaylist(); @@ -2059,11 +2084,10 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, if (current.isMusic() && !current.scheduled) { if (SharedConfig.shuffleMusic) { buildShuffledPlayList(); - currentPlaylistNum = 0; } if (loadMusic) { if (playlistGlobalSearchParams == null) { - MediaDataController.getInstance(current.currentAccount).loadMusic(current.getDialogId(), playlist.get(0).getIdWithChannel(), playlist.get(playlist.size() - 1).getIdWithChannel()); + MediaDataController.getInstance(current.currentAccount).loadMusic(current.getDialogId(), minId, maxId); } else { playlistClassGuid = ConnectionsManager.generateClassGuid(); } @@ -2072,6 +2096,26 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, return playMessage(current); } + private void sortPlaylist() { + Collections.sort(playlist, (o1, o2) -> { + int mid1 = o1.getId(); + int mid2 = o2.getId(); + long group1 = o1.messageOwner.grouped_id; + long group2 = o2.messageOwner.grouped_id; + if (mid1 < 0 && mid2 < 0) { + if (group1 != 0 && group1 == group2) { + return Integer.compare(mid1, mid2); + } + return Integer.compare(mid2, mid1); + } else { + if (group1 != 0 && group1 == group2) { + return Integer.compare(mid2, mid1); + } + return Integer.compare(mid1, mid2); + } + }); + } + public void playNextMessage() { playNextMessageWithoutOrder(false); } @@ -3110,7 +3154,6 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, if (oldShuffle != SharedConfig.shuffleMusic) { if (SharedConfig.shuffleMusic) { buildShuffledPlayList(); - currentPlaylistNum = 0; } else { if (playingMessageObject != null) { currentPlaylistNum = playlist.indexOf(playingMessageObject); @@ -3590,13 +3633,39 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, break; } destination.transferFrom(source, a, Math.min(4096, size - a)); + if (a + 4096 >= size || lastProgress <= SystemClock.elapsedRealtime() - 500) { + lastProgress = SystemClock.elapsedRealtime(); + final int progress = (int) (finishedProgress + 100.0f / messageObjects.size() * a / size); + AndroidUtilities.runOnUIThread(() -> { + try { + progressDialog.setProgress(progress); + } catch (Exception e) { + FileLog.e(e); + } + }); + } } if (!cancelled) { if (isMusic) { AndroidUtilities.addMediaToGallery(Uri.fromFile(destFile)); } else { DownloadManager downloadManager = (DownloadManager) ApplicationLoader.applicationContext.getSystemService(Context.DOWNLOAD_SERVICE); - downloadManager.addCompletedDownload(destFile.getName(), destFile.getName(), false, mime, destFile.getAbsolutePath(), destFile.length(), true); + String mimeType = mime; + if (TextUtils.isEmpty(mimeType)) { + MimeTypeMap myMime = MimeTypeMap.getSingleton(); + String name = destFile.getName(); + int idx = name.lastIndexOf('.'); + if (idx != -1) { + String ext = name.substring(idx + 1); + mimeType = myMime.getMimeTypeFromExtension(ext.toLowerCase()); + if (TextUtils.isEmpty(mimeType)) { + mimeType = "text/plain"; + } + } else { + mimeType = "text/plain"; + } + } + downloadManager.addCompletedDownload(destFile.getName(), destFile.getName(), false, mimeType, destFile.getAbsolutePath(), destFile.length(), true); } finishedProgress += 100.0f / messageObjects.size(); final int progress = (int) (finishedProgress); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java index 4396fb4c0..e98001998 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java @@ -57,6 +57,7 @@ import org.telegram.ui.LaunchActivity; import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -1786,16 +1787,18 @@ public class MediaDataController extends BaseController { } public void preloadStickerSetThumb(TLRPC.TL_messages_stickerSet stickerSet) { - if (stickerSet.set.thumb instanceof TLRPC.TL_photoSize || stickerSet.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(stickerSet.set.thumbs, 90); + if (thumb != null) { final ArrayList documents = stickerSet.documents; if (documents != null && !documents.isEmpty()) { - loadStickerSetThumbInternal(stickerSet.set.thumb, stickerSet, documents.get(0)); + loadStickerSetThumbInternal(thumb, stickerSet, documents.get(0)); } } } public void preloadStickerSetThumb(TLRPC.StickerSetCovered stickerSet) { - if (stickerSet.set.thumb instanceof TLRPC.TL_photoSize || stickerSet.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(stickerSet.set.thumbs, 90); + if (thumb != null) { final TLRPC.Document sticker; if (stickerSet.cover != null) { sticker = stickerSet.cover; @@ -1804,7 +1807,7 @@ public class MediaDataController extends BaseController { } else { return; } - loadStickerSetThumbInternal(stickerSet.set.thumb, stickerSet, sticker); + loadStickerSetThumbInternal(thumb, stickerSet, sticker); } } @@ -2347,56 +2350,27 @@ public class MediaDataController extends BaseController { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.mediaCountsDidLoad, uid, counts)); } else { boolean missing = false; + TLRPC.TL_messages_getSearchCounters req = new TLRPC.TL_messages_getSearchCounters(); + req.peer = getMessagesController().getInputPeer(lower_part); for (int a = 0; a < counts.length; a++) { + if (req.peer == null) { + counts[a] = 0; + continue; + } if (counts[a] == -1 || old[a] == 1) { - final int type = a; - - TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); - req.limit = 1; - req.offset_id = 0; if (a == MEDIA_PHOTOVIDEO) { - req.filter = new TLRPC.TL_inputMessagesFilterPhotoVideo(); + req.filters.add(new TLRPC.TL_inputMessagesFilterPhotoVideo()); } else if (a == MEDIA_FILE) { - req.filter = new TLRPC.TL_inputMessagesFilterDocument(); + req.filters.add(new TLRPC.TL_inputMessagesFilterDocument()); } else if (a == MEDIA_AUDIO) { - req.filter = new TLRPC.TL_inputMessagesFilterRoundVoice(); + req.filters.add(new TLRPC.TL_inputMessagesFilterRoundVoice()); } else if (a == MEDIA_URL) { - req.filter = new TLRPC.TL_inputMessagesFilterUrl(); + req.filters.add(new TLRPC.TL_inputMessagesFilterUrl()); } else if (a == MEDIA_MUSIC) { - req.filter = new TLRPC.TL_inputMessagesFilterMusic(); - } else if (a == MEDIA_GIF) { - req.filter = new TLRPC.TL_inputMessagesFilterGif(); + req.filters.add(new TLRPC.TL_inputMessagesFilterMusic()); + } else { + req.filters.add(new TLRPC.TL_inputMessagesFilterGif()); } - req.q = ""; - req.peer = getMessagesController().getInputPeer(lower_part); - if (req.peer == null) { - counts[a] = 0; - continue; - } - int reqId = getConnectionsManager().sendRequest(req, (response, error) -> { - if (error == null) { - final TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; - if (res instanceof TLRPC.TL_messages_messages) { - counts[type] = res.messages.size(); - } else { - counts[type] = res.count; - } - putMediaCountDatabase(uid, type, counts[type]); - } else { - counts[type] = 0; - } - boolean finished = true; - for (int b = 0; b < counts.length; b++) { - if (counts[b] == -1) { - finished = false; - break; - } - } - if (finished) { - AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.mediaCountsDidLoad, uid, counts)); - } - }); - getConnectionsManager().bindRequestToGuid(reqId, classGuid); if (counts[a] == -1) { missing = true; } else if (old[a] == 1) { @@ -2404,6 +2378,37 @@ public class MediaDataController extends BaseController { } } } + if (!req.filters.isEmpty()) { + int reqId = getConnectionsManager().sendRequest(req, (response, error) -> { + Arrays.fill(counts, 0); + if (response != null) { + TLRPC.Vector res = (TLRPC.Vector) response; + for (int a = 0, N = res.objects.size(); a < N; a++) { + TLRPC.TL_messages_searchCounter searchCounter = (TLRPC.TL_messages_searchCounter) res.objects.get(a); + int type; + if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterPhotoVideo) { + type = MEDIA_PHOTOVIDEO; + } else if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterDocument) { + type = MEDIA_FILE; + } else if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterRoundVoice) { + type = MEDIA_AUDIO; + } else if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterUrl) { + type = MEDIA_URL; + } else if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterMusic) { + type = MEDIA_MUSIC; + } else if (searchCounter.filter instanceof TLRPC.TL_inputMessagesFilterGif) { + type = MEDIA_GIF; + } else { + continue; + } + counts[type] = searchCounter.count; + putMediaCountDatabase(uid, type, counts[type]); + } + } + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.mediaCountsDidLoad, uid, counts)); + }); + getConnectionsManager().bindRequestToGuid(reqId, classGuid); + } if (!missing) { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.mediaCountsDidLoad, uid, countsFinal)); } @@ -2419,43 +2424,31 @@ public class MediaDataController extends BaseController { if (fromCache || lower_part == 0) { getMediaCountDatabase(uid, type, classGuid); } else { - TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); - req.limit = 1; - req.offset_id = 0; + TLRPC.TL_messages_getSearchCounters req = new TLRPC.TL_messages_getSearchCounters(); if (type == MEDIA_PHOTOVIDEO) { - req.filter = new TLRPC.TL_inputMessagesFilterPhotoVideo(); + req.filters.add(new TLRPC.TL_inputMessagesFilterPhotoVideo()); } else if (type == MEDIA_FILE) { - req.filter = new TLRPC.TL_inputMessagesFilterDocument(); + req.filters.add(new TLRPC.TL_inputMessagesFilterDocument()); } else if (type == MEDIA_AUDIO) { - req.filter = new TLRPC.TL_inputMessagesFilterRoundVoice(); + req.filters.add(new TLRPC.TL_inputMessagesFilterRoundVoice()); } else if (type == MEDIA_URL) { - req.filter = new TLRPC.TL_inputMessagesFilterUrl(); + req.filters.add(new TLRPC.TL_inputMessagesFilterUrl()); } else if (type == MEDIA_MUSIC) { - req.filter = new TLRPC.TL_inputMessagesFilterMusic(); + req.filters.add(new TLRPC.TL_inputMessagesFilterMusic()); } else if (type == MEDIA_GIF) { - req.filter = new TLRPC.TL_inputMessagesFilterGif(); + req.filters.add(new TLRPC.TL_inputMessagesFilterGif()); } - req.q = ""; req.peer = getMessagesController().getInputPeer(lower_part); if (req.peer == null) { return; } int reqId = getConnectionsManager().sendRequest(req, (response, error) -> { - if (error == null) { - final TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; - getMessagesStorage().putUsersAndChats(res.users, res.chats, true, true); - int count; - if (res instanceof TLRPC.TL_messages_messages) { - count = res.messages.size(); - } else { - count = res.count; + if (response != null) { + final TLRPC.Vector res = (TLRPC.Vector) response; + if (!res.objects.isEmpty()) { + TLRPC.TL_messages_searchCounter counter = (TLRPC.TL_messages_searchCounter) res.objects.get(0); + processLoadedMediaCount(counter.count, uid, type, classGuid, false, 0); } - AndroidUtilities.runOnUIThread(() -> { - getMessagesController().putUsers(res.users, false); - getMessagesController().putChats(res.chats, false); - }); - - processLoadedMediaCount(count, uid, type, classGuid, false, 0); } }); getConnectionsManager().bindRequestToGuid(reqId, classGuid); @@ -2469,15 +2462,39 @@ public class MediaDataController extends BaseController { if (message.media instanceof TLRPC.TL_messageMediaPhoto) { return MEDIA_PHOTOVIDEO; } else if (message.media instanceof TLRPC.TL_messageMediaDocument) { - if (MessageObject.isVoiceMessage(message) || MessageObject.isRoundVideoMessage(message)) { - return MEDIA_AUDIO; - } else if (MessageObject.isVideoMessage(message)) { - return MEDIA_PHOTOVIDEO; - } else if (MessageObject.isStickerMessage(message) || MessageObject.isAnimatedStickerMessage(message)) { + TLRPC.Document document = message.media.document; + if (document == null) { return -1; - } else if (MessageObject.isNewGifMessage(message)) { + } + boolean isAnimated = false; + boolean isVideo = false; + boolean isVoice = false; + boolean isMusic = false; + boolean isSticker = false; + + for (int a = 0; a < document.attributes.size(); a++) { + TLRPC.DocumentAttribute attribute = document.attributes.get(a); + if (attribute instanceof TLRPC.TL_documentAttributeVideo) { + isVoice = attribute.round_message; + isVideo = !attribute.round_message; + } else if (attribute instanceof TLRPC.TL_documentAttributeAnimated) { + isAnimated = true; + } else if (attribute instanceof TLRPC.TL_documentAttributeAudio) { + isVoice = attribute.voice; + isMusic = !attribute.voice; + } else if (attribute instanceof TLRPC.TL_documentAttributeSticker) { + isSticker = true; + } + } + if (isVoice) { + return MEDIA_AUDIO; + } else if (isVideo && !isAnimated) { + return MEDIA_PHOTOVIDEO; + } else if (isSticker) { + return -1; + } else if (isAnimated) { return MEDIA_GIF; - } else if (MessageObject.isMusicMessage(message)) { + } else if (isMusic) { return MEDIA_MUSIC; } else { return MEDIA_FILE; @@ -2498,18 +2515,9 @@ public class MediaDataController extends BaseController { return false; } else if (!(message instanceof TLRPC.TL_message_secret) && message instanceof TLRPC.TL_message && (message.media instanceof TLRPC.TL_messageMediaPhoto || message.media instanceof TLRPC.TL_messageMediaDocument) && message.media.ttl_seconds != 0) { return false; - } else if (message.media instanceof TLRPC.TL_messageMediaPhoto || - message.media instanceof TLRPC.TL_messageMediaDocument && !MessageObject.isGifDocument(message.media.document)) { - return true; - } else if (!message.entities.isEmpty()) { - for (int a = 0; a < message.entities.size(); a++) { - TLRPC.MessageEntity entity = message.entities.get(a); - if (entity instanceof TLRPC.TL_messageEntityUrl || entity instanceof TLRPC.TL_messageEntityTextUrl || entity instanceof TLRPC.TL_messageEntityEmail) { - return true; - } - } + } else { + return getMediaType(message) != -1; } - return MediaDataController.getMediaType(message) != -1; } private void processLoadedMedia(final TLRPC.messages_Messages res, final long uid, int count, int max_id, final int type, final int fromCache, final int classGuid, final boolean isChannel, final boolean topReached) { @@ -2758,7 +2766,6 @@ public class MediaDataController extends BaseController { SQLitePreparedStatement state2 = getMessagesStorage().getDatabase().executeFast("REPLACE INTO media_v2 VALUES(?, ?, ?, ?, ?)"); for (TLRPC.Message message : messages) { if (canAddMessageToMedia(message)) { - long messageId = message.id; if (message.peer_id.channel_id != 0) { messageId |= ((long) message.peer_id.channel_id) << 32; @@ -3783,12 +3790,6 @@ public class MediaDataController extends BaseController { removeEmptyMessages(messagesRes.messages); if (!messagesRes.messages.isEmpty()) { TLRPC.Chat chat = getMessagesController().getChat(channelId); - if (chat != null && chat.megagroup) { - for (int a = 0, N = messagesRes.messages.size(); a < N; a++) { - TLRPC.Message message = messagesRes.messages.get(a); - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } - } ImageLoader.saveMessagesThumbs(messagesRes.messages); broadcastPinnedMessage(messagesRes.messages, messagesRes.users, messagesRes.chats, false, false); getMessagesStorage().putUsersAndChats(messagesRes.users, messagesRes.chats, true, true); @@ -3968,9 +3969,6 @@ public class MediaDataController extends BaseController { object.replyMessageObject = messageObject; object.messageOwner.reply_to = new TLRPC.TL_messageReplyHeader(); object.messageOwner.reply_to.reply_to_msg_id = messageObject.getId(); - if (object.isMegagroup()) { - object.replyMessageObject.messageOwner.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } @@ -4204,9 +4202,6 @@ public class MediaDataController extends BaseController { } else if (m.messageOwner.action instanceof TLRPC.TL_messageActionPaymentSent) { m.generatePaymentSentMessageText(null); } - if (m.isMegagroup()) { - m.replyMessageObject.messageOwner.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } changed = true; } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java index 4bf027608..29e7f5eee 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java @@ -77,6 +77,8 @@ public class MessageObject { public long localGroupId; public long localSentGroupId; public boolean localChannel; + public boolean localSupergroup; + public Boolean cachedIsSupergroup; public boolean localEdit; public TLRPC.Message messageOwner; public TLRPC.Document emojiAnimatedSticker; @@ -137,12 +139,15 @@ public class MessageObject { public CharSequence editingMessage; public ArrayList editingMessageEntities; + public boolean editingMessageSearchWebPage; - public String previousCaption; + public String previousMessage; public TLRPC.MessageMedia previousMedia; - public ArrayList previousCaptionEntities; + public ArrayList previousMessageEntities; public String previousAttachPath; + public SvgHelper.SvgDrawable pathThumb; + public int currentAccount; public TLRPC.TL_channelAdminLogEvent currentEvent; @@ -834,7 +839,7 @@ public class MessageObject { public ArrayList textLayoutBlocks; - public MessageObject(int accountNum, TLRPC.Message message, String formattedMessage, String name, String userName, boolean localMessage, boolean isChannel, boolean edit) { + public MessageObject(int accountNum, TLRPC.Message message, String formattedMessage, String name, String userName, boolean localMessage, boolean isChannel, boolean supergroup, boolean edit) { localType = localMessage ? 2 : 1; currentAccount = accountNum; localName = name; @@ -842,6 +847,7 @@ public class MessageObject { messageText = formattedMessage; messageOwner = message; localChannel = isChannel; + localSupergroup = supergroup; localEdit = edit; } @@ -888,14 +894,7 @@ public class MessageObject { TLRPC.User fromUser = null; if (message.from_id instanceof TLRPC.TL_peerUser) { - if (users != null) { - fromUser = users.get(message.from_id.user_id); - } else if (sUsers != null) { - fromUser = sUsers.get(message.from_id.user_id); - } - if (fromUser == null) { - fromUser = MessagesController.getInstance(currentAccount).getUser(message.from_id.user_id); - } + fromUser = getUser(users, sUsers, message.from_id.user_id); } updateMessageText(users, chats, sUsers, sChats); @@ -958,6 +957,7 @@ public class MessageObject { type = TYPE_ANIMATED_STICKER; } } + createPathThumb(); } layoutCreated = generateLayout; generateThumbs(false); @@ -966,6 +966,14 @@ public class MessageObject { } } + private void createPathThumb() { + TLRPC.Document document = getDocument(); + if (document == null) { + return; + } + pathThumb = DocumentObject.getSvgThumb(document, Theme.key_chat_serviceBackground, 1.0f); + } + private void createDateArray(int accountNum, TLRPC.TL_channelAdminLogEvent event, ArrayList messageObjects, HashMap> messagesByDays) { ArrayList dayArray = messagesByDays.get(dateKey); if (dayArray == null) { @@ -1047,8 +1055,9 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.formatString("EventLogEditedChannelTitle", R.string.EventLogEditedChannelTitle, title), "un1", fromUser); } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionChangePhoto) { + TLRPC.TL_channelAdminLogEventActionChangePhoto action = (TLRPC.TL_channelAdminLogEventActionChangePhoto) event.action; messageOwner = new TLRPC.TL_messageService(); - if (event.action.new_photo instanceof TLRPC.TL_photoEmpty) { + if (action.new_photo instanceof TLRPC.TL_photoEmpty) { messageOwner.action = new TLRPC.TL_messageActionChatDeletePhoto(); if (chat.megagroup) { messageText = replaceWithLink(LocaleController.getString("EventLogRemovedWGroupPhoto", R.string.EventLogRemovedWGroupPhoto), "un1", fromUser); @@ -1057,7 +1066,7 @@ public class MessageObject { } } else { messageOwner.action = new TLRPC.TL_messageActionChatEditPhoto(); - messageOwner.action.photo = event.action.new_photo; + messageOwner.action.photo = action.new_photo; if (chat.megagroup) { if (isVideoAvatar()) { @@ -1089,10 +1098,11 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.getString("EventLogLeftChannel", R.string.EventLogLeftChannel), "un1", fromUser); } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantInvite) { + TLRPC.TL_channelAdminLogEventActionParticipantInvite action = (TLRPC.TL_channelAdminLogEventActionParticipantInvite) event.action; messageOwner = new TLRPC.TL_messageService(); messageOwner.action = new TLRPC.TL_messageActionChatAddUser(); - TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(event.action.participant.user_id); - if (messageOwner.from_id instanceof TLRPC.TL_peerUser && event.action.participant.user_id == messageOwner.from_id.user_id) { + TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(action.participant.user_id); + if (messageOwner.from_id instanceof TLRPC.TL_peerUser && action.participant.user_id == messageOwner.from_id.user_id) { if (chat.megagroup) { messageText = replaceWithLink(LocaleController.getString("EventLogGroupJoined", R.string.EventLogGroupJoined), "un1", fromUser); } else { @@ -1103,11 +1113,22 @@ public class MessageObject { messageText = replaceWithLink(messageText, "un1", fromUser); } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin || - event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleBan && event.action.prev_participant instanceof TLRPC.TL_channelParticipantAdmin && event.action.new_participant instanceof TLRPC.TL_channelParticipant) { + event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleBan && ((TLRPC.TL_channelAdminLogEventActionParticipantToggleBan) event.action).prev_participant instanceof TLRPC.TL_channelParticipantAdmin && ((TLRPC.TL_channelAdminLogEventActionParticipantToggleBan) event.action).new_participant instanceof TLRPC.TL_channelParticipant) { + TLRPC.ChannelParticipant prev_participant; + TLRPC.ChannelParticipant new_participant; + if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin) { + TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin action = (TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin) event.action; + prev_participant = action.prev_participant; + new_participant = action.new_participant; + } else { + TLRPC.TL_channelAdminLogEventActionParticipantToggleBan action = (TLRPC.TL_channelAdminLogEventActionParticipantToggleBan) event.action; + prev_participant = action.prev_participant; + new_participant = action.new_participant; + } messageOwner = new TLRPC.TL_message(); - TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(event.action.prev_participant.user_id); + TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(prev_participant.user_id); StringBuilder rights; - if (!(event.action.prev_participant instanceof TLRPC.TL_channelParticipantCreator) && event.action.new_participant instanceof TLRPC.TL_channelParticipantCreator) { + if (!(prev_participant instanceof TLRPC.TL_channelParticipantCreator) && new_participant instanceof TLRPC.TL_channelParticipantCreator) { String str = LocaleController.getString("EventLogChangedOwnership", R.string.EventLogChangedOwnership); int offset = str.indexOf("%1$s"); rights = new StringBuilder(String.format(str, getUserName(whoUser, messageOwner.entities, offset))); @@ -1117,21 +1138,21 @@ public class MessageObject { rights = new StringBuilder(String.format(str, getUserName(whoUser, messageOwner.entities, offset))); rights.append("\n"); - TLRPC.TL_chatAdminRights o = event.action.prev_participant.admin_rights; - TLRPC.TL_chatAdminRights n = event.action.new_participant.admin_rights; + TLRPC.TL_chatAdminRights o = prev_participant.admin_rights; + TLRPC.TL_chatAdminRights n = new_participant.admin_rights; if (o == null) { o = new TLRPC.TL_chatAdminRights(); } if (n == null) { n = new TLRPC.TL_chatAdminRights(); } - if (!TextUtils.equals(event.action.prev_participant.rank, event.action.new_participant.rank)) { - if (TextUtils.isEmpty(event.action.new_participant.rank)) { + if (!TextUtils.equals(prev_participant.rank, new_participant.rank)) { + if (TextUtils.isEmpty(new_participant.rank)) { rights.append('\n').append('-').append(' '); rights.append(LocaleController.getString("EventLogPromotedRemovedTitle", R.string.EventLogPromotedRemovedTitle)); } else { rights.append('\n').append('+').append(' '); - rights.append(LocaleController.formatString("EventLogPromotedTitle", R.string.EventLogPromotedTitle, event.action.new_participant.rank)); + rights.append(LocaleController.formatString("EventLogPromotedTitle", R.string.EventLogPromotedTitle, new_participant.rank)); } } if (o.change_info != n.change_info) { @@ -1165,6 +1186,10 @@ public class MessageObject { rights.append('\n').append(n.ban_users ? '+' : '-').append(' '); rights.append(LocaleController.getString("EventLogPromotedBanUsers", R.string.EventLogPromotedBanUsers)); } + if (o.manage_call != n.manage_call) { + rights.append('\n').append(n.ban_users ? '+' : '-').append(' '); + rights.append(LocaleController.getString("EventLogPromotedManageCall", R.string.EventLogPromotedManageCall)); + } } if (o.invite_users != n.invite_users) { rights.append('\n').append(n.invite_users ? '+' : '-').append(' '); @@ -1256,10 +1281,11 @@ public class MessageObject { } messageText = rights.toString(); } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleBan) { + TLRPC.TL_channelAdminLogEventActionParticipantToggleBan action = (TLRPC.TL_channelAdminLogEventActionParticipantToggleBan) event.action; messageOwner = new TLRPC.TL_message(); - TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(event.action.prev_participant.user_id); - TLRPC.TL_chatBannedRights o = event.action.prev_participant.banned_rights; - TLRPC.TL_chatBannedRights n = event.action.new_participant.banned_rights; + TLRPC.User whoUser = MessagesController.getInstance(currentAccount).getUser(action.prev_participant.user_id); + TLRPC.TL_chatBannedRights o = action.prev_participant.banned_rights; + TLRPC.TL_chatBannedRights n = action.new_participant.banned_rights; if (chat.megagroup && (n == null || !n.view_messages || o != null && n.until_date != o.until_date)) { StringBuilder rights; StringBuilder bannedDuration; @@ -1398,23 +1424,26 @@ public class MessageObject { messageText = String.format(str, getUserName(whoUser, messageOwner.entities, offset)); } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionUpdatePinned) { - if (fromUser != null && fromUser.id == 136817688 && event.action.message.fwd_from != null && event.action.message.fwd_from.from_id instanceof TLRPC.TL_peerChannel) { - TLRPC.Chat channel = MessagesController.getInstance(currentAccount).getChat(event.action.message.fwd_from.from_id.channel_id); - if (event.action.message instanceof TLRPC.TL_messageEmpty || !event.action.message.pinned) { + TLRPC.TL_channelAdminLogEventActionUpdatePinned action = (TLRPC.TL_channelAdminLogEventActionUpdatePinned) event.action; + message = action.message; + if (fromUser != null && fromUser.id == 136817688 && action.message.fwd_from != null && action.message.fwd_from.from_id instanceof TLRPC.TL_peerChannel) { + TLRPC.Chat channel = MessagesController.getInstance(currentAccount).getChat(action.message.fwd_from.from_id.channel_id); + if (action.message instanceof TLRPC.TL_messageEmpty || !action.message.pinned) { messageText = replaceWithLink(LocaleController.getString("EventLogUnpinnedMessages", R.string.EventLogUnpinnedMessages), "un1", channel); } else { messageText = replaceWithLink(LocaleController.getString("EventLogPinnedMessages", R.string.EventLogPinnedMessages), "un1", channel); } } else { - if (event.action.message instanceof TLRPC.TL_messageEmpty || !event.action.message.pinned) { + if (action.message instanceof TLRPC.TL_messageEmpty || !action.message.pinned) { messageText = replaceWithLink(LocaleController.getString("EventLogUnpinnedMessages", R.string.EventLogUnpinnedMessages), "un1", fromUser); } else { messageText = replaceWithLink(LocaleController.getString("EventLogPinnedMessages", R.string.EventLogPinnedMessages), "un1", fromUser); } } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionStopPoll) { - TLRPC.Message m = event.action.message; - if (m.media instanceof TLRPC.TL_messageMediaPoll && ((TLRPC.TL_messageMediaPoll) m.media).poll.quiz) { + TLRPC.TL_channelAdminLogEventActionStopPoll action = (TLRPC.TL_channelAdminLogEventActionStopPoll) event.action; + message = action.message; + if (message.media instanceof TLRPC.TL_messageMediaPoll && ((TLRPC.TL_messageMediaPoll) message.media).poll.quiz) { messageText = replaceWithLink(LocaleController.getString("EventLogStopQuiz", R.string.EventLogStopQuiz), "un1", fromUser); } else { messageText = replaceWithLink(LocaleController.getString("EventLogStopPoll", R.string.EventLogStopPoll), "un1", fromUser); @@ -1432,6 +1461,7 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.getString("EventLogToggledInvitesOff", R.string.EventLogToggledInvitesOff), "un1", fromUser); } } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionDeleteMessage) { + message = ((TLRPC.TL_channelAdminLogEventActionDeleteMessage) event.action).message; messageText = replaceWithLink(LocaleController.getString("EventLogDeletedMessages", R.string.EventLogDeletedMessages), "un1", fromUser); } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionChangeLinkedChat) { int newChatId = ((TLRPC.TL_channelAdminLogEventActionChangeLinkedChat) event.action).new_value; @@ -1562,14 +1592,19 @@ public class MessageObject { } } else { messageText = replaceWithLink(LocaleController.getString("EventLogEditedMessages", R.string.EventLogEditedMessages), "un1", fromUser); - message.message = newMessage.message; - message.media = new TLRPC.TL_messageMediaWebPage(); - message.media.webpage = new TLRPC.TL_webPage(); - message.media.webpage.site_name = LocaleController.getString("EventLogOriginalMessages", R.string.EventLogOriginalMessages); - if (TextUtils.isEmpty(oldMessage.message)) { - message.media.webpage.description = LocaleController.getString("EventLogOriginalCaptionEmpty", R.string.EventLogOriginalCaptionEmpty); + if (newMessage.action instanceof TLRPC.TL_messageActionGroupCall) { + message = newMessage; + message.media = new TLRPC.TL_messageMediaEmpty(); } else { - message.media.webpage.description = oldMessage.message; + message.message = newMessage.message; + message.media = new TLRPC.TL_messageMediaWebPage(); + message.media.webpage = new TLRPC.TL_webPage(); + message.media.webpage.site_name = LocaleController.getString("EventLogOriginalMessages", R.string.EventLogOriginalMessages); + if (TextUtils.isEmpty(oldMessage.message)) { + message.media.webpage.description = LocaleController.getString("EventLogOriginalCaptionEmpty", R.string.EventLogOriginalCaptionEmpty); + } else { + message.media.webpage.description = oldMessage.message; + } } } message.reply_markup = newMessage.reply_markup; @@ -1609,9 +1644,31 @@ public class MessageObject { } messageText = replaceWithLink(LocaleController.formatString("EventLogToggledSlowmodeOn", R.string.EventLogToggledSlowmodeOn, string), "un1", fromUser); } + } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionStartGroupCall) { + messageText = replaceWithLink(LocaleController.getString("EventLogStartedVoiceChat", R.string.EventLogStartedVoiceChat), "un1", fromUser); + } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionDiscardGroupCall) { + messageText = replaceWithLink(LocaleController.getString("EventLogEndedVoiceChat", R.string.EventLogEndedVoiceChat), "un1", fromUser); + } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantMute) { + TLRPC.TL_channelAdminLogEventActionParticipantMute action = (TLRPC.TL_channelAdminLogEventActionParticipantMute) event.action; + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(action.participant.user_id); + messageText = replaceWithLink(LocaleController.getString("EventLogVoiceChatMuted", R.string.EventLogVoiceChatMuted), "un1", fromUser); + messageText = replaceWithLink(messageText, "un2", user); + } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantUnmute) { + TLRPC.TL_channelAdminLogEventActionParticipantUnmute action = (TLRPC.TL_channelAdminLogEventActionParticipantUnmute) event.action; + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(action.participant.user_id); + messageText = replaceWithLink(LocaleController.getString("EventLogVoiceChatUnmuted", R.string.EventLogVoiceChatUnmuted), "un1", fromUser); + messageText = replaceWithLink(messageText, "un2", user); + } else if (event.action instanceof TLRPC.TL_channelAdminLogEventActionToggleGroupCallSetting) { + TLRPC.TL_channelAdminLogEventActionToggleGroupCallSetting action = (TLRPC.TL_channelAdminLogEventActionToggleGroupCallSetting) event.action; + if (action.join_muted) { + messageText = replaceWithLink(LocaleController.getString("EventLogVoiceChatNotAllowedToSpeak", R.string.EventLogVoiceChatNotAllowedToSpeak), "un1", fromUser); + } else { + messageText = replaceWithLink(LocaleController.getString("EventLogVoiceChatAllowedToSpeak", R.string.EventLogVoiceChatAllowedToSpeak), "un1", fromUser); + } } else { messageText = "unsupported " + event.action; } + if (messageOwner == null) { messageOwner = new TLRPC.TL_messageService(); } @@ -1625,13 +1682,10 @@ public class MessageObject { messageOwner.peer_id = new TLRPC.TL_peerChannel(); messageOwner.peer_id.channel_id = chat.id; messageOwner.unread = false; - if (chat.megagroup) { - messageOwner.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } MediaController mediaController = MediaController.getInstance(); - if (event.action.message != null && !(event.action.message instanceof TLRPC.TL_messageEmpty)) { - message = event.action.message; + if (message instanceof TLRPC.TL_messageEmpty) { + message = null; } if (message != null) { @@ -1640,9 +1694,6 @@ public class MessageObject { message.flags &=~ TLRPC.MESSAGE_FLAG_REPLY; message.reply_to = null; message.flags = message.flags &~ TLRPC.MESSAGE_FLAG_EDITED; - if (chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } MessageObject messageObject = new MessageObject(currentAccount, message, null, null, true, true, eventId); if (messageObject.contentType >= 0) { if (mediaController.isPlayingMessage(messageObject)) { @@ -2043,6 +2094,26 @@ public class MessageObject { return null; } + public boolean isSupergroup() { + if (localSupergroup) { + return true; + } + if (cachedIsSupergroup != null) { + return cachedIsSupergroup; + } + if (messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0) { + TLRPC.Chat chat = getChat(null, null, messageOwner.peer_id.channel_id); + if (chat != null) { + return (cachedIsSupergroup = chat.megagroup); + } else { + return false; + } + } else { + cachedIsSupergroup = false; + } + return false; + } + private MessageObject getMessageObjectForBlock(TLRPC.WebPage webPage, TLRPC.PageBlock pageBlock) { TLRPC.TL_message message = null; if (pageBlock instanceof TLRPC.TL_pageBlockPhoto) { @@ -2116,14 +2187,14 @@ public class MessageObject { SerializedData serializedData = new SerializedData(Base64.decode(param, Base64.DEFAULT)); int constructor = serializedData.readInt32(false); previousMedia = TLRPC.MessageMedia.TLdeserialize(serializedData, constructor, false); - previousCaption = serializedData.readString(false); + previousMessage = serializedData.readString(false); previousAttachPath = serializedData.readString(false); int count = serializedData.readInt32(false); - previousCaptionEntities = new ArrayList<>(count); + previousMessageEntities = new ArrayList<>(count); for (int a = 0; a < count; a++) { constructor = serializedData.readInt32(false); TLRPC.MessageEntity entity = TLRPC.MessageEntity.TLdeserialize(serializedData, constructor, false); - previousCaptionEntities.add(entity); + previousMessageEntities.add(entity); } serializedData.cleanup(); } @@ -2237,7 +2308,58 @@ public class MessageObject { if (messageOwner instanceof TLRPC.TL_messageService) { if (messageOwner.action != null) { - if (messageOwner.action instanceof TLRPC.TL_messageActionGeoProximityReached) { + if (messageOwner.action instanceof TLRPC.TL_messageActionGroupCall) { + if (messageOwner.action.duration != 0) { + String time; + int days = messageOwner.action.duration / (3600 * 24); + if (days > 0) { + time = LocaleController.formatPluralString("Days", days); + } else { + int hours = messageOwner.action.duration / 3600; + if (hours > 0) { + time = LocaleController.formatPluralString("Hours", hours); + } else { + int minutes = messageOwner.action.duration / 60; + if (minutes > 0) { + time = LocaleController.formatPluralString("Minutes", minutes); + } else { + time = LocaleController.formatPluralString("Seconds", messageOwner.action.duration); + } + } + } + messageText = LocaleController.formatString("ActionGroupCallEnded", R.string.ActionGroupCallEnded, time); + } else { + if (isOut()) { + messageText = LocaleController.getString("ActionGroupCallStartedByYou", R.string.ActionGroupCallStartedByYou); + } else { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallStarted", R.string.ActionGroupCallStarted), "un1", fromObject); + } + } + } else if (messageOwner.action instanceof TLRPC.TL_messageActionInviteToGroupCall) { + int singleUserId = messageOwner.action.user_id; + if (singleUserId == 0 && messageOwner.action.users.size() == 1) { + singleUserId = messageOwner.action.users.get(0); + } + if (singleUserId != 0) { + TLRPC.User whoUser = getUser(users, sUsers, singleUserId); + + if (isOut()) { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallYouInvited", R.string.ActionGroupCallYouInvited), "un2", whoUser); + } else if (singleUserId == UserConfig.getInstance(currentAccount).getClientUserId()) { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallInvitedYou", R.string.ActionGroupCallInvitedYou), "un1", fromObject); + } else { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallInvited", R.string.ActionGroupCallInvited), "un2", whoUser); + messageText = replaceWithLink(messageText, "un1", fromObject); + } + } else { + if (isOut()) { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallYouInvited", R.string.ActionGroupCallYouInvited), "un2", messageOwner.action.users, users, sUsers); + } else { + messageText = replaceWithLink(LocaleController.getString("ActionGroupCallInvited", R.string.ActionGroupCallInvited), "un2", messageOwner.action.users, users, sUsers); + messageText = replaceWithLink(messageText, "un1", fromObject); + } + } + } else if (messageOwner.action instanceof TLRPC.TL_messageActionGeoProximityReached) { TLRPC.TL_messageActionGeoProximityReached action = (TLRPC.TL_messageActionGeoProximityReached) messageOwner.action; int fromId = getPeerId(action.from_id); TLObject from; @@ -2280,15 +2402,7 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.getString("ActionLeftUser", R.string.ActionLeftUser), "un1", fromObject); } } else { - TLRPC.User whoUser = null; - if (users != null) { - whoUser = users.get(messageOwner.action.user_id); - } else if (sUsers != null) { - whoUser = sUsers.get(messageOwner.action.user_id); - } - if (whoUser == null) { - whoUser = MessagesController.getInstance(currentAccount).getUser(messageOwner.action.user_id); - } + TLRPC.User whoUser = getUser(users, sUsers, messageOwner.action.user_id); if (isOut()) { messageText = replaceWithLink(LocaleController.getString("ActionYouKickUser", R.string.ActionYouKickUser), "un2", whoUser); } else if (messageOwner.action.user_id == UserConfig.getInstance(currentAccount).getClientUserId()) { @@ -2304,20 +2418,16 @@ public class MessageObject { singleUserId = messageOwner.action.users.get(0); } if (singleUserId != 0) { - TLRPC.User whoUser = null; - if (users != null) { - whoUser = users.get(singleUserId); - } else if (sUsers != null) { - whoUser = sUsers.get(singleUserId); - } - if (whoUser == null) { - whoUser = MessagesController.getInstance(currentAccount).getUser(singleUserId); + TLRPC.User whoUser = getUser(users, sUsers, singleUserId); + TLRPC.Chat chat = null; + if (messageOwner.peer_id.channel_id != 0) { + chat = getChat(chats, sChats, messageOwner.peer_id.channel_id); } if (messageOwner.from_id != null && singleUserId == messageOwner.from_id.user_id) { - if (messageOwner.peer_id.channel_id != 0 && !isMegagroup()) { + if (ChatObject.isChannel(chat) && !chat.megagroup) { messageText = LocaleController.getString("ChannelJoined", R.string.ChannelJoined); } else { - if (messageOwner.peer_id.channel_id != 0 && isMegagroup()) { + if (messageOwner.peer_id.channel_id != 0) { if (singleUserId == UserConfig.getInstance(currentAccount).getClientUserId()) { messageText = LocaleController.getString("ChannelMegaJoined", R.string.ChannelMegaJoined); } else { @@ -2334,7 +2444,7 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.getString("ActionYouAddUser", R.string.ActionYouAddUser), "un2", whoUser); } else if (singleUserId == UserConfig.getInstance(currentAccount).getClientUserId()) { if (messageOwner.peer_id.channel_id != 0) { - if (isMegagroup()) { + if (chat != null && chat.megagroup) { messageText = replaceWithLink(LocaleController.getString("MegaAddedBy", R.string.MegaAddedBy), "un1", fromObject); } else { messageText = replaceWithLink(LocaleController.getString("ChannelAddedBy", R.string.ChannelAddedBy), "un1", fromObject); @@ -2362,7 +2472,8 @@ public class MessageObject { messageText = replaceWithLink(LocaleController.getString("ActionInviteUser", R.string.ActionInviteUser), "un1", fromObject); } } else if (messageOwner.action instanceof TLRPC.TL_messageActionChatEditPhoto) { - if (messageOwner.peer_id.channel_id != 0 && !isMegagroup()) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(chats, sChats, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && !chat.megagroup) { if (isVideoAvatar()) { messageText = LocaleController.getString("ActionChannelChangedVideo", R.string.ActionChannelChangedVideo); } else { @@ -2384,7 +2495,8 @@ public class MessageObject { } } } else if (messageOwner.action instanceof TLRPC.TL_messageActionChatEditTitle) { - if (messageOwner.peer_id.channel_id != 0 && !isMegagroup()) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(chats, sChats, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && !chat.megagroup) { messageText = LocaleController.getString("ActionChannelChangedTitle", R.string.ActionChannelChangedTitle).replace("un2", messageOwner.action.title); } else { if (isOut()) { @@ -2394,7 +2506,8 @@ public class MessageObject { } } } else if (messageOwner.action instanceof TLRPC.TL_messageActionChatDeletePhoto) { - if (messageOwner.peer_id.channel_id != 0 && !isMegagroup()) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(chats, sChats, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && !chat.megagroup) { messageText = LocaleController.getString("ActionChannelRemovedPhoto", R.string.ActionChannelRemovedPhoto); } else { if (isOut()) { @@ -2427,14 +2540,7 @@ public class MessageObject { } TLRPC.User to_user = UserConfig.getInstance(currentAccount).getCurrentUser(); if (to_user == null) { - if (users != null) { - to_user = users.get(messageOwner.peer_id.user_id); - } else if (sUsers != null) { - to_user = sUsers.get(messageOwner.peer_id.user_id); - } - if (to_user == null) { - to_user = MessagesController.getInstance(currentAccount).getUser(messageOwner.peer_id.user_id); - } + to_user = getUser(users, sUsers, messageOwner.peer_id.user_id); } String name = to_user != null ? UserObject.getFirstName(to_user) : ""; messageText = LocaleController.formatString("NotificationUnrecognizedDevice", R.string.NotificationUnrecognizedDevice, name, date, messageOwner.action.title, messageOwner.action.address); @@ -2474,7 +2580,8 @@ public class MessageObject { } else if (messageOwner.action instanceof TLRPC.TL_messageActionCreatedBroadcastList) { messageText = LocaleController.formatString("YouCreatedBroadcastList", R.string.YouCreatedBroadcastList); } else if (messageOwner.action instanceof TLRPC.TL_messageActionChannelCreate) { - if (isMegagroup()) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(chats, sChats, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && chat.megagroup) { messageText = LocaleController.getString("ActionCreateMega", R.string.ActionCreateMega); } else { messageText = LocaleController.getString("ActionCreateChannel", R.string.ActionCreateChannel); @@ -2486,13 +2593,7 @@ public class MessageObject { } else if (messageOwner.action instanceof TLRPC.TL_messageActionPinMessage) { TLRPC.Chat chat; if (fromUser == null) { - if (chats != null) { - chat = chats.get(messageOwner.peer_id.channel_id); - } else if (sChats != null) { - chat = sChats.get(messageOwner.peer_id.channel_id); - } else { - chat = null; - } + chat = getChat(chats, sChats, messageOwner.peer_id.channel_id); } else { chat = null; } @@ -2558,16 +2659,7 @@ public class MessageObject { } } } else if (messageOwner.action instanceof TLRPC.TL_messageActionPaymentSent) { - TLRPC.User user = null; - int uid = (int) getDialogId(); - if (users != null) { - user = users.get(uid); - } else if (sUsers != null) { - user = sUsers.get(uid); - } - if (user == null) { - user = MessagesController.getInstance(currentAccount).getUser(uid); - } + TLRPC.User user = getUser(users, sUsers, (int) getDialogId()); generatePaymentSentMessageText(user); } else if (messageOwner.action instanceof TLRPC.TL_messageActionBotAllowed) { String domain = ((TLRPC.TL_messageActionBotAllowed) messageOwner.action).domain; @@ -2616,14 +2708,7 @@ public class MessageObject { } TLRPC.User user = null; if (messageOwner.peer_id != null) { - if (users != null) { - user = users.get(messageOwner.peer_id.user_id); - } else if (sUsers != null) { - user = sUsers.get(messageOwner.peer_id.user_id); - } - if (user == null) { - user = MessagesController.getInstance(currentAccount).getUser(messageOwner.peer_id.user_id); - } + user = getUser(users, sUsers, messageOwner.peer_id.user_id); } messageText = LocaleController.formatString("ActionBotDocuments", R.string.ActionBotDocuments, UserObject.getFirstName(user), str.toString()); } @@ -2894,7 +2979,7 @@ public class MessageObject { public static boolean canPreviewDocument(TLRPC.Document document) { if (document != null && document.mime_type != null) { String mime = document.mime_type.toLowerCase(); - if (isDocumentHasThumb(document) && (mime.equals("image/png") || mime.equals("image/jpg") || mime.equals("image/jpeg"))) { + if (isDocumentHasThumb(document) && (mime.equals("image/png") || mime.equals("image/jpg") || mime.equals("image/jpeg")) || (Build.VERSION.SDK_INT >= 26 && (mime.equals("image/heic")))) { for (int a = 0; a < document.attributes.size(); a++) { TLRPC.DocumentAttribute attribute = document.attributes.get(a); if (attribute instanceof TLRPC.TL_documentAttributeImageSize) { @@ -3155,7 +3240,7 @@ public class MessageObject { return source; } - public CharSequence replaceWithLink(CharSequence source, String param, TLObject object) { + public static CharSequence replaceWithLink(CharSequence source, String param, TLObject object) { int start = TextUtils.indexOf(source, param); if (start >= 0) { String name; @@ -3831,13 +3916,13 @@ public class MessageObject { if (messageOwner.media instanceof TLRPC.TL_messageMediaGame || messageOwner.media instanceof TLRPC.TL_messageMediaInvoice) { return true; } - if (isMegagroup()) { - TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(messageOwner.peer_id.channel_id); + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(null, null, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && chat.megagroup) { return chat != null && chat.username != null && chat.username.length() > 0 && !(messageOwner.media instanceof TLRPC.TL_messageMediaContact) && !(messageOwner.media instanceof TLRPC.TL_messageMediaGeo); } } } else if (messageOwner.from_id instanceof TLRPC.TL_peerChannel || messageOwner.post) { - if (isMegagroup()) { + if (isSupergroup()) { return false; } if (messageOwner.peer_id.channel_id != 0 && (messageOwner.via_bot_id == 0 && messageOwner.reply_to == null || type != TYPE_STICKER && type != TYPE_ANIMATED_STICKER)) { @@ -4160,7 +4245,8 @@ public class MessageObject { } public boolean isOutOwner() { - if (!messageOwner.out || !(messageOwner.from_id instanceof TLRPC.TL_peerUser) && (!(messageOwner.from_id instanceof TLRPC.TL_peerChannel) || !isMegagroup()) || messageOwner.post) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(null, null, messageOwner.peer_id.channel_id) : null; + if (!messageOwner.out || !(messageOwner.from_id instanceof TLRPC.TL_peerUser) && (!(messageOwner.from_id instanceof TLRPC.TL_peerChannel) || ChatObject.isChannel(chat) && !chat.megagroup) || messageOwner.post) { return false; } if (messageOwner.fwd_from == null) { @@ -4185,11 +4271,11 @@ public class MessageObject { if (getDialogId() == UserConfig.getInstance(currentAccount).clientUserId) { return true; } - if (isMegagroup() || messageOwner.peer_id != null && messageOwner.peer_id.chat_id != 0) { + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(null, null, messageOwner.peer_id.channel_id) : null; + if (ChatObject.isChannel(chat) && chat.megagroup || messageOwner.peer_id != null && messageOwner.peer_id.chat_id != 0) { return true; } if (messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0) { - TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(messageOwner.peer_id.channel_id); return chat != null && chat.megagroup; } return false; @@ -4230,7 +4316,8 @@ public class MessageObject { } public boolean isFromGroup() { - return messageOwner.from_id instanceof TLRPC.TL_peerChannel && isMegagroup(); + TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(null, null, messageOwner.peer_id.channel_id) : null; + return messageOwner.from_id instanceof TLRPC.TL_peerChannel && ChatObject.isChannel(chat) && chat.megagroup; } public boolean isForwardedChannelPost() { @@ -4389,10 +4476,6 @@ public class MessageObject { return message.media_unread; } - public boolean isMegagroup() { - return isMegagroup(messageOwner); - } - public boolean isSavedFromMegagroup() { if (messageOwner.fwd_from != null && messageOwner.fwd_from.saved_from_peer != null && messageOwner.fwd_from.saved_from_peer.channel_id != 0) { TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(messageOwner.fwd_from.saved_from_peer.channel_id); @@ -4401,10 +4484,6 @@ public class MessageObject { return false; } - public static boolean isMegagroup(TLRPC.Message message) { - return (message.flags & TLRPC.MESSAGE_FLAG_MEGAGROUP) != 0; - } - public static boolean isOut(TLRPC.Message message) { return message.out; } @@ -4456,6 +4535,15 @@ public class MessageObject { return messageOwner.send_state == MESSAGE_SEND_STATE_EDITING && messageOwner.id > 0; } + public boolean isEditingMedia() { + if (messageOwner.media instanceof TLRPC.TL_messageMediaPhoto) { + return messageOwner.media.photo.id == 0; + } else if (messageOwner.media instanceof TLRPC.TL_messageMediaDocument) { + return messageOwner.media.document.dc_id == 0; + } + return false; + } + public boolean isSendError() { return messageOwner.send_state == MESSAGE_SEND_STATE_SEND_ERROR && messageOwner.id < 0 || scheduled && messageOwner.id > 0 && messageOwner.date < ConnectionsManager.getInstance(currentAccount).getCurrentTime() - 60; } @@ -5593,7 +5681,6 @@ public class MessageObject { if (TextUtils.isEmpty(query)) { return; } - ArrayList foundWords = new ArrayList<>(); query = query.trim().toLowerCase(); String[] queryWord = query.split("\\P{L}+"); @@ -5603,6 +5690,8 @@ public class MessageObject { String message = messageOwner.message.trim().toLowerCase(); if (message.contains(query) && !foundWords.contains(query)) { foundWords.add(query); + handleFoundWords(foundWords, queryWord); + return; } String[] words = message.split("\\P{L}+"); searchForWords.addAll(Arrays.asList(words)); @@ -5673,13 +5762,49 @@ public class MessageObject { } } } + handleFoundWords(foundWords, queryWord); + } + + private void handleFoundWords(ArrayList foundWords, String[] queryWord) { if (!foundWords.isEmpty()) { + boolean foundExactly = false; + for (int i = 0; i < foundWords.size(); i++) { + for (int j = 0; j < queryWord.length; j++) { + if (foundWords.get(i).contains(queryWord[j])) { + foundExactly = true; + break; + } + } + if (foundExactly) { + break; + } + } + if (foundExactly) { + for (int i = 0; i < foundWords.size(); i++) { + boolean findMatch = false; + for (int j = 0; j < queryWord.length; j++) { + if (foundWords.get(i).contains(queryWord[j])) { + findMatch = true; + break; + } + } + if (!findMatch) { + foundWords.remove(i--); + } + } + if (foundWords.size() > 0) { + Collections.sort(foundWords, (s, s1) -> s1.length() - s.length()); + String s = foundWords.get(0); + foundWords.clear(); + foundWords.add(s); + } + } highlightedWords = foundWords; if (messageOwner.message != null) { String str = messageOwner.message.replace('\n', ' ').replaceAll(" +", " ").trim(); int lastIndex = str.length(); int startHighlightedIndex = str.toLowerCase().indexOf(foundWords.get(0)); - int maxSymbols = 130; + int maxSymbols = 200; if (startHighlightedIndex < 0) { startHighlightedIndex = 0; } @@ -5690,7 +5815,6 @@ public class MessageObject { messageTrimmedToHighlight = str; } } - } public boolean hasHighlightedWords() { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java index 26523462d..1415ead84 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java @@ -133,6 +133,7 @@ public class MessagesController extends BaseController implements NotificationCe private SparseLongArray updatesStartWaitTimeChannels = new SparseLongArray(); private SparseIntArray channelsPts = new SparseIntArray(); private SparseBooleanArray gettingDifferenceChannels = new SparseBooleanArray(); + private SparseBooleanArray gettingChatInviters = new SparseBooleanArray(); private SparseBooleanArray gettingUnknownChannels = new SparseBooleanArray(); private LongSparseArray gettingUnknownDialogs = new LongSparseArray<>(); @@ -146,9 +147,12 @@ public class MessagesController extends BaseController implements NotificationCe private long updatesStartWaitTimeQts; private SparseArray fullUsers = new SparseArray<>(); private SparseArray fullChats = new SparseArray<>(); + private LongSparseArray groupCalls = new LongSparseArray<>(); + private SparseArray groupCallsByChatId = new SparseArray<>(); private ArrayList loadingFullUsers = new ArrayList<>(); private ArrayList loadedFullUsers = new ArrayList<>(); private ArrayList loadingFullChats = new ArrayList<>(); + private ArrayList loadingGroupCalls = new ArrayList<>(); private ArrayList loadingFullParticipants = new ArrayList<>(); private ArrayList loadedFullParticipants = new ArrayList<>(); private ArrayList loadedFullChats = new ArrayList<>(); @@ -361,6 +365,11 @@ public class MessagesController extends BaseController implements NotificationCe } } + public void clearQueryTime() { + lastServerQueryTime.clear(); + lastScheduledServerQueryTime.clear(); + } + public static class DiceFrameSuccess { public int frame; public int num; @@ -437,7 +446,6 @@ public class MessagesController extends BaseController implements NotificationCe public static int DIALOG_FILTER_FLAG_ONLY_ARCHIVED = 0x00000100; public static int DIALOG_FILTER_FLAG_ALL_CHATS = DIALOG_FILTER_FLAG_CONTACTS | DIALOG_FILTER_FLAG_NON_CONTACTS | DIALOG_FILTER_FLAG_GROUPS | DIALOG_FILTER_FLAG_CHANNELS | DIALOG_FILTER_FLAG_BOTS; - public static class DialogFilter { public int id; public String name; @@ -450,6 +458,9 @@ public class MessagesController extends BaseController implements NotificationCe public LongSparseArray pinnedDialogs = new LongSparseArray<>(); public ArrayList dialogs = new ArrayList<>(); + private static int dialogFilterPointer = 10; + public int localId = dialogFilterPointer++; + public boolean includesDialog(AccountInstance accountInstance, int lowerId) { MessagesController messagesController = accountInstance.getMessagesController(); TLRPC.Dialog dialog = messagesController.dialogs_dict.get(lowerId); @@ -918,9 +929,6 @@ public class MessagesController extends BaseController implements NotificationCe if (chat != null && chat.left && (promoDialogId == 0 || promoDialogId != -chat.id)) { continue; } - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } else if (message.peer_id.chat_id != 0) { TLRPC.Chat chat = chatsDict.get(message.peer_id.chat_id); if (chat != null && chat.migrated_to != null) { @@ -2168,6 +2176,7 @@ public class MessagesController extends BaseController implements NotificationCe editor.remove("archivehint").remove("proximityhint").remove("archivehint_l").remove("gifhint").remove("soundHint").remove("dcDomainName2").remove("webFileDatacenterId").remove("themehint").commit(); lastScheduledServerQueryTime.clear(); + lastServerQueryTime.clear(); reloadingWebpages.clear(); reloadingWebpagesPending.clear(); reloadingScheduledWebpages.clear(); @@ -2179,6 +2188,8 @@ public class MessagesController extends BaseController implements NotificationCe exportedChats.clear(); fullUsers.clear(); fullChats.clear(); + loadingGroupCalls.clear(); + groupCallsByChatId.clear(); dialogsByFolder.clear(); unreadUnmutedDialogs = 0; joiningToChannels.clear(); @@ -2291,6 +2302,7 @@ public class MessagesController extends BaseController implements NotificationCe uploadingWallpaper = null; uploadingWallpaperInfo = null; uploadingThemes.clear(); + gettingChatInviters.clear(); statusRequest = 0; statusSettingState = 0; @@ -2531,6 +2543,8 @@ public class MessagesController extends BaseController implements NotificationCe oldChat.broadcast = chat.broadcast; oldChat.verified = chat.verified; oldChat.megagroup = chat.megagroup; + oldChat.call_not_empty = chat.call_not_empty; + oldChat.call_active = chat.call_active; if (chat.default_banned_rights != null) { oldChat.default_banned_rights = chat.default_banned_rights; oldChat.flags |= 262144; @@ -2682,6 +2696,46 @@ public class MessagesController extends BaseController implements NotificationCe return fullChats.get(chatId); } + public void putGroupCall(int chatId, ChatObject.Call call) { + groupCalls.put(call.call.id, call); + groupCallsByChatId.put(chatId, call); + TLRPC.ChatFull chatFull = getChatFull(chatId); + if (chatFull != null) { + chatFull.call = call.getInputGroupCall(); + } + getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.call.id, false); + loadFullChat(chatId, 0, true); + } + + public ChatObject.Call getGroupCall(int chatId, boolean load) { + TLRPC.ChatFull chatFull = getChatFull(chatId); + if (chatFull == null || chatFull.call == null) { + return null; + } + ChatObject.Call result = groupCalls.get(chatFull.call.id); + if (result == null && load && !loadingGroupCalls.contains(chatId)) { + loadingGroupCalls.add(chatId); + if (chatFull.call != null) { + TLRPC.TL_phone_getGroupCall req = new TLRPC.TL_phone_getGroupCall(); + req.call = chatFull.call; + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response != null) { + TLRPC.TL_phone_groupCall groupCall = (TLRPC.TL_phone_groupCall) response; + putUsers(groupCall.users, false); + + ChatObject.Call call = new ChatObject.Call(); + call.setCall(currentAccount, chatId, groupCall); + groupCalls.put(groupCall.call.id, call); + groupCallsByChatId.put(chatId, call); + getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, groupCall.call.id, false); + } + loadingGroupCalls.remove((Integer) chatId); + })); + } + } + return result; + } + public void cancelLoadFullUser(int uid) { loadingFullUsers.remove((Integer) uid); } @@ -2845,6 +2899,7 @@ public class MessagesController extends BaseController implements NotificationCe if (cache) { loadingChannelAdmins.delete(chatId); loadChannelAdmins(chatId, false); + getNotificationCenter().postNotificationName(NotificationCenter.didLoadChatAdmins, chatId); } }); } @@ -2911,6 +2966,10 @@ public class MessagesController extends BaseController implements NotificationCe } AndroidUtilities.runOnUIThread(() -> { + TLRPC.ChatFull old = fullChats.get(chat_id); + if (old != null) { + res.full_chat.inviterId = old.inviterId; + } fullChats.put(chat_id, res.full_chat); applyDialogNotificationsSettings(-chat_id, res.full_chat.notify_settings); for (int a = 0; a < res.full_chat.bot_info.size(); a++) { @@ -3089,9 +3148,6 @@ public class MessagesController extends BaseController implements NotificationCe final ArrayList objects = new ArrayList<>(); for (int a = 0; a < messagesRes.messages.size(); a++) { TLRPC.Message message = messagesRes.messages.get(a); - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } message.dialog_id = dialog_id; if (!scheduled) { message.unread = (message.out ? outboxValue : inboxValue) < message.id; @@ -3173,20 +3229,28 @@ public class MessagesController extends BaseController implements NotificationCe }, ConnectionsManager.RequestFlagFailOnServerErrors); } else { - TLRPC.TL_account_reportPeer req = new TLRPC.TL_account_reportPeer(); - if (currentChat != null) { - req.peer = getInputPeer(-currentChat.id); - } else if (currentUser != null) { - req.peer = getInputPeer(currentUser.id); - } if (geo) { + TLRPC.TL_account_reportPeer req = new TLRPC.TL_account_reportPeer(); + if (currentChat != null) { + req.peer = getInputPeer(-currentChat.id); + } else if (currentUser != null) { + req.peer = getInputPeer(currentUser.id); + } req.reason = new TLRPC.TL_inputReportReasonGeoIrrelevant(); - } else { - req.reason = new TLRPC.TL_inputReportReasonSpam(); - } - getConnectionsManager().sendRequest(req, (response, error) -> { + getConnectionsManager().sendRequest(req, (response, error) -> { - }, ConnectionsManager.RequestFlagFailOnServerErrors); + }, ConnectionsManager.RequestFlagFailOnServerErrors); + } else { + TLRPC.TL_messages_reportSpam req = new TLRPC.TL_messages_reportSpam(); + if (currentChat != null) { + req.peer = getInputPeer(-currentChat.id); + } else if (currentUser != null) { + req.peer = getInputPeer(currentUser.id); + } + getConnectionsManager().sendRequest(req, (response, error) -> { + + }, ConnectionsManager.RequestFlagFailOnServerErrors); + } } } @@ -3584,7 +3648,7 @@ public class MessagesController extends BaseController implements NotificationCe TLRPC.TL_messages_editChatAdmin req = new TLRPC.TL_messages_editChatAdmin(); req.chat_id = chatId; req.user_id = getInputUser(user); - req.is_admin = rights.change_info || rights.delete_messages || rights.ban_users || rights.invite_users || rights.pin_messages || rights.add_admins; + req.is_admin = rights.change_info || rights.delete_messages || rights.ban_users || rights.invite_users || rights.pin_messages || rights.add_admins || rights.manage_call; RequestDelegate requestDelegate = (response, error) -> { if (error == null) { AndroidUtilities.runOnUIThread(() -> loadFullChat(chatId, 0, true), 1000); @@ -3593,7 +3657,7 @@ public class MessagesController extends BaseController implements NotificationCe } }; if (req.is_admin && addingNew) { - addUserToChat(chatId, user, null, 0, null, parentFragment, () -> getConnectionsManager().sendRequest(req, requestDelegate)); + addUserToChat(chatId, user, 0, null, parentFragment, () -> getConnectionsManager().sendRequest(req, requestDelegate)); } else { getConnectionsManager().sendRequest(req, requestDelegate); } @@ -5482,16 +5546,16 @@ public class MessagesController extends BaseController implements NotificationCe } public void loadMessages(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, boolean fromCache, int midDate, int classGuid, int load_type, int last_message_id, boolean isChannel, int mode, int threadMessageId, int loadIndex, int first_unread, int unread_count, int last_date, boolean queryFromServer, int mentionsCount) { - loadMessagesInternal(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, fromCache, midDate, classGuid, load_type, last_message_id, isChannel, mode, threadMessageId, loadIndex, first_unread, unread_count, last_date, queryFromServer, mentionsCount, true); + loadMessagesInternal(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, fromCache, midDate, classGuid, load_type, last_message_id, isChannel, mode, threadMessageId, loadIndex, first_unread, unread_count, last_date, queryFromServer, mentionsCount, true, true); } - private void loadMessagesInternal(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, boolean fromCache, int minDate, int classGuid, int load_type, int last_message_id, boolean isChannel, int mode, int threadMessageId, int loadIndex, int first_unread, int unread_count, int last_date, boolean queryFromServer, int mentionsCount, boolean loadDialog) { + private void loadMessagesInternal(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, boolean fromCache, int minDate, int classGuid, int load_type, int last_message_id, boolean isChannel, int mode, int threadMessageId, int loadIndex, int first_unread, int unread_count, int last_date, boolean queryFromServer, int mentionsCount, boolean loadDialog, boolean processMessages) { if (BuildVars.LOGS_ENABLED) { FileLog.d("load messages in chat " + dialogId + " count " + count + " max_id " + max_id + " cache " + fromCache + " mindate = " + minDate + " guid " + classGuid + " load_type " + load_type + " last_message_id " + last_message_id + " mode " + mode + " index " + loadIndex + " firstUnread " + first_unread + " unread_count " + unread_count + " last_date " + last_date + " queryFromServer " + queryFromServer); } int lower_part = (int) dialogId; if (threadMessageId == 0 && mode != 2 && (fromCache || lower_part == 0)) { - getMessagesStorage().getMessages(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, minDate, classGuid, load_type, isChannel, mode == 1, threadMessageId, loadIndex); + getMessagesStorage().getMessages(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, minDate, classGuid, load_type, isChannel, mode == 1, threadMessageId, loadIndex, processMessages); } else { if (threadMessageId != 0) { if (mode != 0) { @@ -5548,7 +5612,7 @@ public class MessagesController extends BaseController implements NotificationCe } } } - processLoadedMessages(res, dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, fnid, last_message_id, unread_count, last_date, load_type, isChannel, false, 0, threadMessageId, loadIndex, queryFromServer, mentionsCount); + processLoadedMessages(res, res.messages.size(), dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, fnid, last_message_id, unread_count, last_date, load_type, isChannel, false, 0, threadMessageId, loadIndex, queryFromServer, mentionsCount, processMessages); } else { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.loadingMessagesFailed, classGuid, req, error)); } @@ -5577,7 +5641,7 @@ public class MessagesController extends BaseController implements NotificationCe } } } - processLoadedMessages(res, dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, first_unread, last_message_id, unread_count, last_date, load_type, isChannel, false, mode, threadMessageId, loadIndex, queryFromServer, mentionsCount); + processLoadedMessages(res, res.messages.size(), dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, first_unread, last_message_id, unread_count, last_date, load_type, isChannel, false, mode, threadMessageId, loadIndex, queryFromServer, mentionsCount, processMessages); } }); getConnectionsManager().bindRequestToGuid(reqId, classGuid); @@ -5601,10 +5665,10 @@ public class MessagesController extends BaseController implements NotificationCe dialogs.users = res.users; dialogs.dialogs = res.dialogs; dialogs.messages = res.messages; - getMessagesStorage().putDialogs(dialogs, 0); + getMessagesStorage().putDialogs(dialogs, 2); } - loadMessagesInternal(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, false, minDate, classGuid, load_type, dialog.top_message, isChannel, 0, threadMessageId, loadIndex, first_unread, dialog.unread_count, last_date, queryFromServer, dialog.unread_mentions_count, false); + loadMessagesInternal(dialogId, mergeDialogId, loadInfo, count, max_id, offset_date, false, minDate, classGuid, load_type, dialog.top_message, isChannel, 0, threadMessageId, loadIndex, first_unread, dialog.unread_count, last_date, queryFromServer, dialog.unread_mentions_count, false, processMessages); } } else { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.loadingMessagesFailed, classGuid, req, error)); @@ -5652,7 +5716,7 @@ public class MessagesController extends BaseController implements NotificationCe } } } - processLoadedMessages(res, dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, first_unread, last_message_id, unread_count, last_date, load_type, isChannel, false, 0, threadMessageId, loadIndex, queryFromServer, mentionsCount); + processLoadedMessages(res, res.messages.size(), dialogId, mergeDialogId, count, mid, offset_date, false, classGuid, first_unread, last_message_id, unread_count, last_date, load_type, isChannel, false, 0, threadMessageId, loadIndex, queryFromServer, mentionsCount, processMessages); } else { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.loadingMessagesFailed, classGuid, req, error)); } @@ -5710,13 +5774,12 @@ public class MessagesController extends BaseController implements NotificationCe } } - public void processLoadedMessages(TLRPC.messages_Messages messagesRes, long dialogId, long mergeDialogId, int count, int max_id, int offset_date, boolean isCache, int classGuid, - int first_unread, int last_message_id, int unread_count, int last_date, int load_type, boolean isChannel, boolean isEnd, int mode, int threadMessageId, int loadIndex, boolean queryFromServer, int mentionsCount) { + public void processLoadedMessages(TLRPC.messages_Messages messagesRes, int resCount, long dialogId, long mergeDialogId, int count, int max_id, int offset_date, boolean isCache, int classGuid, + int first_unread, int last_message_id, int unread_count, int last_date, int load_type, boolean isChannel, boolean isEnd, int mode, int threadMessageId, int loadIndex, boolean queryFromServer, int mentionsCount, boolean needProcess) { if (BuildVars.LOGS_ENABLED) { FileLog.d("processLoadedMessages size " + messagesRes.messages.size() + " in chat " + dialogId + " count " + count + " max_id " + max_id + " cache " + isCache + " guid " + classGuid + " load_type " + load_type + " last_message_id " + last_message_id + " isChannel " + isChannel + " index " + loadIndex + " firstUnread " + first_unread + " unread_count " + unread_count + " last_date " + last_date + " queryFromServer " + queryFromServer); } boolean createDialog = false; - boolean isMegagroup = false; if (messagesRes instanceof TLRPC.TL_messages_channelMessages) { int channelId = -(int) dialogId; if (mode == 0 && threadMessageId == 0) { @@ -5734,13 +5797,6 @@ public class MessagesController extends BaseController implements NotificationCe } } } - for (int a = 0; a < messagesRes.chats.size(); a++) { - TLRPC.Chat chat = messagesRes.chats.get(a); - if (chat.id == channelId) { - isMegagroup = chat.megagroup; - break; - } - } } int lower_id = (int) dialogId; int high_id = (int) (dialogId >> 32); @@ -5748,8 +5804,15 @@ public class MessagesController extends BaseController implements NotificationCe ImageLoader.saveMessagesThumbs(messagesRes.messages); } boolean isInitialLoading = offset_date == 0 && max_id == 0; - - if (high_id != 1 && lower_id != 0 && isCache && ((messagesRes.messages.size() == 0 && (!isInitialLoading || (SystemClock.elapsedRealtime() - lastServerQueryTime.get(dialogId, 0L)) > 60 * 1000)) || mode == 1 && (SystemClock.elapsedRealtime() - lastScheduledServerQueryTime.get(dialogId, 0L)) > 60 * 1000)) { + boolean requestByTime; + if (mode == 1) { + requestByTime = ((SystemClock.elapsedRealtime() - lastScheduledServerQueryTime.get(dialogId, 0L)) > 60 * 1000); + } else if (mode == 2) { + requestByTime = false; + } else { + requestByTime = (SystemClock.elapsedRealtime() - lastServerQueryTime.get(dialogId, 0L)) > 60 * 1000; + } + if (high_id != 1 && lower_id != 0 && isCache && (resCount == 0 && (!isInitialLoading || requestByTime))) { int hash; if (mode == 2) { hash = 0; @@ -5770,7 +5833,7 @@ public class MessagesController extends BaseController implements NotificationCe lastServerQueryTime.put(dialogId, SystemClock.elapsedRealtime()); hash = 0; } - AndroidUtilities.runOnUIThread(() -> loadMessages(dialogId, mergeDialogId, false, count, load_type == 2 && queryFromServer ? first_unread : max_id, offset_date, false, hash, classGuid, load_type, last_message_id, isChannel, mode, threadMessageId, loadIndex, first_unread, unread_count, last_date, queryFromServer, mentionsCount)); + AndroidUtilities.runOnUIThread(() -> loadMessagesInternal(dialogId, mergeDialogId, false, count, load_type == 2 && queryFromServer ? first_unread : max_id, offset_date, false, hash, classGuid, load_type, last_message_id, isChannel, mode, threadMessageId, loadIndex, first_unread, unread_count, last_date, queryFromServer, mentionsCount, true, needProcess)); if (messagesRes.messages.isEmpty()) { return; } @@ -5801,9 +5864,6 @@ public class MessagesController extends BaseController implements NotificationCe for (int a = 0; a < size; a++) { TLRPC.Message message = messagesRes.messages.get(a); - if (isMegagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } if (mode == 0) { if (message.action instanceof TLRPC.TL_messageActionChatDeleteUser) { @@ -5829,6 +5889,10 @@ public class MessagesController extends BaseController implements NotificationCe } } + if (!needProcess && (int) dialogId == 0) { + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoadWithoutProcess, classGuid, messagesRes.messages.size(), isCache, isEnd, last_message_id)); + return; + } final ArrayList objects = new ArrayList<>(); final ArrayList messagesToReload = new ArrayList<>(); final HashMap> webpagesToReload = new HashMap<>(); @@ -5887,7 +5951,13 @@ public class MessagesController extends BaseController implements NotificationCe if ((int) dialogId != 0) { int finalFirst_unread_final = first_unread_final; - getMediaDataController().loadReplyMessagesForMessages(objects, dialogId, mode == 1, () -> getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoad, dialogId, count, objects, isCache, finalFirst_unread_final, last_message_id, unread_count, last_date, load_type, isEnd, classGuid, loadIndex, max_id, mentionsCount, mode)); + getMediaDataController().loadReplyMessagesForMessages(objects, dialogId, mode == 1, () -> { + if (!needProcess) { + getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoadWithoutProcess, classGuid, resCount, isCache, isEnd, last_message_id); + } else { + getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoad, dialogId, count, objects, isCache, finalFirst_unread_final, last_message_id, unread_count, last_date, load_type, isEnd, classGuid, loadIndex, max_id, mentionsCount, mode); + } + }); } else { getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoad, dialogId, count, objects, isCache, first_unread_final, last_message_id, unread_count, last_date, load_type, isEnd, classGuid, loadIndex, max_id, mentionsCount, mode); } @@ -6476,9 +6546,6 @@ public class MessagesController extends BaseController implements NotificationCe if (chat != null && chat.left) { continue; } - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } else if (message.peer_id.chat_id != 0) { TLRPC.Chat chat = chatsDict.get(message.peer_id.chat_id); if (chat != null && chat.migrated_to != null) { @@ -6910,9 +6977,6 @@ public class MessagesController extends BaseController implements NotificationCe if (chat != null && chat.left && (promoDialogId == 0 || promoDialogId != -chat.id)) { continue; } - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } else if (message.peer_id.chat_id != 0) { TLRPC.Chat chat = chatsDict.get(message.peer_id.chat_id); if (chat != null && chat.migrated_to != null) { @@ -7074,7 +7138,7 @@ public class MessagesController extends BaseController implements NotificationCe if (loadType == DIALOGS_LOAD_TYPE_CHANNEL) { TLRPC.Chat chat = dialogsRes.chats.get(0); getChannelDifference(chat.id); - checkChannelInviter(chat.id); + AndroidUtilities.runOnUIThread(() -> checkChatInviter(chat.id, true)); } TLRPC.Message lastMessageFinal = lastMessage; @@ -8455,116 +8519,91 @@ public class MessagesController extends BaseController implements NotificationCe return joiningToChannels.contains(chat_id); } - public void addUserToChat(final int chat_id, final TLRPC.User user, final TLRPC.ChatFull info, int count_fwd, String botHash, final BaseFragment fragment, final Runnable onFinishRunnable) { + public void addUserToChat(final int chat_id, final TLRPC.User user, int count_fwd, String botHash, final BaseFragment fragment, final Runnable onFinishRunnable) { if (user == null) { return; } - if (chat_id > 0) { - final TLObject request; + final TLObject request; - final boolean isChannel = ChatObject.isChannel(chat_id, currentAccount); - final boolean isMegagroup = isChannel && getChat(chat_id).megagroup; - final TLRPC.InputUser inputUser = getInputUser(user); - if (botHash == null || isChannel && !isMegagroup) { - if (isChannel) { - if (inputUser instanceof TLRPC.TL_inputUserSelf) { - if (joiningToChannels.contains(chat_id)) { - return; - } - TLRPC.TL_channels_joinChannel req = new TLRPC.TL_channels_joinChannel(); - req.channel = getInputChannel(chat_id); - request = req; - joiningToChannels.add(chat_id); - } else { - TLRPC.TL_channels_inviteToChannel req = new TLRPC.TL_channels_inviteToChannel(); - req.channel = getInputChannel(chat_id); - req.users.add(inputUser); - request = req; + final boolean isChannel = ChatObject.isChannel(chat_id, currentAccount); + final boolean isMegagroup = isChannel && getChat(chat_id).megagroup; + final TLRPC.InputUser inputUser = getInputUser(user); + if (botHash == null || isChannel && !isMegagroup) { + if (isChannel) { + if (inputUser instanceof TLRPC.TL_inputUserSelf) { + if (joiningToChannels.contains(chat_id)) { + return; } + TLRPC.TL_channels_joinChannel req = new TLRPC.TL_channels_joinChannel(); + req.channel = getInputChannel(chat_id); + request = req; + joiningToChannels.add(chat_id); } else { - TLRPC.TL_messages_addChatUser req = new TLRPC.TL_messages_addChatUser(); - req.chat_id = chat_id; - req.fwd_limit = count_fwd; - req.user_id = inputUser; + TLRPC.TL_channels_inviteToChannel req = new TLRPC.TL_channels_inviteToChannel(); + req.channel = getInputChannel(chat_id); + req.users.add(inputUser); request = req; } } else { - TLRPC.TL_messages_startBot req = new TLRPC.TL_messages_startBot(); - req.bot = inputUser; - if (isChannel) { - req.peer = getInputPeer(-chat_id); - } else { - req.peer = new TLRPC.TL_inputPeerChat(); - req.peer.chat_id = chat_id; - } - req.start_param = botHash; - req.random_id = Utilities.random.nextLong(); + TLRPC.TL_messages_addChatUser req = new TLRPC.TL_messages_addChatUser(); + req.chat_id = chat_id; + req.fwd_limit = count_fwd; + req.user_id = inputUser; request = req; } - - getConnectionsManager().sendRequest(request, (response, error) -> { - if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { - AndroidUtilities.runOnUIThread(() -> joiningToChannels.remove((Integer) chat_id)); - } - if (error != null) { - AndroidUtilities.runOnUIThread(() -> { - AlertsCreator.processError(currentAccount, error, fragment, request, isChannel && !isMegagroup); - if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { - getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_CHAT); - } - }); - return; - } - boolean hasJoinMessage = false; - TLRPC.Updates updates = (TLRPC.Updates) response; - for (int a = 0; a < updates.updates.size(); a++) { - TLRPC.Update update = updates.updates.get(a); - if (update instanceof TLRPC.TL_updateNewChannelMessage) { - if (((TLRPC.TL_updateNewChannelMessage) update).message.action instanceof TLRPC.TL_messageActionChatAddUser) { - hasJoinMessage = true; - break; - } - } - } - processUpdates(updates, false); - if (isChannel) { - if (!hasJoinMessage && inputUser instanceof TLRPC.TL_inputUserSelf) { - generateJoinMessage(chat_id, true); - } - AndroidUtilities.runOnUIThread(() -> loadFullChat(chat_id, 0, true), 1000); - } - if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { - getMessagesStorage().updateDialogsWithDeletedMessages(new ArrayList<>(), null, true, chat_id); - } - if (onFinishRunnable != null) { - AndroidUtilities.runOnUIThread(onFinishRunnable); - } - }); } else { - if (info instanceof TLRPC.TL_chatFull) { - for (int a = 0; a < info.participants.participants.size(); a++) { - if (info.participants.participants.get(a).user_id == user.id) { - return; + TLRPC.TL_messages_startBot req = new TLRPC.TL_messages_startBot(); + req.bot = inputUser; + if (isChannel) { + req.peer = getInputPeer(-chat_id); + } else { + req.peer = new TLRPC.TL_inputPeerChat(); + req.peer.chat_id = chat_id; + } + req.start_param = botHash; + req.random_id = Utilities.random.nextLong(); + request = req; + } + + getConnectionsManager().sendRequest(request, (response, error) -> { + if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { + AndroidUtilities.runOnUIThread(() -> joiningToChannels.remove((Integer) chat_id)); + } + if (error != null) { + AndroidUtilities.runOnUIThread(() -> { + AlertsCreator.processError(currentAccount, error, fragment, request, isChannel && !isMegagroup); + if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { + getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_CHAT); + } + }); + return; + } + boolean hasJoinMessage = false; + TLRPC.Updates updates = (TLRPC.Updates) response; + for (int a = 0; a < updates.updates.size(); a++) { + TLRPC.Update update = updates.updates.get(a); + if (update instanceof TLRPC.TL_updateNewChannelMessage) { + if (((TLRPC.TL_updateNewChannelMessage) update).message.action instanceof TLRPC.TL_messageActionChatAddUser) { + hasJoinMessage = true; + break; } } - - TLRPC.Chat chat = getChat(chat_id); - chat.participants_count++; - ArrayList chatArrayList = new ArrayList<>(); - chatArrayList.add(chat); - getMessagesStorage().putUsersAndChats(null, chatArrayList, true, true); - - TLRPC.TL_chatParticipant newPart = new TLRPC.TL_chatParticipant(); - newPart.user_id = user.id; - newPart.inviter_id = getUserConfig().getClientUserId(); - newPart.date = getConnectionsManager().getCurrentTime(); - info.participants.participants.add(0, newPart); - getMessagesStorage().updateChatInfo(info, true); - getNotificationCenter().postNotificationName(NotificationCenter.chatInfoDidLoad, info, 0, false); - getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_CHAT_MEMBERS); } - } + processUpdates(updates, false); + if (isChannel) { + if (!hasJoinMessage && inputUser instanceof TLRPC.TL_inputUserSelf) { + generateJoinMessage(chat_id, true); + } + AndroidUtilities.runOnUIThread(() -> loadFullChat(chat_id, 0, true), 1000); + } + if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { + getMessagesStorage().updateDialogsWithDeletedMessages(new ArrayList<>(), null, true, chat_id); + } + if (onFinishRunnable != null) { + AndroidUtilities.runOnUIThread(onFinishRunnable); + } + }); } public void deleteUserFromChat(final int chat_id, final TLRPC.User user, final TLRPC.ChatFull info) { @@ -9347,9 +9386,6 @@ public class MessagesController extends BaseController implements NotificationCe for (int a = 0; a < res.new_messages.size(); a++) { TLRPC.Message message = res.new_messages.get(a); message.unread = !(channelFinal != null && channelFinal.left || (message.out ? outboxValue : inboxValue) >= message.id || message.action instanceof TLRPC.TL_messageActionChannelCreate); - if (channelFinal != null && channelFinal.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } boolean isDialogCreated = createdDialogIds.contains(dialog_id); MessageObject obj = new MessageObject(currentAccount, message, usersDict, isDialogCreated, isDialogCreated); @@ -9406,9 +9442,6 @@ public class MessagesController extends BaseController implements NotificationCe TLRPC.Message message = res.messages.get(a); message.dialog_id = -channelId; message.unread = !(message.action instanceof TLRPC.TL_messageActionChannelCreate || channelFinal != null && channelFinal.left || (message.out ? outboxValue : inboxValue) >= message.id); - if (channelFinal != null && channelFinal.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } getMessagesStorage().overwriteChannel(channelId, (TLRPC.TL_updates_channelDifferenceTooLong) res, newDialogType); } @@ -9798,6 +9831,8 @@ public class MessagesController extends BaseController implements NotificationCe } int size = 0; + ArrayList dids = new ArrayList<>(); + ArrayList pinned = new ArrayList<>(); for (int a = 0, N = dialogs.size(); a < N; a++) { TLRPC.Dialog dialog = dialogs.get(a); if (dialog instanceof TLRPC.TL_dialogFolder) { @@ -9809,7 +9844,8 @@ public class MessagesController extends BaseController implements NotificationCe } continue; } - getMessagesStorage().setDialogPinned(dialog.id, dialog.pinnedNum); + dids.add(dialog.id); + pinned.add(dialog.pinnedNum); if ((int) dialog.id != 0) { TLRPC.InputPeer inputPeer = getInputPeer((int) dialogs.get(a).id); TLRPC.TL_inputDialogPeer inputDialogPeer = new TLRPC.TL_inputDialogPeer(); @@ -9818,6 +9854,7 @@ public class MessagesController extends BaseController implements NotificationCe size += inputDialogPeer.getObjectSize(); } } + getMessagesStorage().setDialogsPinned(dids, pinned); NativeByteBuffer data = null; try { @@ -10043,6 +10080,8 @@ public class MessagesController extends BaseController implements NotificationCe if (!newPinnedDialogs.isEmpty()) { putUsers(res.users, false); putChats(res.chats, false); + ArrayList dids = new ArrayList<>(); + ArrayList pinned = new ArrayList<>(); for (int a = 0, N = newPinnedDialogs.size(); a < N; a++) { TLRPC.Dialog dialog = newPinnedDialogs.get(a); dialog.pinnedNum = (N - a) + maxPinnedNum; @@ -10052,7 +10091,8 @@ public class MessagesController extends BaseController implements NotificationCe if (d != null) { d.pinned = true; d.pinnedNum = dialog.pinnedNum; - getMessagesStorage().setDialogPinned(dialog.id, dialog.pinnedNum); + dids.add(dialog.id); + pinned.add(dialog.pinnedNum); } else { added = true; dialogs_dict.put(dialog.id, dialog); @@ -10069,6 +10109,7 @@ public class MessagesController extends BaseController implements NotificationCe changed = true; } + getMessagesStorage().setDialogsPinned(dids, pinned); } if (changed) { if (added) { @@ -10107,9 +10148,6 @@ public class MessagesController extends BaseController implements NotificationCe message.post = true; message.action = new TLRPC.TL_messageActionChatAddUser(); message.action.users.add(getUserConfig().getClientUserId()); - if (chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } getUserConfig().saveConfig(false); final ArrayList pushMessages = new ArrayList<>(); @@ -10158,32 +10196,31 @@ public class MessagesController extends BaseController implements NotificationCe }); } - public void checkChannelInviter(final int chat_id) { - AndroidUtilities.runOnUIThread(() -> { - final TLRPC.Chat chat = getChat(chat_id); - if (chat == null || !ChatObject.isChannel(chat_id, currentAccount) || chat.creator) { - return; - } - TLRPC.TL_channels_getParticipant req = new TLRPC.TL_channels_getParticipant(); - req.channel = getInputChannel(chat_id); - req.user_id = new TLRPC.TL_inputUserSelf(); - getConnectionsManager().sendRequest(req, (response, error) -> { - final TLRPC.TL_channels_channelParticipant res = (TLRPC.TL_channels_channelParticipant) response; - if (res != null && res.participant instanceof TLRPC.TL_channelParticipantSelf && res.participant.inviter_id != getUserConfig().getClientUserId()) { - if (chat.megagroup && getMessagesStorage().isMigratedChat(chat.id)) { - return; - } - AndroidUtilities.runOnUIThread(() -> putUsers(res.users, false)); - getMessagesStorage().putUsersAndChats(res.users, null, true, true); + public void checkChatInviter(final int chatId, boolean createMessage) { + final TLRPC.Chat chat = getChat(chatId); + if (!ChatObject.isChannel(chat) || chat.creator || gettingChatInviters.indexOfKey(chatId) >= 0) { + return; + } + gettingChatInviters.put(chatId, true); + TLRPC.TL_channels_getParticipant req = new TLRPC.TL_channels_getParticipant(); + req.channel = getInputChannel(chatId); + req.user_id = new TLRPC.TL_inputUserSelf(); + getConnectionsManager().sendRequest(req, (response, error) -> { + final TLRPC.TL_channels_channelParticipant res = (TLRPC.TL_channels_channelParticipant) response; + if (res != null && res.participant instanceof TLRPC.TL_channelParticipantSelf && res.participant.inviter_id != getUserConfig().getClientUserId()) { + if (chat.megagroup && getMessagesStorage().isMigratedChat(chat.id)) { + return; + } + AndroidUtilities.runOnUIThread(() -> putUsers(res.users, false)); + getMessagesStorage().putUsersAndChats(res.users, null, true, true); + ArrayList pushMessages; + if (createMessage && !getMessagesStorage().hasInviteMeMessage(chatId)) { TLRPC.TL_messageService message = new TLRPC.TL_messageService(); message.media_unread = true; message.unread = true; message.flags = TLRPC.MESSAGE_FLAG_HAS_FROM_ID; message.post = true; - if (chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } message.local_id = message.id = getUserConfig().getNewMessageId(); message.date = res.participant.date; message.action = new TLRPC.TL_messageActionChatAddUser(); @@ -10191,11 +10228,11 @@ public class MessagesController extends BaseController implements NotificationCe message.from_id.user_id = res.participant.inviter_id; message.action.users.add(getUserConfig().getClientUserId()); message.peer_id = new TLRPC.TL_peerChannel(); - message.peer_id.channel_id = chat_id; - message.dialog_id = -chat_id; + message.peer_id.channel_id = chatId; + message.dialog_id = -chatId; getUserConfig().saveConfig(false); - final ArrayList pushMessages = new ArrayList<>(); + pushMessages = new ArrayList<>(); final ArrayList messagesArr = new ArrayList<>(); ConcurrentHashMap usersDict = new ConcurrentHashMap<>(); @@ -10207,23 +10244,30 @@ public class MessagesController extends BaseController implements NotificationCe messagesArr.add(message); MessageObject obj = new MessageObject(currentAccount, message, usersDict, true, false); pushMessages.add(obj); - getMessagesStorage().getStorageQueue().postRunnable(() -> AndroidUtilities.runOnUIThread(() -> getNotificationsController().processNewMessages(pushMessages, true, false, null))); getMessagesStorage().putMessages(messagesArr, true, true, false, 0, false); - - AndroidUtilities.runOnUIThread(() -> { - updateInterfaceWithMessages(-chat_id, pushMessages, false); - getNotificationCenter().postNotificationName(NotificationCenter.dialogsNeedReload); - }); + } else { + pushMessages = null; } - }); + + getMessagesStorage().saveChatInviter(chatId, res.participant.inviter_id); + + AndroidUtilities.runOnUIThread(() -> { + gettingChatInviters.delete(chatId); + if (pushMessages != null) { + updateInterfaceWithMessages(-chatId, pushMessages, false); + getNotificationCenter().postNotificationName(NotificationCenter.dialogsNeedReload); + } + getNotificationCenter().postNotificationName(NotificationCenter.didLoadChatInviter, chatId, res.participant.inviter_id); + }); + } }); } private int getUpdateType(TLRPC.Update update) { if (update instanceof TLRPC.TL_updateNewMessage || update instanceof TLRPC.TL_updateReadMessagesContents || update instanceof TLRPC.TL_updateReadHistoryInbox || update instanceof TLRPC.TL_updateReadHistoryOutbox || update instanceof TLRPC.TL_updateDeleteMessages || update instanceof TLRPC.TL_updateWebPage || - update instanceof TLRPC.TL_updateEditMessage || update instanceof TLRPC.TL_updateFolderPeers) { + update instanceof TLRPC.TL_updateEditMessage || update instanceof TLRPC.TL_updateFolderPeers || update instanceof TLRPC.TL_updatePinnedMessages) { return 0; } else if (update instanceof TLRPC.TL_updateNewEncryptedMessage) { return 1; @@ -10917,6 +10961,7 @@ public class MessagesController extends BaseController implements NotificationCe SparseIntArray markAsReadEncrypted = null; SparseArray> deletedMessages = null; SparseArray> scheduledDeletedMessages = null; + SparseArray> groupSpeakingActions = null; SparseIntArray clearHistoryMessages = null; ArrayList chatInfoToUpdate = null; ArrayList updatesOnMainThread = null; @@ -11053,9 +11098,6 @@ public class MessagesController extends BaseController implements NotificationCe } } } - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } if (message.action instanceof TLRPC.TL_messageActionChatDeleteUser) { TLRPC.User user = usersDict.get(message.action.user_id); @@ -11282,54 +11324,68 @@ public class MessagesController extends BaseController implements NotificationCe if (uid == 0) { uid = userId; } - ConcurrentHashMap> threads = printingUsers.get(uid); - ArrayList arr = threads != null ? threads.get(threadId) : null; - if (action instanceof TLRPC.TL_sendMessageCancelAction) { - if (arr != null) { - for (int a = 0, size = arr.size(); a < size; a++) { - PrintingUser pu = arr.get(a); - if (pu.userId == userId) { - arr.remove(a); - printChanged = true; + if (action instanceof TLRPC.TL_speakingInGroupCallAction) { + if (chatId != 0) { + if (groupSpeakingActions == null) { + groupSpeakingActions = new SparseArray<>(); + } + ArrayList uids = groupSpeakingActions.get(chatId); + if (uids == null) { + uids = new ArrayList<>(); + groupSpeakingActions.put(chatId, uids); + } + uids.add(userId); + } + } else { + ConcurrentHashMap> threads = printingUsers.get(uid); + ArrayList arr = threads != null ? threads.get(threadId) : null; + if (action instanceof TLRPC.TL_sendMessageCancelAction) { + if (arr != null) { + for (int a = 0, size = arr.size(); a < size; a++) { + PrintingUser pu = arr.get(a); + if (pu.userId == userId) { + arr.remove(a); + printChanged = true; + break; + } + } + if (arr.isEmpty()) { + threads.remove(threadId); + if (threads.isEmpty()) { + printingUsers.remove(uid); + } + } + } + } else { + if (threads == null) { + threads = new ConcurrentHashMap<>(); + printingUsers.put(uid, threads); + } + if (arr == null) { + arr = new ArrayList<>(); + threads.put(threadId, arr); + } + boolean exist = false; + for (PrintingUser u : arr) { + if (u.userId == userId) { + exist = true; + u.lastTime = currentTime; + if (u.action.getClass() != action.getClass()) { + printChanged = true; + } + u.action = action; break; } } - if (arr.isEmpty()) { - threads.remove(threadId); - if (threads.isEmpty()) { - printingUsers.remove(uid); - } + if (!exist) { + PrintingUser newUser = new PrintingUser(); + newUser.userId = userId; + newUser.lastTime = currentTime; + newUser.action = action; + arr.add(newUser); + printChanged = true; } } - } else { - if (threads == null) { - threads = new ConcurrentHashMap<>(); - printingUsers.put(uid, threads); - } - if (arr == null) { - arr = new ArrayList<>(); - threads.put(threadId, arr); - } - boolean exist = false; - for (PrintingUser u : arr) { - if (u.userId == userId) { - exist = true; - u.lastTime = currentTime; - if (u.action.getClass() != action.getClass()) { - printChanged = true; - } - u.action = action; - break; - } - } - if (!exist) { - PrintingUser newUser = new PrintingUser(); - newUser.userId = userId; - newUser.lastTime = currentTime; - newUser.action = action; - arr.add(newUser); - printChanged = true; - } } if (Math.abs(getConnectionsManager().getCurrentTime() - date) < 30) { onlinePrivacy.put(userId, date); @@ -11671,6 +11727,11 @@ public class MessagesController extends BaseController implements NotificationCe updatesOnMainThread = new ArrayList<>(); } updatesOnMainThread.add(baseUpdate); + } else if (baseUpdate instanceof TLRPC.TL_updateChat) { + if (updatesOnMainThread == null) { + updatesOnMainThread = new ArrayList<>(); + } + updatesOnMainThread.add(baseUpdate); } else if (baseUpdate instanceof TLRPC.TL_updateChannelMessageViews) { TLRPC.TL_updateChannelMessageViews update = (TLRPC.TL_updateChannelMessageViews) baseUpdate; if (BuildVars.LOGS_ENABLED) { @@ -11752,9 +11813,6 @@ public class MessagesController extends BaseController implements NotificationCe chat = getMessagesStorage().getChatSync(message.peer_id.channel_id); putChat(chat, true); } - if (chat != null && chat.megagroup) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } else { message = ((TLRPC.TL_updateEditMessage) baseUpdate).message; if (message.dialog_id == clientUserId) { @@ -11843,6 +11901,16 @@ public class MessagesController extends BaseController implements NotificationCe updatesOnMainThread = new ArrayList<>(); } updatesOnMainThread.add(baseUpdate); + } else if (baseUpdate instanceof TLRPC.TL_updateGroupCallParticipants) { + if (updatesOnMainThread == null) { + updatesOnMainThread = new ArrayList<>(); + } + updatesOnMainThread.add(baseUpdate); + } else if (baseUpdate instanceof TLRPC.TL_updateGroupCall) { + if (updatesOnMainThread == null) { + updatesOnMainThread = new ArrayList<>(); + } + updatesOnMainThread.add(baseUpdate); } else if (baseUpdate instanceof TLRPC.TL_updatePhoneCallSignalingData) { if (updatesOnMainThread == null) { updatesOnMainThread = new ArrayList<>(); @@ -12011,6 +12079,7 @@ public class MessagesController extends BaseController implements NotificationCe final ArrayList updatesOnMainThreadFinal = updatesOnMainThread; final ArrayList updateMessageThumbs = messageThumbs; final ArrayList folderUpdatesFinal = folderUpdates; + final SparseArray> groupSpeakingActionsFinal = groupSpeakingActions; AndroidUtilities.runOnUIThread(() -> { int updateMask = interfaceUpdateMaskFinal; @@ -12228,6 +12297,7 @@ public class MessagesController extends BaseController implements NotificationCe } if ((update.notify_settings.flags & 4) != 0) { editor.putInt("EnableGroup2", update.notify_settings.mute_until); + getNotificationsController().deleteNotificationChannelGlobal(NotificationsController.TYPE_GROUP); } } else if (update.peer instanceof TLRPC.TL_notifyUsers) { if ((update.notify_settings.flags & 1) != 0) { @@ -12242,6 +12312,7 @@ public class MessagesController extends BaseController implements NotificationCe } if ((update.notify_settings.flags & 4) != 0) { editor.putInt("EnableAll2", update.notify_settings.mute_until); + getNotificationsController().deleteNotificationChannelGlobal(NotificationsController.TYPE_PRIVATE); } } else if (update.peer instanceof TLRPC.TL_notifyBroadcasts) { if ((update.notify_settings.flags & 1) != 0) { @@ -12256,6 +12327,7 @@ public class MessagesController extends BaseController implements NotificationCe } if ((update.notify_settings.flags & 4) != 0) { editor.putInt("EnableChannel2", update.notify_settings.mute_until); + getNotificationsController().deleteNotificationChannelGlobal(NotificationsController.TYPE_CHANNEL); } } getMessagesStorage().updateMutedDialogsFiltersCounters(); @@ -12270,9 +12342,42 @@ public class MessagesController extends BaseController implements NotificationCe } else if (chat.left && dialog != null && (promoDialog == null || promoDialog.id != dialog.id)) { deleteDialog(dialog.id, 0); } + if (chat instanceof TLRPC.TL_channelForbidden || chat.kicked) { + ChatObject.Call call = getGroupCall(chat.id, false); + if (call != null) { + TLRPC.TL_updateGroupCall updateGroupCall = new TLRPC.TL_updateGroupCall(); + updateGroupCall.chat_id = chat.id; + updateGroupCall.call = new TLRPC.TL_groupCallDiscarded(); + updateGroupCall.call.id = call.call.id; + updateGroupCall.call.access_hash = call.call.access_hash; + call.processGroupCallUpdate(getAccountInstance(), updateGroupCall); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().onGroupCallUpdated(updateGroupCall.call); + } + } + } } updateMask |= UPDATE_MASK_CHAT; loadFullChat(update.channel_id, 0, true); + } else if (baseUpdate instanceof TLRPC.TL_updateChat) { + final TLRPC.TL_updateChat update = (TLRPC.TL_updateChat) baseUpdate; + TLRPC.Chat chat = getChat(update.chat_id); + if (chat != null && (chat instanceof TLRPC.TL_chatForbidden || chat.kicked)) { + ChatObject.Call call = getGroupCall(chat.id, false); + if (call != null) { + TLRPC.TL_updateGroupCall updateGroupCall = new TLRPC.TL_updateGroupCall(); + updateGroupCall.chat_id = chat.id; + updateGroupCall.call = new TLRPC.TL_groupCallDiscarded(); + updateGroupCall.call.id = call.call.id; + updateGroupCall.call.access_hash = call.call.access_hash; + call.processGroupCallUpdate(getAccountInstance(), updateGroupCall); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().onGroupCallUpdated(updateGroupCall.call); + } + } + } + updateMask |= UPDATE_MASK_CHAT; + loadFullChat(update.chat_id, 0, true); } else if (baseUpdate instanceof TLRPC.TL_updateChatDefaultBannedRights) { TLRPC.TL_updateChatDefaultBannedRights update = (TLRPC.TL_updateChatDefaultBannedRights) baseUpdate; int chatId; @@ -12326,6 +12431,33 @@ public class MessagesController extends BaseController implements NotificationCe if (svc != null) { svc.onSignalingData(data); } + } else if (baseUpdate instanceof TLRPC.TL_updateGroupCallParticipants) { + TLRPC.TL_updateGroupCallParticipants update = (TLRPC.TL_updateGroupCallParticipants) baseUpdate; + ChatObject.Call call = groupCalls.get(update.call.id); + if (call != null) { + call.processParticipantsUpdate(getAccountInstance(), update); + } + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().onGroupCallParticipantsUpdate(update); + } + } else if (baseUpdate instanceof TLRPC.TL_updateGroupCall) { + TLRPC.TL_updateGroupCall update = (TLRPC.TL_updateGroupCall) baseUpdate; + ChatObject.Call call = groupCalls.get(update.call.id); + if (call != null) { + call.processGroupCallUpdate(getAccountInstance(), update); + TLRPC.Chat chat = getChat(call.chatId); + if (chat != null) { + chat.call_active = update.call instanceof TLRPC.TL_groupCall; + } + } else { + TLRPC.ChatFull chatFull = getChatFull(update.chat_id); + if (chatFull != null && (chatFull.call == null || chatFull.call != null && chatFull.call.id != update.call.id)) { + loadFullChat(update.chat_id, 0, true); + } + } + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().onGroupCallUpdated(update.call); + } } else if (baseUpdate instanceof TLRPC.TL_updatePhoneCall) { TLRPC.TL_updatePhoneCall upd = (TLRPC.TL_updatePhoneCall) baseUpdate; TLRPC.PhoneCall call = upd.phone_call; @@ -12487,6 +12619,15 @@ public class MessagesController extends BaseController implements NotificationCe getMessagesStorage().updateUsers(dbUsersStatus, true, true, true); getMessagesStorage().updateUsers(dbUsers, false, true, true); } + if (groupSpeakingActionsFinal != null) { + for (int a = 0, N = groupSpeakingActionsFinal.size(); a < N; a++) { + int chatId = groupSpeakingActionsFinal.keyAt(a); + ChatObject.Call call = groupCallsByChatId.get(chatId); + if (call != null) { + call.processTypingsUpdate(getAccountInstance(), groupSpeakingActionsFinal.valueAt(a), date); + } + } + } if (webPagesFinal != null) { getNotificationCenter().postNotificationName(NotificationCenter.didReceivedWebpagesInUpdates, webPagesFinal); for (int i = 0; i < 2; i++) { @@ -12749,7 +12890,7 @@ public class MessagesController extends BaseController implements NotificationCe getMessagesStorage().putWebPages(webPages); } if (markAsReadMessagesInbox != null || markAsReadMessagesOutbox != null || markAsReadEncrypted != null || markAsReadMessages != null) { - if (markAsReadMessagesInbox != null || markAsReadMessages != null) { + if (markAsReadMessagesInbox != null || markAsReadMessagesOutbox != null || markAsReadMessages != null) { getMessagesStorage().updateDialogsWithReadMessages(markAsReadMessagesInbox, markAsReadMessagesOutbox, markAsReadMessages, true); } getMessagesStorage().markMessagesAsRead(markAsReadMessagesInbox, markAsReadMessagesOutbox, markAsReadEncrypted, true); @@ -12914,6 +13055,12 @@ public class MessagesController extends BaseController implements NotificationCe channelId = message.messageOwner.peer_id.channel_id; } } + if (message.messageOwner.action instanceof TLRPC.TL_messageActionGroupCall) { + TLRPC.ChatFull chatFull = getChatFull(message.messageOwner.peer_id.channel_id); + if (chatFull != null && (chatFull.call == null || chatFull.call != null && chatFull.call.id != message.messageOwner.action.call.id)) { + loadFullChat(message.messageOwner.peer_id.channel_id, 0, true); + } + } if (!hasNotOutMessage && !message.isOut()) { hasNotOutMessage = true; } @@ -12982,6 +13129,16 @@ public class MessagesController extends BaseController implements NotificationCe getNotificationsController().removeNotificationsForDialog(dialog.id); getNotificationCenter().postNotificationName(NotificationCenter.needReloadRecentDialogsSearch); } + int lowerId = (int) uid; + if (lowerId < 0) { + ChatObject.Call call = getGroupCall(-lowerId, false); + if (call != null) { + TLRPC.Chat chat = getChat(lastMessage.messageOwner.action.channel_id); + if (chat != null) { + call.migrateToChat(chat); + } + } + } return false; } @@ -13312,7 +13469,7 @@ public class MessagesController extends BaseController implements NotificationCe if (user == null && chat == null) { return true; } - String reason = null; + String reason; if (chat != null) { reason = getRestrictionReason(chat.restriction_reason); } else { @@ -13373,7 +13530,7 @@ public class MessagesController extends BaseController implements NotificationCe if (user == null && chat == null || fragment == null) { return; } - String reason = null; + String reason; if (chat != null) { reason = getRestrictionReason(chat.restriction_reason); } else { @@ -13470,49 +13627,94 @@ public class MessagesController extends BaseController implements NotificationCe } } - public void ensureMessagesLoaded(long dialog_id, boolean isChannel, int messageId, Runnable callback, Runnable doOnError) { + public void ensureMessagesLoaded(long dialogId, int messageId, MessagesLoadedCallback callback) { SharedPreferences sharedPreferences = MessagesController.getNotificationsSettings(currentAccount); if (messageId == 0) { - messageId = sharedPreferences.getInt("diditem" + dialog_id, 0); - } - if (messageId != 0 && getMessagesStorage().checkMessageId(dialog_id, isChannel, messageId)) { - if (callback != null) { - callback.run(); - } - return; + messageId = sharedPreferences.getInt("diditem" + dialogId, 0); } int finalMessageId = messageId; final int classGuid = ConnectionsManager.generateClassGuid(); - if (callback != null) { - NotificationCenter.NotificationCenterDelegate delegate = new NotificationCenter.NotificationCenterDelegate() { - @SuppressWarnings("unchecked") - @Override - public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.messagesDidLoad && (Integer) args[10] == classGuid) { - ArrayList messArr = (ArrayList) args[2]; - boolean isCache = (Boolean) args[3]; - if (messArr.isEmpty() && isCache) { - loadMessages(dialog_id, 0, false, 20, 3, 0, false, 0, classGuid, 3, 0, false, 0, 0, 0, 0); - } else { - getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.loadingMessagesFailed); - callback.run(); - } - } - if (id == NotificationCenter.loadingMessagesFailed && (Integer) args[0] == classGuid) { - getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.loadingMessagesFailed); - if (doOnError != null) { - doOnError.run(); - } - } - } - }; - getNotificationCenter().addObserver(delegate, NotificationCenter.messagesDidLoad); - getNotificationCenter().addObserver(delegate, NotificationCenter.loadingMessagesFailed); + int lower_part = (int) dialogId; + int chatId = 0; + + + if (lower_part < 0) { + chatId = -lower_part; } - loadMessages(dialog_id, 0, false, 1, finalMessageId, 0, true, 0, classGuid, 3, 0, false, 0, 0, 0, 0); + TLRPC.Chat currentChat = null; + + if (chatId != 0) { + currentChat = getMessagesController().getChat(chatId); + if (currentChat == null) { + final MessagesStorage messagesStorage = getMessagesStorage(); + int finalChatId = chatId; + messagesStorage.getStorageQueue().postRunnable(() -> { + TLRPC.Chat chat = messagesStorage.getChat(finalChatId); + AndroidUtilities.runOnUIThread(() -> { + if (chat != null) { + getMessagesController().putChat(chat, true); + ensureMessagesLoaded(dialogId, finalMessageId, callback); + } else { + if (callback != null) { + callback.onError(); + } + } + }); + }); + return; + } + } + + final boolean isChannel = ChatObject.isChannel(currentChat); + + final int count = AndroidUtilities.isTablet() ? 30 : 20; + + NotificationCenter.NotificationCenterDelegate delegate = new NotificationCenter.NotificationCenterDelegate() { + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.messagesDidLoadWithoutProcess && (Integer) args[0] == classGuid) { + int size = (int) args[1]; + boolean isCache = (boolean) args[2]; + boolean isEnd = (boolean) args[3]; + int lastMessageId = (int) args[4]; + if ((size < count / 2 && !isEnd) && isCache) { + if (finalMessageId != 0) { + loadMessagesInternal(dialogId, 0, false, count, finalMessageId, 0, false, 0, classGuid, 3, lastMessageId, isChannel, 0, 0, 0, 0, 0, 0, false, 0, true, false); + } else { + loadMessagesInternal(dialogId, 0, false, count, finalMessageId, 0, false, 0, classGuid, 2, lastMessageId, isChannel, 0, 0, 0, 0, 0, 0, false, 0, true, false); + } + } else { + getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoadWithoutProcess); + getNotificationCenter().removeObserver(this, NotificationCenter.loadingMessagesFailed); + if (callback != null) { + callback.onMessagesLoaded(isCache); + } + } + } else if (id == NotificationCenter.loadingMessagesFailed && (Integer) args[0] == classGuid) { + getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoadWithoutProcess); + getNotificationCenter().removeObserver(this, NotificationCenter.loadingMessagesFailed); + if (callback != null) { + callback.onError(); + } + } + } + }; + + + getNotificationCenter().addObserver(delegate, NotificationCenter.messagesDidLoadWithoutProcess); + getNotificationCenter().addObserver(delegate, NotificationCenter.loadingMessagesFailed); + + if (messageId != 0) { + loadMessagesInternal(dialogId, 0, true, count, finalMessageId, 0, true, 0, classGuid, 3, 0, isChannel, 0, 0, 0, 0, 0, 0, false, 0, true, false); + } else { + loadMessagesInternal(dialogId, 0, true, count, finalMessageId, 0, true, 0, classGuid, 2, 0, isChannel, 0, 0, 0, 0, 0, 0, false, 0, true, false); + } + } + + public interface MessagesLoadedCallback { + void onMessagesLoaded(boolean fromCache); + void onError(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java index f3f83758a..cc42a9434 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java @@ -13,6 +13,7 @@ import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; +import android.util.Log; import android.util.LongSparseArray; import android.util.Pair; import android.util.SparseArray; @@ -86,7 +87,7 @@ public class MessagesStorage extends BaseController { private CountDownLatch openSync = new CountDownLatch(1); private static volatile MessagesStorage[] Instance = new MessagesStorage[UserConfig.MAX_ACCOUNT_COUNT]; - private final static int LAST_DB_VERSION = 73; + private final static int LAST_DB_VERSION = 74; public static MessagesStorage getInstance(int num) { MessagesStorage localInstance = Instance[num]; @@ -269,6 +270,7 @@ public class MessagesStorage extends BaseController { database.executeFast("PRAGMA secure_delete = ON").stepThis().dispose(); database.executeFast("PRAGMA temp_store = MEMORY").stepThis().dispose(); database.executeFast("PRAGMA journal_mode = WAL").stepThis().dispose(); + database.executeFast("PRAGMA journal_size_limit = 52428800").stepThis().dispose(); if (createTable) { if (BuildVars.LOGS_ENABLED) { @@ -291,7 +293,6 @@ public class MessagesStorage extends BaseController { database.executeFast("CREATE INDEX IF NOT EXISTS task_idx_messages ON messages(uid, out, read_state, ttl, date, send_state);").stepThis().dispose(); database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_messages2 ON messages(mid, send_state, date);").stepThis().dispose(); database.executeFast("CREATE INDEX IF NOT EXISTS uid_mention_idx_messages ON messages(uid, mention, read_state);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_thread_reply_id_mid_idx_messages ON messages(uid, thread_reply_id, mid) WHERE thread_reply_id != 0;").stepThis().dispose(); database.executeFast("CREATE TABLE download_queue(uid INTEGER, type INTEGER, date INTEGER, data BLOB, parent TEXT, PRIMARY KEY (uid, type));").stepThis().dispose(); database.executeFast("CREATE INDEX IF NOT EXISTS type_date_idx_download_queue ON download_queue(type, date);").stepThis().dispose(); @@ -331,7 +332,7 @@ public class MessagesStorage extends BaseController { database.executeFast("CREATE TABLE bot_keyboard(uid INTEGER PRIMARY KEY, mid INTEGER, info BLOB)").stepThis().dispose(); database.executeFast("CREATE INDEX IF NOT EXISTS bot_keyboard_idx_mid ON bot_keyboard(mid);").stepThis().dispose(); - database.executeFast("CREATE TABLE chat_settings_v2(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER, online INTEGER)").stepThis().dispose(); + database.executeFast("CREATE TABLE chat_settings_v2(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER, online INTEGER, inviter INTEGER)").stepThis().dispose(); database.executeFast("CREATE INDEX IF NOT EXISTS chat_settings_pinned_idx ON chat_settings_v2(uid, pinned) WHERE pinned != 0;").stepThis().dispose(); database.executeFast("CREATE TABLE user_settings(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER)").stepThis().dispose(); @@ -899,7 +900,6 @@ public class MessagesStorage extends BaseController { if (version == 69) { executeNoException("ALTER TABLE messages ADD COLUMN replies_data BLOB default NULL"); executeNoException("ALTER TABLE messages ADD COLUMN thread_reply_id INTEGER default 0"); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_thread_reply_id_mid_idx_messages ON messages(uid, thread_reply_id, mid) WHERE thread_reply_id != 0;").stepThis().dispose(); database.executeFast("PRAGMA user_version = 70").stepThis().dispose(); version = 70; } @@ -919,6 +919,11 @@ public class MessagesStorage extends BaseController { version = 73; } if (version == 73) { + executeNoException("ALTER TABLE chat_settings_v2 ADD COLUMN inviter INTEGER default 0"); + database.executeFast("PRAGMA user_version = 74").stepThis().dispose(); + version = 74; + } + if (version == 74) { } } catch (Exception e) { @@ -1514,9 +1519,9 @@ public class MessagesStorage extends BaseController { NativeByteBuffer data = cursor.byteBufferValue(4); if (data != null) { TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); if (message != null) { + message.readAttachPath(data, getUserConfig().clientUserId); + data.reuse(); MessageObject.setUnreadFlags(message, cursor.intValue(5)); message.id = cursor.intValue(6); int date = cursor.intValue(9); @@ -1535,15 +1540,12 @@ public class MessagesStorage extends BaseController { message.action instanceof TLRPC.TL_messageActionPaymentSent || message.action instanceof TLRPC.TL_messageActionGameScore)) { if (!cursor.isNull(13)) { - data = cursor.byteBufferValue(13); - if (data != null) { - message.replyMessage = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.replyMessage.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); + NativeByteBuffer data2 = cursor.byteBufferValue(13); + if (data2 != null) { + message.replyMessage = TLRPC.Message.TLdeserialize(data2, data2.readInt32(false), false); + message.replyMessage.readAttachPath(data2, getUserConfig().clientUserId); + data2.reuse(); if (message.replyMessage != null) { - if (MessageObject.isMegagroup(message)) { - message.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } addUsersAndChatsFromMessage(message.replyMessage, usersToLoad, chatsToLoad); } } @@ -1566,6 +1568,8 @@ public class MessagesStorage extends BaseController { } catch (Exception e) { FileLog.e(e); } + } else { + data.reuse(); } } @@ -1607,9 +1611,6 @@ public class MessagesStorage extends BaseController { if (owner != null) { owner.replyMessage = message; message.dialog_id = owner.dialog_id; - if (MessageObject.isMegagroup(owner)) { - owner.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } @@ -1656,36 +1657,34 @@ public class MessagesStorage extends BaseController { while (cursor2.next()) { long did = cursor2.longValue(0); int lower_id = (int) did; - if (a == 0 || a == 1) { - if (a == 0) { - if (lower_id != 0) { - filter.alwaysShow.add(lower_id); - } - int pin = cursor2.intValue(1); - if (pin != Integer.MIN_VALUE) { - filter.pinnedDialogs.put(did, pin); - if (!dialogsToLoad.contains(did)) { - dialogsToLoad.add(did); - } - } - } else if (a == 1) { - if (lower_id != 0) { - filter.neverShow.add(lower_id); + if (a == 0) { + if (lower_id != 0) { + filter.alwaysShow.add(lower_id); + } + int pin = cursor2.intValue(1); + if (pin != Integer.MIN_VALUE) { + filter.pinnedDialogs.put(did, pin); + if (!dialogsToLoad.contains(did)) { + dialogsToLoad.add(did); } } - if (lower_id < 0) { - if (!chatsToLoad.contains(-lower_id)) { - chatsToLoad.add(-lower_id); - } - } else if (lower_id > 0) { - if (!usersToLoad.contains(lower_id)) { - usersToLoad.add(lower_id); - } - } else { - int high_id = (int) (did >> 32); - if (!encryptedToLoad.contains(high_id)) { - encryptedToLoad.add(high_id); - } + } else { + if (lower_id != 0) { + filter.neverShow.add(lower_id); + } + } + if (lower_id < 0) { + if (!chatsToLoad.contains(-lower_id)) { + chatsToLoad.add(-lower_id); + } + } else if (lower_id > 0) { + if (!usersToLoad.contains(lower_id)) { + usersToLoad.add(lower_id); + } + } else { + int high_id = (int) (did >> 32); + if (!encryptedToLoad.contains(high_id)) { + encryptedToLoad.add(high_id); } } } @@ -2682,9 +2681,6 @@ public class MessagesStorage extends BaseController { message.replyMessage.readAttachPath(data, getUserConfig().clientUserId); data.reuse(); if (message.replyMessage != null) { - if (MessageObject.isMegagroup(message)) { - message.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } addUsersAndChatsFromMessage(message.replyMessage, usersToLoad, chatsToLoad); } } @@ -2749,7 +2745,7 @@ public class MessagesStorage extends BaseController { } } - pushMessages.add(new MessageObject(currentAccount, message, messageText, name, userName, (flags & 1) != 0, (flags & 2) != 0, false)); + pushMessages.add(new MessageObject(currentAccount, message, messageText, name, userName, (flags & 1) != 0, (flags & 2) != 0, (message.flags & 0x80000000) != 0, false)); addUsersAndChatsFromMessage(message, usersToLoad, chatsToLoad); } } @@ -2774,9 +2770,6 @@ public class MessagesStorage extends BaseController { for (int a = 0; a < arrayList.size(); a++) { TLRPC.Message m = arrayList.get(a); m.replyMessage = message; - if (MessageObject.isMegagroup(m)) { - m.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } @@ -3563,19 +3556,21 @@ public class MessagesStorage extends BaseController { NativeByteBuffer data = cursor.byteBufferValue(0); if (data != null) { TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); if (message != null) { + message.readAttachPath(data, getUserConfig().clientUserId); + data.reuse(); MessageObject.updateReactions(message, reactions); SQLitePreparedStatement state = database.executeFast("UPDATE messages SET data = ? WHERE mid = ?"); - data = new NativeByteBuffer(message.getObjectSize()); - message.serializeToStream(data); + NativeByteBuffer data2 = new NativeByteBuffer(message.getObjectSize()); + message.serializeToStream(data2); state.requery(); - state.bindByteBuffer(1, data); + state.bindByteBuffer(1, data2); state.bindLong(2, mid); state.step(); - data.reuse(); + data2.reuse(); state.dispose(); + } else { + data.reuse(); } } } @@ -4578,7 +4573,7 @@ public class MessagesStorage extends BaseController { } public void updateDialogsWithReadMessages(final SparseLongArray inbox, final SparseLongArray outbox, final ArrayList mentions, boolean useQueue) { - if (isEmpty(inbox) && isEmpty(mentions)) { + if (isEmpty(inbox) && isEmpty(outbox) && isEmpty(mentions)) { return; } if (useQueue) { @@ -4594,7 +4589,7 @@ public class MessagesStorage extends BaseController { } storageQueue.postRunnable(() -> { try { - SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online FROM chat_settings_v2 WHERE uid = " + participants.chat_id); + SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online, inviter FROM chat_settings_v2 WHERE uid = " + participants.chat_id); TLRPC.ChatFull info = null; ArrayList loadedUsers = new ArrayList<>(); if (cursor.next()) { @@ -4604,6 +4599,7 @@ public class MessagesStorage extends BaseController { data.reuse(); info.pinned_msg_id = cursor.intValue(1); info.online_count = cursor.intValue(2); + info.inviterId = cursor.intValue(3); } } cursor.dispose(); @@ -4612,13 +4608,14 @@ public class MessagesStorage extends BaseController { final TLRPC.ChatFull finalInfo = info; AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.chatInfoDidLoad, finalInfo, 0, false)); - SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?)"); + SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?, ?)"); NativeByteBuffer data = new NativeByteBuffer(info.getObjectSize()); info.serializeToStream(data); state.bindInteger(1, info.id); state.bindByteBuffer(2, data); state.bindInteger(3, info.pinned_msg_id); state.bindInteger(4, info.online_count); + state.bindInteger(5, info.inviterId); state.step(); state.dispose(); data.reuse(); @@ -4856,13 +4853,30 @@ public class MessagesStorage extends BaseController { }); } + public void saveChatInviter(int chatId, int inviterId) { + storageQueue.postRunnable(() -> { + try { + SQLitePreparedStatement state = database.executeFast("UPDATE chat_settings_v2 SET inviter = ? WHERE uid = ?"); + state.requery(); + state.bindInteger(1, inviterId); + state.bindInteger(2, chatId); + state.step(); + state.dispose(); + } catch (Exception e) { + FileLog.e(e); + } + }); + } + public void updateChatInfo(final TLRPC.ChatFull info, final boolean ifExist) { storageQueue.postRunnable(() -> { try { int currentOnline = -1; - SQLiteCursor cursor = database.queryFinalized("SELECT online FROM chat_settings_v2 WHERE uid = " + info.id); + int inviter = 0; + SQLiteCursor cursor = database.queryFinalized("SELECT online, inviter FROM chat_settings_v2 WHERE uid = " + info.id); if (cursor.next()) { currentOnline = cursor.intValue(0); + info.inviterId = cursor.intValue(1); } cursor.dispose(); if (ifExist && currentOnline == -1) { @@ -4873,13 +4887,14 @@ public class MessagesStorage extends BaseController { info.online_count = currentOnline; } - SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?)"); + SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?, ?)"); NativeByteBuffer data = new NativeByteBuffer(info.getObjectSize()); info.serializeToStream(data); state.bindInteger(1, info.id); state.bindByteBuffer(2, data); state.bindInteger(3, info.pinned_msg_id); state.bindInteger(4, info.online_count); + state.bindInteger(5, info.inviterId); state.step(); state.dispose(); data.reuse(); @@ -5065,7 +5080,7 @@ public class MessagesStorage extends BaseController { public void updateChatInfo(final int chat_id, final int user_id, final int what, final int invited_id, final int version) { storageQueue.postRunnable(() -> { try { - SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online FROM chat_settings_v2 WHERE uid = " + chat_id); + SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online, inviter FROM chat_settings_v2 WHERE uid = " + chat_id); TLRPC.ChatFull info = null; ArrayList loadedUsers = new ArrayList<>(); if (cursor.next()) { @@ -5075,6 +5090,7 @@ public class MessagesStorage extends BaseController { data.reuse(); info.pinned_msg_id = cursor.intValue(1); info.online_count = cursor.intValue(2); + info.inviterId = cursor.intValue(3); } } cursor.dispose(); @@ -5121,13 +5137,14 @@ public class MessagesStorage extends BaseController { final TLRPC.ChatFull finalInfo = info; AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.chatInfoDidLoad, finalInfo, 0, false)); - SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?)"); + SQLitePreparedStatement state = database.executeFast("REPLACE INTO chat_settings_v2 VALUES(?, ?, ?, ?, ?)"); NativeByteBuffer data = new NativeByteBuffer(info.getObjectSize()); info.serializeToStream(data); state.bindInteger(1, chat_id); state.bindByteBuffer(2, data); state.bindInteger(3, info.pinned_msg_id); state.bindInteger(4, info.online_count); + state.bindInteger(5, info.inviterId); state.step(); state.dispose(); data.reuse(); @@ -5155,15 +5172,44 @@ public class MessagesStorage extends BaseController { } cursor.dispose(); result[0] = info instanceof TLRPC.TL_channelFull && info.migrated_from_chat_id != 0; - if (countDownLatch != null) { - countDownLatch.countDown(); - } + countDownLatch.countDown(); } catch (Exception e) { FileLog.e(e); } finally { - if (countDownLatch != null) { - countDownLatch.countDown(); + countDownLatch.countDown(); + } + }); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } + return result[0]; + } + + public boolean hasInviteMeMessage(final int chat_id) { + final CountDownLatch countDownLatch = new CountDownLatch(1); + final boolean[] result = new boolean[1]; + storageQueue.postRunnable(() -> { + try { + int selfId = getUserConfig().getClientUserId(); + SQLiteCursor cursor = database.queryFinalized("SELECT data FROM messages WHERE uid = " + -chat_id + " AND out = 0 ORDER BY mid DESC LIMIT 100"); + while (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); + data.reuse(); + if (message.action instanceof TLRPC.TL_messageActionChatAddUser && message.action.users.contains(selfId)) { + result[0] = true; + break; + } + } } + cursor.dispose(); + } catch (Exception e) { + FileLog.e(e); + } finally { + countDownLatch.countDown(); } }); try { @@ -5184,7 +5230,7 @@ public class MessagesStorage extends BaseController { boolean pinnedEndReached = false; try { - SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online FROM chat_settings_v2 WHERE uid = " + chatId); + SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned, online, inviter FROM chat_settings_v2 WHERE uid = " + chatId); if (cursor.next()) { NativeByteBuffer data = cursor.byteBufferValue(0); if (data != null) { @@ -5192,6 +5238,7 @@ public class MessagesStorage extends BaseController { data.reuse(); info.pinned_msg_id = cursor.intValue(1); info.online_count = cursor.intValue(2); + info.inviterId = cursor.intValue(3); } } cursor.dispose(); @@ -5255,6 +5302,9 @@ public class MessagesStorage extends BaseController { getUsersInternal(usersToLoad.toString(), loadedUsers); } } + if (info != null && info.inviterId != 0) { + getUsersInternal("" + info.inviterId, loadedUsers); + } cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT mid FROM chat_pinned_v2 WHERE uid = %d ORDER BY mid DESC", -chatId)); while (cursor.next()) { @@ -5942,7 +5992,7 @@ public class MessagesStorage extends BaseController { }); } - public Runnable getMessagesInternal(long dialogId, long mergeDialogId, int count, int max_id, int offset_date, int minDate, int classGuid, int load_type, boolean isChannel, boolean scheduled, int replyMessageId, int loadIndex) { + public Runnable getMessagesInternal(long dialogId, long mergeDialogId, int count, int max_id, int offset_date, int minDate, int classGuid, int load_type, boolean isChannel, boolean scheduled, int replyMessageId, int loadIndex, boolean processMessages) { TLRPC.TL_messages_messages res = new TLRPC.TL_messages_messages(); int currentUserId = getUserConfig().clientUserId; int count_unread = 0; @@ -5966,6 +6016,7 @@ public class MessagesStorage extends BaseController { } boolean isEnd = false; int num = dialogId == 777000 ? 10 : 1; + int messagesCount = 0; try { ArrayList usersToLoad = new ArrayList<>(); ArrayList chatsToLoad = new ArrayList<>(); @@ -6012,9 +6063,6 @@ public class MessagesStorage extends BaseController { message.replyMessage.readAttachPath(data, currentUserId); data.reuse(); if (message.replyMessage != null) { - if (MessageObject.isMegagroup(message)) { - message.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } addUsersAndChatsFromMessage(message.replyMessage, usersToLoad, chatsToLoad); } } @@ -6421,6 +6469,10 @@ public class MessagesStorage extends BaseController { int maxId = Integer.MIN_VALUE; if (cursor != null) { while (cursor.next()) { + messagesCount++; + if (!processMessages) { + continue; + } NativeByteBuffer data = cursor.byteBufferValue(1); if (data != null) { TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); @@ -6477,9 +6529,6 @@ public class MessagesStorage extends BaseController { message.replyMessage.readAttachPath(data, currentUserId); data.reuse(); if (message.replyMessage != null) { - if (MessageObject.isMegagroup(message)) { - message.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } addUsersAndChatsFromMessage(message.replyMessage, usersToLoad, chatsToLoad); } } @@ -6607,9 +6656,6 @@ public class MessagesStorage extends BaseController { for (int a = 0; a < arrayList.size(); a++) { TLRPC.Message object = arrayList.get(a); object.replyMessage = message; - if (MessageObject.isMegagroup(object)) { - object.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } else { @@ -6623,9 +6669,6 @@ public class MessagesStorage extends BaseController { if (object.reply_to != null) { object.reply_to.reply_to_msg_id = message.id; } - if (MessageObject.isMegagroup(object)) { - object.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } @@ -6672,11 +6715,12 @@ public class MessagesStorage extends BaseController { runnable.run(); }; } else {*/ - return () -> getMessagesController().processLoadedMessages(res, dialogId, mergeDialogId, countQueryFinal, maxIdOverrideFinal, offset_date, true, classGuid, minUnreadIdFinal, lastMessageIdFinal, countUnreadFinal, maxUnreadDateFinal, load_type, isChannel, isEndFinal, scheduled ? 1 : 0, replyMessageId, loadIndex, queryFromServerFinal, mentionsUnreadFinal); + int finalMessagesCount = scheduled ? res.messages.size() : messagesCount; + return () -> getMessagesController().processLoadedMessages(res, finalMessagesCount, dialogId, mergeDialogId, countQueryFinal, maxIdOverrideFinal, offset_date, true, classGuid, minUnreadIdFinal, lastMessageIdFinal, countUnreadFinal, maxUnreadDateFinal, load_type, isChannel, isEndFinal, scheduled ? 1 : 0, replyMessageId, loadIndex, queryFromServerFinal, mentionsUnreadFinal, processMessages); //} } - public void getMessages(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, int minDate, int classGuid, int load_type, boolean isChannel, boolean scheduled, int replyMessageId, int loadIndex) { + public void getMessages(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, int minDate, int classGuid, int load_type, boolean isChannel, boolean scheduled, int replyMessageId, int loadIndex, boolean processMessages) { storageQueue.postRunnable(() -> { long mergeDialogIdFinal = mergeDialogId; int lowerId = (int) dialogId; @@ -6688,7 +6732,7 @@ public class MessagesStorage extends BaseController { } } }*/ - Utilities.stageQueue.postRunnable(getMessagesInternal(dialogId, mergeDialogIdFinal, count, max_id, offset_date, minDate, classGuid, load_type, isChannel, scheduled, replyMessageId, loadIndex)); + Utilities.stageQueue.postRunnable(getMessagesInternal(dialogId, mergeDialogIdFinal, count, max_id, offset_date, minDate, classGuid, load_type, isChannel, scheduled, replyMessageId, loadIndex, processMessages)); }); } @@ -6940,28 +6984,6 @@ public class MessagesStorage extends BaseController { }); } - public boolean isDialogHasMessages(final long did) { - final CountDownLatch countDownLatch = new CountDownLatch(1); - final boolean[] result = new boolean[1]; - storageQueue.postRunnable(() -> { - try { - SQLiteCursor cursor = database.queryFinalized(String.format(Locale.US, "SELECT mid FROM messages WHERE uid = %d LIMIT 1", did)); - result[0] = cursor.next(); - cursor.dispose(); - } catch (Exception e) { - FileLog.e(e); - } finally { - countDownLatch.countDown(); - } - }); - try { - countDownLatch.await(); - } catch (Exception e) { - FileLog.e(e); - } - return result[0]; - } - public boolean hasAuthMessage(final int date) { final CountDownLatch countDownLatch = new CountDownLatch(1); final boolean[] result = new boolean[1]; @@ -7229,6 +7251,8 @@ public class MessagesStorage extends BaseController { oldChat.broadcast = chat.broadcast; oldChat.verified = chat.verified; oldChat.megagroup = chat.megagroup; + oldChat.call_not_empty = chat.call_not_empty; + oldChat.call_active = chat.call_active; if (chat.default_banned_rights != null) { oldChat.default_banned_rights = chat.default_banned_rights; oldChat.flags |= 262144; @@ -7632,7 +7656,7 @@ public class MessagesStorage extends BaseController { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, true)); if (checkInvite) { if (newDialogType == 1) { - getMessagesController().checkChannelInviter(channel_id); + getMessagesController().checkChatInviter(channel_id, true); } else { getMessagesController().generateJoinMessage(channel_id, false); } @@ -8333,7 +8357,7 @@ public class MessagesStorage extends BaseController { last_mid = cursor.intValue(2); old_mentions_count = Math.max(0, cursor.intValue(3)); } else if (channelId != 0) { - getMessagesController().checkChannelInviter(channelId); + getMessagesController().checkChatInviter(channelId, true); } cursor.dispose(); @@ -9610,12 +9634,8 @@ public class MessagesStorage extends BaseController { storageQueue.postRunnable(() -> { try { long messageId = message.id; - int channelId = 0; - if (channelId == 0) { - channelId = message.peer_id.channel_id; - } if (message.peer_id.channel_id != 0) { - messageId |= ((long) channelId) << 32; + messageId |= ((long) message.peer_id.channel_id) << 32; } SQLiteCursor cursor = null; @@ -9725,6 +9745,7 @@ public class MessagesStorage extends BaseController { public void putMessages(final TLRPC.messages_Messages messages, final long dialog_id, final int load_type, final int max_id, final boolean createDialog, final boolean scheduled) { storageQueue.postRunnable(() -> { try { + FileLog.d("put messages to " + dialog_id); if (scheduled) { database.executeFast(String.format(Locale.US, "DELETE FROM scheduled_messages WHERE uid = %d AND mid > 0", dialog_id)).stepThis().dispose(); SQLitePreparedStatement state_messages = database.executeFast("REPLACE INTO scheduled_messages VALUES(?, ?, ?, ?, ?, ?, NULL)"); @@ -9825,7 +9846,7 @@ public class MessagesStorage extends BaseController { oldMessage.readAttachPath(data, getUserConfig().clientUserId); data.reuse(); int send_state = cursor.intValue(5); - if (oldMessage != null && send_state != 3) { + if (send_state != 3) { message.attachPath = oldMessage.attachPath; message.ttl = cursor.intValue(2); } @@ -10225,11 +10246,11 @@ public class MessagesStorage extends BaseController { NativeByteBuffer data = cursor.byteBufferValue(18); if (data != null) { dialogFolder.folder = TLRPC.TL_folder.TLdeserialize(data, data.readInt32(false), false); + data.reuse(); } else { dialogFolder.folder = new TLRPC.TL_folder(); dialogFolder.folder.id = (int) dialogId; } - data.reuse(); } dialog = dialogFolder; if (a == 0) { @@ -10270,9 +10291,9 @@ public class MessagesStorage extends BaseController { NativeByteBuffer data = cursor.byteBufferValue(4); if (data != null) { TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); if (message != null) { + message.readAttachPath(data, getUserConfig().clientUserId); + data.reuse(); MessageObject.setUnreadFlags(message, cursor.intValue(5)); message.id = cursor.intValue(6); int date = cursor.intValue(9); @@ -10291,15 +10312,12 @@ public class MessagesStorage extends BaseController { message.action instanceof TLRPC.TL_messageActionPaymentSent || message.action instanceof TLRPC.TL_messageActionGameScore)) { if (!cursor.isNull(13)) { - data = cursor.byteBufferValue(13); - if (data != null) { - message.replyMessage = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.replyMessage.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); + NativeByteBuffer data2 = cursor.byteBufferValue(13); + if (data2 != null) { + message.replyMessage = TLRPC.Message.TLdeserialize(data2, data2.readInt32(false), false); + message.replyMessage.readAttachPath(data2, getUserConfig().clientUserId); + data2.reuse(); if (message.replyMessage != null) { - if (MessageObject.isMegagroup(message)) { - message.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } addUsersAndChatsFromMessage(message.replyMessage, usersToLoad, chatsToLoad); } } @@ -10322,6 +10340,8 @@ public class MessagesStorage extends BaseController { } catch (Exception e) { FileLog.e(e); } + } else { + data.reuse(); } } @@ -10364,9 +10384,6 @@ public class MessagesStorage extends BaseController { if (owner != null) { owner.replyMessage = message; message.dialog_id = owner.dialog_id; - if (MessageObject.isMegagroup(owner)) { - owner.replyMessage.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } } @@ -10849,6 +10866,23 @@ public class MessagesStorage extends BaseController { }); } + public void setDialogsPinned(ArrayList dids, ArrayList pinned) { + storageQueue.postRunnable(() -> { + try { + SQLitePreparedStatement state = database.executeFast("UPDATE dialogs SET pinned = ? WHERE did = ?"); + for (int a = 0, N = dids.size(); a < N; a++) { + state.requery(); + state.bindInteger(1, pinned.get(a)); + state.bindLong(2, dids.get(a)); + state.step(); + } + state.dispose(); + } catch (Exception e) { + FileLog.e(e); + } + }); + } + public void putDialogs(final TLRPC.messages_Dialogs dialogs, final int check) { if (dialogs.dialogs.isEmpty()) { return; @@ -10932,9 +10966,7 @@ public class MessagesStorage extends BaseController { } } try { - if (countDownLatch != null) { - countDownLatch.countDown(); - } + countDownLatch.countDown(); } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java index 6b452af77..57f6ee050 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java @@ -22,7 +22,7 @@ import java.util.zip.ZipFile; public class NativeLoader { - private final static int LIB_VERSION = 34; + private final static int LIB_VERSION = 35; private final static String LIB_NAME = "tmessages." + LIB_VERSION; private final static String LIB_SO_NAME = "lib" + LIB_NAME + ".so"; private final static String LOCALE_LIB_SO_NAME = "lib" + LIB_NAME + "loc.so"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java index 605a45a60..66c73eb36 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java @@ -30,6 +30,7 @@ public class NotificationCenter { public static final int commentsRead = totalEvents++; public static final int changeRepliesCounter = totalEvents++; public static final int messagesDidLoad = totalEvents++; + public static final int messagesDidLoadWithoutProcess = totalEvents++; public static final int loadingMessagesFailed = totalEvents++; public static final int messageReceivedByAck = totalEvents++; public static final int messageReceivedByServer = totalEvents++; @@ -108,6 +109,8 @@ public class NotificationCenter { public static final int didVerifyMessagesStickers = totalEvents++; public static final int scheduledMessagesUpdated = totalEvents++; public static final int newSuggestionsAvailable = totalEvents++; + public static final int didLoadChatInviter = totalEvents++; + public static final int didLoadChatAdmins = totalEvents++; public static final int walletPendingTransactionsChanged = totalEvents++; public static final int walletSyncProgressChanged = totalEvents++; @@ -146,8 +149,11 @@ public class NotificationCenter { public static final int audioRouteChanged = totalEvents++; public static final int didStartedCall = totalEvents++; + public static final int groupCallUpdated = totalEvents++; + public static final int groupCallTypingsUpdated = totalEvents++; public static final int didEndCall = totalEvents++; public static final int closeInCallActivity = totalEvents++; + public static final int groupCallVisibilityChanged = totalEvents++; public static final int appDidLogout = totalEvents++; @@ -197,7 +203,10 @@ public class NotificationCenter { public static final int closeSearchByActiveAction = totalEvents++; public static final int messagePlayingSpeedChanged = totalEvents++; public static final int screenStateChanged = totalEvents++; + public static final int didDatabaseCleared = totalEvents++; public static final int voipServiceCreated = totalEvents++; + public static final int webRtcMicAmplitudeEvent = totalEvents++; + public static final int webRtcSpeakerAmplitudeEvent = totalEvents++; private SparseArray> observers = new SparseArray<>(); private SparseArray> removeAfterBroadcast = new SparseArray<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java index 59f530a88..8d9d04f7b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java @@ -15,6 +15,7 @@ import android.app.ActivityManager; import android.app.AlarmManager; import android.app.Notification; import android.app.NotificationChannel; +import android.app.NotificationChannelGroup; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; @@ -33,6 +34,8 @@ import android.graphics.PorterDuffXfermode; import android.graphics.drawable.BitmapDrawable; import android.media.AudioAttributes; import android.media.AudioManager; +import android.media.Ringtone; +import android.media.RingtoneManager; import android.media.SoundPool; import android.net.Uri; import android.os.Build; @@ -64,6 +67,7 @@ import org.telegram.ui.PopupNotificationActivity; import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Calendar; import java.util.HashSet; import java.util.List; @@ -100,6 +104,8 @@ public class NotificationsController extends BaseController { private int lastBadgeCount = -1; private String launcherClassName; + private Boolean groupsCreated; + public static long globalSecretChatId = -(1L << 32); public boolean showBadgeNumber; @@ -222,7 +228,7 @@ public class NotificationsController extends BaseController { preferences.edit().putString("OtherKey", OTHER_NOTIFICATIONS_CHANNEL).commit(); } if (notificationChannel == null) { - notificationChannel = new NotificationChannel(OTHER_NOTIFICATIONS_CHANNEL, "Other", NotificationManager.IMPORTANCE_DEFAULT); + notificationChannel = new NotificationChannel(OTHER_NOTIFICATIONS_CHANNEL, "Internal notifications", NotificationManager.IMPORTANCE_DEFAULT); notificationChannel.enableLights(false); notificationChannel.enableVibration(false); notificationChannel.setSound(null, null); @@ -267,6 +273,11 @@ public class NotificationsController extends BaseController { if (Build.VERSION.SDK_INT >= 26) { try { + systemNotificationManager.deleteNotificationChannelGroup("channels" + currentAccount); + systemNotificationManager.deleteNotificationChannelGroup("groups" + currentAccount); + systemNotificationManager.deleteNotificationChannelGroup("private" + currentAccount); + systemNotificationManager.deleteNotificationChannelGroup("other" + currentAccount); + String keyStart = currentAccount + "channel"; List list = systemNotificationManager.getNotificationChannels(); int count = list.size(); @@ -323,7 +334,7 @@ public class NotificationsController extends BaseController { MessageObject messageObject = pushMessages.get(a); long dialog_id = messageObject.getDialogId(); if (messageObject.messageOwner.mentioned && messageObject.messageOwner.action instanceof TLRPC.TL_messageActionPinMessage || - (int) dialog_id == 0 || messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isMegagroup()) { + (int) dialog_id == 0 || messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { continue; } return true; @@ -338,7 +349,7 @@ public class NotificationsController extends BaseController { MessageObject messageObject = pushMessages.get(a); long dialog_id = messageObject.getDialogId(); if (messageObject.messageOwner.mentioned && messageObject.messageOwner.action instanceof TLRPC.TL_messageActionPinMessage || - (int) dialog_id == 0 || messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isMegagroup()) { + (int) dialog_id == 0 || messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { continue; } popupArray.add(0, messageObject); @@ -598,7 +609,7 @@ public class NotificationsController extends BaseController { popup = 0; } } - if (popup != 0 && messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isMegagroup()) { + if (popup != 0 && messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { popup = 0; } if (popup != 0) { @@ -1224,7 +1235,7 @@ public class NotificationsController extends BaseController { return LocaleController.getString("Message", R.string.Message); } } else if (chat_id != 0) { - if (messageObject.messageOwner.peer_id.channel_id == 0 || messageObject.isMegagroup()) { + if (messageObject.messageOwner.peer_id.channel_id == 0 || messageObject.isSupergroup()) { userName[0] = messageObject.localUserName; } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.O_MR1) { userName[0] = messageObject.localName; @@ -1233,7 +1244,7 @@ public class NotificationsController extends BaseController { if (preview != null) { preview[0] = false; } - if (!messageObject.isMegagroup() && messageObject.messageOwner.peer_id.channel_id != 0) { + if (messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { return LocaleController.formatString("ChannelMessageNoText", R.string.ChannelMessageNoText, messageObject.localName); } else { return LocaleController.formatString("NotificationMessageGroupNoText", R.string.NotificationMessageGroupNoText, messageObject.localUserName, messageObject.localName); @@ -1380,6 +1391,37 @@ public class NotificationsController extends BaseController { } return LocaleController.formatString("NotificationGroupAddMember", R.string.NotificationGroupAddMember, name, chat.title, names.toString()); } + } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionGroupCall) { + return LocaleController.formatString("NotificationGroupCreatedCall", R.string.NotificationGroupCreatedCall, name, chat.title); + } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionInviteToGroupCall) { + int singleUserId = messageObject.messageOwner.action.user_id; + if (singleUserId == 0 && messageObject.messageOwner.action.users.size() == 1) { + singleUserId = messageObject.messageOwner.action.users.get(0); + } + if (singleUserId != 0) { + if (singleUserId == selfUsedId) { + return LocaleController.formatString("NotificationGroupInvitedYouToCall", R.string.NotificationGroupInvitedYouToCall, name, chat.title); + } else { + TLRPC.User u2 = getMessagesController().getUser(singleUserId); + if (u2 == null) { + return null; + } + return LocaleController.formatString("NotificationGroupInvitedToCall", R.string.NotificationGroupInvitedToCall, name, chat.title, UserObject.getUserName(u2)); + } + } else { + StringBuilder names = new StringBuilder(); + for (int a = 0; a < messageObject.messageOwner.action.users.size(); a++) { + TLRPC.User user = getMessagesController().getUser(messageObject.messageOwner.action.users.get(a)); + if (user != null) { + String name2 = UserObject.getUserName(user); + if (names.length() != 0) { + names.append(", "); + } + names.append(name2); + } + } + return LocaleController.formatString("NotificationGroupInvitedToCall", R.string.NotificationGroupInvitedToCall, name, chat.title, names.toString()); + } } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionChatJoinedByLink) { return LocaleController.formatString("NotificationInvitedToGroupByLink", R.string.NotificationInvitedToGroupByLink, name, chat.title); } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionChatEditTitle) { @@ -1744,7 +1786,7 @@ public class NotificationsController extends BaseController { if (preview != null) { preview[0] = false; } - if (!messageObject.isMegagroup() && messageObject.messageOwner.peer_id.channel_id != 0) { + if (messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { return LocaleController.formatString("ChannelMessageNoText", R.string.ChannelMessageNoText, messageObject.localName); } else { return LocaleController.formatString("NotificationMessageGroupNoText", R.string.NotificationMessageGroupNoText, messageObject.localUserName, messageObject.localName); @@ -1968,6 +2010,37 @@ public class NotificationsController extends BaseController { } msg = LocaleController.formatString("NotificationGroupAddMember", R.string.NotificationGroupAddMember, name, chat.title, names.toString()); } + } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionGroupCall) { + msg = LocaleController.formatString("NotificationGroupCreatedCall", R.string.NotificationGroupCreatedCall, name, chat.title); + } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionInviteToGroupCall) { + int singleUserId = messageObject.messageOwner.action.user_id; + if (singleUserId == 0 && messageObject.messageOwner.action.users.size() == 1) { + singleUserId = messageObject.messageOwner.action.users.get(0); + } + if (singleUserId != 0) { + if (singleUserId == selfUsedId) { + msg = LocaleController.formatString("NotificationGroupInvitedYouToCall", R.string.NotificationGroupInvitedYouToCall, name, chat.title); + } else { + TLRPC.User u2 = getMessagesController().getUser(singleUserId); + if (u2 == null) { + return null; + } + msg = LocaleController.formatString("NotificationGroupInvitedToCall", R.string.NotificationGroupInvitedToCall, name, chat.title, UserObject.getUserName(u2)); + } + } else { + StringBuilder names = new StringBuilder(); + for (int a = 0; a < messageObject.messageOwner.action.users.size(); a++) { + TLRPC.User user = getMessagesController().getUser(messageObject.messageOwner.action.users.get(a)); + if (user != null) { + String name2 = UserObject.getUserName(user); + if (names.length() != 0) { + names.append(", "); + } + names.append(name2); + } + } + msg = LocaleController.formatString("NotificationGroupInvitedToCall", R.string.NotificationGroupInvitedToCall, name, chat.title, names.toString()); + } } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionChatJoinedByLink) { msg = LocaleController.formatString("NotificationInvitedToGroupByLink", R.string.NotificationInvitedToGroupByLink, name, chat.title); } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionChatEditTitle) { @@ -2560,32 +2633,96 @@ public class NotificationsController extends BaseController { return true; } - @TargetApi(26) public void deleteNotificationChannel(long dialogId) { + deleteNotificationChannel(dialogId, -1); + } + + public void deleteNotificationChannel(long dialogId, int what) { + if (Build.VERSION.SDK_INT < 26) { + return; + } notificationsQueue.postRunnable(() -> { - if (Build.VERSION.SDK_INT < 26) { - return; - } try { SharedPreferences preferences = getAccountInstance().getNotificationsSettings(); - String key = "org.telegram.key" + dialogId; - String channelId = preferences.getString(key, null); - if (channelId != null) { - preferences.edit().remove(key).remove(key + "_s").commit(); - systemNotificationManager.deleteNotificationChannel(channelId); + SharedPreferences.Editor editor = preferences.edit(); + if (what == 0 || what == -1) { + String key = "org.telegram.key" + dialogId; + String channelId = preferences.getString(key, null); + if (channelId != null) { + editor.remove(key).remove(key + "_s"); + systemNotificationManager.deleteNotificationChannel(channelId); + } } + if (what == 1 || what == -1) { + String key = "org.telegram.keyia" + dialogId; + String channelId = preferences.getString(key, null); + if (channelId != null) { + editor.remove(key).remove(key + "_s"); + systemNotificationManager.deleteNotificationChannel(channelId); + } + } + editor.commit(); } catch (Exception e) { FileLog.e(e); } }); } - @TargetApi(26) - public void deleteAllNotificationChannels() { + public void deleteNotificationChannelGlobal(int type) { + deleteNotificationChannelGlobal(type, -1); + } + + public void deleteNotificationChannelGlobal(int type, int what) { + if (Build.VERSION.SDK_INT < 26) { + return; + } notificationsQueue.postRunnable(() -> { - if (Build.VERSION.SDK_INT < 26) { - return; + try { + SharedPreferences preferences = getAccountInstance().getNotificationsSettings(); + SharedPreferences.Editor editor = preferences.edit(); + if (what == 0 || what == -1) { + String key; + if (type == TYPE_CHANNEL) { + key = "channels"; + } else if (type == TYPE_GROUP) { + key = "groups"; + } else { + key = "private"; + } + String channelId = preferences.getString(key, null); + if (channelId != null) { + editor.remove(key).remove(key + "_s"); + systemNotificationManager.deleteNotificationChannel(channelId); + } + } + + if (what == 1 || what == -1) { + String key; + if (type == TYPE_CHANNEL) { + key = "channels_ia"; + } else if (type == TYPE_GROUP) { + key = "groups_ia"; + } else { + key = "private_ia"; + } + String channelId = preferences.getString(key, null); + if (channelId != null) { + editor.remove(key).remove(key + "_s"); + systemNotificationManager.deleteNotificationChannel(channelId); + } + } + editor.commit(); + } catch (Exception e) { + FileLog.e(e); } + }); + } + + public void deleteAllNotificationChannels() { + if (Build.VERSION.SDK_INT < 26) { + return; + } + notificationsQueue.postRunnable(() -> { try { SharedPreferences preferences = getAccountInstance().getNotificationsSettings(); Map values = preferences.getAll(); @@ -2664,131 +2801,304 @@ public class NotificationsController extends BaseController { } @TargetApi(26) - private String validateChannelId(long dialogId, String name, long[] vibrationPattern, int ledColor, Uri sound, int importance, long[] configVibrationPattern, Uri configSound, int configImportance) { + protected void ensureGroupsCreated() { SharedPreferences preferences = getAccountInstance().getNotificationsSettings(); - String key = "org.telegram.key" + dialogId; + if (groupsCreated == null) { + groupsCreated = preferences.getBoolean("groupsCreated", false); + } + if (!groupsCreated) { + try { + String keyStart = currentAccount + "channel"; + List list = systemNotificationManager.getNotificationChannels(); + int count = list.size(); + for (int a = 0; a < count; a++) { + NotificationChannel channel = list.get(a); + String id = channel.getId(); + if (id.startsWith(keyStart)) { + systemNotificationManager.deleteNotificationChannel(id); + } + } + } catch (Exception e) { + FileLog.e(e); + } + TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); + if (user == null) { + getUserConfig().getCurrentUser(); + } + String userName; + if (user != null) { + userName = " (" + ContactsController.formatName(user.first_name, user.last_name) + ")"; + } else { + userName = ""; + } + systemNotificationManager.createNotificationChannelGroups(Arrays.asList( + new NotificationChannelGroup("channels" + currentAccount, LocaleController.getString("NotificationsChannels", R.string.NotificationsChannels) + userName), + new NotificationChannelGroup("groups" + currentAccount, LocaleController.getString("NotificationsGroups", R.string.NotificationsGroups) + userName), + new NotificationChannelGroup("private" + currentAccount, LocaleController.getString("NotificationsPrivateChats", R.string.NotificationsPrivateChats) + userName), + new NotificationChannelGroup("other" + currentAccount, LocaleController.getString("NotificationsOther", R.string.NotificationsOther) + userName) + )); + preferences.edit().putBoolean("groupsCreated", true).commit(); + groupsCreated = true; + } + } + + @TargetApi(26) + private String validateChannelId(long dialogId, String name, long[] vibrationPattern, int ledColor, Uri sound, int importance, boolean isDefault, boolean isInApp, boolean isSilent, int type) { + ensureGroupsCreated(); + + SharedPreferences preferences = getAccountInstance().getNotificationsSettings(); + + String key; + String groupId; + if (isSilent) { + groupId = "other" + currentAccount; + } else { + if (type == TYPE_CHANNEL) { + groupId = "channels" + currentAccount; + } else if (type == TYPE_GROUP) { + groupId = "groups" + currentAccount; + } else { + groupId = "private" + currentAccount; + } + } + + boolean secretChat = !isDefault && (int) dialogId == 0; + + if (isSilent) { + name = LocaleController.getString("NotificationsSilent", R.string.NotificationsSilent); + key = "silent"; + } else if (isDefault) { + name = isInApp ? LocaleController.getString("NotificationsInAppDefault", R.string.NotificationsInAppDefault) : LocaleController.getString("NotificationsDefault", R.string.NotificationsDefault); + if (type == TYPE_CHANNEL) { + key = isInApp ? "channels_ia" : "channels"; + } else if (type == TYPE_GROUP) { + key = isInApp ? "groups_ia" : "groups"; + } else { + key = isInApp ? "private_ia" : "private"; + } + } else { + if (isInApp) { + name = LocaleController.formatString("NotificationsChatInApp", R.string.NotificationsChatInApp, name); + } + key = (isInApp ? "org.telegram.keyia" : "org.telegram.key") + dialogId; + } String channelId = preferences.getString(key, null); String settings = preferences.getString(key + "_s", null); boolean edited = false; StringBuilder newSettings = new StringBuilder(); - String newSettingsHash; + String newSettingsHash = null; - /*NotificationChannel existingChannel = systemNotificationManager.getNotificationChannel(channelId); - if (existingChannel != null) { - int channelImportance = existingChannel.getImportance(); - Uri channelSound = existingChannel.getSound(); - long[] channelVibrationPattern = existingChannel.getVibrationPattern(); - int channelLedColor = existingChannel.getLightColor(); - if (channelVibrationPattern != null) { - for (int a = 0; a < channelVibrationPattern.length; a++) { - newSettings.append(channelVibrationPattern[a]); + if (!isSilent && channelId != null) { + NotificationChannel existingChannel = systemNotificationManager.getNotificationChannel(channelId); + if (existingChannel != null) { + int channelImportance = existingChannel.getImportance(); + Uri channelSound = existingChannel.getSound(); + long[] channelVibrationPattern = existingChannel.getVibrationPattern(); + int channelLedColor = existingChannel.getLightColor(); + if (channelVibrationPattern != null) { + for (int a = 0; a < channelVibrationPattern.length; a++) { + newSettings.append(channelVibrationPattern[a]); + } } - } - newSettings.append(channelLedColor); - if (channelSound != null) { - newSettings.append(channelSound.toString()); - } - newSettings.append(channelImportance); - newSettingsHash = Utilities.MD5(newSettings.toString()); - newSettings.setLength(0); - if (!settings.equals(newSettingsHash)) { - SharedPreferences.Editor editor = null; - if (channelImportance != configImportance) { - if (editor == null) { - editor = preferences.edit(); - } - int priority; - if (channelImportance == NotificationManager.IMPORTANCE_HIGH || channelImportance == NotificationManager.IMPORTANCE_MAX) { - priority = 1; - } else if (channelImportance == NotificationManager.IMPORTANCE_MIN) { - priority = 4; - } else if (channelImportance == NotificationManager.IMPORTANCE_LOW) { - priority = 5; - } else { - priority = 0; - } - editor.putInt("priority_" + dialogId, priority); - if (configImportance == importance) { - importance = channelImportance; + newSettings.append(channelLedColor); + if (channelSound != null) { + newSettings.append(channelSound.toString()); + } + newSettings.append(channelImportance); + if (secretChat) { + newSettings.append("secret"); + } + newSettingsHash = Utilities.MD5(newSettings.toString()); + newSettings.setLength(0); + if (!settings.equals(newSettingsHash)) { + SharedPreferences.Editor editor = null; + if (channelImportance == NotificationManager.IMPORTANCE_NONE) { + if (isInApp) { + editor = preferences.edit(); + if (isDefault) { + editor.putInt(getGlobalNotificationsKey(type), Integer.MAX_VALUE); + } else { + editor.putInt("notify2_" + dialogId, 2); + } + } + edited = true; + } else if (channelImportance != importance) { + if (!isInApp) { + editor = preferences.edit(); + int priority; + if (channelImportance == NotificationManager.IMPORTANCE_HIGH || channelImportance == NotificationManager.IMPORTANCE_MAX) { + priority = 1; + } else if (channelImportance == NotificationManager.IMPORTANCE_MIN) { + priority = 4; + } else if (channelImportance == NotificationManager.IMPORTANCE_LOW) { + priority = 5; + } else { + priority = 0; + } + if (isDefault) { + editor.putInt(getGlobalNotificationsKey(type), 0).commit(); + if (type == TYPE_CHANNEL) { + editor.putInt("priority_channel", priority); + } else if (type == TYPE_GROUP) { + editor.putInt("priority_group", priority); + } else { + editor.putInt("priority_messages", priority); + } + } else { + editor.putInt("notify2_" + dialogId, 0); + editor.remove("notifyuntil_" + dialogId); + editor.putInt("priority_" + dialogId, priority); + } + } edited = true; } - } - if (configSound == null || channelSound != null || configSound != null && channelSound == null || !configSound.equals(channelSound)) { - if (editor == null) { - editor = preferences.edit(); - } - String newSound; - if (channelSound == null) { - newSound = "NoSound"; - editor.putString("sound_" + dialogId, "NoSound"); - } else { - newSound = channelSound.toString(); - Ringtone rng = RingtoneManager.getRingtone(ApplicationLoader.applicationContext, channelSound); - String ringtoneName = null; - if (rng != null) { - if (channelSound.equals(Settings.System.DEFAULT_RINGTONE_URI)) { - ringtoneName = LocaleController.getString("DefaultRingtone", R.string.DefaultRingtone); - } else { - ringtoneName = rng.getTitle(ApplicationLoader.applicationContext); + if (channelSound == null && sound != null || channelSound != null && (sound == null || !TextUtils.equals(channelSound.toString(), sound.toString()))) { + if (!isInApp) { + if (editor == null) { + editor = preferences.edit(); + } + String newSound; + if (channelSound == null) { + newSound = "NoSound"; + if (isDefault) { + if (type == TYPE_CHANNEL) { + editor.putString("ChannelSound", "NoSound"); + } else if (type == TYPE_GROUP) { + editor.putString("GroupSound", "NoSound"); + } else { + editor.putString("GlobalSound", "NoSound"); + } + } else { + editor.putString("sound_" + dialogId, "NoSound"); + } + } else { + newSound = channelSound.toString(); + Ringtone rng = RingtoneManager.getRingtone(ApplicationLoader.applicationContext, channelSound); + String ringtoneName = null; + if (rng != null) { + if (channelSound.equals(Settings.System.DEFAULT_RINGTONE_URI)) { + ringtoneName = LocaleController.getString("DefaultRingtone", R.string.DefaultRingtone); + } else { + ringtoneName = rng.getTitle(ApplicationLoader.applicationContext); + } + rng.stop(); + } + if (ringtoneName != null) { + if (isDefault) { + if (type == TYPE_CHANNEL) { + editor.putString("ChannelSound", ringtoneName); + } else if (type == TYPE_GROUP) { + editor.putString("GroupSound", ringtoneName); + } else { + editor.putString("GlobalSound", ringtoneName); + } + } else { + editor.putString("sound_" + dialogId, ringtoneName); + } + } + } + if (isDefault) { + if (type == TYPE_CHANNEL) { + editor.putString("ChannelSoundPath", newSound); + } else if (type == TYPE_GROUP) { + editor.putString("GroupSoundPath", newSound); + } else { + editor.putString("GlobalSoundPath", newSound); + } + } else { + editor.putString("sound_path_" + dialogId, newSound); } - rng.stop(); } - if (ringtoneName != null) { - editor.putString("sound_" + dialogId, ringtoneName); - } - } - editor.putString("sound_path_" + dialogId, newSound); - if (configSound == null && sound == null || configSound != null && sound != null || configSound.equals(sound)) { sound = channelSound; edited = true; } - } - boolean vibrate = existingChannel.shouldVibrate(); - if (isEmptyVibration(configVibrationPattern) != vibrate) { - if (editor == null) { - editor = preferences.edit(); + boolean vibrate = existingChannel.shouldVibrate(); + boolean hasVibration = !isEmptyVibration(vibrationPattern); + if (hasVibration != vibrate) { + if (!isInApp) { + if (editor == null) { + editor = preferences.edit(); + } + if (isDefault) { + if (type == TYPE_CHANNEL) { + editor.putInt("vibrate_channel", vibrate ? 0 : 2); + } else if (type == TYPE_GROUP) { + editor.putInt("vibrate_group", vibrate ? 0 : 2); + } else { + editor.putInt("vibrate_messages", vibrate ? 0 : 2); + } + } else { + editor.putInt("vibrate_" + dialogId, vibrate ? 0 : 2); + } + } + vibrationPattern = new long[]{}; + edited = true; + } + if (channelLedColor != ledColor) { + if (!isInApp) { + if (editor == null) { + editor = preferences.edit(); + } + if (isDefault) { + if (type == TYPE_CHANNEL) { + editor.putInt("ChannelLed", channelLedColor); + } else if (type == TYPE_GROUP) { + editor.putInt("GroupLed", channelLedColor); + } else { + editor.putInt("MessagesLed", channelLedColor); + } + } else { + editor.putInt("color_" + dialogId, channelLedColor); + } + } + ledColor = channelLedColor; + edited = true; + } + if (editor != null) { + editor.commit(); } - editor.putInt("vibrate_" + dialogId, vibrate ? 0 : 2); - } - if (editor != null) { - editor.putBoolean("custom_" + dialogId, true); - editor.commit(); } } - }*/ - - boolean secretChat = (int) dialogId == 0; - for (int a = 0; a < vibrationPattern.length; a++) { - newSettings.append(vibrationPattern[a]); - } - newSettings.append(ledColor); - if (sound != null) { - newSettings.append(sound.toString()); - } - newSettings.append(importance); - if (secretChat) { - newSettings.append("secret"); } - newSettingsHash = Utilities.MD5(newSettings.toString()); - if (channelId != null && !settings.equals(newSettingsHash)) { - if (edited) { - preferences.edit().putString(key, channelId).putString(key + "_s", newSettingsHash).commit(); - } else { + if (edited && newSettingsHash != null) { + preferences.edit().putString(key, channelId).putString(key + "_s", newSettingsHash).commit(); + } else { + for (int a = 0; a < vibrationPattern.length; a++) { + newSettings.append(vibrationPattern[a]); + } + newSettings.append(ledColor); + if (sound != null) { + newSettings.append(sound.toString()); + } + newSettings.append(importance); + if (secretChat) { + newSettings.append("secret"); + } + newSettingsHash = Utilities.MD5(newSettings.toString()); + + if (channelId != null && !settings.equals(newSettingsHash)) { systemNotificationManager.deleteNotificationChannel(channelId); channelId = null; } } if (channelId == null) { - channelId = currentAccount + "channel" + dialogId + "_" + Utilities.random.nextLong(); + if (isDefault) { + channelId = currentAccount + "channel_" + key + "_" + Utilities.random.nextLong(); + } else { + channelId = currentAccount + "channel_" + dialogId + "_" + Utilities.random.nextLong(); + } NotificationChannel notificationChannel = new NotificationChannel(channelId, secretChat ? LocaleController.getString("SecretChatName", R.string.SecretChatName) : name, importance); + notificationChannel.setGroup(groupId); if (ledColor != 0) { notificationChannel.enableLights(true); notificationChannel.setLightColor(ledColor); + } else { + notificationChannel.enableLights(false); } if (!isEmptyVibration(vibrationPattern)) { notificationChannel.enableVibration(true); - if (vibrationPattern != null && vibrationPattern.length > 0) { + if (vibrationPattern.length > 0) { notificationChannel.setVibrationPattern(vibrationPattern); } } else { @@ -2850,10 +3160,10 @@ public class NotificationsController extends BaseController { TLRPC.FileLocation photoPath = null; boolean notifyDisabled = false; - int needVibrate = 0; - String choosenSoundPath; + int vibrate = 0; + String soundPath = null; int ledColor = 0xff0000ff; - int priority = 0; + int importance = 0; int notifyOverride = getNotifyOverride(preferences, override_dialog_id); boolean value; @@ -2899,131 +3209,101 @@ public class NotificationsController extends BaseController { String defaultPath = Settings.System.DEFAULT_NOTIFICATION_URI.getPath(); - boolean inAppSounds = preferences.getBoolean("EnableInAppSounds", true); - boolean inAppVibrate = preferences.getBoolean("EnableInAppVibrate", true); - boolean inAppPreview = preferences.getBoolean("EnableInAppPreview", true); - boolean inAppPriority = preferences.getBoolean("EnableInAppPriority", false); - boolean custom; - int vibrateOverride; - int priorityOverride; - if (custom = preferences.getBoolean("custom_" + dialog_id, false)) { - vibrateOverride = preferences.getInt("vibrate_" + dialog_id, 0); - priorityOverride = preferences.getInt("priority_" + dialog_id, 3); - choosenSoundPath = preferences.getString("sound_path_" + dialog_id, null); + boolean isDefault = true; + boolean isInApp = !ApplicationLoader.mainInterfacePaused; + int chatType = TYPE_PRIVATE; + + String customSoundPath; + int customVibrate; + int customImportance; + Integer customLedColor; + if (preferences.getBoolean("custom_" + dialog_id, false)) { + customVibrate = preferences.getInt("vibrate_" + dialog_id, 0); + customImportance = preferences.getInt("priority_" + dialog_id, 3); + customSoundPath = preferences.getString("sound_path_" + dialog_id, null); + if (preferences.contains("color_" + dialog_id)) { + customLedColor = preferences.getInt("color_" + dialog_id, 0); + } else { + customLedColor = null; + } } else { - vibrateOverride = 0; - priorityOverride = 3; - choosenSoundPath = null; + customVibrate = 0; + customImportance = 3; + customSoundPath = null; + customLedColor = null; } boolean vibrateOnlyIfSilent = false; if (chat_id != 0) { if (isChannel) { - if (choosenSoundPath != null && choosenSoundPath.equals(defaultPath)) { - choosenSoundPath = null; - } else if (choosenSoundPath == null) { - choosenSoundPath = preferences.getString("ChannelSoundPath", defaultPath); - } - needVibrate = preferences.getInt("vibrate_channel", 0); - priority = preferences.getInt("priority_channel", 1); + soundPath = preferences.getString("ChannelSoundPath", defaultPath); + vibrate = preferences.getInt("vibrate_channel", 0); + importance = preferences.getInt("priority_channel", 1); ledColor = preferences.getInt("ChannelLed", 0xff0000ff); + chatType = TYPE_CHANNEL; } else { - if (choosenSoundPath != null && choosenSoundPath.equals(defaultPath)) { - choosenSoundPath = null; - } else if (choosenSoundPath == null) { - choosenSoundPath = preferences.getString("GroupSoundPath", defaultPath); - } - needVibrate = preferences.getInt("vibrate_group", 0); - priority = preferences.getInt("priority_group", 1); + soundPath = preferences.getString("GroupSoundPath", defaultPath); + vibrate = preferences.getInt("vibrate_group", 0); + importance = preferences.getInt("priority_group", 1); ledColor = preferences.getInt("GroupLed", 0xff0000ff); + chatType = TYPE_GROUP; } } else if (user_id != 0) { - if (choosenSoundPath != null && choosenSoundPath.equals(defaultPath)) { - choosenSoundPath = null; - } else if (choosenSoundPath == null) { - choosenSoundPath = preferences.getString("GlobalSoundPath", defaultPath); - } - needVibrate = preferences.getInt("vibrate_messages", 0); - priority = preferences.getInt("priority_messages", 1); + soundPath = preferences.getString("GlobalSoundPath", defaultPath); + vibrate = preferences.getInt("vibrate_messages", 0); + importance = preferences.getInt("priority_messages", 1); ledColor = preferences.getInt("MessagesLed", 0xff0000ff); + chatType = TYPE_PRIVATE; } - if (custom) { - if (preferences.contains("color_" + dialog_id)) { - ledColor = preferences.getInt("color_" + dialog_id, 0); - } - } - - if (priorityOverride != 3) { - priority = priorityOverride; - } - - if (needVibrate == 4) { + if (vibrate == 4) { vibrateOnlyIfSilent = true; - needVibrate = 0; + vibrate = 0; } - if (needVibrate == 2 && (vibrateOverride == 1 || vibrateOverride == 3) || needVibrate != 2 && vibrateOverride == 2 || vibrateOverride != 0 && vibrateOverride != 4) { - needVibrate = vibrateOverride; + if (customSoundPath != null && !TextUtils.equals(soundPath, customSoundPath)) { + soundPath = customSoundPath; + isDefault = false; } - if (!ApplicationLoader.mainInterfacePaused) { - if (!inAppSounds) { - choosenSoundPath = null; + if (customImportance != 3 && importance != customImportance) { + importance = customImportance; + isDefault = false; + } + if (customLedColor != null && customLedColor != ledColor) { + ledColor = customLedColor; + isDefault = false; + } + if (customVibrate != 0 && customVibrate != 4 && customVibrate != vibrate) { + vibrate = customVibrate; + isDefault = false; + } + if (isInApp) { + if (!preferences.getBoolean("EnableInAppSounds", true)) { + soundPath = null; } - if (!inAppVibrate) { - needVibrate = 2; + if (!preferences.getBoolean("EnableInAppVibrate", true)) { + vibrate = 2; } - if (!inAppPriority) { - priority = 0; - } else if (priority == 2) { - priority = 1; + if (!preferences.getBoolean("EnableInAppPriority", false)) { + importance = 0; + } else if (importance == 2) { + importance = 1; } } - if (vibrateOnlyIfSilent && needVibrate != 2) { + if (vibrateOnlyIfSilent && vibrate != 2) { try { int mode = audioManager.getRingerMode(); if (mode != AudioManager.RINGER_MODE_SILENT && mode != AudioManager.RINGER_MODE_VIBRATE) { - needVibrate = 2; + vibrate = 2; } } catch (Exception e) { FileLog.e(e); } } - Uri configSound = null; - long[] configVibrationPattern = null; - int configImportance = 0; - if (Build.VERSION.SDK_INT >= 26) { - if (needVibrate == 2) { - configVibrationPattern = new long[]{0, 0}; - } else if (needVibrate == 1) { - configVibrationPattern = new long[]{0, 100, 0, 100}; - } else if (needVibrate == 0 || needVibrate == 4) { - configVibrationPattern = new long[]{}; - } else if (needVibrate == 3) { - configVibrationPattern = new long[]{0, 1000}; - } - if (choosenSoundPath != null && !choosenSoundPath.equals("NoSound")) { - if (choosenSoundPath.equals(defaultPath)) { - configSound = Settings.System.DEFAULT_NOTIFICATION_URI; - } else { - configSound = Uri.parse(choosenSoundPath); - } - } - if (priority == 0) { - configImportance = NotificationManager.IMPORTANCE_DEFAULT; - } else if (priority == 1 || priority == 2) { - configImportance = NotificationManager.IMPORTANCE_HIGH; - } else if (priority == 4) { - configImportance = NotificationManager.IMPORTANCE_MIN; - } else if (priority == 5) { - configImportance = NotificationManager.IMPORTANCE_LOW; - } - } - if (notifyDisabled) { - needVibrate = 0; - priority = 0; + vibrate = 0; + importance = 0; ledColor = 0; - choosenSoundPath = null; + soundPath = null; } Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class); @@ -3117,7 +3397,6 @@ public class NotificationsController extends BaseController { .setColor(0xff11acfa); long[] vibrationPattern = null; - int importance = 0; Uri sound = null; mBuilder.setCategory(NotificationCompat.CATEGORY_MESSAGE); @@ -3211,64 +3490,65 @@ public class NotificationsController extends BaseController { } } + int configImportance = 0; if (!notifyAboutLast || silent == 1) { mBuilder.setPriority(NotificationCompat.PRIORITY_LOW); if (Build.VERSION.SDK_INT >= 26) { - importance = NotificationManager.IMPORTANCE_LOW; + configImportance = NotificationManager.IMPORTANCE_LOW; } } else { - if (priority == 0) { + if (importance == 0) { mBuilder.setPriority(NotificationCompat.PRIORITY_DEFAULT); if (Build.VERSION.SDK_INT >= 26) { - importance = NotificationManager.IMPORTANCE_DEFAULT; + configImportance = NotificationManager.IMPORTANCE_DEFAULT; } - } else if (priority == 1 || priority == 2) { + } else if (importance == 1 || importance == 2) { mBuilder.setPriority(NotificationCompat.PRIORITY_HIGH); if (Build.VERSION.SDK_INT >= 26) { - importance = NotificationManager.IMPORTANCE_HIGH; + configImportance = NotificationManager.IMPORTANCE_HIGH; } - } else if (priority == 4) { + } else if (importance == 4) { mBuilder.setPriority(NotificationCompat.PRIORITY_MIN); if (Build.VERSION.SDK_INT >= 26) { - importance = NotificationManager.IMPORTANCE_MIN; + configImportance = NotificationManager.IMPORTANCE_MIN; } - } else if (priority == 5) { + } else if (importance == 5) { mBuilder.setPriority(NotificationCompat.PRIORITY_LOW); if (Build.VERSION.SDK_INT >= 26) { - importance = NotificationManager.IMPORTANCE_LOW; + configImportance = NotificationManager.IMPORTANCE_LOW; } } } if (silent != 1 && !notifyDisabled) { - if (ApplicationLoader.mainInterfacePaused || inAppPreview) { + if (!isInApp || preferences.getBoolean("EnableInAppPreview", true)) { if (lastMessage.length() > 100) { lastMessage = lastMessage.substring(0, 100).replace('\n', ' ').trim() + "..."; } mBuilder.setTicker(lastMessage); } if (!MediaController.getInstance().isRecordingAudio()) { - if (choosenSoundPath != null && !choosenSoundPath.equals("NoSound")) { + if (soundPath != null && !soundPath.equals("NoSound")) { if (Build.VERSION.SDK_INT >= 26) { - if (choosenSoundPath.equals(defaultPath)) { + if (soundPath.equals(defaultPath)) { sound = Settings.System.DEFAULT_NOTIFICATION_URI; } else { - sound = Uri.parse(choosenSoundPath); + sound = Uri.parse(soundPath); } } else { - if (choosenSoundPath.equals(defaultPath)) { + if (soundPath.equals(defaultPath)) { mBuilder.setSound(Settings.System.DEFAULT_NOTIFICATION_URI, AudioManager.STREAM_NOTIFICATION); } else { - if (Build.VERSION.SDK_INT >= 24 && choosenSoundPath.startsWith("file://") && !AndroidUtilities.isInternalUri(Uri.parse(choosenSoundPath))) { + if (Build.VERSION.SDK_INT >= 24 && soundPath.startsWith("file://") && !AndroidUtilities.isInternalUri(Uri.parse(soundPath))) { try { - Uri uri = FileProvider.getUriForFile(ApplicationLoader.applicationContext, BuildConfig.APPLICATION_ID + ".provider", new File(choosenSoundPath.replace("file://", ""))); + Uri uri = FileProvider.getUriForFile(ApplicationLoader.applicationContext, BuildConfig.APPLICATION_ID + ".provider", new File(soundPath.replace("file://", ""))); ApplicationLoader.applicationContext.grantUriPermission("com.android.systemui", uri, Intent.FLAG_GRANT_READ_URI_PERMISSION); mBuilder.setSound(uri, AudioManager.STREAM_NOTIFICATION); } catch (Exception e) { - mBuilder.setSound(Uri.parse(choosenSoundPath), AudioManager.STREAM_NOTIFICATION); + mBuilder.setSound(Uri.parse(soundPath), AudioManager.STREAM_NOTIFICATION); } } else { - mBuilder.setSound(Uri.parse(choosenSoundPath), AudioManager.STREAM_NOTIFICATION); + mBuilder.setSound(Uri.parse(soundPath), AudioManager.STREAM_NOTIFICATION); } } } @@ -3277,14 +3557,14 @@ public class NotificationsController extends BaseController { if (ledColor != 0) { mBuilder.setLights(ledColor, 1000, 1000); } - if (needVibrate == 2 || MediaController.getInstance().isRecordingAudio()) { + if (vibrate == 2 || MediaController.getInstance().isRecordingAudio()) { mBuilder.setVibrate(vibrationPattern = new long[]{0, 0}); - } else if (needVibrate == 1) { + } else if (vibrate == 1) { mBuilder.setVibrate(vibrationPattern = new long[]{0, 100, 0, 100}); - } else if (needVibrate == 0 || needVibrate == 4) { + } else if (vibrate == 0 || vibrate == 4) { mBuilder.setDefaults(NotificationCompat.DEFAULT_VIBRATE); vibrationPattern = new long[]{}; - } else if (needVibrate == 3) { + } else if (vibrate == 3) { mBuilder.setVibrate(vibrationPattern = new long[]{0, 1000}); } } else { @@ -3325,7 +3605,7 @@ public class NotificationsController extends BaseController { } } if (Build.VERSION.SDK_INT >= 26) { - mBuilder.setChannelId(validateChannelId(dialog_id, chatName, vibrationPattern, ledColor, sound, importance, configVibrationPattern, configSound, configImportance)); + mBuilder.setChannelId(validateChannelId(dialog_id, chatName, vibrationPattern, ledColor, sound, configImportance, isDefault, isInApp, notifyDisabled, chatType)); } showExtraNotifications(mBuilder, notifyAboutLast, detailText); scheduleNotificationRepeat(); @@ -3486,7 +3766,7 @@ public class NotificationsController extends BaseController { chat = getMessagesController().getChat(-lowerId); if (chat == null) { if (lastMessageObject.isFcmMessage()) { - isSupergroup = lastMessageObject.isMegagroup(); + isSupergroup = lastMessageObject.isSupergroup(); name = lastMessageObject.localName; isChannel = lastMessageObject.localChannel; } else { @@ -4254,9 +4534,10 @@ public class NotificationsController extends BaseController { getAccountInstance().getNotificationsSettings().edit().putInt(getGlobalNotificationsKey(type), time).commit(); updateServerNotificationsSettings(type); getMessagesStorage().updateMutedDialogsFiltersCounters(); + deleteNotificationChannelGlobal(type); } - public String getGlobalNotificationsKey(int type) { + public static String getGlobalNotificationsKey(int type) { if (type == TYPE_GROUP) { return "EnableGroup2"; } else if (type == TYPE_PRIVATE) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsDisabledReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsDisabledReceiver.java new file mode 100644 index 000000000..c8e31ec00 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsDisabledReceiver.java @@ -0,0 +1,58 @@ +/* + * This is the source code of Telegram for Android v. 5.x.x. + * It is licensed under GNU GPL v. 2 or later. + * You should have received a copy of the license in this archive (see LICENSE). + * + * Copyright Nikolai Kudashov, 2013-2018. + */ + +package org.telegram.messenger; + +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.SharedPreferences; +import android.text.TextUtils; + +import static android.app.NotificationManager.EXTRA_BLOCKED_STATE; +import static android.app.NotificationManager.EXTRA_NOTIFICATION_CHANNEL_ID; + +public class NotificationsDisabledReceiver extends BroadcastReceiver { + + @Override + public void onReceive(Context context, Intent intent) { + String channelId = intent.getStringExtra(EXTRA_NOTIFICATION_CHANNEL_ID); + boolean state = intent.getBooleanExtra(EXTRA_BLOCKED_STATE, false); + if (TextUtils.isEmpty(channelId)) { + return; + } + String[] args = channelId.split("_"); + if (args.length < 3) { + return; + } + ApplicationLoader.postInitApplication(); + int account = Utilities.parseInt(args[0]); + if (account < 0 || account >= UserConfig.MAX_ACCOUNT_COUNT) { + return; + } + SharedPreferences preferences = AccountInstance.getInstance(account).getNotificationsSettings(); + SharedPreferences.Editor editor = preferences.edit(); + if (args[1].startsWith("channel")) { + editor.putInt(NotificationsController.getGlobalNotificationsKey(NotificationsController.TYPE_CHANNEL), state ? Integer.MAX_VALUE : 0); + } else if (args[1].startsWith("groups")) { + editor.putInt(NotificationsController.getGlobalNotificationsKey(NotificationsController.TYPE_GROUP), state ? Integer.MAX_VALUE : 0); + } else if (args[1].startsWith("private")) { + editor.putInt(NotificationsController.getGlobalNotificationsKey(NotificationsController.TYPE_PRIVATE), state ? Integer.MAX_VALUE : 0); + } else { + long dialogId = Utilities.parseLong(args[1]); + if (dialogId == 0) { + return; + } + editor.putInt("notify2_" + dialogId, state ? 2 : 0); + if (!state) { + editor.remove("notifyuntil_" + dialogId); + } + } + editor.commit(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java index 7f2282227..3e20381e9 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java @@ -10,7 +10,6 @@ package org.telegram.messenger; import android.content.ClipDescription; import android.content.Context; -import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.PorterDuff; @@ -22,7 +21,6 @@ import android.location.LocationManager; import android.media.MediaCodecInfo; import android.media.MediaMetadataRetriever; import android.media.MediaPlayer; -import android.media.ThumbnailUtils; import android.net.Uri; import android.os.Build; import android.os.Bundle; @@ -32,7 +30,6 @@ import android.text.TextUtils; import android.util.Base64; import android.util.LongSparseArray; import android.util.SparseArray; -import android.util.SparseIntArray; import android.util.TypedValue; import android.view.Gravity; import android.webkit.MimeTypeMap; @@ -57,13 +54,11 @@ import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Cells.ChatMessageCell; import org.telegram.ui.ChatActivity; -import org.telegram.ui.ChatRightsEditActivity; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.AnimatedFileDrawable; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.Point; import org.telegram.ui.PaymentFormActivity; -import org.telegram.ui.TooManyCommunitiesActivity; import org.telegram.ui.TwoStepVerificationActivity; import org.telegram.ui.TwoStepVerificationSetupActivity; @@ -808,20 +803,30 @@ public class SendMessagesHelper extends BaseController implements NotificationCe private void revertEditingMessageObject(MessageObject object) { object.cancelEditing = true; object.messageOwner.media = object.previousMedia; - object.messageOwner.message = object.previousCaption; - object.messageOwner.entities = object.previousCaptionEntities; + object.messageOwner.message = object.previousMessage; + object.messageOwner.entities = object.previousMessageEntities; object.messageOwner.attachPath = object.previousAttachPath; object.messageOwner.send_state = MessageObject.MESSAGE_SEND_STATE_SENT; + if (object.messageOwner.entities != null) { + object.messageOwner.flags |= 128; + } else { + object.messageOwner.flags &=~ 128; + } + object.previousMedia = null; - object.previousCaption = null; - object.previousCaptionEntities = null; + object.previousMessage = null; + object.previousMessageEntities = null; object.previousAttachPath = null; object.videoEditedInfo = null; object.type = -1; object.setType(); object.caption = null; - object.generateCaption(); + if (object.type != 0) { + object.generateCaption(); + } else { + object.generateLayout(null); + } ArrayList arr = new ArrayList<>(); arr.add(object.messageOwner); @@ -952,7 +957,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe public boolean retrySendMessage(MessageObject messageObject, boolean unsent) { if (messageObject.getId() >= 0) { if (messageObject.isEditing()) { - editMessageMedia(messageObject, null, null, null, null, null, true, messageObject); + editMessage(messageObject, null, null, null, null, null, true, messageObject); } return false; } @@ -1122,7 +1127,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe performSendMessageRequest(req, newMsgObj, null, null, null, null, false); } - public void sendSticker(TLRPC.Document document, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, Object parentObject, boolean notify, int scheduleDate) { + public void sendSticker(TLRPC.Document document, String query, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, Object parentObject, boolean notify, int scheduleDate) { if (document == null) { return; } @@ -1216,7 +1221,14 @@ public class SendMessagesHelper extends BaseController implements NotificationCe }); }); } else { - sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, null, notify, scheduleDate, 0, parentObject); + HashMap params; + if (!TextUtils.isEmpty(query)) { + params = new HashMap<>(); + params.put("query", query); + } else { + params = null; + } + sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, params, notify, scheduleDate, 0, parentObject); } } @@ -1337,10 +1349,9 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } else { newMsg.fwd_from.from_id = new TLRPC.TL_peerChannel(); newMsg.fwd_from.from_id.channel_id = msgObj.messageOwner.peer_id.channel_id; - newMsg.fwd_from.flags |= 2; + newMsg.fwd_from.flags |= 1; if (msgObj.messageOwner.post && fromId > 0) { newMsg.fwd_from.from_id = msgObj.messageOwner.from_id != null ? msgObj.messageOwner.from_id : msgObj.messageOwner.peer_id; - newMsg.fwd_from.flags |= 1; } } if (msgObj.messageOwner.post_author != null) { @@ -1372,9 +1383,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (newMsg.media != null) { newMsg.flags |= TLRPC.MESSAGE_FLAG_HAS_MEDIA; } - if (isMegagroup) { - newMsg.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } if (msgObj.messageOwner.via_bot_id != 0) { newMsg.via_bot_id = msgObj.messageOwner.via_bot_id; newMsg.flags |= TLRPC.MESSAGE_FLAG_HAS_BOT_ID; @@ -1545,7 +1553,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe final ArrayList newMsgObjArr = arr; final ArrayList newMsgArr = new ArrayList<>(objArr); final LongSparseArray messagesByRandomIdsFinal = messagesByRandomIds; - final boolean isMegagroupFinal = isMegagroup; boolean scheduledOnline = scheduleDate == 0x7FFFFFFE; getConnectionsManager().sendRequest(req, (response, error) -> { if (error == null) { @@ -1587,9 +1594,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe TLRPC.TL_updateNewChannelMessage updateNewChannelMessage = (TLRPC.TL_updateNewChannelMessage) update; message = updateNewChannelMessage.message; getMessagesController().processNewChannelDifferenceParams(updateNewChannelMessage.pts, updateNewChannelMessage.pts_count, message.peer_id.channel_id); - if (isMegagroupFinal) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } if (scheduledOnline && message.date != 0x7FFFFFFE) { currentSchedule = false; @@ -1695,7 +1699,12 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } private void writePreviousMessageData(TLRPC.Message message, SerializedData data) { - message.media.serializeToStream(data); + if (message.media == null) { + TLRPC.TL_messageMediaEmpty media = new TLRPC.TL_messageMediaEmpty(); + media.serializeToStream(data); + } else { + message.media.serializeToStream(data); + } data.writeString(message.message != null ? message.message : ""); data.writeString(message.attachPath != null ? message.attachPath : ""); int count; @@ -1705,10 +1714,13 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } - private void editMessageMedia(MessageObject messageObject, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.TL_document document, String path, HashMap params, boolean retry, Object parentObject) { + public void editMessage(MessageObject messageObject, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.TL_document document, String path, HashMap params, boolean retry, Object parentObject) { if (messageObject == null) { return; } + if (params == null) { + params = new HashMap<>(); + } TLRPC.Message newMsg = messageObject.messageOwner; messageObject.cancelEditing = false; @@ -1727,10 +1739,12 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } if (retry) { - if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto) { + if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage || messageObject.messageOwner.media == null || messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaEmpty) { + type = 1; + } else if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto) { photo = (TLRPC.TL_photo) messageObject.messageOwner.media.photo; type = 2; - } else { + } else if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaDocument) { document = (TLRPC.TL_document) messageObject.messageOwner.media.document; if (MessageObject.isVideoDocument(document) || videoEditedInfo != null) { type = 3; @@ -1748,10 +1762,14 @@ public class SendMessagesHelper extends BaseController implements NotificationCe path = newMsg.attachPath; } else { messageObject.previousMedia = newMsg.media; - messageObject.previousCaption = newMsg.message; - messageObject.previousCaptionEntities = newMsg.entities; + messageObject.previousMessage = newMsg.message; + messageObject.previousMessageEntities = newMsg.entities; messageObject.previousAttachPath = newMsg.attachPath; + TLRPC.MessageMedia media = newMsg.media; + if (media == null) { + media = new TLRPC.TL_messageMediaEmpty(); + } SerializedData serializedDataCalc = new SerializedData(true); writePreviousMessageData(newMsg, serializedDataCalc); SerializedData prevMessageData = new SerializedData(serializedDataCalc.length()); @@ -1787,6 +1805,8 @@ public class SendMessagesHelper extends BaseController implements NotificationCe params.put("ve", ve); } newMsg.attachPath = path; + } else { + type = 1; } newMsg.params = params; @@ -1806,17 +1826,30 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (!retry) { if (messageObject.editingMessage != null) { newMsg.message = messageObject.editingMessage.toString(); - if (messageObject.editingMessageEntities != null) { - newMsg.entities = messageObject.editingMessageEntities; - } else { - CharSequence[] message = new CharSequence[]{messageObject.editingMessage}; - ArrayList entities = getMediaDataController().getEntities(message, supportsSendingNewEntities); - if (entities != null && !entities.isEmpty()) { - newMsg.entities = entities; - } - } messageObject.caption = null; - messageObject.generateCaption(); + if (type == 1) { + if (messageObject.editingMessageEntities != null) { + newMsg.entities = messageObject.editingMessageEntities; + newMsg.flags |= 128; + } else { + newMsg.flags &=~ 128; + } + } else { + if (messageObject.editingMessageEntities != null) { + newMsg.entities = messageObject.editingMessageEntities; + newMsg.flags |= 128; + } else { + CharSequence[] message = new CharSequence[]{messageObject.editingMessage}; + ArrayList entities = getMediaDataController().getEntities(message, supportsSendingNewEntities); + if (entities != null && !entities.isEmpty()) { + newMsg.entities = entities; + newMsg.flags |= 128; + } else { + newMsg.flags &=~ 128; + } + } + messageObject.generateCaption(); + } } ArrayList arr = new ArrayList<>(); @@ -1825,6 +1858,13 @@ public class SendMessagesHelper extends BaseController implements NotificationCe messageObject.type = -1; messageObject.setType(); + if (type == 1) { + if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto || messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaDocument) { + messageObject.generateCaption(); + } else { + messageObject.generateLayout(null); + } + } messageObject.createMessageSendInfo(); ArrayList arrayList = new ArrayList<>(); arrayList.add(messageObject); @@ -1840,7 +1880,9 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (type >= 1 && type <= 3 || type >= 5 && type <= 8) { TLRPC.InputMedia inputMedia = null; - if (type == 2) { + if (type == 1) { + + } else if (type == 2) { TLRPC.TL_inputMediaUploadedPhoto uploadedPhoto = new TLRPC.TL_inputMediaUploadedPhoto(); if (params != null) { String masks = params.get("masks"); @@ -1972,16 +2014,18 @@ public class SendMessagesHelper extends BaseController implements NotificationCe TLRPC.TL_messages_editMessage request = new TLRPC.TL_messages_editMessage(); request.id = messageObject.getId(); request.peer = getMessagesController().getInputPeer((int) peer); - request.flags |= 16384; - request.media = inputMedia; + if (inputMedia != null) { + request.flags |= 16384; + request.media = inputMedia; + } if (messageObject.scheduled) { request.schedule_date = messageObject.messageOwner.date; request.flags |= 32768; } - if (messageObject.editingMessage != null) { request.message = messageObject.editingMessage.toString(); request.flags |= 2048; + request.no_webpage = !messageObject.editingMessageSearchWebPage; if (messageObject.editingMessageEntities != null) { request.entities = messageObject.editingMessageEntities; request.flags |= 8; @@ -2038,7 +2082,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } - public int editMessage(MessageObject messageObject, String message, boolean searchLinks, final BaseFragment fragment, ArrayList entities, int scheduleDate, final Runnable callback) { + public int editMessage(MessageObject messageObject, String message, boolean searchLinks, final BaseFragment fragment, ArrayList entities, int scheduleDate) { if (fragment == null || fragment.getParentActivity() == null) { return 0; } @@ -2065,9 +2109,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } else { AndroidUtilities.runOnUIThread(() -> AlertsCreator.processError(currentAccount, error, fragment, req)); } - if (callback != null) { - AndroidUtilities.runOnUIThread(callback); - } }); } @@ -2936,7 +2977,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe TLRPC.Chat chat = getMessagesController().getChat(sendToPeer.channel_id); if (chat != null) { if (chat.megagroup) { - newMsg.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; newMsg.unread = true; } else { newMsg.post = true; @@ -3276,6 +3316,10 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (media.id.file_reference == null) { media.id.file_reference = new byte[0]; } + if (params != null && params.containsKey("query")) { + media.query = params.get("query"); + media.flags |= 2; + } inputMedia = media; } if (delayedMessage == null) { @@ -3334,6 +3378,10 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (media.id.file_reference == null) { media.id.file_reference = new byte[0]; } + if (params != null && params.containsKey("query")) { + media.query = params.get("query"); + media.flags |= 2; + } inputMedia = media; } if (!http && uploadedMedia != null) { @@ -3373,6 +3421,10 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (media.id.file_reference == null) { media.id.file_reference = new byte[0]; } + if (params != null && params.containsKey("query")) { + media.query = params.get("query"); + media.flags |= 2; + } inputMedia = media; } delayedMessage = new DelayedMessage(peer); @@ -3857,7 +3909,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } for (int a = 0, N = arrayList.size(); a < N; a++) { TLRPC.PhotoSize size = arrayList.get(a); - if (size == null || size instanceof TLRPC.TL_photoStrippedSize || size instanceof TLRPC.TL_photoSizeEmpty || size.location == null) { + if (size == null || size instanceof TLRPC.TL_photoStrippedSize || size instanceof TLRPC.TL_photoPathSize || size instanceof TLRPC.TL_photoSizeEmpty || size.location == null) { continue; } TLRPC.TL_photoSize photoSize = new TLRPC.TL_photoSize(); @@ -4540,9 +4592,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe updateMediaPaths(msgObj, message, message.id, originalPath, false); existFlags = msgObj.getMediaExistanceFlags(); newMsgObj.id = message.id; - if ((newMsgObj.flags & TLRPC.MESSAGE_FLAG_MEGAGROUP) != 0) { - message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } grouped_id = message.grouped_id; if (!scheduled) { @@ -4800,18 +4849,12 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } sentMessages.add(message = newMessage.message); - if ((newMsgObj.flags & TLRPC.MESSAGE_FLAG_MEGAGROUP) != 0) { - newMessage.message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } Utilities.stageQueue.postRunnable(() -> getMessagesController().processNewChannelDifferenceParams(newMessage.pts, newMessage.pts_count, newMessage.message.peer_id.channel_id)); updatesArr.remove(a); break; } else if (update instanceof TLRPC.TL_updateNewScheduledMessage) { final TLRPC.TL_updateNewScheduledMessage newMessage = (TLRPC.TL_updateNewScheduledMessage) update; sentMessages.add(message = newMessage.message); - if ((newMsgObj.flags & TLRPC.MESSAGE_FLAG_MEGAGROUP) != 0) { - newMessage.message.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } updatesArr.remove(a); break; } @@ -5023,6 +5066,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe sentMessage.message = newMsg.message; sentMessage.attachPath = newMsg.attachPath; newMsg.media.photo.id = sentMessage.media.photo.id; + newMsg.media.photo.dc_id = sentMessage.media.photo.dc_id; newMsg.media.photo.access_hash = sentMessage.media.photo.access_hash; } else if (sentMessage.media instanceof TLRPC.TL_messageMediaDocument && sentMessage.media.document != null && newMsg.media instanceof TLRPC.TL_messageMediaDocument && newMsg.media.document != null) { if (sentMessage.media.ttl_seconds == 0 && (newMsgObj.videoEditedInfo == null || newMsgObj.videoEditedInfo.mediaEntities == null && TextUtils.isEmpty(newMsgObj.videoEditedInfo.paintPath) && newMsgObj.videoEditedInfo.cropState == null)) { @@ -5052,7 +5096,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe size2.location = size.location; size2.size = size.size; } - } else if (size2 != null && MessageObject.isStickerMessage(sentMessage) && size2.location != null) { + } else if (size != null && size2 != null && MessageObject.isStickerMessage(sentMessage) && size2.location != null) { size.location = size2.location; } else if (size2 == null || size2 != null && size2.location instanceof TLRPC.TL_fileLocationUnavailable || size2 instanceof TLRPC.TL_photoSizeEmpty) { newMsg.media.document.thumbs = sentMessage.media.document.thumbs; @@ -5480,7 +5524,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe AndroidUtilities.runOnUIThread(() -> { if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, 0, parentFinal); } @@ -5575,7 +5619,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } AndroidUtilities.runOnUIThread(() -> { if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, null, null, documentFinal, messageObject.messageOwner.attachPath, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, messageObject.messageOwner.attachPath, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, messageObject.messageOwner.attachPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, null, params, notify, scheduleDate, 0, parentFinal); } @@ -5638,7 +5682,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } mediaCount++; long prevGroupId = groupId[0]; - if (!prepareSendingDocumentInternal(accountInstance, paths.get(a), originalPaths.get(a), null, mime, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, editingMessageObject, groupId, mediaCount == 10 || a == count - 1, true, notify, scheduleDate, docType)) { + if (!prepareSendingDocumentInternal(accountInstance, paths.get(a), originalPaths.get(a), null, mime, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, editingMessageObject, groupId, mediaCount == 10 || a == count - 1, inputContent == null, notify, scheduleDate, docType)) { error = true; } if (prevGroupId != groupId[0]) { @@ -5661,7 +5705,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } mediaCount++; long prevGroupId = groupId[0]; - if (!prepareSendingDocumentInternal(accountInstance, null, null, uris.get(a), mime, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, editingMessageObject, groupId, mediaCount == 10 || a == count - 1, true, notify, scheduleDate, docType)) { + if (!prepareSendingDocumentInternal(accountInstance, null, null, uris.get(a), mime, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, editingMessageObject, groupId, mediaCount == 10 || a == count - 1, inputContent == null, notify, scheduleDate, docType)) { error = true; } if (prevGroupId != groupId[0]) { @@ -6066,7 +6110,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe TLRPC.TL_photo photo = (TLRPC.TL_photo) object; boolean smallExists; TLRPC.PhotoSize smallSize = FileLoader.getClosestPhotoSizeWithSize(photo.sizes, 90); - if (smallSize instanceof TLRPC.TL_photoStrippedSize) { + if (smallSize instanceof TLRPC.TL_photoStrippedSize || smallSize instanceof TLRPC.TL_photoPathSize) { smallExists = true; } else { File smallFile = FileLoader.getPathToAttach(smallSize, true); @@ -6100,7 +6144,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe TLRPC.TL_document document = (TLRPC.TL_document) object; if ((MessageObject.isVideoDocument(document) || MessageObject.isNewGifDocument(document)) && MessageObject.isDocumentHasThumb(document)) { TLRPC.PhotoSize photoSize = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 320); - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { return; } File smallFile = FileLoader.getPathToAttach(photoSize, true); @@ -6366,7 +6410,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } AndroidUtilities.runOnUIThread(() -> { if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, 0, parentFinal); } @@ -6437,7 +6481,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } AndroidUtilities.runOnUIThread(() -> { if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, photoFinal, null, null, needDownloadHttpFinal ? info.searchImage.imageUrl : null, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, photoFinal, null, null, needDownloadHttpFinal ? info.searchImage.imageUrl : null, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(photoFinal, needDownloadHttpFinal ? info.searchImage.imageUrl : null, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, info.ttl, parentFinal); } @@ -6613,7 +6657,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe ImageLoader.getInstance().putImageToCache(new BitmapDrawable(thumbFinal), thumbKeyFinal); } if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, info.ttl, parentFinal); } @@ -6806,7 +6850,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe ImageLoader.getInstance().putImageToCache(new BitmapDrawable(bitmapFinal[0]), keyFinal[0]); } if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, photoFinal, null, null, null, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, photoFinal, null, null, null, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(photoFinal, null, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, info.ttl, parentFinal); } @@ -7247,7 +7291,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe ImageLoader.getInstance().putImageToCache(new BitmapDrawable(thumbFinal), thumbKeyFinal); } if (editingMessageObject != null) { - accountInstance.getSendMessagesHelper().editMessageMedia(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); + accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); } else { accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, ttl, parentFinal); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java index f55ff7f54..10d66765b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java @@ -64,8 +64,11 @@ public class SharedConfig { public static int lockRecordAudioVideoHint; public static boolean searchMessagesAsListUsed; public static boolean stickersReorderingHintUsed; + public static boolean disableVoiceAudioEffects; private static int lastLocalId = -210000; + public static String storageCacheDir; + private static String passportConfigJson = ""; private static HashMap passportConfigMap; public static int passportConfigHash; @@ -182,6 +185,8 @@ public class SharedConfig { editor.putInt("textSelectionHintShows", textSelectionHintShows); editor.putInt("scheduledOrNoSoundHintShows", scheduledOrNoSoundHintShows); editor.putInt("lockRecordAudioVideoHint", lockRecordAudioVideoHint); + editor.putBoolean("disableVoiceAudioEffects", disableVoiceAudioEffects); + editor.putString("storageCacheDir", !TextUtils.isEmpty(storageCacheDir) ? storageCacheDir : ""); editor.commit(); } catch (Exception e) { FileLog.e(e); @@ -220,6 +225,7 @@ public class SharedConfig { pushString = preferences.getString("pushString2", ""); passportConfigJson = preferences.getString("passportConfigJson", ""); passportConfigHash = preferences.getInt("passportConfigHash", 0); + storageCacheDir = preferences.getString("storageCacheDir", null); String authKeyString = preferences.getString("pushAuthKey", null); if (!TextUtils.isEmpty(authKeyString)) { pushAuthKey = Base64.decode(authKeyString, Base64.DEFAULT); @@ -239,7 +245,6 @@ public class SharedConfig { preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); saveToGallery = preferences.getBoolean("save_gallery", false); autoplayGifs = preferences.getBoolean("autoplay_gif", true); - chatBubbles = preferences.getBoolean("chatBubbles", Build.VERSION.SDK_INT >= 30); autoplayVideo = preferences.getBoolean("autoplay_video", true); mapPreviewType = preferences.getInt("mapPreviewType", 2); raiseToSpeak = preferences.getBoolean("raise_to_speak", true); @@ -280,6 +285,7 @@ public class SharedConfig { textSelectionHintShows = preferences.getInt("textSelectionHintShows", 0); scheduledOrNoSoundHintShows = preferences.getInt("scheduledOrNoSoundHintShows", 0); lockRecordAudioVideoHint = preferences.getInt("lockRecordAudioVideoHint", 0); + disableVoiceAudioEffects = preferences.getBoolean("disableVoiceAudioEffects", false); preferences = ApplicationLoader.applicationContext.getSharedPreferences("Notifications", Activity.MODE_PRIVATE); showNotificationsForAllAccounts = preferences.getBoolean("AllAccounts", true); @@ -527,6 +533,14 @@ public class SharedConfig { }); } + public static void toggleDisableVoiceAudioEffects() { + disableVoiceAudioEffects = !disableVoiceAudioEffects; + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("disableVoiceAudioEffects", disableVoiceAudioEffects); + editor.commit(); + } + public static void toggleLoopStickers() { loopStickers = !loopStickers; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); @@ -590,14 +604,6 @@ public class SharedConfig { editor.commit(); } - public static void toggleChatBubbles() { - chatBubbles = !chatBubbles; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("chatBubbles", chatBubbles); - editor.commit(); - } - public static void setUseThreeLinesLayout(boolean value) { useThreeLinesLayout = value; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SvgHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java similarity index 87% rename from TMessagesProj/src/main/java/org/telegram/ui/Components/SvgHelper.java rename to TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java index 4f8a4964d..604239ef1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SvgHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java @@ -20,21 +20,26 @@ See the License for the specific language governing permissions and limitations under the License. */ -package org.telegram.ui.Components; +package org.telegram.messenger; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; +import android.graphics.ComposeShader; +import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Path; import android.graphics.PixelFormat; +import android.graphics.PorterDuff; import android.graphics.Rect; import android.graphics.RectF; +import android.graphics.Shader; import android.graphics.drawable.Drawable; +import android.os.SystemClock; -import org.telegram.messenger.FileLog; +import org.telegram.ui.ActionBar.Theme; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; @@ -43,6 +48,7 @@ import org.xml.sax.helpers.DefaultHandler; import java.io.File; import java.io.FileInputStream; import java.io.StringReader; +import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; @@ -80,19 +86,48 @@ public class SvgHelper { } } + private static class RoundRect { + RectF rect; + float rx; + public RoundRect(RectF rect, float rx) { + this.rect = rect; + this.rx = rx; + } + } + public static class SvgDrawable extends Drawable { private ArrayList commands = new ArrayList<>(); private HashMap paints = new HashMap<>(); private int width; private int height; + private static int[] parentPosition = new int[2]; + + private LinearGradient backgroundGradient; + private LinearGradient placeholderGradient; + private Matrix placeholderMatrix; + private static float totalTranslation; + private static float gradientWidth; + private static long lastUpdateTime; + private static Runnable shiftRunnable; + private static WeakReference shiftDrawable; + private ImageReceiver parentImageReceiver; + private int currentColor; + private String currentColorKey; + private float colorAlpha; + private float crossfadeAlpha; @Override public void draw(Canvas canvas) { + if (currentColorKey != null) { + setupGradient(currentColorKey, colorAlpha); + } Rect bounds = getBounds(); float scaleX = bounds.width() / (float) width; float scaleY = bounds.height() / (float) height; float scale = Math.max(scaleX, scaleY); + canvas.save(); + canvas.translate(bounds.left, bounds.top); canvas.scale(scale, scale); for (int a = 0, N = commands.size(); a < N; a++) { Object object = commands.get(a); @@ -103,6 +138,8 @@ public class SvgHelper { canvas.restore(); } else { Paint paint = paints.get(object); + int originalAlpha = paint.getAlpha(); + paint.setAlpha((int) (crossfadeAlpha * originalAlpha)); if (object instanceof Path) { canvas.drawPath((Path) object, paint); } else if (object instanceof Rect) { @@ -118,14 +155,48 @@ public class SvgHelper { } else if (object instanceof Oval) { Oval oval = (Oval) object; canvas.drawOval(oval.rect, paint); + } else if (object instanceof RoundRect) { + RoundRect rect = (RoundRect) object; + canvas.drawRoundRect(rect.rect, rect.rx, rect.rx, paint); } + paint.setAlpha(originalAlpha); + } + } + canvas.restore(); + if (placeholderGradient != null) { + if (shiftRunnable == null || shiftDrawable.get() == this) { + long newUpdateTime = SystemClock.elapsedRealtime(); + long dt = Math.abs(lastUpdateTime - newUpdateTime); + if (dt > 17) { + dt = 16; + } + lastUpdateTime = newUpdateTime; + totalTranslation += dt * gradientWidth / 1800.0f; + while (totalTranslation >= gradientWidth / 2) { + totalTranslation -= gradientWidth; + } + shiftDrawable = new WeakReference<>(this); + if (shiftRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(shiftRunnable); + } + AndroidUtilities.runOnUIThread(shiftRunnable = () -> shiftRunnable = null, (int) (1000 / AndroidUtilities.screenRefreshRate) - 1); + } + if (parentImageReceiver != null) { + parentImageReceiver.getParentPosition(parentPosition); + } + placeholderMatrix.reset(); + placeholderMatrix.postTranslate(-parentPosition[0] + totalTranslation - bounds.left, 0); + placeholderMatrix.postScale(1.0f / scale, 1.0f / scale); + placeholderGradient.setLocalMatrix(placeholderMatrix); + if (parentImageReceiver != null) { + parentImageReceiver.invalidate(); } } } @Override public void setAlpha(int alpha) { - + crossfadeAlpha = alpha / 255.0f; } @Override @@ -146,6 +217,30 @@ public class SvgHelper { private void addCommand(Object command) { commands.add(command); } + + public void setParent(ImageReceiver imageReceiver) { + parentImageReceiver = imageReceiver; + } + + public void setupGradient(String colorKey, float alpha) { + int color = Theme.getColor(colorKey); + if (currentColor != color) { + colorAlpha = alpha; + currentColorKey = colorKey; + currentColor = color; + gradientWidth = AndroidUtilities.displaySize.x * 2; + float w = AndroidUtilities.dp(180) / gradientWidth; + color = Color.argb((int) (Color.alpha(color) / 2 * colorAlpha), Color.red(color), Color.green(color), Color.blue(color)); + float centerX = (1.0f - w) / 2; + placeholderGradient = new LinearGradient(0, 0, gradientWidth, 0, new int[]{0x00000000, 0x00000000, color, 0x00000000, 0x00000000}, new float[]{0.0f, centerX - w / 2.0f, centerX, centerX + w / 2.0f, 1.0f}, Shader.TileMode.REPEAT); + backgroundGradient = new LinearGradient(0, 0, gradientWidth, 0, new int[]{color, color}, null, Shader.TileMode.REPEAT); + placeholderMatrix = new Matrix(); + placeholderGradient.setLocalMatrix(placeholderMatrix); + for (Paint paint : paints.values()) { + paint.setShader(new ComposeShader(placeholderGradient, backgroundGradient, PorterDuff.Mode.ADD)); + } + } + } } public static Bitmap getBitmap(File file, int width, int height, boolean white) { @@ -193,6 +288,21 @@ public class SvgHelper { } } + public static SvgDrawable getDrawableByPath(String pathString, int w, int h) { + try { + Path path = doPath(pathString); + SvgDrawable drawable = new SvgDrawable(); + drawable.commands.add(path); + drawable.paints.put(path, new Paint(Paint.ANTI_ALIAS_FLAG)); + drawable.width = w; + drawable.height = h; + return drawable; + } catch (Exception e) { + FileLog.e(e); + return null; + } + } + public static Bitmap getBitmapByPathOnly(String pathString, int svgWidth, int svgHeight, int width, int height) { try { Path path = doPath(pathString); @@ -395,6 +505,15 @@ public class SvgHelper { } else if (prevCmd == 'l' || prevCmd == 'L') { cmd = prevCmd; break; + } else if (prevCmd == 's' || prevCmd == 'S') { + cmd = prevCmd; + break; + } else if (prevCmd == 'h' || prevCmd == 'H') { + cmd = prevCmd; + break; + } else if (prevCmd == 'v' || prevCmd == 'V') { + cmd = prevCmd; + break; } default: { ph.advance(); @@ -754,6 +873,7 @@ public class SvgHelper { private float scale = 1.0f; private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); private RectF rect = new RectF(); + private RectF rectTmp = new RectF(); private boolean whiteOnly; boolean pushed = false; @@ -952,20 +1072,31 @@ public class SvgHelper { } Float width = getFloatAttr("width", atts); Float height = getFloatAttr("height", atts); + Float rx = getFloatAttr("rx", atts, null); pushTransform(atts); Properties props = new Properties(atts, globalStyles); if (doFill(props)) { if (drawable != null) { - drawable.addCommand(new RectF(x, y, x + width, y + height), paint); + drawable.addCommand(new RoundRect(new RectF(x, y, x + width, y + height), rx), paint); } else { - canvas.drawRect(x, y, x + width, y + height, paint); + if (rx != null) { + rectTmp.set(x, y, x + width, y + height); + canvas.drawRoundRect(rectTmp, rx, rx, paint); + } else { + canvas.drawRect(x, y, x + width, y + height, paint); + } } } if (doStroke(props)) { if (drawable != null) { - drawable.addCommand(new RectF(x, y, x + width, y + height), paint); + drawable.addCommand(new RoundRect(new RectF(x, y, x + width, y + height), rx), paint); } else { - canvas.drawRect(x, y, x + width, y + height, paint); + if (rx != null) { + rectTmp.set(x, y, x + width, y + height); + canvas.drawRoundRect(rectTmp, rx, rx, paint); + } else { + canvas.drawRect(x, y, x + width, y + height, paint); + } } } popTransform(); @@ -1513,4 +1644,30 @@ public class SvgHelper { return f; } } + + public static String decompress(byte[] encoded) { + try { + StringBuilder path = new StringBuilder(encoded.length * 2); + path.append('M'); + for (int i = 0; i < encoded.length; i++) { + int num = encoded[i] & 0xff; + if (num >= 128 + 64) { + int start = num - 128 - 64; + path.append("AACAAAAHAAALMAAAQASTAVAAAZaacaaaahaaalmaaaqastava.az0123456789-,".substring(start, start + 1)); + } else { + if (num >= 128) { + path.append(','); + } else if (num >= 64) { + path.append('-'); + } + path.append(num & 63); + } + } + path.append('z'); + return path.toString(); + } catch (Exception e) { + FileLog.e(e); + } + return ""; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java b/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java index ddad25359..e3990bcd3 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java @@ -744,7 +744,7 @@ public class MediaCodecVideoConvertor { readAndWriteTracks(extractor, mediaMuxer, info, startTime, endTime, duration, cacheFile, bitrate != -1); } } - } catch (Exception e) { + } catch (Throwable e) { error = true; FileLog.e("bitrate: " + bitrate + " framerate: " + framerate + " size: " + resultHeight + "x" + resultWidth); FileLog.e(e); @@ -756,7 +756,7 @@ public class MediaCodecVideoConvertor { try { mediaMuxer.finishMovie(); endPresentationTime = mediaMuxer.getLastFrameTimestamp(videoTrackIndex); - } catch (Exception e) { + } catch (Throwable e) { FileLog.e(e); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java index 066da5a90..b1801d08b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java @@ -330,6 +330,80 @@ public final class Instance { } } + public static final class Fingerprint { + + public final String hash; + public final String setup; + public final String fingerprint; + + public Fingerprint(String hash, String setup, String fingerprint) { + this.hash = hash; + this.setup = setup; + this.fingerprint = fingerprint; + } + + @Override + public String toString() { + return "Fingerprint{" + + "hash=" + hash + + ", setup=" + setup + + ", fingerprint=" + fingerprint + + '}'; + } + } + + public static final class Candidate { + + public final String port; + public final String protocol; + public final String network; + public final String generation; + public final String id; + public final String component; + public final String foundation; + public final String priority; + public final String ip; + public final String type; + public final String tcpType; + public final String relAddr; + public final String relPort; + + public Candidate(String port, String protocol, String network, String generation, String id, String component, String foundation, String priority, String ip, String type, String tcpType, String relAddr, String relPort) { + this.port = port; + this.protocol = protocol; + this.network = network; + this.generation = generation; + this.id = id; + this.component = component; + this.foundation = foundation; + this.priority = priority; + this.ip = ip; + this.type = type; + this.tcpType = tcpType; + this.relAddr = relAddr; + this.relPort = relPort; + } + + @Override + public String toString() { + return "Candidate{" + + "port=" + port + + ", protocol=" + protocol + + ", network=" + network + + ", generation=" + generation + + ", id=" + id + + ", component=" + component + + ", foundation=" + foundation + + ", priority=" + priority + + ", ip=" + ip + + ", type=" + type + + ", tcpType=" + tcpType + + ", relAddr=" + relAddr + + ", relPort=" + relPort + + '}'; + } + } + public static final class ServerConfig { public final boolean useSystemNs; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java index fda526040..e15ac62ce 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java @@ -1,8 +1,13 @@ package org.telegram.messenger.voip; +import org.json.JSONArray; +import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLog; +import org.telegram.messenger.SharedConfig; +import org.webrtc.ContextUtils; import org.webrtc.VideoSink; import java.util.concurrent.CountDownLatch; @@ -16,6 +21,20 @@ public class NativeInstance { private long nativePtr; private String persistentStateFilePath; + private PayloadCallback payloadCallback; + private AudioLevelsCallback audioLevelsCallback; + private float[] temp = new float[1]; + + private boolean isGroup; + + public interface PayloadCallback { + void run(int ssrc, String value); + } + + public interface AudioLevelsCallback { + void run(int[] uids, float[] levels, boolean[] voice); + } + public static NativeInstance make(String version, Instance.Config config, String path, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer) { if (BuildVars.LOGS_ENABLED) { FileLog.d("create new tgvoip instance, version " + version); @@ -27,10 +46,24 @@ public class NativeInstance { return instance; } + public static NativeInstance makeGroup(PayloadCallback payloadCallback, AudioLevelsCallback audioLevelsCallback) { + ContextUtils.initialize(ApplicationLoader.applicationContext); + NativeInstance instance = new NativeInstance(); + instance.payloadCallback = payloadCallback; + instance.audioLevelsCallback = audioLevelsCallback; + instance.isGroup = true; + instance.nativePtr = makeGroupNativeInstance(instance, SharedConfig.disableVoiceAudioEffects); + return instance; + } + public int getPeerCapabilities() { return 0; } + public boolean isGroup() { + return isGroup; + } + public void setOnStateUpdatedListener(Instance.OnStateUpdatedListener listener) { onStateUpdatedListener = listener; } @@ -71,6 +104,43 @@ public class NativeInstance { } } + //group calls + private void onNetworkStateUpdated(boolean connected) { + if (onStateUpdatedListener != null) { + AndroidUtilities.runOnUIThread(() -> onStateUpdatedListener.onStateUpdated(connected ? 1 : 0)); + } + } + + private void onAudioLevelsUpdated(int[] uids, float[] levels, boolean[] voice) { + if (uids.length == 0) { + return; + } + AndroidUtilities.runOnUIThread(() -> audioLevelsCallback.run(uids, levels, voice)); + } + + private void onEmitJoinPayload(String ufrag, String pwd, Instance.Fingerprint[] fingerprints, int ssrc) { + try { + JSONObject json = new JSONObject(); + json.put("ufrag", ufrag); + json.put("pwd", pwd); + JSONArray array = new JSONArray(); + for (int a = 0; a < fingerprints.length; a++) { + JSONObject object = new JSONObject(); + object.put("hash", fingerprints[a].hash); + object.put("fingerprint", fingerprints[a].fingerprint); + object.put("setup", fingerprints[a].setup); + array.put(object); + } + json.put("fingerprints", array); + json.put("ssrc", ssrc); + AndroidUtilities.runOnUIThread(() -> payloadCallback.run(ssrc, json.toString())); + } catch (Exception e) { + FileLog.e(e); + } + } + + public native void removeSsrcs(int[] ssrcs); + public native void setJoinResponsePayload(String ufrag, String pwd, Instance.Fingerprint[] fingerprints, Instance.Candidate[] candidates); private Instance.FinalState finalState; private CountDownLatch stopBarrier; @@ -92,6 +162,11 @@ public class NativeInstance { return finalState; } + public void stopGroup() { + stopGroupNative(); + } + + private static native long makeGroupNativeInstance(NativeInstance instance, boolean highQuality); private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio); public static native long createVideoCapturer(VideoSink localSink, boolean front); public static native void setVideoStateCapturer(long videoCapturer, int videoState); @@ -111,6 +186,7 @@ public class NativeInstance { public native Instance.TrafficStats getTrafficStats(); public native byte[] getPersistentState(); private native void stopNative(); + private native void stopGroupNative(); public native void setupOutgoingVideo(VideoSink localSink, boolean front); public native void switchCamera(boolean front); public native void setVideoState(int videoState); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java index 961cee631..3d93c14e0 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java @@ -38,6 +38,7 @@ import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; import android.media.MediaPlayer; +import android.media.MediaRouter; import android.media.RingtoneManager; import android.media.SoundPool; import android.net.ConnectivityManager; @@ -59,12 +60,15 @@ import android.text.SpannableString; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; import android.view.View; +import android.view.WindowManager; import android.widget.RemoteViews; +import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildConfig; import org.telegram.messenger.BuildVars; +import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; @@ -76,6 +80,8 @@ import org.telegram.messenger.NotificationsController; import org.telegram.messenger.R; import org.telegram.messenger.StatsController; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; +import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -103,6 +109,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList public static final int STATE_ESTABLISHED = Instance.STATE_ESTABLISHED; public static final int STATE_FAILED = Instance.STATE_FAILED; public static final int STATE_RECONNECTING = Instance.STATE_RECONNECTING; + public static final int STATE_CREATING = 6; public static final int STATE_ENDED = 11; public static final String ACTION_HEADSET_PLUG = "android.intent.action.HEADSET_PLUG"; @@ -122,10 +129,15 @@ public abstract class VoIPBaseService extends Service implements SensorEventList protected static final int PROXIMITY_SCREEN_OFF_WAKE_LOCK = 32; protected static VoIPBaseService sharedInstance; + protected static Runnable setModeRunnable; + protected static final Object sync = new Object(); protected NetworkInfo lastNetInfo; protected int currentState = 0; protected Notification ongoingCallNotification; protected NativeInstance tgVoip; + + protected TLRPC.Chat chat; + protected boolean isVideoAvailable; protected boolean notificationsDisabled; protected boolean switchingCamera; @@ -142,20 +154,34 @@ public abstract class VoIPBaseService extends Service implements SensorEventList protected int spRingbackID; protected int spFailedID; protected int spEndId; + protected int spVoiceChatEndId; + protected int spVoiceChatStartId; + protected int spVoiceChatConnecting; protected int spBusyId; protected int spConnectingId; protected int spPlayID; protected boolean needPlayEndSound; protected boolean haveAudioFocus; protected boolean micMute; + protected boolean unmutedByHold; protected BluetoothAdapter btAdapter; protected Instance.TrafficStats prevTrafficStats; protected boolean isBtHeadsetConnected; protected boolean screenOn; + private Runnable updateNotificationRunnable; + + protected Runnable onDestroyRunnable; + + protected boolean playedConnectedSound; + protected int videoState = Instance.VIDEO_STATE_INACTIVE; - public TLRPC.PhoneCall call; + public TLRPC.PhoneCall privateCall; + public ChatObject.Call groupCall; + + protected int mySource; + protected boolean createGroupCall; protected long callStartTime; protected boolean playingSound; @@ -186,17 +212,35 @@ public abstract class VoIPBaseService extends Service implements SensorEventList protected Runnable afterSoundRunnable = new Runnable() { @Override public void run() { - soundPool.release(); - if (USE_CONNECTION_SERVICE) { - return; - } + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (isBtHeadsetConnected) { - am.stopBluetoothSco(); - am.setBluetoothScoOn(false); - bluetoothScoActive = false; + am.abandonAudioFocus(VoIPBaseService.this); + am.unregisterMediaButtonEventReceiver(new ComponentName(VoIPBaseService.this, VoIPMediaButtonReceiver.class)); + if (!USE_CONNECTION_SERVICE && sharedInstance == null) { + if (isBtHeadsetConnected) { + am.stopBluetoothSco(); + am.setBluetoothScoOn(false); + bluetoothScoActive = false; + } + am.setSpeakerphoneOn(false); } - am.setSpeakerphoneOn(false); + + Utilities.globalQueue.postRunnable(() -> soundPool.release()); + Utilities.globalQueue.postRunnable(setModeRunnable = () -> { + synchronized (sync) { + if (setModeRunnable == null) { + return; + } + setModeRunnable = null; + } + try { + am.setMode(AudioManager.MODE_NORMAL); + } catch (SecurityException x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error setting audio more to normal", x); + } + } + }); } }; @@ -342,6 +386,9 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } public void registerStateListener(StateListener l) { + if (stateListeners.contains(l)) { + return; + } stateListeners.add(l); if (currentState != 0) { l.onStateChanged(currentState); @@ -355,20 +402,70 @@ public abstract class VoIPBaseService extends Service implements SensorEventList stateListeners.remove(l); } - public void setMicMute(boolean mute) { + public void setMicMute(boolean mute, boolean hold, boolean send) { + if (micMute == mute) { + return; + } micMute = mute; + if (groupCall != null) { + if (!send) { + TLRPC.TL_groupCallParticipant self = groupCall.participants.get(UserConfig.getInstance(currentAccount).getClientUserId()); + if (self != null && self.muted && !self.can_self_unmute) { + send = true; + } + } + if (send) { + editCallMember(UserConfig.getInstance(currentAccount).getCurrentUser(), mute); + Utilities.globalQueue.postRunnable(updateNotificationRunnable = () -> { + if (updateNotificationRunnable == null) { + return; + } + updateNotificationRunnable = null; + showNotification(chat.title, getRoundAvatarBitmap(chat)); + }); + } + } + unmutedByHold = !micMute && hold; if (tgVoip != null) { tgVoip.setMuteMicrophone(mute); } + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } + + public void editCallMember(TLObject object, boolean mute) { + if (groupCall == null) { + return; + } + TLRPC.TL_phone_editGroupCallMember req = new TLRPC.TL_phone_editGroupCallMember(); + req.call = groupCall.getInputGroupCall(); + if (object instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) object; + if (UserObject.isUserSelf(user)) { + req.user_id = new TLRPC.TL_inputUserSelf(); + } else { + req.user_id = new TLRPC.TL_inputUser(); + req.user_id.user_id = user.id; + req.user_id.access_hash = user.access_hash; + } + } + req.muted = mute; + int account = currentAccount; + AccountInstance.getInstance(account).getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + AccountInstance.getInstance(account).getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + }); } public boolean isMicMute() { return micMute; } - public void toggleSpeakerphoneOrShowRouteSheet(Activity activity) { + public void toggleSpeakerphoneOrShowRouteSheet(Context context, boolean fromOverlayWindow) { if (isBluetoothHeadsetConnected() && hasEarpiece()) { - BottomSheet.Builder builder = new BottomSheet.Builder(activity) + BottomSheet.Builder builder = new BottomSheet.Builder(context) .setTitle(LocaleController.getString("VoipOutputDevices", R.string.VoipOutputDevices), true) .setItems(new CharSequence[]{ LocaleController.getString("VoipAudioRoutingSpeaker", R.string.VoipAudioRoutingSpeaker), @@ -382,6 +479,15 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } setAudioOutput(which); }); + + BottomSheet bottomSheet = builder.create(); + if (fromOverlayWindow) { + if (Build.VERSION.SDK_INT >= 26) { + bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY); + } else { + bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_SYSTEM_ALERT); + } + } builder.show(); return; } @@ -542,16 +648,23 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } protected void showNotification(String name, Bitmap photo) { - Intent intent = new Intent(this, LaunchActivity.class).setAction("voip"); + Intent intent = new Intent(this, LaunchActivity.class).setAction(groupCall != null ? "voip_chat" : "voip"); + if (groupCall != null) { + intent.putExtra("currentAccount", currentAccount); + } Notification.Builder builder = new Notification.Builder(this) - .setContentTitle(LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) + .setContentTitle(groupCall != null ? LocaleController.getString("VoipVoiceChat", R.string.VoipVoiceChat) : LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) .setContentText(name) - .setSmallIcon(R.drawable.notification) - .setContentIntent(PendingIntent.getActivity(this, 0, intent, 0)); + .setContentIntent(PendingIntent.getActivity(this, 50, intent, 0)); + if (groupCall != null) { + builder.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); + } else { + builder.setSmallIcon(R.drawable.notification); + } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { Intent endIntent = new Intent(this, VoIPActionsReceiver.class); endIntent.setAction(getPackageName() + ".END_CALL"); - builder.addAction(R.drawable.ic_call_end_white_24dp, LocaleController.getString("VoipEndCall", R.string.VoipEndCall), PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_UPDATE_CURRENT)); + builder.addAction(R.drawable.ic_call_end_white_24dp, groupCall != null ? LocaleController.getString("VoipGroupLeaveAlertTitle", R.string.VoipGroupLeaveAlertTitle) : LocaleController.getString("VoipEndCall", R.string.VoipEndCall), PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_UPDATE_CURRENT)); builder.setPriority(Notification.PRIORITY_MAX); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { @@ -644,6 +757,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList if (proximityWakelock != null && proximityWakelock.isHeld()) { proximityWakelock.release(); } + if (updateNotificationRunnable != null) { + Utilities.globalQueue.cancelRunnable(updateNotificationRunnable); + updateNotificationRunnable = null; + } unregisterReceiver(receiver); if (timeoutRunnable != null) { AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); @@ -655,9 +772,14 @@ public abstract class VoIPBaseService extends Service implements SensorEventList if (tgVoip != null) { StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5); onTgVoipPreStop(); - Instance.FinalState state = tgVoip.stop(); - updateTrafficStats(state.trafficStats); - onTgVoipStop(state); + if (tgVoip.isGroup()) { + NativeInstance instance = tgVoip; + Utilities.globalQueue.postRunnable(instance::stopGroup); + } else { + Instance.FinalState state = tgVoip.stop(); + updateTrafficStats(state.trafficStats); + onTgVoipStop(state); + } prevTrafficStats = null; callStartTime = 0; tgVoip = null; @@ -668,30 +790,39 @@ public abstract class VoIPBaseService extends Service implements SensorEventList videoCapturer = 0; } cpuWakelock.release(); - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (!USE_CONNECTION_SERVICE) { - if (isBtHeadsetConnected && !playingSound) { - am.stopBluetoothSco(); - am.setBluetoothScoOn(false); - am.setSpeakerphoneOn(false); - bluetoothScoActive = false; - } - try { - am.setMode(AudioManager.MODE_NORMAL); - } catch (SecurityException x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error setting audio more to normal", x); - } - } - am.abandonAudioFocus(this); - } - am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); - if (haveAudioFocus) { - am.abandonAudioFocus(this); - } - if (!playingSound) { - soundPool.release(); + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (!USE_CONNECTION_SERVICE) { + if (isBtHeadsetConnected) { + am.stopBluetoothSco(); + am.setBluetoothScoOn(false); + am.setSpeakerphoneOn(false); + bluetoothScoActive = false; + } + if (onDestroyRunnable == null) { + Utilities.globalQueue.postRunnable(setModeRunnable = () -> { + synchronized (sync) { + if (setModeRunnable == null) { + return; + } + setModeRunnable = null; + } + try { + am.setMode(AudioManager.MODE_NORMAL); + } catch (SecurityException x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error setting audio more to normal", x); + } + } + }); + } + am.abandonAudioFocus(this); + } + am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + if (haveAudioFocus) { + am.abandonAudioFocus(this); + } + Utilities.globalQueue.postRunnable(() -> soundPool.release()); } if (USE_CONNECTION_SERVICE) { @@ -704,7 +835,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } ConnectionsManager.getInstance(currentAccount).setAppPaused(true, false); - VoIPHelper.lastCallTime = System.currentTimeMillis(); + VoIPHelper.lastCallTime = SystemClock.elapsedRealtime(); } public abstract long getCallID(); @@ -769,20 +900,43 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } registerReceiver(receiver, filter); fetchBluetoothDeviceName(); - soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); - spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); - spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); - spFailedID = soundPool.load(this, R.raw.voip_failed, 1); - spEndId = soundPool.load(this, R.raw.voip_end, 1); - spBusyId = soundPool.load(this, R.raw.voip_busy, 1); + Utilities.globalQueue.postRunnable(() -> { + soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); + spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); + spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); + spFailedID = soundPool.load(this, R.raw.voip_failed, 1); + spEndId = soundPool.load(this, R.raw.voip_end, 1); + spBusyId = soundPool.load(this, R.raw.voip_busy, 1); + spVoiceChatEndId = soundPool.load(this, R.raw.voicechat_leave, 1); + spVoiceChatStartId = soundPool.load(this, R.raw.voicechat_join, 1); + spVoiceChatConnecting = soundPool.load(this, R.raw.voicechat_connecting, 1); + }); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if (!USE_CONNECTION_SERVICE && btAdapter != null && btAdapter.isEnabled()) { - int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); - updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); + try { + MediaRouter mr = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); + if (Build.VERSION.SDK_INT < 24) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + MediaRouter.RouteInfo ri = mr.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO); + if (ri.getDeviceType() == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + updateBluetoothHeadsetState(false); + } + } + } catch (Throwable e) { + FileLog.e(e); } } } catch (Exception x) { @@ -835,7 +989,13 @@ public abstract class VoIPBaseService extends Service implements SensorEventList needPlayEndSound = true; AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (!USE_CONNECTION_SERVICE) { - am.setMode(AudioManager.MODE_IN_COMMUNICATION); + Utilities.globalQueue.postRunnable(() -> { + try { + am.setMode(AudioManager.MODE_IN_COMMUNICATION); + } catch (Exception e) { + FileLog.e(e); + } + }); am.requestAudioFocus(this, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN); if (isBluetoothHeadsetConnected() && hasEarpiece()) { switch (audioRouteToSet) { @@ -900,7 +1060,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList @SuppressLint("NewApi") @Override public void onSensorChanged(SensorEvent event) { - if (currentVideoState == Instance.VIDEO_STATE_ACTIVE || videoState == Instance.VIDEO_STATE_ACTIVE) { + if (unmutedByHold || currentVideoState == Instance.VIDEO_STATE_ACTIVE || videoState == Instance.VIDEO_STATE_ACTIVE) { return; } if (event.sensor.getType() == Sensor.TYPE_PROXIMITY) { @@ -1005,7 +1165,11 @@ public abstract class VoIPBaseService extends Service implements SensorEventList protected void updateNetworkType() { if (tgVoip != null) { - tgVoip.setNetworkType(getNetworkType()); + if (tgVoip.isGroup()) { + + } else { + tgVoip.setNetworkType(getNetworkType()); + } } else { lastNetInfo = getActiveNetworkInfo(); } @@ -1162,6 +1326,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList chan.setSound(soundProviderUri, attrs); chan.enableVibration(false); chan.enableLights(false); + chan.setBypassDnd(true); try { nm.createNotificationChannel(chan); } catch (Exception e) { @@ -1252,10 +1417,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList FileLog.e(x); } lastError = error; - dispatchStateChanged(STATE_FAILED); + AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_FAILED)); if (TextUtils.equals(error, Instance.ERROR_LOCALIZED) && soundPool != null) { playingSound = true; - soundPool.play(spFailedID, 1, 1, 0, 0, 1); + Utilities.globalQueue.postRunnable(() -> soundPool.play(spFailedID, 1, 1, 0, 0, 1)); AndroidUtilities.runOnUIThread(afterSoundRunnable, 1000); } if (USE_CONNECTION_SERVICE && systemCallConnection != null) { @@ -1285,13 +1450,15 @@ public abstract class VoIPBaseService extends Service implements SensorEventList AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); connectingSoundRunnable = null; } - if (spPlayID != 0) { - soundPool.stop(spPlayID); - spPlayID = 0; - } - if (!wasEstablished) { + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + spPlayID = 0; + } + }); + if (groupCall == null && !wasEstablished) { wasEstablished = true; - if (!isProximityNear && !call.video) { + if (!isProximityNear && !privateCall.video) { Vibrator vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE); if (vibrator.hasVibrator()) { vibrator.vibrate(100); @@ -1314,21 +1481,25 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } } if (newState == STATE_RECONNECTING) { - if (spPlayID != 0) { - soundPool.stop(spPlayID); - } - spPlayID = soundPool.play(spConnectingId, 1, 1, 0, -1, 1); + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + } + spPlayID = soundPool.play(groupCall != null ? spVoiceChatConnecting : spConnectingId, 1, 1, 0, -1, 1); + }); } dispatchStateChanged(newState); } @Override public void onSignalBarCountChanged(int newCount) { - signalBarCount = newCount; - for (int a = 0; a < stateListeners.size(); a++) { - StateListener l = stateListeners.get(a); - l.onSignalBarsCountChanged(newCount); - } + AndroidUtilities.runOnUIThread(() -> { + signalBarCount = newCount; + for (int a = 0; a < stateListeners.size(); a++) { + StateListener l = stateListeners.get(a); + l.onSignalBarsCountChanged(newCount); + } + }); } public boolean isBluetoothOn() { @@ -1336,6 +1507,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList return am.isBluetoothScoOn(); } + public boolean isBluetoothWillOn() { + return needSwitchToBluetoothAfterScoActivates; + } + public boolean isHeadsetPlugged() { return isHeadsetPlugged; } @@ -1357,17 +1532,37 @@ public abstract class VoIPBaseService extends Service implements SensorEventList if (BuildVars.LOGS_ENABLED) { FileLog.d("Call " + getCallID() + " ended"); } - dispatchStateChanged(STATE_ENDED); + if (groupCall != null && (!playedConnectedSound || onDestroyRunnable != null)) { + needPlayEndSound = false; + } + AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_ENDED)); + int delay = 700; + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + spPlayID = 0; + } + }); + + if (connectingSoundRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); + connectingSoundRunnable = null; + } if (needPlayEndSound) { playingSound = true; - soundPool.play(spEndId, 1, 1, 0, 0, 1); - AndroidUtilities.runOnUIThread(afterSoundRunnable, 700); + if (groupCall == null) { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spEndId, 1, 1, 0, 0, 1)); + } else { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spVoiceChatEndId, 1.0f, 1.0f, 0, 0, 1)); + delay = 400; + } + AndroidUtilities.runOnUIThread(afterSoundRunnable, delay); } if (timeoutRunnable != null) { AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); timeoutRunnable = null; } - endConnectionServiceCall(needPlayEndSound ? 700 : 0); + endConnectionServiceCall(needPlayEndSound ? delay : 0); stopSelf(); } @@ -1422,7 +1617,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList private void acceptIncomingCallFromNotification() { showNotification(); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && (checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || call.video && checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && (checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || privateCall.video && checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)) { try { PendingIntent.getActivity(VoIPBaseService.this, 0, new Intent(VoIPBaseService.this, VoIPPermissionActivity.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK), 0).send(); } catch (Exception x) { @@ -1523,13 +1718,33 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } public interface StateListener { - void onStateChanged(int state); - void onSignalBarsCountChanged(int count); - void onAudioSettingsChanged(); - void onMediaStateUpdated(int audioState, int videoState); - void onCameraSwitch(boolean isFrontFace); - void onVideoAvailableChange(boolean isAvailable); - void onScreenOnChange(boolean screenOn); + default void onStateChanged(int state) { + + } + + default void onSignalBarsCountChanged(int count) { + + } + + default void onAudioSettingsChanged() { + + } + + default void onMediaStateUpdated(int audioState, int videoState) { + + } + + default void onCameraSwitch(boolean isFrontFace) { + + } + + default void onVideoAvailableChange(boolean isAvailable) { + + } + + default void onScreenOnChange(boolean screenOn) { + + } } public class CallConnection extends Connection { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java index 8ad82b13b..fc7d74c18 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java @@ -33,9 +33,12 @@ import android.text.TextUtils; import android.view.KeyEvent; import android.widget.Toast; +import org.json.JSONArray; +import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildVars; +import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; @@ -44,6 +47,7 @@ import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.NotificationsController; import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; import org.telegram.messenger.XiaomiUtilities; import org.telegram.tgnet.ConnectionsManager; @@ -57,7 +61,6 @@ import org.webrtc.VideoSink; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.lang.reflect.Field; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; @@ -90,20 +93,34 @@ public class VoIPService extends VoIPBaseService { public static TLRPC.PhoneCall callIShouldHavePutIntoIntent; - private boolean needSendDebugLog = false; - private boolean needRateCall = false; + public static NativeInstance.AudioLevelsCallback audioLevelsCallback; - private boolean endCallAfterRequest = false; + private boolean needSendDebugLog; + private boolean needRateCall; + private long lastTypingTimeSend; + + private boolean endCallAfterRequest; private ArrayList pendingUpdates = new ArrayList<>(); private Runnable delayedStartOutgoingCall; - private boolean startedRinging = false; + private boolean startedRinging; public boolean isFrontFaceCamera() { return isFrontFaceCamera; } - private static class ProxyVideoSink implements VideoSink { + public boolean mutedByAdmin() { + ChatObject.Call call = VoIPService.getSharedInstance().groupCall; + if (call != null) { + TLRPC.TL_groupCallParticipant participant = call.participants.get(UserConfig.getInstance(currentAccount).getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(VoIPService.getSharedInstance().getChat())) { + return true; + } + } + return false; + } + + private static class ProxyVideoSink implements VideoSink { private VideoSink target; private VideoSink background; @@ -159,11 +176,18 @@ public class VoIPService extends VoIPBaseService { throw new IllegalStateException("No account specified when starting VoIP service"); } int userID = intent.getIntExtra("user_id", 0); + int chatID = intent.getIntExtra("chat_id", 0); + createGroupCall = intent.getBooleanExtra("createGroupCall", false); isOutgoing = intent.getBooleanExtra("is_outgoing", false); videoCall = intent.getBooleanExtra("video_call", false); isVideoAvailable = intent.getBooleanExtra("can_video_call", false); notificationsDisabled = intent.getBooleanExtra("notifications_disabled", false); - user = MessagesController.getInstance(currentAccount).getUser(userID); + if (userID != 0) { + user = MessagesController.getInstance(currentAccount).getUser(userID); + } + if (chatID != 0) { + chat = MessagesController.getInstance(currentAccount).getChat(chatID); + } localSink = new ProxyVideoSink(); remoteSink = new ProxyVideoSink(); try { @@ -172,6 +196,14 @@ public class VoIPService extends VoIPBaseService { } catch (Exception e) { FileLog.e(e); } + if (chat != null && !createGroupCall) { + ChatObject.Call call = MessagesController.getInstance(currentAccount).getGroupCall(chat.id, false); + if (call == null) { + FileLog.w("VoIPService: trying to open group call without call " + chat.id); + stopSelf(); + return START_NOT_STICKY; + } + } if (videoCall) { videoCapturer = NativeInstance.createVideoCapturer(localSink, isFrontFaceCamera); @@ -181,40 +213,58 @@ public class VoIPService extends VoIPBaseService { } } - if (user == null) { + if (user == null && chat == null) { if (BuildVars.LOGS_ENABLED) { - FileLog.w("VoIPService: user==null"); + FileLog.w("VoIPService: user == null AND chat == null"); } stopSelf(); return START_NOT_STICKY; } sharedInstance = this; + synchronized (sync) { + if (setModeRunnable != null) { + Utilities.globalQueue.cancelRunnable(setModeRunnable); + setModeRunnable = null; + } + } if (isOutgoing) { - dispatchStateChanged(STATE_REQUESTING); - if (USE_CONNECTION_SERVICE) { - TelecomManager tm = (TelecomManager) getSystemService(TELECOM_SERVICE); - Bundle extras = new Bundle(); - Bundle myExtras = new Bundle(); - extras.putParcelable(TelecomManager.EXTRA_PHONE_ACCOUNT_HANDLE, addAccountToTelecomManager()); - myExtras.putInt("call_type", 1); - extras.putBundle(TelecomManager.EXTRA_OUTGOING_CALL_EXTRAS, myExtras); - ContactsController.getInstance(currentAccount).createOrUpdateConnectionServiceContact(user.id, user.first_name, user.last_name); - tm.placeCall(Uri.fromParts("tel", "+99084" + user.id, null), extras); + if (user != null) { + dispatchStateChanged(STATE_REQUESTING); + if (USE_CONNECTION_SERVICE) { + TelecomManager tm = (TelecomManager) getSystemService(TELECOM_SERVICE); + Bundle extras = new Bundle(); + Bundle myExtras = new Bundle(); + extras.putParcelable(TelecomManager.EXTRA_PHONE_ACCOUNT_HANDLE, addAccountToTelecomManager()); + myExtras.putInt("call_type", 1); + extras.putBundle(TelecomManager.EXTRA_OUTGOING_CALL_EXTRAS, myExtras); + ContactsController.getInstance(currentAccount).createOrUpdateConnectionServiceContact(user.id, user.first_name, user.last_name); + tm.placeCall(Uri.fromParts("tel", "+99084" + user.id, null), extras); + } else { + delayedStartOutgoingCall = () -> { + delayedStartOutgoingCall = null; + startOutgoingCall(); + }; + AndroidUtilities.runOnUIThread(delayedStartOutgoingCall, 2000); + } } else { - delayedStartOutgoingCall = () -> { - delayedStartOutgoingCall = null; - startOutgoingCall(); - }; - AndroidUtilities.runOnUIThread(delayedStartOutgoingCall, 2000); + micMute = true; + startGroupCall(0, null); + if (!isBtHeadsetConnected && !isHeadsetPlugged) { + setAudioOutput(0); + } } if (intent.getBooleanExtra("start_incall_activity", false)) { - startActivity(new Intent(this, LaunchActivity.class).setAction("voip").addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)); + Intent intent1 = new Intent(this, LaunchActivity.class).setAction(user != null ? "voip" : "voip_chat").addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + if (chat != null) { + intent1.putExtra("currentAccount", currentAccount); + } + startActivity(intent1); } } else { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeInCallActivity); - call = callIShouldHavePutIntoIntent; - videoCall = call != null && call.video; + privateCall = callIShouldHavePutIntoIntent; + videoCall = privateCall != null && privateCall.video; if (videoCall) { isVideoAvailable = true; } @@ -246,9 +296,13 @@ public class VoIPService extends VoIPBaseService { if (callIShouldHavePutIntoIntent != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { NotificationsController.checkOtherNotificationsChannel(); Notification.Builder bldr = new Notification.Builder(this, NotificationsController.OTHER_NOTIFICATIONS_CHANNEL) - .setSmallIcon(R.drawable.notification) .setContentTitle(LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) .setShowWhen(false); + if (groupCall != null) { + bldr.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); + } else { + bldr.setSmallIcon(R.drawable.notification); + } startForeground(ID_ONGOING_CALL_NOTIFICATION, bldr.build()); } } @@ -289,6 +343,9 @@ public class VoIPService extends VoIPBaseService { @Override protected void onTgVoipStop(Instance.FinalState finalState) { + if (user == null) { + return; + } if (needRateCall || forceRating || finalState.isRatingSuggested) { startRatingActivity(); needRateCall = false; @@ -298,8 +355,8 @@ public class VoIPService extends VoIPBaseService { req.debug = new TLRPC.TL_dataJSON(); req.debug.data = finalState.debugLog; req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = call.access_hash; - req.peer.id = call.id; + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (BuildVars.LOGS_ENABLED) { FileLog.d("Sent debug logs, response = " + response); @@ -317,12 +374,63 @@ public class VoIPService extends VoIPBaseService { return user; } + public TLRPC.Chat getChat() { + return chat; + } + + public int getCallerId() { + if (user != null) { + return user.id; + } else { + return -chat.id; + } + } + public void hangUp() { - declineIncomingCall(currentState == STATE_RINGING || (currentState == STATE_WAITING && isOutgoing) ? DISCARD_REASON_MISSED : DISCARD_REASON_HANGUP, null); + hangUp(0, null); + } + + public void hangUp(int discard) { + hangUp(discard, null); } public void hangUp(Runnable onDone) { + hangUp(0, onDone); + } + + public void hangUp(int discard, Runnable onDone) { declineIncomingCall(currentState == STATE_RINGING || (currentState == STATE_WAITING && isOutgoing) ? DISCARD_REASON_MISSED : DISCARD_REASON_HANGUP, onDone); + if (groupCall != null) { + if (discard == 2) { + return; + } + if (discard == 1) { + TLRPC.ChatFull chatFull = MessagesController.getInstance(currentAccount).getChatFull(chat.id); + if (chatFull != null) { + chatFull.flags &=~ 2097152; + chatFull.call = null; + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.groupCallUpdated, chat.id, groupCall.call.id, false); + } + TLRPC.TL_phone_discardGroupCall req = new TLRPC.TL_phone_discardGroupCall(); + req.call = groupCall.getInputGroupCall(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response instanceof TLRPC.TL_updates) { + TLRPC.TL_updates updates = (TLRPC.TL_updates) response; + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + } + }); + } else { + TLRPC.TL_phone_leaveGroupCall req = new TLRPC.TL_phone_leaveGroupCall(); + req.call = groupCall.getInputGroupCall(); + req.source = mySource; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response instanceof TLRPC.TL_updates) { + TLRPC.TL_updates updates = (TLRPC.TL_updates) response; + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + } + }); + } + } } private void startOutgoingCall() { @@ -388,14 +496,14 @@ public class VoIPService extends VoIPBaseService { ConnectionsManager.getInstance(currentAccount).sendRequest(reqCall, (response12, error12) -> AndroidUtilities.runOnUIThread(() -> { if (error12 == null) { - call = ((TLRPC.TL_phone_phoneCall) response12).phone_call; + privateCall = ((TLRPC.TL_phone_phoneCall) response12).phone_call; a_or_b = salt1; dispatchStateChanged(STATE_WAITING); if (endCallAfterRequest) { hangUp(); return; } - if (pendingUpdates.size() > 0 && call != null) { + if (pendingUpdates.size() > 0 && privateCall != null) { for (TLRPC.PhoneCall call : pendingUpdates) { onCallUpdated(call); } @@ -405,8 +513,8 @@ public class VoIPService extends VoIPBaseService { timeoutRunnable = null; TLRPC.TL_phone_discardCall req1 = new TLRPC.TL_phone_discardCall(); req1.peer = new TLRPC.TL_inputPhoneCall(); - req1.peer.access_hash = call.access_hash; - req1.peer.id = call.id; + req1.peer.access_hash = privateCall.access_hash; + req1.peer.id = privateCall.id; req1.reason = new TLRPC.TL_phoneCallDiscardReasonMissed(); ConnectionsManager.getInstance(currentAccount).sendRequest(req1, (response1, error1) -> { if (BuildVars.LOGS_ENABLED) { @@ -445,9 +553,9 @@ public class VoIPService extends VoIPBaseService { } private void acknowledgeCall(final boolean startRinging) { - if (call instanceof TLRPC.TL_phoneCallDiscarded) { + if (privateCall instanceof TLRPC.TL_phoneCallDiscarded) { if (BuildVars.LOGS_ENABLED) { - FileLog.w("Call " + call.id + " was discarded before the service started, stopping"); + FileLog.w("Call " + privateCall.id + " was discarded before the service started, stopping"); } stopSelf(); return; @@ -463,8 +571,8 @@ public class VoIPService extends VoIPBaseService { } TLRPC.TL_phone_receivedCall req = new TLRPC.TL_phone_receivedCall(); req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.id = call.id; - req.peer.access_hash = call.access_hash; + req.peer.id = privateCall.id; + req.peer.access_hash = privateCall.access_hash; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (sharedInstance == null) { return; @@ -500,11 +608,11 @@ public class VoIPService extends VoIPBaseService { systemCallConnection.setRinging(); } if (BuildVars.LOGS_ENABLED) { - FileLog.d("starting ringing for call " + call.id); + FileLog.d("starting ringing for call " + privateCall.id); } dispatchStateChanged(STATE_WAITING_INCOMING); if (!notificationsDisabled && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - showIncomingNotification(ContactsController.formatName(user.first_name, user.last_name), null, user, call.video, 0); + showIncomingNotification(ContactsController.formatName(user.first_name, user.last_name), null, user, privateCall.video, 0); if (BuildVars.LOGS_ENABLED) { FileLog.d("Showing incoming call notification"); } @@ -536,6 +644,10 @@ public class VoIPService extends VoIPBaseService { return currentState == STATE_WAITING_INCOMING; } + public boolean isJoined() { + return currentState != STATE_WAIT_INIT && currentState != STATE_CREATING; + } + public void acceptIncomingCall() { MessagesController.getInstance(currentAccount).ignoreSetOnline = false; stopRinging(); @@ -569,7 +681,7 @@ public class VoIPService extends VoIPBaseService { for (int a = 0; a < 256; a++) { salt[a] = (byte) ((byte) (Utilities.random.nextDouble() * 256) ^ res.random[a]); } - if (call == null) { + if (privateCall == null) { if (BuildVars.LOGS_ENABLED) { FileLog.e("call is null"); } @@ -580,7 +692,7 @@ public class VoIPService extends VoIPBaseService { BigInteger g_b = BigInteger.valueOf(messagesStorage.getSecretG()); BigInteger p = new BigInteger(1, messagesStorage.getSecretPBytes()); g_b = g_b.modPow(new BigInteger(1, salt), p); - g_a_hash = call.g_a_hash; + g_a_hash = privateCall.g_a_hash; byte[] g_b_bytes = g_b.toByteArray(); if (g_b_bytes.length > 256) { @@ -592,8 +704,8 @@ public class VoIPService extends VoIPBaseService { TLRPC.TL_phone_acceptCall req1 = new TLRPC.TL_phone_acceptCall(); req1.g_b = g_b_bytes; req1.peer = new TLRPC.TL_inputPhoneCall(); - req1.peer.id = call.id; - req1.peer.access_hash = call.access_hash; + req1.peer.id = privateCall.id; + req1.peer.access_hash = privateCall.access_hash; req1.protocol = new TLRPC.TL_phoneCallProtocol(); req1.protocol.udp_p2p = req1.protocol.udp_reflector = true; req1.protocol.min_layer = CALL_MIN_LAYER; @@ -604,9 +716,9 @@ public class VoIPService extends VoIPBaseService { if (BuildVars.LOGS_ENABLED) { FileLog.w("accept call ok! " + response1); } - call = ((TLRPC.TL_phone_phoneCall) response1).phone_call; - if (call instanceof TLRPC.TL_phoneCallDiscarded) { - onCallUpdated(call); + privateCall = ((TLRPC.TL_phone_phoneCall) response1).phone_call; + if (privateCall instanceof TLRPC.TL_phoneCallDiscarded) { + onCallUpdated(privateCall); } } else { if (BuildVars.LOGS_ENABLED) { @@ -682,6 +794,9 @@ public class VoIPService extends VoIPBaseService { public void onDestroy() { super.onDestroy(); setSinks(null, null); + if (onDestroyRunnable != null) { + onDestroyRunnable.run(); + } } @Override @@ -689,6 +804,10 @@ public class VoIPService extends VoIPBaseService { return LaunchActivity.class; } + public boolean isHangingUp() { + return currentState == STATE_HANGING_UP; + } + public void declineIncomingCall(int reason, final Runnable onDone) { stopRinging(); callDiscardReason = reason; @@ -711,10 +830,8 @@ public class VoIPService extends VoIPBaseService { return; } dispatchStateChanged(STATE_HANGING_UP); - if (call == null) { - if (onDone != null) { - onDone.run(); - } + if (privateCall == null) { + onDestroyRunnable = onDone; callEnded(); if (callReqId != 0) { ConnectionsManager.getInstance(currentAccount).cancelRequest(callReqId, false); @@ -724,8 +841,8 @@ public class VoIPService extends VoIPBaseService { } TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = call.access_hash; - req.peer.id = call.id; + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; req.duration = (int) (getCallDuration() / 1000); req.connection_id = tgVoip != null ? tgVoip.getPreferredRelayId() : 0; switch (reason) { @@ -785,56 +902,127 @@ public class VoIPService extends VoIPBaseService { } if (!wasNotConnected) { AndroidUtilities.cancelRunOnUIThread(stopper); - if (onDone != null) + if (onDone != null) { onDone.run(); + } } }, ConnectionsManager.RequestFlagFailOnServerErrors); } - private void dumpCallObject() { - try { - if (BuildVars.LOGS_ENABLED) { - Field[] flds = TLRPC.PhoneCall.class.getFields(); - for (Field f : flds) { - FileLog.d(f.getName() + " = " + f.get(call)); - } - } - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e(x); - } - } - } - public void onSignalingData(TLRPC.TL_updatePhoneCallSignalingData data) { - if (tgVoip == null || getCallID() != data.phone_call_id) { + if (user == null || tgVoip == null || tgVoip.isGroup() || getCallID() != data.phone_call_id) { return; } tgVoip.onSignalingDataReceive(data.data); } + public void onGroupCallParticipantsUpdate(TLRPC.TL_updateGroupCallParticipants update) { + if (chat == null || groupCall == null || groupCall.call.id != update.call.id) { + return; + } + ArrayList toRemove = null; + int selfId = UserConfig.getInstance(currentAccount).clientUserId; + for (int a = 0, N = update.participants.size(); a < N; a++) { + TLRPC.TL_groupCallParticipant participant = update.participants.get(a); + if (participant.left) { + if (toRemove == null) { + toRemove = new ArrayList<>(); + } + toRemove.add(participant.source); + } else if (participant.user_id == selfId && participant.source != mySource) { + hangUp(2); + return; + } + } + if (toRemove != null) { + int[] ssrcs = new int[toRemove.size()]; + for (int a = 0, N = toRemove.size(); a < N; a++) { + ssrcs[a] = toRemove.get(a); + } + tgVoip.removeSsrcs(ssrcs); + } + } + + public void onGroupCallUpdated(TLRPC.GroupCall call) { + if (chat == null) { + return; + } + if (groupCall == null || groupCall.call.id != call.id) { + return; + } + if (groupCall.call instanceof TLRPC.TL_groupCallDiscarded) { + hangUp(2); + return; + } + if (currentState == STATE_WAIT_INIT && call.params != null) { + TLRPC.TL_dataJSON json = call.params; + try { + JSONObject object = new JSONObject(json.data); + object = object.getJSONObject("transport"); + String ufrag = object.getString("ufrag"); + String pwd = object.getString("pwd"); + JSONArray array = object.getJSONArray("fingerprints"); + Instance.Fingerprint[] fingerprints = new Instance.Fingerprint[array.length()]; + for (int a = 0; a < fingerprints.length; a++) { + JSONObject item = array.getJSONObject(a); + fingerprints[a] = new Instance.Fingerprint( + item.getString("hash"), + item.getString("setup"), + item.getString("fingerprint") + ); + } + array = object.getJSONArray("candidates"); + Instance.Candidate[] candidates = new Instance.Candidate[array.length()]; + for (int a = 0; a < candidates.length; a++) { + JSONObject item = array.getJSONObject(a); + candidates[a] = new Instance.Candidate( + item.optString("port", ""), + item.optString("protocol", ""), + item.optString("network", ""), + item.optString("generation", ""), + item.optString("id", ""), + item.optString("component", ""), + item.optString("foundation", ""), + item.optString("priority", ""), + item.optString("ip", ""), + item.optString("type", ""), + item.optString("tcpType", ""), + item.optString("relAddr", ""), + item.optString("relPort", "") + ); + } + tgVoip.setJoinResponsePayload(ufrag, pwd, fingerprints, candidates); + dispatchStateChanged(STATE_WAIT_INIT_ACK); + } catch (Exception e) { + FileLog.e(e); + } + } + } + public void onCallUpdated(TLRPC.PhoneCall phoneCall) { - if (call == null) { + if (user == null) { + return; + } + if (privateCall == null) { pendingUpdates.add(phoneCall); return; } if (phoneCall == null) { return; } - if (phoneCall.id != call.id) { + if (phoneCall.id != privateCall.id) { if (BuildVars.LOGS_ENABLED) { - FileLog.w("onCallUpdated called with wrong call id (got " + phoneCall.id + ", expected " + this.call.id + ")"); + FileLog.w("onCallUpdated called with wrong call id (got " + phoneCall.id + ", expected " + this.privateCall.id + ")"); } return; } if (phoneCall.access_hash == 0) { - phoneCall.access_hash = this.call.access_hash; + phoneCall.access_hash = this.privateCall.access_hash; } if (BuildVars.LOGS_ENABLED) { FileLog.d("Call updated: " + phoneCall); - dumpCallObject(); } - call = phoneCall; + privateCall = phoneCall; if (phoneCall instanceof TLRPC.TL_phoneCallDiscarded) { needSendDebugLog = phoneCall.need_debug; needRateCall = phoneCall.need_rating; @@ -844,7 +1032,7 @@ public class VoIPService extends VoIPBaseService { if (phoneCall.reason instanceof TLRPC.TL_phoneCallDiscardReasonBusy) { dispatchStateChanged(STATE_BUSY); playingSound = true; - soundPool.play(spBusyId, 1, 1, 0, -1, 1); + Utilities.globalQueue.postRunnable(() -> soundPool.play(spBusyId, 1, 1, 0, -1, 1)); AndroidUtilities.runOnUIThread(afterSoundRunnable, 1500); endConnectionServiceCall(1500); stopSelf(); @@ -919,10 +1107,12 @@ public class VoIPService extends VoIPBaseService { AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); connectingSoundRunnable = null; } - if (spPlayID != 0) { - soundPool.stop(spPlayID); - } - spPlayID = soundPool.play(spRingbackID, 1, 1, 0, -1, 1); + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + } + spPlayID = soundPool.play(spRingbackID, 1, 1, 0, -1, 1); + }); if (timeoutRunnable != null) { AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); timeoutRunnable = null; @@ -939,9 +1129,9 @@ public class VoIPService extends VoIPBaseService { private void startRatingActivity() { try { PendingIntent.getActivity(VoIPService.this, 0, new Intent(VoIPService.this, VoIPFeedbackActivity.class) - .putExtra("call_id", call.id) - .putExtra("call_access_hash", call.access_hash) - .putExtra("call_video", call.video) + .putExtra("call_id", privateCall.id) + .putExtra("call_access_hash", privateCall.access_hash) + .putExtra("call_video", privateCall.video) .putExtra("account", currentAccount) .addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP), 0).send(); } catch (Exception x) { @@ -956,10 +1146,9 @@ public class VoIPService extends VoIPBaseService { } private void processAcceptedCall() { - dispatchStateChanged(STATE_EXCHANGING_KEYS); BigInteger p = new BigInteger(1, MessagesStorage.getInstance(currentAccount).getSecretPBytes()); - BigInteger i_authKey = new BigInteger(1, call.g_b); + BigInteger i_authKey = new BigInteger(1, privateCall.g_b); if (!Utilities.isGoodGaAndGb(i_authKey, p)) { if (BuildVars.LOGS_ENABLED) { @@ -994,8 +1183,8 @@ public class VoIPService extends VoIPBaseService { req.g_a = g_a; req.key_fingerprint = fingerprint; req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.id = call.id; - req.peer.access_hash = call.access_hash; + req.peer.id = privateCall.id; + req.peer.access_hash = privateCall.access_hash; req.protocol = new TLRPC.TL_phoneCallProtocol(); req.protocol.max_layer = Instance.getConnectionMaxLayer(); req.protocol.min_layer = CALL_MIN_LAYER; @@ -1005,7 +1194,7 @@ public class VoIPService extends VoIPBaseService { if (error != null) { callFailed(); } else { - call = ((TLRPC.TL_phone_phoneCall) response).phone_call; + privateCall = ((TLRPC.TL_phone_phoneCall) response).phone_call; initiateActualEncryptedCall(); } })); @@ -1018,6 +1207,203 @@ public class VoIPService extends VoIPBaseService { return ApplicationLoader.isRoaming() ? Instance.DATA_SAVING_MOBILE : Instance.DATA_SAVING_NEVER; } + public void migrateToChat(TLRPC.Chat newChat) { + chat = newChat; + } + + private void startGroupCall(int ssrc, String json) { + if (sharedInstance != this) { + return; + } + if (createGroupCall) { + groupCall = new ChatObject.Call(); + groupCall.call = new TLRPC.TL_groupCall(); + groupCall.call.participants_count = 0; + groupCall.call.version = 1; + groupCall.call.can_change_join_muted = true; + groupCall.chatId = chat.id; + groupCall.currentAccount = currentAccount; + + dispatchStateChanged(STATE_CREATING); + TLRPC.TL_phone_createGroupCall req = new TLRPC.TL_phone_createGroupCall(); + req.peer = MessagesController.getInputPeer(chat); + req.random_id = Utilities.random.nextInt(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response != null) { + TLRPC.Updates updates = (TLRPC.Updates) response; + for (int a = 0; a < updates.updates.size(); a++) { + TLRPC.Update update = updates.updates.get(a); + if (update instanceof TLRPC.TL_updateGroupCall) { + TLRPC.TL_updateGroupCall updateGroupCall = (TLRPC.TL_updateGroupCall) update; + AndroidUtilities.runOnUIThread(() -> { + if (sharedInstance == null) { + return; + } + groupCall.call.access_hash = updateGroupCall.call.access_hash; + groupCall.call.id = updateGroupCall.call.id; + MessagesController.getInstance(currentAccount).putGroupCall(groupCall.chatId, groupCall); + startGroupCall(0, null); + }); + break; + } + } + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + } else { + AndroidUtilities.runOnUIThread(() -> { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 6, error.text); + hangUp(0); + }); + } + }, ConnectionsManager.RequestFlagFailOnServerErrors); + createGroupCall = false; + return; + } + + if (json == null) { + if (groupCall == null) { + groupCall = MessagesController.getInstance(currentAccount).getGroupCall(chat.id, false); + } + configureDeviceForCall(); + showNotification(); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall)); + createGroupInstance(); + } else { + if (getSharedInstance() == null || groupCall == null) { + return; + } + dispatchStateChanged(STATE_WAIT_INIT); + mySource = ssrc; + TLRPC.TL_phone_joinGroupCall req = new TLRPC.TL_phone_joinGroupCall(); + req.muted = true; + req.call = groupCall.getInputGroupCall(); + req.params = new TLRPC.TL_dataJSON(); + req.params.data = json; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response != null) { + MessagesController.getInstance(currentAccount).processUpdates((TLRPC.Updates) response, false); + AndroidUtilities.runOnUIThread(() -> groupCall.loadMembers(false)); + } else { + AndroidUtilities.runOnUIThread(() -> { + if ("GROUPCALL_SSRC_DUPLICATE_MUCH".equals(error.text)) { + createGroupInstance(); + } else { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 6, error.text); + hangUp(0); + } + }); + } + }, BuildVars.DEBUG_PRIVATE_VERSION ? ConnectionsManager.RequestFlagFailOnServerErrors : 0); + } + } + + private Runnable shortPollRunnable; + private int checkRequestId; + + private void startGroupCheckShortpoll() { + if (shortPollRunnable != null || sharedInstance == null || groupCall == null) { + return; + } + AndroidUtilities.runOnUIThread(shortPollRunnable = () -> { + if (shortPollRunnable == null || sharedInstance == null || groupCall == null) { + return; + } + TLRPC.TL_phone_checkGroupCall req = new TLRPC.TL_phone_checkGroupCall(); + req.call = groupCall.getInputGroupCall(); + req.source = mySource; + checkRequestId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (shortPollRunnable == null || sharedInstance == null || groupCall == null) { + return; + } + shortPollRunnable = null; + checkRequestId = 0; + if (response instanceof TLRPC.TL_boolFalse || error != null && error.code == 400) { + createGroupInstance(); + } else { + startGroupCheckShortpoll(); + } + })); + }, 4000); + } + + private void cancelGroupCheckShortPoll() { + if (checkRequestId != 0) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(checkRequestId, false); + checkRequestId = 0; + } + if (shortPollRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(shortPollRunnable); + shortPollRunnable = null; + } + } + + private void createGroupInstance() { + if (tgVoip != null) { + tgVoip.stopGroup(); + } + cancelGroupCheckShortPoll(); + tgVoip = NativeInstance.makeGroup(this::startGroupCall, (uids, levels, voice) -> { + if (sharedInstance == null || groupCall == null) { + return; + } + groupCall.processVoiceLevelsUpdate(uids, levels, voice); + float maxAmplitude = 0; + boolean hasOther = false; + for (int a = 0; a < uids.length; a++) { + if (uids[a] == 0) { + if (lastTypingTimeSend < SystemClock.uptimeMillis() - 5000 && levels[a] > 0.1f && voice[a]) { + lastTypingTimeSend = SystemClock.uptimeMillis(); + TLRPC.TL_messages_setTyping req = new TLRPC.TL_messages_setTyping(); + req.action = new TLRPC.TL_speakingInGroupCallAction(); + req.peer = MessagesController.getInputPeer(chat); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + + }); + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.webRtcMicAmplitudeEvent, levels[a]); + continue; + } + hasOther = true; + maxAmplitude = Math.max(maxAmplitude, levels[a]); + } + if (hasOther) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.webRtcSpeakerAmplitudeEvent, maxAmplitude); + if (audioLevelsCallback != null) { + audioLevelsCallback.run(uids, levels, voice); + } + } + }); + tgVoip.setOnStateUpdatedListener(state -> { + dispatchStateChanged(state == 1 ? STATE_ESTABLISHED : STATE_RECONNECTING); + if (state == 0) { + startGroupCheckShortpoll(); + if (playedConnectedSound && spPlayID == 0) { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + } + spPlayID = soundPool.play(spVoiceChatConnecting, 1.0f, 1.0f, 0, -1, 1); + } + } else { + cancelGroupCheckShortPoll(); + if (playedConnectedSound) { + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + spPlayID = 0; + } + }); + if (connectingSoundRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); + connectingSoundRunnable = null; + } + } else { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spVoiceChatStartId, 1.0f, 1.0f, 0, 0, 1)); + playedConnectedSound = true; + } + } + }); + dispatchStateChanged(STATE_WAIT_INIT); + } + private void initiateActualEncryptedCall() { if (timeoutRunnable != null) { AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); @@ -1035,7 +1421,7 @@ public class VoIPService extends VoIPBaseService { } else { hashes = new HashSet<>(); } - hashes.add(call.id + " " + call.access_hash + " " + System.currentTimeMillis()); + hashes.add(privateCall.id + " " + privateCall.access_hash + " " + System.currentTimeMillis()); while (hashes.size() > 20) { String oldest = null; long oldestTime = Long.MAX_VALUE; @@ -1085,9 +1471,9 @@ public class VoIPService extends VoIPBaseService { final Instance.ServerConfig serverConfig = Instance.getGlobalServerConfig(); final boolean enableAec = !(sysAecAvailable && serverConfig.useSystemAec); final boolean enableNs = !(sysNsAvailable && serverConfig.useSystemNs); - final String logFilePath = BuildVars.DEBUG_VERSION ? VoIPHelper.getLogFilePath("voip" + call.id) : VoIPHelper.getLogFilePath(call.id, false); + final String logFilePath = BuildVars.DEBUG_VERSION ? VoIPHelper.getLogFilePath("voip" + privateCall.id) : VoIPHelper.getLogFilePath(privateCall.id, false); final String statisLogFilePath = ""; - final Instance.Config config = new Instance.Config(initializationTimeout, receiveTimeout, voipDataSaving, call.p2p_allowed, enableAec, enableNs, true, false, serverConfig.enableStunMarking, logFilePath, statisLogFilePath, call.protocol.max_layer); + final Instance.Config config = new Instance.Config(initializationTimeout, receiveTimeout, voipDataSaving, privateCall.p2p_allowed, enableAec, enableNs, true, false, serverConfig.enableStunMarking, logFilePath, statisLogFilePath, privateCall.protocol.max_layer); // persistent state final String persistentStateFilePath = new File(ApplicationLoader.applicationContext.getFilesDir(), "voip_persistent_state.json").getAbsolutePath(); @@ -1095,9 +1481,9 @@ public class VoIPService extends VoIPBaseService { // endpoints final boolean forceTcp = preferences.getBoolean("dbg_force_tcp_in_calls", false); final int endpointType = forceTcp ? Instance.ENDPOINT_TYPE_TCP_RELAY : Instance.ENDPOINT_TYPE_UDP_RELAY; - final Instance.Endpoint[] endpoints = new Instance.Endpoint[call.connections.size()]; + final Instance.Endpoint[] endpoints = new Instance.Endpoint[privateCall.connections.size()]; for (int i = 0; i < endpoints.length; i++) { - final TLRPC.PhoneConnection connection = call.connections.get(i); + final TLRPC.PhoneConnection connection = privateCall.connections.get(i); endpoints[i] = new Instance.Endpoint(connection instanceof TLRPC.TL_phoneConnectionWebrtc, connection.id, connection.ip, connection.ipv6, connection.port, endpointType, connection.peer_tag, connection.turn, connection.stun, connection.username, connection.password); } if (forceTcp) { @@ -1117,14 +1503,14 @@ public class VoIPService extends VoIPBaseService { // encryption key final Instance.EncryptionKey encryptionKey = new Instance.EncryptionKey(authKey, isOutgoing); - boolean newAvailable = "2.7.7".compareTo(call.protocol.library_versions.get(0)) <= 0; + boolean newAvailable = "2.7.7".compareTo(privateCall.protocol.library_versions.get(0)) <= 0; if (videoCapturer != 0 && !newAvailable) { NativeInstance.destroyVideoCapturer(videoCapturer); videoCapturer = 0; videoState = Instance.VIDEO_STATE_INACTIVE; } // init - tgVoip = Instance.makeInstance(call.protocol.library_versions.get(0), config, persistentStateFilePath, endpoints, proxy, getNetworkType(), encryptionKey, remoteSink, videoCapturer); + tgVoip = Instance.makeInstance(privateCall.protocol.library_versions.get(0), config, persistentStateFilePath, endpoints, proxy, getNetworkType(), encryptionKey, remoteSink, videoCapturer); tgVoip.setOnStateUpdatedListener(this::onConnectionStateChanged); tgVoip.setOnSignalBarsUpdatedListener(this::onSignalBarCountChanged); tgVoip.setOnSignalDataListener(this::onSignalingData); @@ -1158,42 +1544,50 @@ public class VoIPService extends VoIPBaseService { } protected void showNotification() { - showNotification(ContactsController.formatName(user.first_name, user.last_name), getRoundAvatarBitmap(user)); + if (user != null) { + showNotification(ContactsController.formatName(user.first_name, user.last_name), getRoundAvatarBitmap(user)); + } else { + showNotification(chat.title, getRoundAvatarBitmap(chat)); + } } private void startConnectingSound() { - if (spPlayID != 0) { - soundPool.stop(spPlayID); - } - spPlayID = soundPool.play(spConnectingId, 1, 1, 0, -1, 1); - if (spPlayID == 0) { - AndroidUtilities.runOnUIThread(connectingSoundRunnable = new Runnable() { - @Override - public void run() { - if (sharedInstance == null) { - return; + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID != 0) { + soundPool.stop(spPlayID); + } + spPlayID = soundPool.play(spConnectingId, 1, 1, 0, -1, 1); + if (spPlayID == 0) { + AndroidUtilities.runOnUIThread(connectingSoundRunnable = new Runnable() { + @Override + public void run() { + if (sharedInstance == null) { + return; + } + Utilities.globalQueue.postRunnable(() -> { + if (spPlayID == 0) { + spPlayID = soundPool.play(spConnectingId, 1, 1, 0, -1, 1); + } + if (spPlayID == 0) { + AndroidUtilities.runOnUIThread(this, 100); + } else { + connectingSoundRunnable = null; + } + }); } - if (spPlayID == 0) { - spPlayID = soundPool.play(spConnectingId, 1, 1, 0, -1, 1); - } - if (spPlayID == 0) { - AndroidUtilities.runOnUIThread(this, 100); - } else { - connectingSoundRunnable = null; - } - } - }, 100); - } + }, 100); + } + }); } public void onSignalingData(byte[] data) { - if (call == null) { + if (privateCall == null) { return; } TLRPC.TL_phone_sendSignalingData req = new TLRPC.TL_phone_sendSignalingData(); req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = call.access_hash; - req.peer.id = call.id; + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; req.data = data; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { @@ -1201,14 +1595,14 @@ public class VoIPService extends VoIPBaseService { } protected void callFailed(String error) { - if (call != null) { + if (privateCall != null) { if (BuildVars.LOGS_ENABLED) { FileLog.d("Discarding failed call"); } TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = call.access_hash; - req.peer.id = call.id; + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; req.duration = (int) (getCallDuration() / 1000); req.connection_id = tgVoip != null ? tgVoip.getPreferredRelayId() : 0; req.reason = new TLRPC.TL_phoneCallDiscardReasonDisconnect(); @@ -1229,7 +1623,7 @@ public class VoIPService extends VoIPBaseService { @Override public long getCallID() { - return call != null ? call.id : 0; + return privateCall != null ? privateCall.id : 0; } public boolean isVideoAvailable() { @@ -1242,10 +1636,7 @@ public class VoIPService extends VoIPBaseService { if (currentState == STATE_WAITING_INCOMING) { acceptIncomingCall(); } else { - setMicMute(!isMicMute()); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } + setMicMute(!isMicMute(), false, true); } } } @@ -1278,13 +1669,15 @@ public class VoIPService extends VoIPBaseService { @Override public void onConnectionStateChanged(int newState) { - if (newState == STATE_ESTABLISHED) { - if (callStartTime == 0) { - callStartTime = SystemClock.elapsedRealtime(); + AndroidUtilities.runOnUIThread(() -> { + if (newState == STATE_ESTABLISHED) { + if (callStartTime == 0) { + callStartTime = SystemClock.elapsedRealtime(); + } + //peerCapabilities = tgVoip.getPeerCapabilities(); } - //peerCapabilities = tgVoip.getPeerCapabilities(); - } - super.onConnectionStateChanged(newState); + super.onConnectionStateChanged(newState); + }); } @TargetApi(Build.VERSION_CODES.O) diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java b/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java index b45b6297e..6ae4716bd 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java @@ -11,14 +11,12 @@ import android.text.TextUtils; import android.util.Base64; import com.google.firebase.remoteconfig.FirebaseRemoteConfig; -import com.google.firebase.remoteconfig.FirebaseRemoteConfigSettings; import org.json.JSONArray; import org.json.JSONObject; import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.BaseController; -import org.telegram.messenger.BuildConfig; import org.telegram.messenger.BuildVars; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.EmuDetector; @@ -1258,8 +1256,6 @@ public class ConnectionsManager extends BaseController { throw new Exception("test backend"); } firebaseRemoteConfig = FirebaseRemoteConfig.getInstance(); - FirebaseRemoteConfigSettings configSettings = new FirebaseRemoteConfigSettings.Builder().setDeveloperModeEnabled(BuildConfig.DEBUG).build(); - firebaseRemoteConfig.setConfigSettings(configSettings); String currentValue = firebaseRemoteConfig.getString("ipconfigv3"); if (BuildVars.LOGS_ENABLED) { FileLog.d("current firebase value = " + currentValue); @@ -1268,30 +1264,30 @@ public class ConnectionsManager extends BaseController { firebaseRemoteConfig.fetch(0).addOnCompleteListener(finishedTask -> { final boolean success = finishedTask.isSuccessful(); Utilities.stageQueue.postRunnable(() -> { - currentTask = null; - String config = null; if (success) { - firebaseRemoteConfig.activateFetched(); - config = firebaseRemoteConfig.getString("ipconfigv3"); - } - if (!TextUtils.isEmpty(config)) { - byte[] bytes = Base64.decode(config, Base64.DEFAULT); - try { - NativeByteBuffer buffer = new NativeByteBuffer(bytes.length); - buffer.writeBytes(bytes); - int date = (int) (firebaseRemoteConfig.getInfo().getFetchTimeMillis() / 1000); - native_applyDnsConfig(currentAccount, buffer.address, AccountInstance.getInstance(currentAccount).getUserConfig().getClientPhone(), date); - } catch (Exception e) { - FileLog.e(e); - } - } else { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("failed to get firebase result"); - FileLog.d("start dns txt task"); - } - DnsTxtLoadTask task = new DnsTxtLoadTask(currentAccount); - task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, null, null, null); - currentTask = task; + firebaseRemoteConfig.activate().addOnCompleteListener(finishedTask2 -> { + currentTask = null; + String config = firebaseRemoteConfig.getString("ipconfigv3"); + if (!TextUtils.isEmpty(config)) { + byte[] bytes = Base64.decode(config, Base64.DEFAULT); + try { + NativeByteBuffer buffer = new NativeByteBuffer(bytes.length); + buffer.writeBytes(bytes); + int date = (int) (firebaseRemoteConfig.getInfo().getFetchTimeMillis() / 1000); + native_applyDnsConfig(currentAccount, buffer.address, AccountInstance.getInstance(currentAccount).getUserConfig().getClientPhone(), date); + } catch (Exception e) { + FileLog.e(e); + } + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("failed to get firebase result"); + FileLog.d("start dns txt task"); + } + DnsTxtLoadTask task = new DnsTxtLoadTask(currentAccount); + task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, null, null, null); + currentTask = task; + } + }); } }); }); diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java index 731f6d246..33bb0ceea 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java @@ -10,6 +10,8 @@ package org.telegram.tgnet; import android.text.TextUtils; +import org.telegram.messenger.FileLoader; +import org.telegram.messenger.FileLog; import org.telegram.messenger.Utilities; import java.util.ArrayList; @@ -59,9 +61,8 @@ public class TLRPC { public static final int MESSAGE_FLAG_HAS_VIEWS = 0x00000400; public static final int MESSAGE_FLAG_HAS_BOT_ID = 0x00000800; public static final int MESSAGE_FLAG_EDITED = 0x00008000; - public static final int MESSAGE_FLAG_MEGAGROUP = 0x80000000; - public static final int LAYER = 120; + public static final int LAYER = 122; public static class TL_stats_megagroupStats extends TLObject { public static int constructor = 0xef7ff916; @@ -1778,134 +1779,87 @@ public class TLRPC { } } - public static abstract class GroupCall extends TLObject { - public long id; - public long access_hash; - public int duration; - public int flags; - public int channel_id; - public int admin_id; - public byte[] encryption_key; - public long key_fingerprint; - public PhoneCallProtocol protocol; - public TL_groupCallConnection connection; - public byte[] reflector_group_tag; - public byte[] reflector_self_tag; - public byte[] reflector_self_secret; - public int participants_count; + public static abstract class GroupCall extends TLObject { - public static GroupCall TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - GroupCall result = null; - switch (constructor) { - case 0x7780bcb4: - result = new TL_groupCallDiscarded(); - break; - case 0xa8f1624: - result = new TL_groupCall(); - break; - case 0x6d0b1604: - result = new TL_groupCallPrivate(); - break; - } - if (result == null && exception) { - throw new RuntimeException(String.format("can't parse magic %x in GroupCall", constructor)); - } - if (result != null) { - result.readParams(stream, exception); - } - return result; - } - } + public int flags; + public boolean join_muted; + public boolean can_change_join_muted; + public long id; + public long access_hash; + public int participants_count; + public TL_dataJSON params; + public int version; + public int duration; - public static class TL_groupCallDiscarded extends GroupCall { - public static int constructor = 0x7780bcb4; + public static GroupCall TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + GroupCall result = null; + switch (constructor) { + case 0x7780bcb4: + result = new TL_groupCallDiscarded(); + break; + case 0x55903081: + result = new TL_groupCall(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in GroupCall", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_groupCallDiscarded extends GroupCall { + public static int constructor = 0x7780bcb4; - public void readParams(AbstractSerializedData stream, boolean exception) { - id = stream.readInt64(exception); - access_hash = stream.readInt64(exception); - duration = stream.readInt32(exception); - } + public void readParams(AbstractSerializedData stream, boolean exception) { + id = stream.readInt64(exception); + access_hash = stream.readInt64(exception); + duration = stream.readInt32(exception); + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt64(id); - stream.writeInt64(access_hash); - stream.writeInt32(duration); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(id); + stream.writeInt64(access_hash); + stream.writeInt32(duration); + } + } - public static class TL_groupCall extends GroupCall { - public static int constructor = 0xa8f1624; + public static class TL_groupCall extends GroupCall { + public static int constructor = 0x55903081; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + join_muted = (flags & 2) != 0; + can_change_join_muted = (flags & 4) != 0; + id = stream.readInt64(exception); + access_hash = stream.readInt64(exception); + participants_count = stream.readInt32(exception); + if ((flags & 1) != 0) { + params = TL_dataJSON.TLdeserialize(stream, stream.readInt32(exception), exception); + } + version = stream.readInt32(exception); + } - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - id = stream.readInt64(exception); - access_hash = stream.readInt64(exception); - if ((flags & 1) != 0) { - channel_id = stream.readInt32(exception); - } - admin_id = stream.readInt32(exception); - if ((flags & 2) != 0) { - encryption_key = stream.readByteArray(exception); - } - key_fingerprint = stream.readInt64(exception); - protocol = PhoneCallProtocol.TLdeserialize(stream, stream.readInt32(exception), exception); - connection = TL_groupCallConnection.TLdeserialize(stream, stream.readInt32(exception), exception); - reflector_group_tag = stream.readByteArray(exception); - reflector_self_tag = stream.readByteArray(exception); - reflector_self_secret = stream.readByteArray(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - stream.writeInt64(id); - stream.writeInt64(access_hash); - if ((flags & 1) != 0) { - stream.writeInt32(channel_id); - } - stream.writeInt32(admin_id); - if ((flags & 2) != 0) { - stream.writeByteArray(encryption_key); - } - stream.writeInt64(key_fingerprint); - protocol.serializeToStream(stream); - connection.serializeToStream(stream); - stream.writeByteArray(reflector_group_tag); - stream.writeByteArray(reflector_self_tag); - stream.writeByteArray(reflector_self_secret); - } - } - - public static class TL_groupCallPrivate extends GroupCall { - public static int constructor = 0x6d0b1604; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - id = stream.readInt64(exception); - access_hash = stream.readInt64(exception); - if ((flags & 1) != 0) { - channel_id = stream.readInt32(exception); - } - participants_count = stream.readInt32(exception); - admin_id = stream.readInt32(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - stream.writeInt64(id); - stream.writeInt64(access_hash); - if ((flags & 1) != 0) { - stream.writeInt32(channel_id); - } - stream.writeInt32(participants_count); - stream.writeInt32(admin_id); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = join_muted ? (flags | 2) : (flags &~ 2); + flags = can_change_join_muted ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + stream.writeInt64(id); + stream.writeInt64(access_hash); + stream.writeInt32(participants_count); + if ((flags & 1) != 0) { + params.serializeToStream(stream); + } + stream.writeInt32(version); + } + } public static class TL_channelBannedRights_layer92 extends TLObject { public static int constructor = 0x58cf4249; @@ -3252,6 +3206,7 @@ public class TLRPC { public boolean pin_messages; public boolean add_admins; public boolean anonymous; + public boolean manage_call; public static TL_chatAdminRights TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { if (TL_chatAdminRights.constructor != constructor) { @@ -3277,6 +3232,7 @@ public class TLRPC { pin_messages = (flags & 128) != 0; add_admins = (flags & 512) != 0; anonymous = (flags & 1024) != 0; + manage_call = (flags & 2048) != 0; } public void serializeToStream(AbstractSerializedData stream) { @@ -3290,6 +3246,7 @@ public class TLRPC { flags = pin_messages ? (flags | 128) : (flags &~ 128); flags = add_admins ? (flags | 512) : (flags &~ 512); flags = anonymous ? (flags | 1024) : (flags &~ 1024); + flags = manage_call ? (flags | 2048) : (flags &~ 2048); stream.writeInt32(flags); } } @@ -5043,6 +5000,9 @@ public class TLRPC { case 0xaa0cd9e4: result = new TL_sendMessageUploadDocumentAction(); break; + case 0xd92c2285: + result = new TL_speakingInGroupCallAction(); + break; case 0xa187d66f: result = new TL_sendMessageRecordVideoAction(); break; @@ -5238,6 +5198,15 @@ public class TLRPC { } } + public static class TL_speakingInGroupCallAction extends SendMessageAction { + public static int constructor = 0xd92c2285; + + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + public static class TL_sendMessageRecordVideoAction extends SendMessageAction { public static int constructor = 0xa187d66f; @@ -9050,6 +9019,8 @@ public class TLRPC { public int slowmode_next_send_date; public int stats_dc; public int pts; + public TL_inputGroupCall call; + public int inviterId; public static ChatFull TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { ChatFull result = null; @@ -9063,9 +9034,12 @@ public class TLRPC { case 0x2e02a614: result = new TL_chatFull_layer87(); break; - case 0xf0e6672a: + case 0xef3a6acd: result = new TL_channelFull(); break; + case 0xf0e6672a: + result = new TL_channelFull_layer121(); + break; case 0x2d895c74: result = new TL_channelFull_layer110(); break; @@ -9090,9 +9064,12 @@ public class TLRPC { case 0x9e341ddf: result = new TL_channelFull_layer48(); break; - case 0x1b7c9db3: + case 0xdc8c181: result = new TL_chatFull(); break; + case 0x1b7c9db3: + result = new TL_chatFull_layer121(); + break; case 0x22a235da: result = new TL_chatFull_layer98(); break; @@ -9277,6 +9254,177 @@ public class TLRPC { } public static class TL_channelFull extends ChatFull { + public static int constructor = 0xef3a6acd; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + can_view_participants = (flags & 8) != 0; + can_set_username = (flags & 64) != 0; + can_set_stickers = (flags & 128) != 0; + hidden_prehistory = (flags & 1024) != 0; + can_set_location = (flags & 65536) != 0; + has_scheduled = (flags & 524288) != 0; + can_view_stats = (flags & 1048576) != 0; + blocked = (flags & 4194304) != 0; + id = stream.readInt32(exception); + about = stream.readString(exception); + if ((flags & 1) != 0) { + participants_count = stream.readInt32(exception); + } + if ((flags & 2) != 0) { + admins_count = stream.readInt32(exception); + } + if ((flags & 4) != 0) { + kicked_count = stream.readInt32(exception); + } + if ((flags & 4) != 0) { + banned_count = stream.readInt32(exception); + } + if ((flags & 8192) != 0) { + online_count = stream.readInt32(exception); + } + read_inbox_max_id = stream.readInt32(exception); + read_outbox_max_id = stream.readInt32(exception); + unread_count = stream.readInt32(exception); + chat_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + notify_settings = PeerNotifySettings.TLdeserialize(stream, stream.readInt32(exception), exception); + exported_invite = ExportedChatInvite.TLdeserialize(stream, stream.readInt32(exception), exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + BotInfo object = BotInfo.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + bot_info.add(object); + } + if ((flags & 16) != 0) { + migrated_from_chat_id = stream.readInt32(exception); + } + if ((flags & 16) != 0) { + migrated_from_max_id = stream.readInt32(exception); + } + if ((flags & 32) != 0) { + pinned_msg_id = stream.readInt32(exception); + } + if ((flags & 256) != 0) { + stickerset = StickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 512) != 0) { + available_min_id = stream.readInt32(exception); + } + if ((flags & 2048) != 0) { + folder_id = stream.readInt32(exception); + } + if ((flags & 16384) != 0) { + linked_chat_id = stream.readInt32(exception); + } + if ((flags & 32768) != 0) { + location = ChannelLocation.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 131072) != 0) { + slowmode_seconds = stream.readInt32(exception); + } + if ((flags & 262144) != 0) { + slowmode_next_send_date = stream.readInt32(exception); + } + if ((flags & 4096) != 0) { + stats_dc = stream.readInt32(exception); + } + pts = stream.readInt32(exception); + if ((flags & 2097152) != 0) { + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = can_view_participants ? (flags | 8) : (flags &~ 8); + flags = can_set_username ? (flags | 64) : (flags &~ 64); + flags = can_set_stickers ? (flags | 128) : (flags &~ 128); + flags = hidden_prehistory ? (flags | 1024) : (flags &~ 1024); + flags = can_set_location ? (flags | 65536) : (flags &~ 65536); + flags = has_scheduled ? (flags | 524288) : (flags &~ 524288); + flags = can_view_stats ? (flags | 1048576) : (flags &~ 1048576); + flags = blocked ? (flags | 4194304) : (flags &~ 4194304); + stream.writeInt32(flags); + stream.writeInt32(id); + stream.writeString(about); + if ((flags & 1) != 0) { + stream.writeInt32(participants_count); + } + if ((flags & 2) != 0) { + stream.writeInt32(admins_count); + } + if ((flags & 4) != 0) { + stream.writeInt32(kicked_count); + } + if ((flags & 4) != 0) { + stream.writeInt32(banned_count); + } + if ((flags & 8192) != 0) { + stream.writeInt32(online_count); + } + stream.writeInt32(read_inbox_max_id); + stream.writeInt32(read_outbox_max_id); + stream.writeInt32(unread_count); + chat_photo.serializeToStream(stream); + notify_settings.serializeToStream(stream); + exported_invite.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = bot_info.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + bot_info.get(a).serializeToStream(stream); + } + if ((flags & 16) != 0) { + stream.writeInt32(migrated_from_chat_id); + } + if ((flags & 16) != 0) { + stream.writeInt32(migrated_from_max_id); + } + if ((flags & 32) != 0) { + stream.writeInt32(pinned_msg_id); + } + if ((flags & 256) != 0) { + stickerset.serializeToStream(stream); + } + if ((flags & 512) != 0) { + stream.writeInt32(available_min_id); + } + if ((flags & 2048) != 0) { + stream.writeInt32(folder_id); + } + if ((flags & 16384) != 0) { + stream.writeInt32(linked_chat_id); + } + if ((flags & 32768) != 0) { + location.serializeToStream(stream); + } + if ((flags & 131072) != 0) { + stream.writeInt32(slowmode_seconds); + } + if ((flags & 262144) != 0) { + stream.writeInt32(slowmode_next_send_date); + } + if ((flags & 4096) != 0) { + stream.writeInt32(stats_dc); + } + stream.writeInt32(pts); + if ((flags & 2097152) != 0) { + call.serializeToStream(stream); + } + } + } + + public static class TL_channelFull_layer121 extends TL_channelFull { public static int constructor = 0xf0e6672a; @@ -10350,6 +10498,83 @@ public class TLRPC { } public static class TL_chatFull extends ChatFull { + public static int constructor = 0xdc8c181; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + can_set_username = (flags & 128) != 0; + has_scheduled = (flags & 256) != 0; + id = stream.readInt32(exception); + about = stream.readString(exception); + participants = ChatParticipants.TLdeserialize(stream, stream.readInt32(exception), exception); + if ((flags & 4) != 0) { + chat_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + } + notify_settings = PeerNotifySettings.TLdeserialize(stream, stream.readInt32(exception), exception); + exported_invite = ExportedChatInvite.TLdeserialize(stream, stream.readInt32(exception), exception); + if ((flags & 8) != 0) { + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + BotInfo object = BotInfo.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + bot_info.add(object); + } + } + if ((flags & 64) != 0) { + pinned_msg_id = stream.readInt32(exception); + } + if ((flags & 2048) != 0) { + folder_id = stream.readInt32(exception); + } + if ((flags & 4096) != 0) { + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = can_set_username ? (flags | 128) : (flags &~ 128); + flags = has_scheduled ? (flags | 256) : (flags &~ 256); + stream.writeInt32(flags); + stream.writeInt32(id); + stream.writeString(about); + participants.serializeToStream(stream); + if ((flags & 4) != 0) { + chat_photo.serializeToStream(stream); + } + notify_settings.serializeToStream(stream); + exported_invite.serializeToStream(stream); + if ((flags & 8) != 0) { + stream.writeInt32(0x1cb5c415); + int count = bot_info.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + bot_info.get(a).serializeToStream(stream); + } + } + if ((flags & 64) != 0) { + stream.writeInt32(pinned_msg_id); + } + if ((flags & 2048) != 0) { + stream.writeInt32(folder_id); + } + if ((flags & 4096) != 0) { + call.serializeToStream(stream); + } + } + } + + public static class TL_chatFull_layer121 extends TL_chatFull { public static int constructor = 0x1b7c9db3; @@ -14105,100 +14330,6 @@ public class TLRPC { } } - public static class TL_phone_groupCall extends TLObject { - public static int constructor = 0x6737ffb7; - - public GroupCall call; - public ArrayList participants = new ArrayList<>(); - public ArrayList chats = new ArrayList<>(); - public ArrayList users = new ArrayList<>(); - - public static TL_phone_groupCall TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_phone_groupCall.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_phone_groupCall", constructor)); - } else { - return null; - } - } - TL_phone_groupCall result = new TL_phone_groupCall(); - result.readParams(stream, exception); - return result; - } - - public void readParams(AbstractSerializedData stream, boolean exception) { - call = GroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); - int magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - int count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - GroupCallParticipant object = GroupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - participants.add(object); - } - magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - Chat object = Chat.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - chats.add(object); - } - magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - User object = User.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - users.add(object); - } - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - stream.writeInt32(0x1cb5c415); - int count = participants.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - participants.get(a).serializeToStream(stream); - } - stream.writeInt32(0x1cb5c415); - count = chats.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - chats.get(a).serializeToStream(stream); - } - stream.writeInt32(0x1cb5c415); - count = users.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - users.get(a).serializeToStream(stream); - } - } - } - public static abstract class PhoneCallDiscardReason extends TLObject { public byte[] encrypted_key; @@ -14211,9 +14342,6 @@ public class TLRPC { case 0xfaf7e8c9: result = new TL_phoneCallDiscardReasonBusy(); break; - case 0xafe2b839: - result = new TL_phoneCallDiscardReasonAllowGroupCall(); - break; case 0x85e42301: result = new TL_phoneCallDiscardReasonMissed(); break; @@ -14249,20 +14377,6 @@ public class TLRPC { } } - public static class TL_phoneCallDiscardReasonAllowGroupCall extends PhoneCallDiscardReason { - public static int constructor = 0xafe2b839; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - encrypted_key = stream.readByteArray(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeByteArray(encrypted_key); - } - } - public static class TL_phoneCallDiscardReasonMissed extends PhoneCallDiscardReason { public static int constructor = 0x85e42301; @@ -14827,6 +14941,81 @@ public class TLRPC { } } + public static class TL_phone_groupCall extends TLObject { + public static int constructor = 0x66ab0bfc; + + public GroupCall call; + public ArrayList participants = new ArrayList<>(); + public String participants_next_offset; + public ArrayList users = new ArrayList<>(); + + public static TL_phone_groupCall TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_phone_groupCall.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_phone_groupCall", constructor)); + } else { + return null; + } + } + TL_phone_groupCall result = new TL_phone_groupCall(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + call = GroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_groupCallParticipant object = TL_groupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + participants.add(object); + } + participants_next_offset = stream.readString(exception); + magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + User object = User.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + users.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = participants.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + participants.get(a).serializeToStream(stream); + } + stream.writeString(participants_next_offset); + stream.writeInt32(0x1cb5c415); + count = users.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + users.get(a).serializeToStream(stream); + } + } + } + public static abstract class PhoneCall extends TLObject { public int flags; @@ -16548,6 +16737,9 @@ public class TLRPC { case 0x95e3fbef: result = new TL_messageActionChatDeletePhoto(); break; + case 0x76b9f11a: + result = new TL_messageActionInviteToGroupCall(); + break; case 0x80e11a7f: result = new TL_messageActionPhoneCall(); break; @@ -16652,27 +16844,27 @@ public class TLRPC { } } - public static class TL_messageActionGroupCall extends MessageAction { - public static int constructor = 0x7a0d7f42; + public static class TL_messageActionGroupCall extends MessageAction { + public static int constructor = 0x7a0d7f42; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); - if ((flags & 1) != 0) { - duration = stream.readInt32(exception); - } - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + if ((flags & 1) != 0) { + duration = stream.readInt32(exception); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - call.serializeToStream(stream); - if ((flags & 1) != 0) { - stream.writeInt32(duration); - } - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + call.serializeToStream(stream); + if ((flags & 1) != 0) { + stream.writeInt32(duration); + } + } + } public static class TL_messageActionChatMigrateTo extends MessageAction { public static int constructor = 0x51bdb021; @@ -16788,6 +16980,37 @@ public class TLRPC { } } + public static class TL_messageActionInviteToGroupCall extends MessageAction { + public static int constructor = 0x76b9f11a; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + users.add(stream.readInt32(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = users.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(users.get(a)); + } + } + } + public static class TL_messageActionUserJoined extends MessageAction { public static int constructor = 0x55555550; @@ -20732,9 +20955,9 @@ public class TLRPC { case 0xf8ab7dfb: result = new TL_inputMediaContact(); break; - case 0x23ab23d2: - result = new TL_inputMediaDocument(); - break; + case 0x33473058: + result = new TL_inputMediaDocument(); + break; case 0xd33f43f3: result = new TL_inputMediaGame(); break; @@ -20801,28 +21024,35 @@ public class TLRPC { } } - public static class TL_inputMediaDocument extends InputMedia { - public static int constructor = 0x23ab23d2; + public static class TL_inputMediaDocument extends InputMedia { + public static int constructor = 0x33473058; - public InputDocument id; + public InputDocument id; + public String query; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - id = InputDocument.TLdeserialize(stream, stream.readInt32(exception), exception); - if ((flags & 1) != 0) { - ttl_seconds = stream.readInt32(exception); - } - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + id = InputDocument.TLdeserialize(stream, stream.readInt32(exception), exception); + if ((flags & 1) != 0) { + ttl_seconds = stream.readInt32(exception); + } + if ((flags & 2) != 0) { + query = stream.readString(exception); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - id.serializeToStream(stream); - if ((flags & 1) != 0) { - stream.writeInt32(ttl_seconds); - } - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + id.serializeToStream(stream); + if ((flags & 1) != 0) { + stream.writeInt32(ttl_seconds); + } + if ((flags & 2) != 0) { + stream.writeString(query); + } + } + } public static class TL_inputMediaGame extends InputMedia { public static int constructor = 0xd33f43f3; @@ -21448,75 +21678,78 @@ public class TLRPC { } } - public static class TL_channelAdminLogEventsFilter extends TLObject { - public static int constructor = 0xea107ae4; + public static class TL_channelAdminLogEventsFilter extends TLObject { + public static int constructor = 0xea107ae4; - public int flags; - public boolean join; - public boolean leave; - public boolean invite; - public boolean ban; - public boolean unban; - public boolean kick; - public boolean unkick; - public boolean promote; - public boolean demote; - public boolean info; - public boolean settings; - public boolean pinned; - public boolean edit; - public boolean delete; + public int flags; + public boolean join; + public boolean leave; + public boolean invite; + public boolean ban; + public boolean unban; + public boolean kick; + public boolean unkick; + public boolean promote; + public boolean demote; + public boolean info; + public boolean settings; + public boolean pinned; + public boolean edit; + public boolean delete; + public boolean group_call; - public static TL_channelAdminLogEventsFilter TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_channelAdminLogEventsFilter.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_channelAdminLogEventsFilter", constructor)); - } else { - return null; - } - } - TL_channelAdminLogEventsFilter result = new TL_channelAdminLogEventsFilter(); - result.readParams(stream, exception); - return result; - } + public static TL_channelAdminLogEventsFilter TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_channelAdminLogEventsFilter.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_channelAdminLogEventsFilter", constructor)); + } else { + return null; + } + } + TL_channelAdminLogEventsFilter result = new TL_channelAdminLogEventsFilter(); + result.readParams(stream, exception); + return result; + } - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - join = (flags & 1) != 0; - leave = (flags & 2) != 0; - invite = (flags & 4) != 0; - ban = (flags & 8) != 0; - unban = (flags & 16) != 0; - kick = (flags & 32) != 0; - unkick = (flags & 64) != 0; - promote = (flags & 128) != 0; - demote = (flags & 256) != 0; - info = (flags & 512) != 0; - settings = (flags & 1024) != 0; - pinned = (flags & 2048) != 0; - edit = (flags & 4096) != 0; - delete = (flags & 8192) != 0; - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + join = (flags & 1) != 0; + leave = (flags & 2) != 0; + invite = (flags & 4) != 0; + ban = (flags & 8) != 0; + unban = (flags & 16) != 0; + kick = (flags & 32) != 0; + unkick = (flags & 64) != 0; + promote = (flags & 128) != 0; + demote = (flags & 256) != 0; + info = (flags & 512) != 0; + settings = (flags & 1024) != 0; + pinned = (flags & 2048) != 0; + edit = (flags & 4096) != 0; + delete = (flags & 8192) != 0; + group_call = (flags & 16384) != 0; + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = join ? (flags | 1) : (flags &~ 1); - flags = leave ? (flags | 2) : (flags &~ 2); - flags = invite ? (flags | 4) : (flags &~ 4); - flags = ban ? (flags | 8) : (flags &~ 8); - flags = unban ? (flags | 16) : (flags &~ 16); - flags = kick ? (flags | 32) : (flags &~ 32); - flags = unkick ? (flags | 64) : (flags &~ 64); - flags = promote ? (flags | 128) : (flags &~ 128); - flags = demote ? (flags | 256) : (flags &~ 256); - flags = info ? (flags | 512) : (flags &~ 512); - flags = settings ? (flags | 1024) : (flags &~ 1024); - flags = pinned ? (flags | 2048) : (flags &~ 2048); - flags = edit ? (flags | 4096) : (flags &~ 4096); - flags = delete ? (flags | 8192) : (flags &~ 8192); - stream.writeInt32(flags); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = join ? (flags | 1) : (flags &~ 1); + flags = leave ? (flags | 2) : (flags &~ 2); + flags = invite ? (flags | 4) : (flags &~ 4); + flags = ban ? (flags | 8) : (flags &~ 8); + flags = unban ? (flags | 16) : (flags &~ 16); + flags = kick ? (flags | 32) : (flags &~ 32); + flags = unkick ? (flags | 64) : (flags &~ 64); + flags = promote ? (flags | 128) : (flags &~ 128); + flags = demote ? (flags | 256) : (flags &~ 256); + flags = info ? (flags | 512) : (flags &~ 512); + flags = settings ? (flags | 1024) : (flags &~ 1024); + flags = pinned ? (flags | 2048) : (flags &~ 2048); + flags = edit ? (flags | 4096) : (flags &~ 4096); + flags = delete ? (flags | 8192) : (flags &~ 8192); + flags = group_call ? (flags | 16384) : (flags &~ 16384); + stream.writeInt32(flags); + } + } public static abstract class UserStatus extends TLObject { public int expires; @@ -21924,12 +22157,18 @@ public class TLRPC { case 0xaca1657b: result = new TL_updateMessagePoll(); break; + case 0x1330a196: + result = new TL_updateChat(); + break; case 0xa20db0e5: result = new TL_updateDeleteMessages(); break; case 0x8588878b: result = new TL_updatePinnedChannelMessages(); break; + case 0xf2ebdb4e: + result = new TL_updateGroupCallParticipants(); + break; case 0x571d2742: result = new TL_updateReadFeaturedStickers(); break; @@ -21954,9 +22193,6 @@ public class TLRPC { case 0x4e90bfd6: result = new TL_updateMessageID(); break; - case 0x57eaec8: - result = new TL_updateGroupCallParticipant(); - break; case 0x25d6c9c7: result = new TL_updateReadChannelOutbox(); break; @@ -22017,7 +22253,7 @@ public class TLRPC { case 0xb4a2e88d: result = new TL_updateEncryption(); break; - case 0x85fe86ed: + case 0xa45eb99b: result = new TL_updateGroupCall(); break; case 0xeb0467fb: @@ -22247,6 +22483,46 @@ public class TLRPC { } } + public static class TL_updateGroupCallParticipants extends Update { + public static int constructor = 0xf2ebdb4e; + + public TL_inputGroupCall call; + public ArrayList participants = new ArrayList<>(); + public int version; + + public void readParams(AbstractSerializedData stream, boolean exception) { + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_groupCallParticipant object = TL_groupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + participants.add(object); + } + version = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = participants.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + participants.get(a).serializeToStream(stream); + } + stream.writeInt32(version); + } + } + public static class TL_updateReadFeaturedStickers extends Update { public static int constructor = 0x571d2742; @@ -22398,24 +22674,6 @@ public class TLRPC { } } - public static class TL_updateGroupCallParticipant extends Update { - public static int constructor = 0x57eaec8; - - public TL_inputGroupCall call; - public GroupCallParticipant participant; - - public void readParams(AbstractSerializedData stream, boolean exception) { - call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); - participant = GroupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - participant.serializeToStream(stream); - } - } - public static class TL_updateReadChannelOutbox extends Update { public static int constructor = 0x25d6c9c7; @@ -22814,6 +23072,21 @@ public class TLRPC { } } + public static class TL_updateChat extends Update { + public static int constructor = 0x1330a196; + + public int chat_id; + + public void readParams(AbstractSerializedData stream, boolean exception) { + chat_id = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(chat_id); + } + } + public static class TL_updateChatUserTyping extends Update { public static int constructor = 0x9a65ea1f; @@ -22863,16 +23136,19 @@ public class TLRPC { } public static class TL_updateGroupCall extends Update { - public static int constructor = 0x85fe86ed; + public static int constructor = 0xa45eb99b; + public int chat_id; public GroupCall call; public void readParams(AbstractSerializedData stream, boolean exception) { + chat_id = stream.readInt32(exception); call = GroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); + stream.writeInt32(chat_id); call.serializeToStream(stream); } } @@ -25940,43 +26216,6 @@ public class TLRPC { } } - public static class TL_groupCallConnection extends TLObject { - public static int constructor = 0x40732163; - - public long id; - public String ip; - public String ipv6; - public int port; - - public static TL_groupCallConnection TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_groupCallConnection.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_groupCallConnection", constructor)); - } else { - return null; - } - } - TL_groupCallConnection result = new TL_groupCallConnection(); - result.readParams(stream, exception); - return result; - } - - public void readParams(AbstractSerializedData stream, boolean exception) { - id = stream.readInt64(exception); - ip = stream.readString(exception); - ipv6 = stream.readString(exception); - port = stream.readInt32(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt64(id); - stream.writeString(ip); - stream.writeString(ipv6); - stream.writeInt32(port); - } - } - public static class TL_help_countryCode extends TLObject { public static int constructor = 0x4203c5ef; @@ -30218,84 +30457,88 @@ public class TLRPC { } public static abstract class ChannelAdminLogEventAction extends TLObject { - public Message message; - public String prev_value; - public Message prev_message; - public Message new_message; - public ChannelParticipant prev_participant; - public ChannelParticipant new_participant; - public InputStickerSet prev_stickerset; - public InputStickerSet new_stickerset; - public ChannelParticipant participant; - public Photo prev_photo; - public Photo new_photo; public static ChannelAdminLogEventAction TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { ChannelAdminLogEventAction result = null; switch (constructor) { - case 0x1b7907ae: - result = new TL_channelAdminLogEventActionToggleInvites(); - break; - case 0xe9e82c18: - result = new TL_channelAdminLogEventActionUpdatePinned(); - break; - case 0x26ae0971: - result = new TL_channelAdminLogEventActionToggleSignatures(); - break; - case 0x55188a2e: - result = new TL_channelAdminLogEventActionChangeAbout(); - break; - case 0x709b2405: - result = new TL_channelAdminLogEventActionEditMessage(); - break; - case 0xe6b76ae: - result = new TL_channelAdminLogEventActionChangeLocation(); + case 0xdb9f9140: + result = new TL_channelAdminLogEventActionDiscardGroupCall(); + break; + case 0xb1c3caa7: + result = new TL_channelAdminLogEventActionChangeStickerSet(); + break; + case 0xf92424d2: + result = new TL_channelAdminLogEventActionParticipantMute(); break; - case 0xd5676710: - result = new TL_channelAdminLogEventActionParticipantToggleAdmin(); - break; - case 0xb1c3caa7: - result = new TL_channelAdminLogEventActionChangeStickerSet(); - break; - case 0xe6dfb825: - result = new TL_channelAdminLogEventActionChangeTitle(); - break; case 0x2df5fc0a: result = new TL_channelAdminLogEventActionDefaultBannedRights(); break; - case 0xa26f881b: - result = new TL_channelAdminLogEventActionChangeLinkedChat(); + case 0xf89777f2: + result = new TL_channelAdminLogEventActionParticipantLeave(); break; - case 0x5f5c95f1: - result = new TL_channelAdminLogEventActionTogglePreHistoryHidden(); - break; - case 0x8f079643: - result = new TL_channelAdminLogEventActionStopPoll(); + case 0x709b2405: + result = new TL_channelAdminLogEventActionEditMessage(); + break; + case 0x6a4afc38: + result = new TL_channelAdminLogEventActionChangeUsername(); + break; + case 0xe6b76ae: + result = new TL_channelAdminLogEventActionChangeLocation(); + break; + case 0x434bd2af: + result = new TL_channelAdminLogEventActionChangePhoto(); + break; + case 0xd5676710: + result = new TL_channelAdminLogEventActionParticipantToggleAdmin(); break; case 0x53909779: result = new TL_channelAdminLogEventActionToggleSlowMode(); break; - case 0x42e047bb: - result = new TL_channelAdminLogEventActionDeleteMessage(); - break; - case 0xe31c34d8: - result = new TL_channelAdminLogEventActionParticipantInvite(); - break; - case 0xf89777f2: - result = new TL_channelAdminLogEventActionParticipantLeave(); - break; - case 0x6a4afc38: - result = new TL_channelAdminLogEventActionChangeUsername(); - break; - case 0x434bd2af: - result = new TL_channelAdminLogEventActionChangePhoto(); + case 0x8f079643: + result = new TL_channelAdminLogEventActionStopPoll(); + break; + case 0x26ae0971: + result = new TL_channelAdminLogEventActionToggleSignatures(); + break; + case 0xe6d83d7e: + result = new TL_channelAdminLogEventActionParticipantToggleBan(); + break; + case 0x183040d3: + result = new TL_channelAdminLogEventActionParticipantJoin(); + break; + case 0x5f5c95f1: + result = new TL_channelAdminLogEventActionTogglePreHistoryHidden(); + break; + case 0xe9e82c18: + result = new TL_channelAdminLogEventActionUpdatePinned(); + break; + case 0x56d6a247: + result = new TL_channelAdminLogEventActionToggleGroupCallSetting(); + break; + case 0xe31c34d8: + result = new TL_channelAdminLogEventActionParticipantInvite(); + break; + case 0x55188a2e: + result = new TL_channelAdminLogEventActionChangeAbout(); + break; + case 0x23209745: + result = new TL_channelAdminLogEventActionStartGroupCall(); + break; + case 0xa26f881b: + result = new TL_channelAdminLogEventActionChangeLinkedChat(); + break; + case 0x1b7907ae: + result = new TL_channelAdminLogEventActionToggleInvites(); + break; + case 0xe64429c0: + result = new TL_channelAdminLogEventActionParticipantUnmute(); + break; + case 0x42e047bb: + result = new TL_channelAdminLogEventActionDeleteMessage(); + break; + case 0xe6dfb825: + result = new TL_channelAdminLogEventActionChangeTitle(); break; - case 0xe6d83d7e: - result = new TL_channelAdminLogEventActionParticipantToggleBan(); - break; - case 0x183040d3: - result = new TL_channelAdminLogEventActionParticipantJoin(); - break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in ChannelAdminLogEventAction", constructor)); @@ -30307,149 +30550,53 @@ public class TLRPC { } } - public static class TL_channelAdminLogEventActionToggleInvites extends ChannelAdminLogEventAction { - public static int constructor = 0x1b7907ae; + public static class TL_channelAdminLogEventActionDiscardGroupCall extends ChannelAdminLogEventAction { + public static int constructor = 0xdb9f9140; - public boolean new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - new_value = stream.readBool(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeBool(new_value); - } - } - - public static class TL_channelAdminLogEventActionUpdatePinned extends ChannelAdminLogEventAction { - public static int constructor = 0xe9e82c18; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - message.serializeToStream(stream); - } - } - - public static class TL_channelAdminLogEventActionToggleSignatures extends ChannelAdminLogEventAction { - public static int constructor = 0x26ae0971; - - public boolean new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - new_value = stream.readBool(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeBool(new_value); - } - } - - public static class TL_channelAdminLogEventActionChangeAbout extends ChannelAdminLogEventAction { - public static int constructor = 0x55188a2e; - - public String new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_value = stream.readString(exception); - new_value = stream.readString(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeString(prev_value); - stream.writeString(new_value); - } - } - - public static class TL_channelAdminLogEventActionEditMessage extends ChannelAdminLogEventAction { - public static int constructor = 0x709b2405; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); - new_message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - prev_message.serializeToStream(stream); - new_message.serializeToStream(stream); - } - } - - public static class TL_channelAdminLogEventActionChangeLocation extends ChannelAdminLogEventAction { - public static int constructor = 0xe6b76ae; - - public ChannelLocation prev_value; - public ChannelLocation new_value; + public TL_inputGroupCall call; public void readParams(AbstractSerializedData stream, boolean exception) { - prev_value = ChannelLocation.TLdeserialize(stream, stream.readInt32(exception), exception); - new_value = ChannelLocation.TLdeserialize(stream, stream.readInt32(exception), exception); + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - prev_value.serializeToStream(stream); - new_value.serializeToStream(stream); + call.serializeToStream(stream); } } - public static class TL_channelAdminLogEventActionParticipantToggleAdmin extends ChannelAdminLogEventAction { - public static int constructor = 0xd5676710; + public static class TL_channelAdminLogEventActionChangeStickerSet extends ChannelAdminLogEventAction { + public static int constructor = 0xb1c3caa7; + public InputStickerSet prev_stickerset; + public InputStickerSet new_stickerset; - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - new_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - } + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); + new_stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - prev_participant.serializeToStream(stream); - new_participant.serializeToStream(stream); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + prev_stickerset.serializeToStream(stream); + new_stickerset.serializeToStream(stream); + } + } - public static class TL_channelAdminLogEventActionChangeStickerSet extends ChannelAdminLogEventAction { - public static int constructor = 0xb1c3caa7; + public static class TL_channelAdminLogEventActionParticipantMute extends ChannelAdminLogEventAction { + public static int constructor = 0xf92424d2; + public TL_groupCallParticipant participant; - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); - new_stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); - } + public void readParams(AbstractSerializedData stream, boolean exception) { + participant = TL_groupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - prev_stickerset.serializeToStream(stream); - new_stickerset.serializeToStream(stream); - } - } - - public static class TL_channelAdminLogEventActionChangeTitle extends ChannelAdminLogEventAction { - public static int constructor = 0xe6dfb825; - - public String new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_value = stream.readString(exception); - new_value = stream.readString(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeString(prev_value); - stream.writeString(new_value); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + participant.serializeToStream(stream); + } + } public static class TL_channelAdminLogEventActionDefaultBannedRights extends ChannelAdminLogEventAction { public static int constructor = 0x2df5fc0a; @@ -30469,50 +30616,102 @@ public class TLRPC { } } - public static class TL_channelAdminLogEventActionChangeLinkedChat extends ChannelAdminLogEventAction { - public static int constructor = 0xa26f881b; + public static class TL_channelAdminLogEventActionParticipantLeave extends ChannelAdminLogEventAction { + public static int constructor = 0xf89777f2; - public int prev_value; - public int new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_value = stream.readInt32(exception); - new_value = stream.readInt32(exception); - } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - stream.writeInt32(prev_value); - stream.writeInt32(new_value); } } - public static class TL_channelAdminLogEventActionTogglePreHistoryHidden extends ChannelAdminLogEventAction { - public static int constructor = 0x5f5c95f1; - - public boolean new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - new_value = stream.readBool(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeBool(new_value); - } - } - - public static class TL_channelAdminLogEventActionStopPoll extends ChannelAdminLogEventAction { - public static int constructor = 0x8f079643; + public static class TL_channelAdminLogEventActionEditMessage extends ChannelAdminLogEventAction { + public static int constructor = 0x709b2405; + public Message prev_message; + public Message new_message; public void readParams(AbstractSerializedData stream, boolean exception) { - message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); + prev_message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); + new_message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - message.serializeToStream(stream); + prev_message.serializeToStream(stream); + new_message.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionChangeUsername extends ChannelAdminLogEventAction { + public static int constructor = 0x6a4afc38; + + public String prev_value; + public String new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_value = stream.readString(exception); + new_value = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(prev_value); + stream.writeString(new_value); + } + } + + public static class TL_channelAdminLogEventActionChangeLocation extends ChannelAdminLogEventAction { + public static int constructor = 0xe6b76ae; + + public ChannelLocation prev_value; + public ChannelLocation new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_value = ChannelLocation.TLdeserialize(stream, stream.readInt32(exception), exception); + new_value = ChannelLocation.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + prev_value.serializeToStream(stream); + new_value.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionChangePhoto extends ChannelAdminLogEventAction { + public static int constructor = 0x434bd2af; + + public Photo prev_photo; + public Photo new_photo; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + new_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + prev_photo.serializeToStream(stream); + new_photo.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionParticipantToggleAdmin extends ChannelAdminLogEventAction { + public static int constructor = 0xd5676710; + + public ChannelParticipant prev_participant; + public ChannelParticipant new_participant; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + new_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + prev_participant.serializeToStream(stream); + new_participant.serializeToStream(stream); } } @@ -30534,100 +30733,236 @@ public class TLRPC { } } - public static class TL_channelAdminLogEventActionDeleteMessage extends ChannelAdminLogEventAction { - public static int constructor = 0x42e047bb; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - message.serializeToStream(stream); - } - } - - public static class TL_channelAdminLogEventActionParticipantInvite extends ChannelAdminLogEventAction { - public static int constructor = 0xe31c34d8; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - participant.serializeToStream(stream); - } - } - - public static class TL_channelAdminLogEventActionParticipantLeave extends ChannelAdminLogEventAction { - public static int constructor = 0xf89777f2; - - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - } - } - - public static class TL_channelAdminLogEventActionChangeUsername extends ChannelAdminLogEventAction { - public static int constructor = 0x6a4afc38; - - public String new_value; - - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_value = stream.readString(exception); - new_value = stream.readString(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeString(prev_value); - stream.writeString(new_value); - } - } - - public static class TL_channelAdminLogEventActionChangePhoto extends ChannelAdminLogEventAction { - public static int constructor = 0x434bd2af; + public static class TL_channelAdminLogEventActionStopPoll extends ChannelAdminLogEventAction { + public static int constructor = 0x8f079643; + public Message message; public void readParams(AbstractSerializedData stream, boolean exception) { - prev_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); - new_photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - prev_photo.serializeToStream(stream); - new_photo.serializeToStream(stream); + message.serializeToStream(stream); } } - public static class TL_channelAdminLogEventActionParticipantToggleBan extends ChannelAdminLogEventAction { - public static int constructor = 0xe6d83d7e; + public static class TL_channelAdminLogEventActionToggleSignatures extends ChannelAdminLogEventAction { + public static int constructor = 0x26ae0971; + + public boolean new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + new_value = stream.readBool(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeBool(new_value); + } + } + + public static class TL_channelAdminLogEventActionParticipantToggleBan extends ChannelAdminLogEventAction { + public static int constructor = 0xe6d83d7e; + + public ChannelParticipant prev_participant; + public ChannelParticipant new_participant; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + new_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + prev_participant.serializeToStream(stream); + new_participant.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionParticipantJoin extends ChannelAdminLogEventAction { + public static int constructor = 0x183040d3; - public void readParams(AbstractSerializedData stream, boolean exception) { - prev_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - new_participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - prev_participant.serializeToStream(stream); - new_participant.serializeToStream(stream); - } - } + public static class TL_channelAdminLogEventActionTogglePreHistoryHidden extends ChannelAdminLogEventAction { + public static int constructor = 0x5f5c95f1; - public static class TL_channelAdminLogEventActionParticipantJoin extends ChannelAdminLogEventAction { - public static int constructor = 0x183040d3; + public boolean new_value; + public void readParams(AbstractSerializedData stream, boolean exception) { + new_value = stream.readBool(exception); + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeBool(new_value); + } + } + + public static class TL_channelAdminLogEventActionUpdatePinned extends ChannelAdminLogEventAction { + public static int constructor = 0xe9e82c18; + + public Message message; + + public void readParams(AbstractSerializedData stream, boolean exception) { + message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + message.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionToggleGroupCallSetting extends ChannelAdminLogEventAction { + public static int constructor = 0x56d6a247; + + public boolean join_muted; + + public void readParams(AbstractSerializedData stream, boolean exception) { + join_muted = stream.readBool(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeBool(join_muted); + } + } + + public static class TL_channelAdminLogEventActionParticipantInvite extends ChannelAdminLogEventAction { + public static int constructor = 0xe31c34d8; + + public ChannelParticipant participant; + + public void readParams(AbstractSerializedData stream, boolean exception) { + participant = ChannelParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + participant.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionChangeAbout extends ChannelAdminLogEventAction { + public static int constructor = 0x55188a2e; + + public String prev_value; + public String new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_value = stream.readString(exception); + new_value = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(prev_value); + stream.writeString(new_value); + } + } + + public static class TL_channelAdminLogEventActionStartGroupCall extends ChannelAdminLogEventAction { + public static int constructor = 0x23209745; + + public TL_inputGroupCall call; + + public void readParams(AbstractSerializedData stream, boolean exception) { + call = TL_inputGroupCall.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionChangeLinkedChat extends ChannelAdminLogEventAction { + public static int constructor = 0xa26f881b; + + public int prev_value; + public int new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_value = stream.readInt32(exception); + new_value = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(prev_value); + stream.writeInt32(new_value); + } + } + + public static class TL_channelAdminLogEventActionToggleInvites extends ChannelAdminLogEventAction { + public static int constructor = 0x1b7907ae; + + public boolean new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + new_value = stream.readBool(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeBool(new_value); + } + } + + public static class TL_channelAdminLogEventActionParticipantUnmute extends ChannelAdminLogEventAction { + public static int constructor = 0xe64429c0; + + public TL_groupCallParticipant participant; + + public void readParams(AbstractSerializedData stream, boolean exception) { + participant = TL_groupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + participant.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionDeleteMessage extends ChannelAdminLogEventAction { + public static int constructor = 0x42e047bb; + + public Message message; + + public void readParams(AbstractSerializedData stream, boolean exception) { + message = Message.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + message.serializeToStream(stream); + } + } + + public static class TL_channelAdminLogEventActionChangeTitle extends ChannelAdminLogEventAction { + public static int constructor = 0xe6dfb825; + + public String prev_value; + public String new_value; + + public void readParams(AbstractSerializedData stream, boolean exception) { + prev_value = stream.readString(exception); + new_value = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(prev_value); + stream.writeString(new_value); + } + } public static abstract class InputWebFileLocation extends TLObject { @@ -31040,6 +31375,8 @@ public class TLRPC { public boolean scam; public boolean has_link; public boolean explicit_content; + public boolean call_active; + public boolean call_not_empty; public ArrayList restriction_reason = new ArrayList<>(); public TL_channelAdminRights_layer92 admin_rights_layer92; public TL_channelBannedRights_layer92 banned_rights_layer92; @@ -31214,6 +31551,8 @@ public class TLRPC { kicked = (flags & 2) != 0; left = (flags & 4) != 0; deactivated = (flags & 32) != 0; + call_active = (flags & 8388608) != 0; + call_not_empty = (flags & 16777216) != 0; id = stream.readInt32(exception); title = stream.readString(exception); photo = ChatPhoto.TLdeserialize(stream, stream.readInt32(exception), exception); @@ -31237,6 +31576,8 @@ public class TLRPC { flags = kicked ? (flags | 2) : (flags &~ 2); flags = left ? (flags | 4) : (flags &~ 4); flags = deactivated ? (flags | 32) : (flags &~ 32); + flags = call_active ? (flags | 8388608) : (flags &~ 8388608); + flags = call_not_empty ? (flags | 16777216) : (flags &~ 16777216); stream.writeInt32(flags); stream.writeInt32(id); stream.writeString(title); @@ -31550,6 +31891,8 @@ public class TLRPC { has_link = (flags & 1048576) != 0; has_geo = (flags & 2097152) != 0; slowmode_enabled = (flags & 4194304) != 0; + call_active = (flags & 8388608) != 0; + call_not_empty = (flags & 16777216) != 0; id = stream.readInt32(exception); if ((flags & 8192) != 0) { access_hash = stream.readInt64(exception); @@ -31606,6 +31949,8 @@ public class TLRPC { flags = has_link ? (flags | 1048576) : (flags &~ 1048576); flags = has_geo ? (flags | 2097152) : (flags &~ 2097152); flags = slowmode_enabled ? (flags | 4194304) : (flags &~ 4194304); + flags = call_active ? (flags | 8388608) : (flags &~ 8388608); + flags = call_not_empty ? (flags | 16777216) : (flags &~ 16777216); stream.writeInt32(flags); stream.writeInt32(id); if ((flags & 8192) != 0) { @@ -32070,7 +32415,7 @@ public class TLRPC { public int count; public int hash; public int installed_date; - public PhotoSize thumb; + public ArrayList thumbs = new ArrayList<>(); public int thumb_dc_id; public static StickerSet TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -32086,6 +32431,9 @@ public class TLRPC { result = new TL_stickerSet_layer97(); break; case 0xeeb46f27: + result = new TL_stickerSet_layer121(); + break; + case 0x40e237a8: result = new TL_stickerSet(); break; case 0xcd303b41: @@ -32177,7 +32525,10 @@ public class TLRPC { title = stream.readString(exception); short_name = stream.readString(exception); if ((flags & 16) != 0) { - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + if (thumb != null) { + thumbs.add(thumb); + } } count = stream.readInt32(exception); hash = stream.readInt32(exception); @@ -32197,7 +32548,7 @@ public class TLRPC { stream.writeString(title); stream.writeString(short_name); if ((flags & 16) != 0) { - thumb.serializeToStream(stream); + thumbs.get(0).serializeToStream(stream); } stream.writeInt32(count); stream.writeInt32(hash); @@ -32205,7 +32556,7 @@ public class TLRPC { } public static class TL_stickerSet extends StickerSet { - public static int constructor = 0xeeb46f27; + public static int constructor = 0x40e237a8; public void readParams(AbstractSerializedData stream, boolean exception) { @@ -32222,7 +32573,21 @@ public class TLRPC { title = stream.readString(exception); short_name = stream.readString(exception); if ((flags & 16) != 0) { - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + thumbs.add(object); + } } if ((flags & 16) != 0) { thumb_dc_id = stream.readInt32(exception); @@ -32246,7 +32611,67 @@ public class TLRPC { stream.writeString(title); stream.writeString(short_name); if ((flags & 16) != 0) { - thumb.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = thumbs.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + thumbs.get(a).serializeToStream(stream); + } + } + if ((flags & 16) != 0) { + stream.writeInt32(thumb_dc_id); + } + stream.writeInt32(count); + stream.writeInt32(hash); + } + } + + public static class TL_stickerSet_layer121 extends TL_stickerSet { + public static int constructor = 0xeeb46f27; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + archived = (flags & 2) != 0; + official = (flags & 4) != 0; + masks = (flags & 8) != 0; + animated = (flags & 32) != 0; + if ((flags & 1) != 0) { + installed_date = stream.readInt32(exception); + } + id = stream.readInt64(exception); + access_hash = stream.readInt64(exception); + title = stream.readString(exception); + short_name = stream.readString(exception); + if ((flags & 16) != 0) { + PhotoSize thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + if (thumb != null) { + thumbs.add(thumb); + } + } + if ((flags & 16) != 0) { + thumb_dc_id = stream.readInt32(exception); + } + count = stream.readInt32(exception); + hash = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = archived ? (flags | 2) : (flags &~ 2); + flags = official ? (flags | 4) : (flags &~ 4); + flags = masks ? (flags | 8) : (flags &~ 8); + flags = animated ? (flags | 32) : (flags &~ 32); + stream.writeInt32(flags); + if ((flags & 1) != 0) { + stream.writeInt32(installed_date); + } + stream.writeInt64(id); + stream.writeInt64(access_hash); + stream.writeString(title); + stream.writeString(short_name); + if ((flags & 16) != 0) { + thumbs.get(0).serializeToStream(stream); } if ((flags & 16) != 0) { stream.writeInt32(thumb_dc_id); @@ -32719,6 +33144,84 @@ public class TLRPC { } } + public static class TL_phone_groupParticipants extends TLObject { + public static int constructor = 0x9cfeb92d; + + public int count; + public ArrayList participants = new ArrayList<>(); + public String next_offset; + public ArrayList users = new ArrayList<>(); + public int version; + + public static TL_phone_groupParticipants TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_phone_groupParticipants.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_phone_groupParticipants", constructor)); + } else { + return null; + } + } + TL_phone_groupParticipants result = new TL_phone_groupParticipants(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + count = stream.readInt32(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_groupCallParticipant object = TL_groupCallParticipant.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + participants.add(object); + } + next_offset = stream.readString(exception); + magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + User object = User.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + users.add(object); + } + version = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(count); + stream.writeInt32(0x1cb5c415); + int count = participants.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + participants.get(a).serializeToStream(stream); + } + stream.writeString(next_offset); + stream.writeInt32(0x1cb5c415); + count = users.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + users.get(a).serializeToStream(stream); + } + stream.writeInt32(version); + } + } + public static abstract class PageListOrderedItem extends TLObject { public static PageListOrderedItem TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -33431,6 +33934,9 @@ public class TLRPC { public static PhotoSize TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { PhotoSize result = null; switch (constructor) { + case 0xd8214d41: + result = new TL_photoPathSize(); + break; case 0x77bfb61b: result = new TL_photoSize(); break; @@ -33647,34 +34153,6 @@ public class TLRPC { } } - public static class TL_wallet_secretSalt extends TLObject { - public static int constructor = 0xdd484d64; - - public byte[] salt; - - public static TL_wallet_secretSalt TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_wallet_secretSalt.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_wallet_secretSalt", constructor)); - } else { - return null; - } - } - TL_wallet_secretSalt result = new TL_wallet_secretSalt(); - result.readParams(stream, exception); - return result; - } - - public void readParams(AbstractSerializedData stream, boolean exception) { - salt = stream.readByteArray(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeByteArray(salt); - } - } - public static abstract class InputFile extends TLObject { public long id; public int parts; @@ -35690,152 +36168,68 @@ public class TLRPC { } } - public static class TL_wallet_liteResponse extends TLObject { - public static int constructor = 0x764386d7; + public static class TL_groupCallParticipant extends TLObject { + public static int constructor = 0x56b087c9; - public byte[] response; + public int flags; + public boolean muted; + public boolean left; + public boolean can_self_unmute; + public boolean just_joined; + public boolean versioned; + public int user_id; + public int date; + public int active_date; + public int source; + public long lastSpeakTime; //custom; + public float amplitude; //custom; + public boolean hasVoice; //custom; - public static TL_wallet_liteResponse TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_wallet_liteResponse.constructor != constructor) { + public static TL_groupCallParticipant TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_groupCallParticipant.constructor != constructor) { if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_wallet_liteResponse", constructor)); + throw new RuntimeException(String.format("can't parse magic %x in TL_groupCallParticipant", constructor)); } else { return null; } } - TL_wallet_liteResponse result = new TL_wallet_liteResponse(); + TL_groupCallParticipant result = new TL_groupCallParticipant(); result.readParams(stream, exception); return result; } public void readParams(AbstractSerializedData stream, boolean exception) { - response = stream.readByteArray(exception); + flags = stream.readInt32(exception); + muted = (flags & 1) != 0; + left = (flags & 2) != 0; + can_self_unmute = (flags & 4) != 0; + just_joined = (flags & 16) != 0; + versioned = (flags & 32) != 0; + user_id = stream.readInt32(exception); + date = stream.readInt32(exception); + if ((flags & 8) != 0) { + active_date = stream.readInt32(exception); + } + source = stream.readInt32(exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - stream.writeByteArray(response); + flags = muted ? (flags | 1) : (flags &~ 1); + flags = left ? (flags | 2) : (flags &~ 2); + flags = can_self_unmute ? (flags | 4) : (flags &~ 4); + flags = just_joined ? (flags | 16) : (flags &~ 16); + flags = versioned ? (flags | 32) : (flags &~ 32); + stream.writeInt32(flags); + stream.writeInt32(user_id); + stream.writeInt32(date); + if ((flags & 8) != 0) { + stream.writeInt32(active_date); + } + stream.writeInt32(source); } } - public static abstract class GroupCallParticipant extends TLObject { - public int user_id; - public byte[] member_tag_hash; - public byte[] streams; - public int flags; - public boolean readonly; - public int date; - public int inviter_id; - public TL_inputPhoneCall phone_call; - - public static GroupCallParticipant TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - GroupCallParticipant result = null; - switch (constructor) { - case 0x419b0df2: - result = new TL_groupCallParticipantLeft(); - break; - case 0x4f0b39b8: - result = new TL_groupCallParticipantAdmin(); - break; - case 0x589db397: - result = new TL_groupCallParticipant(); - break; - case 0x377496f0: - result = new TL_groupCallParticipantInvited(); - break; - } - if (result == null && exception) { - throw new RuntimeException(String.format("can't parse magic %x in GroupCallParticipant", constructor)); - } - if (result != null) { - result.readParams(stream, exception); - } - return result; - } - } - - public static class TL_groupCallParticipantLeft extends GroupCallParticipant { - public static int constructor = 0x419b0df2; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - user_id = stream.readInt32(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(user_id); - } - } - - public static class TL_groupCallParticipantAdmin extends GroupCallParticipant { - public static int constructor = 0x4f0b39b8; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - user_id = stream.readInt32(exception); - member_tag_hash = stream.readByteArray(exception); - streams = stream.readByteArray(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(user_id); - stream.writeByteArray(member_tag_hash); - stream.writeByteArray(streams); - } - } - - public static class TL_groupCallParticipant extends GroupCallParticipant { - public static int constructor = 0x589db397; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - readonly = (flags & 1) != 0; - user_id = stream.readInt32(exception); - date = stream.readInt32(exception); - member_tag_hash = stream.readByteArray(exception); - streams = stream.readByteArray(exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = readonly ? (flags | 1) : (flags &~ 1); - stream.writeInt32(flags); - stream.writeInt32(user_id); - stream.writeInt32(date); - stream.writeByteArray(member_tag_hash); - stream.writeByteArray(streams); - } - } - - public static class TL_groupCallParticipantInvited extends GroupCallParticipant { - public static int constructor = 0x377496f0; - - - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - user_id = stream.readInt32(exception); - inviter_id = stream.readInt32(exception); - date = stream.readInt32(exception); - if ((flags & 1) != 0) { - phone_call = TL_inputPhoneCall.TLdeserialize(stream, stream.readInt32(exception), exception); - } - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - stream.writeInt32(user_id); - stream.writeInt32(inviter_id); - stream.writeInt32(date); - if ((flags & 1) != 0) { - phone_call.serializeToStream(stream); - } - } - } - public static class TL_fileHash extends TLObject { public static int constructor = 0x6242c773; @@ -42202,192 +42596,6 @@ public class TLRPC { } } - public static class TL_phone_createGroupCall extends TLObject { - public static int constructor = 0x8504e5b6; - - public int flags; - public InputChannel channel; - public int random_id; - public TL_phoneCallProtocol protocol; - public byte[] encryption_key; - public long key_fingerprint; - public byte[] streams; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Updates.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - channel.serializeToStream(stream); - stream.writeInt32(random_id); - protocol.serializeToStream(stream); - if ((flags & 1) != 0) { - stream.writeByteArray(encryption_key); - } - stream.writeInt64(key_fingerprint); - stream.writeByteArray(streams); - } - } - - public static class TL_phone_joinGroupCall extends TLObject { - public static int constructor = 0x9db32d7; - - public TL_inputGroupCall call; - public byte[] streams; - public long key_fingerprint; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Updates.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - stream.writeByteArray(streams); - stream.writeInt64(key_fingerprint); - } - } - - public static class TL_phone_leaveGroupCall extends TLObject { - public static int constructor = 0x60e98e5f; - - public TL_inputGroupCall call; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Updates.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - } - } - - public static class TL_phone_editGroupCallMember extends TLObject { - public static int constructor = 0x46659be4; - - public int flags; - public boolean readonly; - public boolean kicked; - public TL_inputGroupCall call; - public InputUser user_id; - public byte[] streams; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Updates.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = readonly ? (flags | 1) : (flags &~ 1); - flags = kicked ? (flags | 2) : (flags &~ 2); - stream.writeInt32(flags); - call.serializeToStream(stream); - user_id.serializeToStream(stream); - if ((flags & 4) != 0) { - stream.writeByteArray(streams); - } - } - } - - public static class TL_phone_inviteGroupCallMembers extends TLObject { - public static int constructor = 0xcc92a6dc; - - public int flags; - public boolean uninvite; - public TL_inputGroupCall call; - public ArrayList users = new ArrayList<>(); - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Bool.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = uninvite ? (flags | 1) : (flags &~ 1); - stream.writeInt32(flags); - call.serializeToStream(stream); - stream.writeInt32(0x1cb5c415); - int count = users.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - users.get(a).serializeToStream(stream); - } - } - } - - public static class TL_phone_discardGroupCall extends TLObject { - public static int constructor = 0x7a777135; - - public TL_inputGroupCall call; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Updates.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - } - } - - public static class TL_phone_getGroupCall extends TLObject { - public static int constructor = 0xc7cb017; - - public TL_inputGroupCall call; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return TL_phone_groupCall.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - call.serializeToStream(stream); - } - } - - public static class TL_phone_upgradePhoneCall extends TLObject { - public static int constructor = 0x98e3cdba; - - public int flags; - public TL_inputPhoneCall peer; - public byte[] encryption_key; - public long key_fingerprint; - public byte[] streams; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return TL_phone_groupCall.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(flags); - peer.serializeToStream(stream); - if ((flags & 1) != 0) { - stream.writeByteArray(encryption_key); - } - stream.writeInt64(key_fingerprint); - stream.writeByteArray(streams); - } - } - - public static class TL_phone_getCall extends TLObject { - public static int constructor = 0x8adb4f79; - - public TL_inputPhoneCall peer; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return TL_phone_phoneCall.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - peer.serializeToStream(stream); - } - } - public static class TL_phone_sendSignalingData extends TLObject { public static int constructor = 0xff7a9383; @@ -42405,6 +42613,205 @@ public class TLRPC { } } + public static class TL_phone_createGroupCall extends TLObject { + public static int constructor = 0xbd3dabe0; + + public InputPeer peer; + public int random_id; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + peer.serializeToStream(stream); + stream.writeInt32(random_id); + } + } + + public static class TL_phone_joinGroupCall extends TLObject { + public static int constructor = 0x5f9c8e62; + + public int flags; + public boolean muted; + public TL_inputGroupCall call; + public TL_dataJSON params; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = muted ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + call.serializeToStream(stream); + params.serializeToStream(stream); + } + } + + public static class TL_phone_leaveGroupCall extends TLObject { + public static int constructor = 0x500377f9; + + public TL_inputGroupCall call; + public int source; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(source); + } + } + + public static class TL_phone_editGroupCallMember extends TLObject { + public static int constructor = 0x63146ae4; + + public int flags; + public boolean muted; + public TL_inputGroupCall call; + public InputUser user_id; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = muted ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + call.serializeToStream(stream); + user_id.serializeToStream(stream); + } + } + + public static class TL_phone_inviteToGroupCall extends TLObject { + public static int constructor = 0x7b393160; + + public TL_inputGroupCall call; + public ArrayList users = new ArrayList<>(); + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = users.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + users.get(a).serializeToStream(stream); + } + } + } + + public static class TL_phone_discardGroupCall extends TLObject { + public static int constructor = 0x7a777135; + + public TL_inputGroupCall call; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + } + } + + public static class TL_phone_toggleGroupCallSettings extends TLObject { + public static int constructor = 0x74bbb43d; + + public int flags; + public TL_inputGroupCall call; + public boolean join_muted; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + call.serializeToStream(stream); + if ((flags & 1) != 0) { + stream.writeBool(join_muted); + } + } + } + + public static class TL_phone_getGroupCall extends TLObject { + public static int constructor = 0xc7cb017; + + public TL_inputGroupCall call; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_phone_groupCall.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + } + } + + public static class TL_phone_getGroupParticipants extends TLObject { + public static int constructor = 0xc9f1d285; + + public TL_inputGroupCall call; + public ArrayList ids = new ArrayList<>(); + public ArrayList sources = new ArrayList<>(); + public String offset; + public int limit; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_phone_groupParticipants.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(0x1cb5c415); + int count = ids.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(ids.get(a)); + } + stream.writeInt32(0x1cb5c415); + count = sources.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(sources.get(a)); + } + stream.writeString(offset); + stream.writeInt32(limit); + } + } + + public static class TL_phone_checkGroupCall extends TLObject { + public static int constructor = 0xb74a7bea; + + public TL_inputGroupCall call; + public int source; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Bool.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + stream.writeInt32(source); + } + } + public static class TL_payments_getPaymentForm extends TLObject { public static int constructor = 0x99f09745; @@ -42759,36 +43166,6 @@ public class TLRPC { } } - public static class TL_wallet_sendLiteRequest extends TLObject { - public static int constructor = 0xe2c9d33e; - - public byte[] body; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return TL_wallet_liteResponse.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeByteArray(body); - } - } - - public static class TL_wallet_getKeySecretSalt extends TLObject { - public static int constructor = 0xb57f346; - - public boolean revoke; - - public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return TL_wallet_secretSalt.TLdeserialize(stream, constructor, exception); - } - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeBool(revoke); - } - } - public static class TL_stats_getBroadcastStats extends TLObject { public static int constructor = 0xab42441a; @@ -42896,6 +43273,23 @@ public class TLRPC { //manually created + public static class TL_photoPathSize extends PhotoSize { + public static int constructor = 0xd8214d41; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + type = stream.readString(exception); + bytes = stream.readByteArray(exception); + w = h = 50; + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(type); + stream.writeByteArray(bytes); + } + } + //RichText start public static abstract class RichText extends TLObject { public String url; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java index 6c8c57749..776a32d5b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java @@ -44,6 +44,7 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.GroupCallPip; import org.telegram.ui.Components.LayoutHelper; import java.util.ArrayList; @@ -277,6 +278,7 @@ public class ActionBarLayout extends FrameLayout { public ArrayList fragmentsStack; private Rect rect = new Rect(); + private boolean delayedAnimationResumed; public ActionBarLayout(Context context) { super(context); @@ -732,7 +734,10 @@ public class ActionBarLayout extends FrameLayout { if (transitionAnimationPreviewMode || startedTracking || checkTransitionAnimation() || fragmentsStack.isEmpty()) { return; } - if (!currentActionBar.isActionModeShowed() && currentActionBar != null && currentActionBar.isSearchFieldVisible) { + if (GroupCallPip.onBackPressed()) { + return; + } + if (currentActionBar != null && !currentActionBar.isActionModeShowed() && currentActionBar.isSearchFieldVisible) { currentActionBar.closeSearchField(); return; } @@ -908,7 +913,8 @@ public class ActionBarLayout extends FrameLayout { } public void resumeDelayedFragmentAnimation() { - if (delayedOpenAnimationRunnable == null) { + delayedAnimationResumed = true; + if (delayedOpenAnimationRunnable == null || waitingForKeyboardCloseRunnable != null) { return; } AndroidUtilities.cancelRunOnUIThread(delayedOpenAnimationRunnable); @@ -1081,6 +1087,8 @@ public class ActionBarLayout extends FrameLayout { } fragment.onTransitionAnimationStart(true, false); } + + delayedAnimationResumed = false; oldFragment = currentFragment; newFragment = fragment; AnimatorSet animation = null; @@ -1099,6 +1107,9 @@ public class ActionBarLayout extends FrameLayout { containerView.setScaleY(1.0f); } if (containerView.isKeyboardVisible || containerViewBack.isKeyboardVisible) { + if (currentFragment != null) { + currentFragment.saveKeyboardPositionBeforeTransition(); + } waitingForKeyboardCloseRunnable = new Runnable() { @Override public void run() { @@ -1106,15 +1117,38 @@ public class ActionBarLayout extends FrameLayout { return; } waitingForKeyboardCloseRunnable = null; - if (!noDelay) { + if (noDelay) { if (currentFragment != null) { currentFragment.onTransitionAnimationStart(false, false); } fragment.onTransitionAnimationStart(true, false); + startLayoutAnimation(true, true, preview); + } else if (delayedOpenAnimationRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(delayedOpenAnimationRunnable); + if (delayedAnimationResumed) { + delayedOpenAnimationRunnable.run(); + } else { + AndroidUtilities.runOnUIThread(delayedOpenAnimationRunnable, 200); + } } - startLayoutAnimation(true, true, preview); } }; + if (fragment.needDelayOpenAnimation()) { + delayedOpenAnimationRunnable = new Runnable() { + @Override + public void run() { + if (delayedOpenAnimationRunnable != this) { + return; + } + delayedOpenAnimationRunnable = null; + if (currentFragment != null) { + currentFragment.onTransitionAnimationStart(false, false); + } + fragment.onTransitionAnimationStart(true, false); + startLayoutAnimation(true, true, preview); + } + }; + } AndroidUtilities.runOnUIThread(waitingForKeyboardCloseRunnable, SharedConfig.smoothKeyboard ? 250 : 200); } else if (fragment.needDelayOpenAnimation()) { delayedOpenAnimationRunnable = new Runnable() { @@ -1124,9 +1158,6 @@ public class ActionBarLayout extends FrameLayout { return; } delayedOpenAnimationRunnable = null; - if (currentFragment != null) { - currentFragment.onTransitionAnimationStart(false, false); - } fragment.onTransitionAnimationStart(true, false); startLayoutAnimation(true, true, preview); } @@ -1136,8 +1167,9 @@ public class ActionBarLayout extends FrameLayout { startLayoutAnimation(true, true, preview); } } else { - containerView.setAlpha(1.0f); - containerView.setTranslationX(0.0f); + if (containerView.isKeyboardVisible || containerViewBack.isKeyboardVisible && currentFragment != null) { + currentFragment.saveKeyboardPositionBeforeTransition(); + } currentAnimation = animation; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java index e08db0e63..27769ed1e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java @@ -57,7 +57,6 @@ import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; -import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; import org.telegram.ui.Adapters.FiltersView; @@ -205,7 +204,7 @@ public class ActionBarMenuItem extends FrameLayout { @Override public boolean onTouchEvent(MotionEvent event) { if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { - if (longClickEnabled && hasSubMenu() && (popupWindow == null || popupWindow != null && !popupWindow.isShowing())) { + if (longClickEnabled && hasSubMenu() && (popupWindow == null || !popupWindow.isShowing())) { showMenuRunnable = () -> { if (getParent() != null) { getParent().requestDisallowInterceptTouchEvent(true); @@ -215,7 +214,7 @@ public class ActionBarMenuItem extends FrameLayout { AndroidUtilities.runOnUIThread(showMenuRunnable, 200); } } else if (event.getActionMasked() == MotionEvent.ACTION_MOVE) { - if (showSubmenuByMove && hasSubMenu() && (popupWindow == null || popupWindow != null && !popupWindow.isShowing())) { + if (showSubmenuByMove && hasSubMenu() && (popupWindow == null || !popupWindow.isShowing())) { if (event.getY() > getHeight()) { if (getParent() != null) { getParent().requestDisallowInterceptTouchEvent(true); @@ -234,7 +233,8 @@ public class ActionBarMenuItem extends FrameLayout { for (int a = 0; a < popupLayout.getItemsCount(); a++) { View child = popupLayout.getItemAt(a); child.getHitRect(rect); - if ((Integer) child.getTag() < 100) { + Object tag = child.getTag(); + if (tag instanceof Integer && (Integer) tag < 100) { if (!rect.contains((int) x, (int) y)) { child.setPressed(false); child.setSelected(false); @@ -431,7 +431,7 @@ public class ActionBarMenuItem extends FrameLayout { public ActionBarMenuSubItem addSubItem(int id, int icon, CharSequence text, boolean needCheck) { createPopupLayout(); - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), needCheck); + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), needCheck, false, false); cell.setTextAndIcon(text, icon); cell.setMinimumWidth(AndroidUtilities.dp(196)); cell.setTag(id); @@ -460,6 +460,22 @@ public class ActionBarMenuItem extends FrameLayout { return cell; } + public View addDivider(int color) { + createPopupLayout(); + + TextView cell = new TextView(getContext()); + cell.setBackgroundColor(color); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + popupLayout.addView(cell); + LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) cell.getLayoutParams(); + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = 1; + layoutParams.topMargin = layoutParams.bottomMargin = AndroidUtilities.dp(3); + cell.setLayoutParams(layoutParams); + + return cell; + } + public void redrawPopup(int color) { if (popupLayout != null && popupLayout.getBackgroundColor() != color) { popupLayout.setBackgroundColor(color); @@ -501,6 +517,12 @@ public class ActionBarMenuItem extends FrameLayout { } } + public void setupPopupRadialSelectors(int color) { + if (popupLayout != null) { + popupLayout.setupRadialSelectors(color); + } + } + public boolean hasSubMenu() { return popupLayout != null; } @@ -567,6 +589,7 @@ public class ActionBarMenuItem extends FrameLayout { } else { updateOrShowPopup(true, false); } + popupLayout.updateRadialSelectors(); popupWindow.startAnimation(); } @@ -592,7 +615,6 @@ public class ActionBarMenuItem extends FrameLayout { if (listener != null) { Animator animator = listener.getCustomToggleTransition(); if (animator != null) { - searchField.setText(""); animator.start(); return true; } @@ -1043,6 +1065,18 @@ public class ActionBarMenuItem extends FrameLayout { } return super.onKeyDown(keyCode, event); } + + @Override + public boolean onTouchEvent(MotionEvent event) { + boolean result = super.onTouchEvent(event); + if (event.getAction() == MotionEvent.ACTION_UP) { //hack to fix android bug with not opening keyboard + if (!AndroidUtilities.showKeyboard(this)) { + clearFocus(); + requestFocus(); + } + } + return result; + } }; searchField.setScrollContainer(false); searchField.setCursorWidth(1.5f); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java index e93b078ab..d95710108 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java @@ -26,14 +26,20 @@ public class ActionBarMenuSubItem extends FrameLayout { private int iconColor = Theme.getColor(Theme.key_actionBarDefaultSubmenuItemIcon); private int selectorColor = Theme.getColor(Theme.key_dialogButtonSelector); - public ActionBarMenuSubItem(Context context) { - this(context, false); + boolean top; + boolean bottom; + + public ActionBarMenuSubItem(Context context, boolean top, boolean bottom) { + this(context, false, top, bottom); } - public ActionBarMenuSubItem(Context context, boolean needCheck) { + public ActionBarMenuSubItem(Context context, boolean needCheck, boolean top, boolean bottom) { super(context); - setBackground(Theme.createSelectorDrawable(selectorColor, 2)); + this.top = top; + this.bottom = bottom; + + updateBackground(); setPadding(AndroidUtilities.dp(18), 0, AndroidUtilities.dp(18), 0); imageView = new ImageView(context); @@ -55,7 +61,7 @@ public class ActionBarMenuSubItem extends FrameLayout { checkView.setImageResource(R.drawable.msg_text_check); checkView.setScaleType(ImageView.ScaleType.CENTER); checkView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_radioBackgroundChecked), PorterDuff.Mode.MULTIPLY)); - addView(checkView, LayoutHelper.createFrame(26, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); + addView(checkView, LayoutHelper.createFrame(26, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT))); } } @@ -64,19 +70,6 @@ public class ActionBarMenuSubItem extends FrameLayout { super.onMeasure(widthMeasureSpec, View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(48), View.MeasureSpec.EXACTLY)); } - @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - super.onLayout(changed, left, top, right, bottom); - if (checkView != null) { - if (LocaleController.isRTL) { - left = getPaddingRight(); - } else { - left = getMeasuredWidth() - checkView.getMeasuredWidth() - getPaddingLeft(); - } - checkView.layout(left, checkView.getTop(), left + checkView.getMeasuredWidth(), checkView.getBottom()); - } - } - public void setChecked(boolean checked) { if (checkView == null) { return; @@ -84,9 +77,13 @@ public class ActionBarMenuSubItem extends FrameLayout { checkView.setVisibility(checked ? VISIBLE : INVISIBLE); } + public void setCheckColor(int color) { + checkView.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + } + public void setTextAndIcon(CharSequence text, int icon) { textView.setText(text); - if (icon != 0) { + if (icon != 0 || checkView != null) { imageView.setImageResource(icon); imageView.setVisibility(VISIBLE); textView.setPadding(LocaleController.isRTL ? 0 : AndroidUtilities.dp(43), 0, LocaleController.isRTL ? AndroidUtilities.dp(43) : 0, 0); @@ -127,7 +124,20 @@ public class ActionBarMenuSubItem extends FrameLayout { public void setSelectorColor(int selectorColor) { if (this.selectorColor != selectorColor) { - setBackground(Theme.createSelectorDrawable(this.selectorColor = selectorColor, 2)); + this.selectorColor = selectorColor; + updateBackground(); } } + + public void updateSelectorBackground(boolean top, boolean bottom) { + this.top = top; + this.bottom = bottom; + updateBackground(); + } + + private void updateBackground() { + int topBackgroundRadius = top ? 6 : 0; + int bottomBackgroundRadius = bottom ? 6 : 0; + setBackground(Theme.createRadSelectorDrawable(selectorColor, topBackgroundRadius, bottomBackgroundRadius)); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java index 207e0b021..1963f3102 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java @@ -178,13 +178,18 @@ public class ActionBarPopupWindow extends PopupWindow { } } else { int count = getItemsCount(); - for (int a = lastStartedChild; a < count; a++) { + int h = 0; + for (int a = 0; a < count; a++) { View child = getItemAt(a); if (child.getVisibility() != VISIBLE) { continue; } + h += child.getMeasuredHeight(); + if (a < lastStartedChild) { + continue; + } Integer position = positions.get(child); - if (position != null && (position + 1) * AndroidUtilities.dp(48) - AndroidUtilities.dp(24) > value * height) { + if (position != null && h - AndroidUtilities.dp(24) > value * height) { break; } lastStartedChild = a + 1; @@ -294,6 +299,32 @@ public class ActionBarPopupWindow extends PopupWindow { child.setBackground(Theme.createRadSelectorDrawable(color, a == 0 ? 6 : 0, a == count - 1 ? 6 : 0)); } } + + public void updateRadialSelectors() { + int count = linearLayout.getChildCount(); + View firstVisible = null; + View lastVisible = null; + for (int a = 0; a < count; a++) { + View child = linearLayout.getChildAt(a); + if (child.getVisibility() != View.VISIBLE) { + continue; + } + if (firstVisible == null) { + firstVisible = child; + } + lastVisible = child; + } + + for (int a = 0; a < count; a++) { + View child = linearLayout.getChildAt(a); + if (child.getVisibility() != View.VISIBLE) { + continue; + } + if (child instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) child).updateSelectorBackground(child == firstVisible, child == lastVisible); + } + } + } } public ActionBarPopupWindow() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java index 6079f4543..d11e1e79e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java @@ -23,8 +23,7 @@ import java.util.ArrayList; public class AdjustPanLayoutHelper { - private final static float[] interpolatorValues = new float[]{0f, 0.11162791f, 0.40232557f, 0.6534884f, 0.7930232f, 0.8802326f, 0.9302326f, 0.96046513f, 0.9767442f, 0.9860465f, 0.99186045f, 0.9953488f, 0.9976744f, 0.99883723f, 1}; - public final static Interpolator keyboardInterpolator = CubicBezierInterpolator.DEFAULT;//new LookupTableInterpolator(interpolatorValues); + public final static Interpolator keyboardInterpolator = CubicBezierInterpolator.DEFAULT; public final static long keyboardDuration = 250; private final View parent; @@ -250,7 +249,6 @@ public class AdjustPanLayoutHelper { } - public void setResizableView(FrameLayout windowView) { resizableViewToSet = windowView; } @@ -258,40 +256,4 @@ public class AdjustPanLayoutHelper { public boolean animationInProgress() { return animationInProgress; } - - /** - * copy from androidx.interpolator.view.animation.LookupTableInterpolator - */ - static class LookupTableInterpolator implements Interpolator { - - private final float[] mValues; - private final float mStepSize; - - protected LookupTableInterpolator(float[] values) { - mValues = values; - mStepSize = 1f / (mValues.length - 1); - } - - @Override - public float getInterpolation(float input) { - if (input >= 1.0f) { - return 1.0f; - } - if (input <= 0f) { - return 0f; - } - - // Calculate index - We use min with length - 2 to avoid IndexOutOfBoundsException when - // we lerp (linearly interpolate) in the return statement - int position = Math.min((int) (input * (mValues.length - 1)), mValues.length - 2); - - // Calculate values to account for small offsets as the lookup table has discrete values - float quantized = position * mStepSize; - float diff = input - quantized; - float weight = diff / mStepSize; - - // Linearly interpolate between the table values - return mValues[position] + weight * (mValues[position + 1] - mValues[position]); - } - } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java index 26b9d59c6..206b8838a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java @@ -96,7 +96,8 @@ public class AlertDialog extends Dialog implements Drawable.Callback { private boolean canCacnel = true; private boolean dismissDialogByButtons = true; - + private boolean drawBackground; + private boolean notDrawBackgroundOnTopView; private RLottieImageView topImageView; private CharSequence positiveButtonText; private OnClickListener positiveButtonListener; @@ -125,6 +126,8 @@ public class AlertDialog extends Dialog implements Drawable.Callback { }; private ArrayList itemViews = new ArrayList<>(); + private float aspectRatio; + private boolean dimEnabled = true; public static class AlertDialogCell extends FrameLayout { @@ -273,8 +276,7 @@ public class AlertDialog extends Dialog implements Drawable.Callback { } if (topView != null) { int w = width - AndroidUtilities.dp(16); - float scale = w / 936.0f; - int h = (int) (354 * scale); + int h = (int) (w * aspectRatio); topView.measure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY)); topView.getLayoutParams().height = h; availableHeight -= topView.getMeasuredHeight(); @@ -379,12 +381,41 @@ public class AlertDialog extends Dialog implements Drawable.Callback { public boolean hasOverlappingRendering() { return false; } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (drawBackground) { + shadowDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + if (topView != null && notDrawBackgroundOnTopView) { + int clipTop = topView.getBottom(); + canvas.save(); + canvas.clipRect(0, clipTop, getMeasuredWidth(), getMeasuredHeight()); + shadowDrawable.draw(canvas); + canvas.restore(); + } else { + shadowDrawable.draw(canvas); + } + } + super.dispatchDraw(canvas); + } }; containerView.setOrientation(LinearLayout.VERTICAL); if (progressViewStyle == 3) { containerView.setBackgroundDrawable(null); + containerView.setPadding(0, 0, 0, 0); + drawBackground = false; } else { - containerView.setBackgroundDrawable(shadowDrawable); + if (notDrawBackgroundOnTopView) { + Rect rect = new Rect(); + shadowDrawable.getPadding(rect); + containerView.setPadding(rect.left, rect.top, rect.right, rect.bottom); + drawBackground = true; + } else { + containerView.setBackgroundDrawable(null); + containerView.setPadding(0, 0, 0, 0); + containerView.setBackgroundDrawable(shadowDrawable); + drawBackground = false; + } } containerView.setFitsSystemWindows(Build.VERSION.SDK_INT >= 21); setContentView(containerView); @@ -759,8 +790,10 @@ public class AlertDialog extends Dialog implements Drawable.Callback { if (progressViewStyle == 3) { params.width = WindowManager.LayoutParams.MATCH_PARENT; } else { - params.dimAmount = 0.6f; - params.flags |= WindowManager.LayoutParams.FLAG_DIM_BEHIND; + if (dimEnabled) { + params.dimAmount = 0.6f; + params.flags |= WindowManager.LayoutParams.FLAG_DIM_BEHIND; + } lastScreenWidth = AndroidUtilities.displaySize.x; final int calculatedWidth = AndroidUtilities.displaySize.x - AndroidUtilities.dp(48); @@ -796,6 +829,19 @@ public class AlertDialog extends Dialog implements Drawable.Callback { } } + public void setBackgroundColor(int color) { + shadowDrawable.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + } + + public void setTextColor(int color) { + if (titleTextView != null) { + titleTextView.setTextColor(color); + } + if (messageTextView != null) { + messageTextView.setTextColor(color); + } + } + private void showCancelAlert() { if (!canCacnel || cancelDialog != null) { return; @@ -1191,5 +1237,17 @@ public class AlertDialog extends Dialog implements Drawable.Callback { alertDialog.setOnDismissListener(onDismissListener); return this; } + + public void setTopViewAspectRatio(float aspectRatio) { + alertDialog.aspectRatio = aspectRatio; + } + + public void setDimEnabled(boolean dimEnabled) { + alertDialog.dimEnabled = dimEnabled; + } + + public void notDrawBackgroundOnTopView(boolean b) { + alertDialog.notDrawBackgroundOnTopView = b; + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java index 7156a9b5f..54dea6608 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java @@ -30,10 +30,10 @@ import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; -import org.telegram.messenger.MediaController; -import org.telegram.messenger.MediaDataController; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocationController; +import org.telegram.messenger.MediaController; +import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; @@ -593,4 +593,8 @@ public class BaseFragment { parentLayout.setFragmentPanTranslationOffset(offset); } } + + public void saveKeyboardPositionBeforeTransition() { + + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java index ba57c7b1a..465077462 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java @@ -67,6 +67,7 @@ public class BottomSheet extends Dialog { protected boolean keyboardVisible; private WindowInsets lastInsets; protected boolean drawNavigationBar; + protected boolean scrollNavBar; protected boolean useSmoothKeyboard; @@ -94,6 +95,7 @@ public class BottomSheet extends Dialog { protected ColorDrawable backDrawable = new ColorDrawable(0xff000000); protected boolean useLightStatusBar = true; + protected boolean useLightNavBar; protected String behindKeyboardColorKey = Theme.key_dialogBackground; protected int behindKeyboardColor; @@ -114,6 +116,7 @@ public class BottomSheet extends Dialog { private boolean focusable; private boolean dimBehind = true; + private int dimBehindAlpha = 51; protected boolean allowNestedScroll = true; @@ -137,6 +140,9 @@ public class BottomSheet extends Dialog { private boolean disableScroll; private float currentPanTranslationY; + protected String navBarColorKey = Theme.key_windowBackgroundGray; + protected int navBarColor; + public void setDisableScroll(boolean b) { disableScroll = b; } @@ -513,12 +519,16 @@ public class BottomSheet extends Dialog { super.dispatchDraw(canvas); if ((drawNavigationBar && bottomInset != 0) || currentPanTranslationY != 0) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - backgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundGray)); + if (navBarColorKey != null) { + backgroundPaint.setColor(Theme.getColor(navBarColorKey)); + } else { + backgroundPaint.setColor(navBarColor); + } } else { backgroundPaint.setColor(0xff000000); } float translation = 0; - if (Build.VERSION.SDK_INT >= 29 && getAdditionalMandatoryOffsets() > 0) { + if (scrollNavBar || Build.VERSION.SDK_INT >= 29 && getAdditionalMandatoryOffsets() > 0) { float dist = containerView.getMeasuredHeight() - containerView.getTranslationY(); translation = Math.max(0, bottomInset - dist); } @@ -739,6 +749,9 @@ public class BottomSheet extends Dialog { container.setSystemUiVisibility(flags); } } + if (useLightNavBar && Build.VERSION.SDK_INT >= 26) { + AndroidUtilities.setLightNavigationBar(getWindow(), false); + } if (containerView == null) { containerView = new FrameLayout(getContext()) { @@ -871,14 +884,14 @@ public class BottomSheet extends Dialog { cancelSheetAnimation(); containerView.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.x + backgroundPaddingLeft * 2, View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.y, View.MeasureSpec.AT_MOST)); if (showWithoutAnimation) { - backDrawable.setAlpha(dimBehind ? 51 : 0); + backDrawable.setAlpha(dimBehind ? dimBehindAlpha : 0); containerView.setTranslationY(0); return; } backDrawable.setAlpha(0); if (Build.VERSION.SDK_INT >= 18) { layoutCount = 2; - containerView.setTranslationY(containerView.getMeasuredHeight()); + containerView.setTranslationY((Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) + containerView.getMeasuredHeight()); AndroidUtilities.runOnUIThread(startAnimationRunnable = new Runnable() { @Override public void run() { @@ -986,7 +999,7 @@ public class BottomSheet extends Dialog { currentSheetAnimation = new AnimatorSet(); currentSheetAnimation.playTogether( ObjectAnimator.ofFloat(containerView, View.TRANSLATION_Y, 0), - ObjectAnimator.ofInt(backDrawable, AnimationProperties.COLOR_DRAWABLE_ALPHA, dimBehind ? 51 : 0)); + ObjectAnimator.ofInt(backDrawable, AnimationProperties.COLOR_DRAWABLE_ALPHA, dimBehind ? dimBehindAlpha : 0)); currentSheetAnimation.setDuration(400); currentSheetAnimation.setStartDelay(20); currentSheetAnimation.setInterpolator(openInterpolator); @@ -1045,6 +1058,10 @@ public class BottomSheet extends Dialog { dimBehind = value; } + public void setDimBehindAlpha(int value) { + dimBehindAlpha = value; + } + public void setItemText(int item, CharSequence text) { if (item < 0 || item >= itemViews.size()) { return; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java index 3c6350688..7ac2e24f4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java @@ -83,6 +83,7 @@ import org.telegram.ui.Cells.ThemesHorizontalListCell; import org.telegram.ui.Components.AudioVisualizerDrawable; import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.CombinedDrawable; +import org.telegram.ui.Components.FragmentContextViewWavesDrawable; import org.telegram.ui.Components.MsgClockDrawable; import org.telegram.ui.Components.PathAnimator; import org.telegram.ui.Components.PlayingGameDrawable; @@ -92,7 +93,7 @@ import org.telegram.ui.Components.RoundStatusDrawable; import org.telegram.ui.Components.ScamDrawable; import org.telegram.ui.Components.SendingFileDrawable; import org.telegram.ui.Components.StatusDrawable; -import org.telegram.ui.Components.SvgHelper; +import org.telegram.messenger.SvgHelper; import org.telegram.ui.Components.ThemeEditorView; import org.telegram.ui.Components.TypingDotsDrawable; @@ -1973,6 +1974,7 @@ public class Theme { private static boolean isPatternWallpaper; public static Paint dividerPaint; + public static Paint dividerExtraPaint; public static Paint linkSelectionPaint; public static Paint checkboxSquare_eraserPaint; public static Paint checkboxSquare_checkPaint; @@ -2330,6 +2332,7 @@ public class Theme { public static final String key_inappPlayerClose = "inappPlayerClose"; public static final String key_returnToCallBackground = "returnToCallBackground"; + public static final String key_returnToCallMutedBackground = "returnToCallMutedBackground"; public static final String key_returnToCallText = "returnToCallText"; public static final String key_contextProgressInner1 = "contextProgressInner1"; @@ -2765,6 +2768,70 @@ public class Theme { public static final String key_chat_recordedVoiceHighlight = "key_chat_recordedVoiceHighlight"; public static final String key_chat_TextSelectionCursor = "chat_TextSelectionCursor"; + public static final String key_voipgroup_listSelector = "voipgroup_listSelector"; + public static final String key_voipgroup_inviteMembersBackground = "voipgroup_inviteMembersBackground"; + public static final String key_voipgroup_actionBar = "voipgroup_actionBar"; + public static final String key_voipgroup_emptyView = "voipgroup_emptyView"; + public static final String key_voipgroup_actionBarItems = "voipgroup_actionBarItems"; + public static final String key_voipgroup_actionBarSubtitle = "voipgroup_actionBarSubtitle"; + public static final String key_voipgroup_actionBarItemsSelector = "voipgroup_actionBarItemsSelector"; + public static final String key_voipgroup_actionBarUnscrolled = "voipgroup_actionBarUnscrolled"; + public static final String key_voipgroup_listViewBackgroundUnscrolled = "voipgroup_listViewBackgroundUnscrolled"; + public static final String key_voipgroup_lastSeenTextUnscrolled = "voipgroup_lastSeenTextUnscrolled"; + public static final String key_voipgroup_mutedIconUnscrolled = "voipgroup_mutedIconUnscrolled"; + public static final String key_voipgroup_nameText = "voipgroup_nameText"; + public static final String key_voipgroup_lastSeenText = "voipgroup_lastSeenText"; + public static final String key_voipgroup_listeningText = "voipgroup_listeningText"; + public static final String key_voipgroup_speakingText = "voipgroup_speakingText"; + public static final String key_voipgroup_mutedIcon = "voipgroup_mutedIcon"; + public static final String key_voipgroup_mutedByAdminIcon = "voipgroup_mutedByAdminIcon"; + public static final String key_voipgroup_listViewBackground = "voipgroup_listViewBackground"; + public static final String key_voipgroup_dialogBackground = "voipgroup_dialogBackground"; + public static final String key_voipgroup_leaveCallMenu = "voipgroup_leaveCallMenu"; + public static final String key_voipgroup_checkMenu = "voipgroup_checkMenu"; + public static final String key_voipgroup_soundButton = "voipgroup_soundButton"; + public static final String key_voipgroup_soundButtonActive = "voipgroup_soundButtonActive"; + public static final String key_voipgroup_soundButtonActiveScrolled = "voipgroup_soundButtonActiveScrolled"; + public static final String key_voipgroup_soundButton2 = "voipgroup_soundButton2"; + public static final String key_voipgroup_soundButtonActive2 = "voipgroup_soundButtonActive2"; + public static final String key_voipgroup_soundButtonActive2Scrolled = "voipgroup_soundButtonActive2Scrolled"; + public static final String key_voipgroup_leaveButton = "voipgroup_leaveButton"; + public static final String key_voipgroup_leaveButtonScrolled = "voipgroup_leaveButtonScrolled"; + public static final String key_voipgroup_muteButton = "voipgroup_muteButton"; + public static final String key_voipgroup_muteButton2 = "voipgroup_muteButton2"; + public static final String key_voipgroup_muteButton3 = "voipgroup_muteButton3"; + public static final String key_voipgroup_unmuteButton = "voipgroup_unmuteButton"; + public static final String key_voipgroup_unmuteButton2 = "voipgroup_unmuteButton2"; + public static final String key_voipgroup_disabledButton = "voipgroup_disabledButton"; + public static final String key_voipgroup_disabledButtonActive = "voipgroup_disabledButtonActive"; + public static final String key_voipgroup_disabledButtonActiveScrolled = "voipgroup_disabledButtonActiveScrolled"; + public static final String key_voipgroup_connectingProgress = "voipgroup_connectingProgress"; + public static final String key_voipgroup_blueText = "voipgroup_blueText"; + public static final String key_voipgroup_scrollUp = "voipgroup_scrollUp"; + public static final String key_voipgroup_searchPlaceholder = "voipgroup_searchPlaceholder"; + public static final String key_voipgroup_searchBackground = "voipgroup_searchBackground"; + public static final String key_voipgroup_searchText = "voipgroup_searchText"; + public static final String key_voipgroup_overlayGreen1 = "voipgroup_overlayGreen1"; + public static final String key_voipgroup_overlayGreen2 = "voipgroup_overlayGreen2"; + public static final String key_voipgroup_overlayBlue1 = "voipgroup_overlayBlue1"; + public static final String key_voipgroup_overlayBlue2 = "voipgroup_overlayBlue2"; + public static final String key_voipgroup_topPanelGreen1 = "voipgroup_topPanelGreen1"; + public static final String key_voipgroup_topPanelGreen2 = "voipgroup_topPanelGreen2"; + public static final String key_voipgroup_topPanelBlue1 = "voipgroup_topPanelBlue1"; + public static final String key_voipgroup_topPanelBlue2 = "voipgroup_topPanelBlue2"; + public static final String key_voipgroup_topPanelGray = "voipgroup_topPanelGray"; + public static final String key_voipgroup_overlayAlertGradientMuted = "voipgroup_overlayAlertGradientMuted"; + public static final String key_voipgroup_overlayAlertGradientMuted2 = "voipgroup_overlayAlertGradientMuted2"; + public static final String key_voipgroup_overlayAlertGradientUnmuted = "voipgroup_overlayAlertGradientUnmuted"; + public static final String key_voipgroup_overlayAlertGradientUnmuted2 = "voipgroup_overlayAlertGradientUnmuted2"; + public static final String key_voipgroup_overlayAlertMutedByAdmin = "voipgroup_overlayAlertMutedByAdmin"; + public static final String key_voipgroup_overlayAlertMutedByAdmin2 = "kvoipgroup_overlayAlertMutedByAdmin2"; + public static final String key_voipgroup_mutedByAdminGradient = "voipgroup_mutedByAdminGradient"; + public static final String key_voipgroup_mutedByAdminGradient2 = "voipgroup_mutedByAdminGradient2"; + public static final String key_voipgroup_mutedByAdminGradient3 = "voipgroup_mutedByAdminGradient3"; + public static final String key_voipgroup_mutedByAdminMuteButton = "voipgroup_mutedByAdminMuteButton"; + public static final String key_voipgroup_mutedByAdminMuteButtonDisabled = "voipgroup_mutedByAdminMuteButtonDisabled"; + public static final String key_passport_authorizeBackground = "passport_authorizeBackground"; public static final String key_passport_authorizeBackgroundSelected = "passport_authorizeBackgroundSelected"; public static final String key_passport_authorizeText = "passport_authorizeText"; @@ -3547,6 +3614,7 @@ public class Theme { defaultColors.put(key_inappPlayerClose, 0xff8b969b); defaultColors.put(key_returnToCallBackground, 0xff44a1e3); + defaultColors.put(key_returnToCallMutedBackground, 0xff9DA7B1); defaultColors.put(key_returnToCallText, 0xffffffff); defaultColors.put(key_sharedMedia_startStopLoadIcon, 0xff36a2ee); @@ -3644,6 +3712,72 @@ public class Theme { defaultColors.put(key_statisticChartLineEmpty, 0xFFEEEEEE); defaultColors.put(key_actionBarTipBackground, 0xFF446F94); + defaultColors.put(key_voipgroup_checkMenu, 0xff6BB6F9); + defaultColors.put(key_voipgroup_muteButton, 0xff77E55C); + defaultColors.put(key_voipgroup_muteButton2, 0xff7DDCAA); + defaultColors.put(key_voipgroup_muteButton3, 0xff56C7FE); + defaultColors.put(key_voipgroup_searchText, 0xffffffff); + defaultColors.put(key_voipgroup_searchPlaceholder, 0xff858D94); + defaultColors.put(key_voipgroup_searchBackground, 0xff303B47); + defaultColors.put(key_voipgroup_leaveCallMenu, 0xffFF7575); + defaultColors.put(key_voipgroup_scrollUp, 0xff394654); + defaultColors.put(key_voipgroup_soundButton, 0x7d2C414D); + defaultColors.put(key_voipgroup_soundButtonActive, 0x7d22A4EB); + defaultColors.put(key_voipgroup_soundButtonActiveScrolled, 0x8233B4FF); + defaultColors.put(key_voipgroup_soundButton2, 0x7d28593A); + defaultColors.put(key_voipgroup_soundButtonActive2, 0x7d18B751); + defaultColors.put(key_voipgroup_soundButtonActive2Scrolled, 0x8224BF46); + defaultColors.put(key_voipgroup_leaveButton, 0x7dF75C5C); + defaultColors.put(key_voipgroup_leaveButtonScrolled, 0x82D14D54); + defaultColors.put(key_voipgroup_connectingProgress, 0xff28BAFF); + defaultColors.put(key_voipgroup_disabledButton, 0xff1C2229); + defaultColors.put(key_voipgroup_disabledButtonActive, 0xff2C3A45); + defaultColors.put(key_voipgroup_disabledButtonActiveScrolled, 0x8277A1FC); + defaultColors.put(key_voipgroup_unmuteButton, 0xff539EF8); + defaultColors.put(key_voipgroup_unmuteButton2, 0xff66D4FB); + defaultColors.put(key_voipgroup_actionBarUnscrolled, 0xff191F26); + defaultColors.put(key_voipgroup_listViewBackgroundUnscrolled, 0xff222A33); + defaultColors.put(key_voipgroup_lastSeenTextUnscrolled, 0xff858D94); + defaultColors.put(key_voipgroup_mutedIconUnscrolled, 0xff7E868C); + defaultColors.put(key_voipgroup_actionBar, 0xff0F1317); + defaultColors.put(key_voipgroup_emptyView, 0xff1A1D21); + defaultColors.put(key_voipgroup_actionBarItems, 0xffffffff); + defaultColors.put(key_voipgroup_actionBarSubtitle, 0xff8A8A8A); + defaultColors.put(key_voipgroup_actionBarItemsSelector, 0x1eBADBFF); + defaultColors.put(key_voipgroup_mutedByAdminIcon, 0xffFF7070); + defaultColors.put(key_voipgroup_mutedIcon, 0xff6F7980); + defaultColors.put(key_voipgroup_lastSeenText, 0xff79838A); + defaultColors.put(key_voipgroup_nameText, 0xffffffff); + defaultColors.put(key_voipgroup_listViewBackground, 0xff1C2229); + defaultColors.put(key_voipgroup_dialogBackground, 0xff1C2229); + defaultColors.put(key_voipgroup_listeningText, 0xff4DB8FF); + defaultColors.put(key_voipgroup_speakingText, 0xff77EE7D); + defaultColors.put(key_voipgroup_listSelector, 0x0effffff); + defaultColors.put(key_voipgroup_inviteMembersBackground, 0xff222A33); + defaultColors.put(key_voipgroup_overlayBlue1, 0xff2BCEFF); + defaultColors.put(key_voipgroup_overlayBlue2, 0xff0976E3); + defaultColors.put(key_voipgroup_overlayGreen1, 0xff12B522); + defaultColors.put(key_voipgroup_overlayGreen2, 0xff00D6C1); + defaultColors.put(key_voipgroup_topPanelBlue1, 0xff60C7FB); + defaultColors.put(key_voipgroup_topPanelBlue2, 0xff519FF9); + defaultColors.put(key_voipgroup_topPanelGreen1, 0xff52CE5D); + defaultColors.put(key_voipgroup_topPanelGreen2, 0xff00B1C0); + defaultColors.put(key_voipgroup_topPanelGray, 0xff8599aa); + + defaultColors.put(key_voipgroup_overlayAlertGradientMuted, 0xff236D92); + defaultColors.put(key_voipgroup_overlayAlertGradientMuted2, 0xff2C4D6B); + defaultColors.put(key_voipgroup_overlayAlertGradientUnmuted, 0xff0C8A8C); + defaultColors.put(key_voipgroup_overlayAlertGradientUnmuted2, 0xff284C75); + defaultColors.put(key_voipgroup_mutedByAdminGradient, 0xff57A4FE); + defaultColors.put(key_voipgroup_mutedByAdminGradient2, 0xffF05459); + defaultColors.put(key_voipgroup_mutedByAdminGradient3, 0xff766EE9); + defaultColors.put(key_voipgroup_overlayAlertMutedByAdmin, 0xff67709E); + defaultColors.put(key_voipgroup_overlayAlertMutedByAdmin2, 0xff2F5078); + defaultColors.put(key_voipgroup_mutedByAdminMuteButton, 0x7F78A3FF); + defaultColors.put(key_voipgroup_mutedByAdminMuteButtonDisabled, 0x3378A3FF); + + + fallbackKeys.put(key_chat_inAdminText, key_chat_inTimeText); fallbackKeys.put(key_chat_inAdminSelectedText, key_chat_inTimeSelectedText); fallbackKeys.put(key_player_progressCachedBackground, key_player_progressBackground); @@ -3776,6 +3910,8 @@ public class Theme { fallbackKeys.put(key_chat_outAdminText, key_chat_outTimeText); fallbackKeys.put(key_chat_outAdminSelectedText, key_chat_outTimeSelectedText); + fallbackKeys.put(key_returnToCallMutedBackground, key_windowBackgroundWhite); + themeAccentExclusionKeys.addAll(Arrays.asList(keys_avatar_background)); themeAccentExclusionKeys.addAll(Arrays.asList(keys_avatar_nameInMessage)); themeAccentExclusionKeys.add(key_chat_attachFileBackground); @@ -3793,6 +3929,45 @@ public class Theme { themeAccentExclusionKeys.add(key_statisticChartLine_orange); themeAccentExclusionKeys.add(key_statisticChartLine_indigo); + themeAccentExclusionKeys.add(key_voipgroup_checkMenu); + themeAccentExclusionKeys.add(key_voipgroup_muteButton); + themeAccentExclusionKeys.add(key_voipgroup_muteButton2); + themeAccentExclusionKeys.add(key_voipgroup_muteButton3); + themeAccentExclusionKeys.add(key_voipgroup_searchText); + themeAccentExclusionKeys.add(key_voipgroup_searchPlaceholder); + themeAccentExclusionKeys.add(key_voipgroup_searchBackground); + themeAccentExclusionKeys.add(key_voipgroup_leaveCallMenu); + themeAccentExclusionKeys.add(key_voipgroup_scrollUp); + themeAccentExclusionKeys.add(key_voipgroup_blueText); + themeAccentExclusionKeys.add(key_voipgroup_soundButton); + themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive); + themeAccentExclusionKeys.add(key_voipgroup_soundButton2); + themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive2); + themeAccentExclusionKeys.add(key_voipgroup_leaveButton); + themeAccentExclusionKeys.add(key_voipgroup_connectingProgress); + themeAccentExclusionKeys.add(key_voipgroup_disabledButton); + themeAccentExclusionKeys.add(key_voipgroup_unmuteButton); + themeAccentExclusionKeys.add(key_voipgroup_unmuteButton2); + themeAccentExclusionKeys.add(key_voipgroup_actionBarUnscrolled); + themeAccentExclusionKeys.add(key_voipgroup_listViewBackgroundUnscrolled); + themeAccentExclusionKeys.add(key_voipgroup_lastSeenTextUnscrolled); + themeAccentExclusionKeys.add(key_voipgroup_mutedIconUnscrolled); + themeAccentExclusionKeys.add(key_voipgroup_actionBar); + themeAccentExclusionKeys.add(key_voipgroup_emptyView); + themeAccentExclusionKeys.add(key_voipgroup_actionBarItems); + themeAccentExclusionKeys.add(key_voipgroup_actionBarSubtitle); + themeAccentExclusionKeys.add(key_voipgroup_actionBarItemsSelector); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminIcon); + themeAccentExclusionKeys.add(key_voipgroup_mutedIcon); + themeAccentExclusionKeys.add(key_voipgroup_lastSeenText); + themeAccentExclusionKeys.add(key_voipgroup_nameText); + themeAccentExclusionKeys.add(key_voipgroup_listViewBackground); + themeAccentExclusionKeys.add(key_voipgroup_listeningText); + themeAccentExclusionKeys.add(key_voipgroup_speakingText); + themeAccentExclusionKeys.add(key_voipgroup_listSelector); + themeAccentExclusionKeys.add(key_voipgroup_inviteMembersBackground); + themeAccentExclusionKeys.add(key_voipgroup_dialogBackground); + myMessagesColorKeys.add(key_chat_outGreenCall); myMessagesColorKeys.add(key_chat_outBubble); myMessagesColorKeys.add(key_chat_outBubbleSelected); @@ -4745,33 +4920,24 @@ public class Theme { } } - public static Drawable createRadSelectorDrawable(int color, int topRad, int bottomRad) { - Drawable drawable; + public static Drawable createCircleSelectorDrawable(int color, int leftInset, int rightInset) { if (Build.VERSION.SDK_INT >= 21) { maskPaint.setColor(0xffffffff); Drawable maskDrawable = new Drawable() { - private Path path = new Path(); - private RectF rect = new RectF(); - private float[] radii = new float[8]; - @Override public void draw(Canvas canvas) { - radii[0] = radii[1] = radii[2] = radii[3] = AndroidUtilities.dp(topRad); - radii[4] = radii[5] = radii[6] = radii[7] = AndroidUtilities.dp(bottomRad); - rect.set(getBounds()); - path.addRoundRect(rect, radii, Path.Direction.CW); - canvas.drawPath(path, maskPaint); + android.graphics.Rect bounds = getBounds(); + final int rad = (Math.max(bounds.width(), bounds.height()) / 2) + leftInset + rightInset; + canvas.drawCircle(bounds.centerX() - leftInset + rightInset, bounds.centerY(), rad, maskPaint); } @Override public void setAlpha(int alpha) { - } @Override public void setColorFilter(ColorFilter colorFilter) { - } @Override @@ -4793,6 +4959,85 @@ public class Theme { } } + public static class RippleRadMaskDrawable extends Drawable { + private Path path = new Path(); + private RectF rect = new RectF(); + private float[] radii = new float[8]; + private int topRad; + private int bottomRad; + + public RippleRadMaskDrawable(int top, int bottom) { + topRad = top; + bottomRad = bottom; + } + + public void setRadius(int top, int bottom) { + topRad = top; + bottomRad = bottom; + invalidateSelf(); + } + + @Override + public void draw(Canvas canvas) { + radii[0] = radii[1] = radii[2] = radii[3] = AndroidUtilities.dp(topRad); + radii[4] = radii[5] = radii[6] = radii[7] = AndroidUtilities.dp(bottomRad); + rect.set(getBounds()); + path.addRoundRect(rect, radii, Path.Direction.CW); + canvas.drawPath(path, maskPaint); + } + + @Override + public void setAlpha(int alpha) { + + } + + @Override + public void setColorFilter(ColorFilter colorFilter) { + + } + + @Override + public int getOpacity() { + return PixelFormat.UNKNOWN; + } + } + + public static void setMaskDrawableRad(Drawable rippleDrawable, int top, int bottom) { + if (Build.VERSION.SDK_INT < 21) { + return; + } + if (rippleDrawable instanceof RippleDrawable) { + RippleDrawable drawable = (RippleDrawable) rippleDrawable; + int count = drawable.getNumberOfLayers(); + for (int a = 0; a < count; a++) { + Drawable layer = drawable.getDrawable(a); + if (layer instanceof RippleRadMaskDrawable) { + drawable.setDrawableByLayerId(android.R.id.mask, new RippleRadMaskDrawable(top, bottom)); + break; + } + } + } + } + + public static Drawable createRadSelectorDrawable(int color, int topRad, int bottomRad) { + Drawable drawable; + if (Build.VERSION.SDK_INT >= 21) { + maskPaint.setColor(0xffffffff); + Drawable maskDrawable = new RippleRadMaskDrawable(topRad, bottomRad); + ColorStateList colorStateList = new ColorStateList( + new int[][]{StateSet.WILD_CARD}, + new int[]{color} + ); + return new RippleDrawable(colorStateList, null, maskDrawable); + } else { + StateListDrawable stateListDrawable = new StateListDrawable(); + stateListDrawable.addState(new int[]{android.R.attr.state_pressed}, new ColorDrawable(color)); + stateListDrawable.addState(new int[]{android.R.attr.state_selected}, new ColorDrawable(color)); + stateListDrawable.addState(StateSet.WILD_CARD, new ColorDrawable(0x00000000)); + return stateListDrawable; + } + } + public static void applyPreviousTheme() { if (previousTheme == null) { return; @@ -6495,6 +6740,9 @@ public class Theme { dividerPaint = new Paint(); dividerPaint.setStrokeWidth(1); + dividerExtraPaint = new Paint(); + dividerExtraPaint.setStrokeWidth(1); + avatar_backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); checkboxSquare_checkPaint = new Paint(Paint.ANTI_ALIAS_FLAG); @@ -6549,7 +6797,7 @@ public class Theme { dialogs_pinArchiveDrawable = new RLottieDrawable(R.raw.chats_hide, "chats_hide", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); dialogs_unpinArchiveDrawable = new RLottieDrawable(R.raw.chats_unhide, "chats_unhide", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); dialogs_hidePsaDrawable = new RLottieDrawable(R.raw.chat_audio_record_delete, "chats_psahide", AndroidUtilities.dp(30), AndroidUtilities.dp(30)); - + applyCommonTheme(); } } @@ -8177,4 +8425,13 @@ public class Theme { statusDrawable.setColor(getColor(key_chats_actionMessage)); return statusDrawable; } + + private static FragmentContextViewWavesDrawable fragmentContextViewWavesDrawable; + + public static FragmentContextViewWavesDrawable getFragmentContextViewWavesDrawable() { + if (fragmentContextViewWavesDrawable == null) { + fragmentContextViewWavesDrawable = new FragmentContextViewWavesDrawable(); + } + return fragmentContextViewWavesDrawable; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java index 7686a661d..310c42025 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java @@ -21,9 +21,13 @@ import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.TextView; +import androidx.recyclerview.widget.RecyclerView; +import androidx.viewpager.widget.ViewPager; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; +import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; @@ -43,17 +47,15 @@ import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.LoadingCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.UserCell; -import org.telegram.ui.Components.PullForegroundDrawable; import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.PullForegroundDrawable; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.DialogsActivity; import java.util.ArrayList; import java.util.Collections; - -import androidx.recyclerview.widget.RecyclerView; -import androidx.viewpager.widget.ViewPager; +import java.util.HashSet; public class DialogsAdapter extends RecyclerListView.SelectionAdapter { @@ -75,6 +77,8 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { private long lastSortTime; private PullForegroundDrawable pullForegroundDrawable; + private DialogsPreloader preloader; + public DialogsAdapter(Context context, int type, int folder, boolean onlySelect, ArrayList selected, int account) { mContext = context; dialogsType = type; @@ -91,6 +95,9 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { archiveHintCell = new ArchiveHintCell(context); } } + if (folder == 0) { + this.preloader = new DialogsPreloader(); + } } public void setOpenedDialogId(long id) { @@ -302,6 +309,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { case 0: DialogCell dialogCell = new DialogCell(mContext, true, false, currentAccount); dialogCell.setArchivedPullAnimation(pullForegroundDrawable); + dialogCell.setPreloader(preloader); view = dialogCell; break; case 1: @@ -441,6 +449,9 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { } cell.setChecked(selectedDialogs.contains(dialog.id), false); cell.setDialog(dialog, dialogsType, folderId); + if (preloader != null && i < 10) { + preloader.add(dialog.id); + } break; } case 5: { @@ -543,4 +554,132 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { public void setArchivedPullDrawable(PullForegroundDrawable drawable) { pullForegroundDrawable = drawable; } + + public void didDatabaseCleared() { + if (preloader != null) { + preloader.clear(); + } + } + + public void resume() { + if (preloader != null) { + preloader.resume(); + } + } + + public void pause() { + if (preloader != null) { + preloader.pause(); + } + } + + public static class DialogsPreloader { + + private final int MAX_REQUEST_COUNT = 4; + private final int MAX_NETWORK_REQUEST_COUNT = 10 - MAX_REQUEST_COUNT; + private final int NETWORK_REQUESTS_RESET_TIME = 60_000; + + HashSet dialogsReadyMap = new HashSet<>(); + HashSet preloadedErrorMap = new HashSet<>(); + + HashSet loadingDialogs = new HashSet<>(); + ArrayList preloadDialogsPool = new ArrayList<>(); + int currentRequestCount; + int networkRequestCount; + + boolean resumed; + + Runnable clearNetworkRequestCount = () -> { + networkRequestCount = 0; + start(); + }; + + public void add(long dialog_id) { + if (isReady(dialog_id) || preloadedErrorMap.contains(dialog_id) || loadingDialogs.contains(dialog_id) || preloadDialogsPool.contains(dialog_id)) { + return; + } + preloadDialogsPool.add(dialog_id); + start(); + } + + private void start() { + if (!preloadIsAvilable() || !resumed || preloadDialogsPool.isEmpty() || currentRequestCount >= MAX_REQUEST_COUNT || networkRequestCount > MAX_NETWORK_REQUEST_COUNT) { + return; + } + long dialog_id = preloadDialogsPool.remove(0); + currentRequestCount++; + loadingDialogs.add(dialog_id); + MessagesController.getInstance(UserConfig.selectedAccount).ensureMessagesLoaded(dialog_id, 0, new MessagesController.MessagesLoadedCallback() { + @Override + public void onMessagesLoaded(boolean fromCache) { + AndroidUtilities.runOnUIThread(() -> { + if (!fromCache) { + networkRequestCount++; + if (networkRequestCount >= MAX_NETWORK_REQUEST_COUNT) { + AndroidUtilities.cancelRunOnUIThread(clearNetworkRequestCount); + AndroidUtilities.runOnUIThread(clearNetworkRequestCount, NETWORK_REQUESTS_RESET_TIME); + } + } + if (loadingDialogs.remove(dialog_id)) { + dialogsReadyMap.add(dialog_id); + updateList(); + currentRequestCount--; + start(); + } + }); + } + + @Override + public void onError() { + AndroidUtilities.runOnUIThread(() -> { + if (loadingDialogs.remove(dialog_id)) { + preloadedErrorMap.add(dialog_id); + currentRequestCount--; + start(); + } + }); + } + }); + } + + private boolean preloadIsAvilable() { + return false; + // return DownloadController.getInstance(UserConfig.selectedAccount).getCurrentDownloadMask() != 0; + } + + public void updateList() { + } + + public boolean isReady(long currentDialogId) { + return dialogsReadyMap.contains(currentDialogId); + } + + public boolean preloadedError(long currendDialogId) { + return preloadedErrorMap.contains(currendDialogId); + } + + public void remove(long currentDialogId) { + preloadDialogsPool.remove(currentDialogId); + } + + public void clear() { + dialogsReadyMap.clear(); + preloadedErrorMap.clear(); + loadingDialogs.clear(); + preloadDialogsPool.clear(); + currentRequestCount = 0; + networkRequestCount = 0; + AndroidUtilities.cancelRunOnUIThread(clearNetworkRequestCount); + updateList(); + } + + public void resume() { + resumed = true; + start(); + } + + public void pause() { + resumed = false; + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java index 12130568f..4787bdf93 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java @@ -12,8 +12,8 @@ import android.content.Context; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; -import android.text.style.ForegroundColorSpan; import android.util.LongSparseArray; +import android.util.SparseArray; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; @@ -42,7 +42,6 @@ import org.telegram.ui.Cells.DialogCell; import org.telegram.ui.Cells.GraySectionCell; import org.telegram.ui.Cells.HashtagSearchCell; import org.telegram.ui.Cells.HintDialogCell; -import org.telegram.ui.Cells.LoadingCell; import org.telegram.ui.Cells.ProfileSearchCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Components.FlickerLoadingView; @@ -202,7 +201,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { searchResultHashtags.add(arrayList.get(a).hashtag); } if (delegate != null) { - delegate.searchStateChanged(false, false); + delegate.searchStateChanged(waitingResponseCount > 0, false); } notifyDataSetChanged(); } @@ -264,8 +263,6 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { req.limit = 20; req.q = query; req.filter = new TLRPC.TL_inputMessagesFilterEmpty(); - req.flags |= 1; - req.folder_id = folderId; if (query.equals(lastMessagesSearchString) && !searchResultMessages.isEmpty()) { MessageObject lastMessage = searchResultMessages.get(searchResultMessages.size() - 1); req.offset_id = lastMessage.getId(); @@ -290,9 +287,19 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { final ArrayList messageObjects = new ArrayList<>(); if (error == null) { TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; + SparseArray chatsMap = new SparseArray<>(); + SparseArray usersMap = new SparseArray<>(); + for (int a = 0; a < res.chats.size(); a++) { + TLRPC.Chat chat = res.chats.get(a); + chatsMap.put(chat.id, chat); + } + for (int a = 0; a < res.users.size(); a++) { + TLRPC.User user = res.users.get(a); + usersMap.put(user.id, user); + } for (int a = 0; a < res.messages.size(); a++) { TLRPC.Message message = res.messages.get(a); - MessageObject messageObject = new MessageObject(currentAccount, message, false, true); + MessageObject messageObject = new MessageObject(currentAccount, message, usersMap, chatsMap, false, true); messageObjects.add(messageObject); messageObject.setQuery(query); } @@ -639,9 +646,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { searchResultNames.clear(); searchResultHashtags.clear(); searchAdapterHelper.mergeResults(null); - if (needMessagesSearch != 2) { - searchAdapterHelper.queryServerSearch(null, true, true, true, true, dialogsType == 2, 0, dialogsType == 0, 0, 0); - } + searchAdapterHelper.queryServerSearch(null, true, true, true, true, dialogsType == 2, 0, dialogsType == 0, 0, 0); searchWas = false; lastSearchId = 0; waitingResponseCount = 0; @@ -677,10 +682,8 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { final int searchId = ++lastSearchId; waitingResponseCount = 3; notifyDataSetChanged(); - if (needMessagesSearch != 2 && delegate != null) { + if (delegate != null) { delegate.searchStateChanged(true, false); - } else { - waitingResponseCount--; } Utilities.searchQueue.postRunnable(searchRunnable = () -> { @@ -694,7 +697,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { if (needMessagesSearch != 2) { searchAdapterHelper.queryServerSearch(query, true, dialogsType != 4, true, dialogsType != 4, dialogsType == 2, 0, dialogsType == 0, 0, searchId); } else { - waitingResponseCount--; + waitingResponseCount -= 2; } if (needMessagesSearch == 0) { waitingResponseCount--; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DrawerLayoutAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DrawerLayoutAdapter.java index 421be5915..79ecc9a16 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DrawerLayoutAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DrawerLayoutAdapter.java @@ -9,10 +9,12 @@ package org.telegram.ui.Adapters; import android.content.Context; +import android.content.pm.PackageManager; import android.view.View; import android.view.ViewGroup; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; @@ -40,6 +42,7 @@ public class DrawerLayoutAdapter extends RecyclerListView.SelectionAdapter { private boolean accountsShown; private DrawerProfileCell profileCell; private SideMenultItemAnimator itemAnimator; + private boolean hasGps; public DrawerLayoutAdapter(Context context, SideMenultItemAnimator animator) { mContext = context; @@ -47,6 +50,11 @@ public class DrawerLayoutAdapter extends RecyclerListView.SelectionAdapter { accountsShown = UserConfig.getActivatedAccountsCount() > 1 && MessagesController.getGlobalMainSettings().getBoolean("accountsShown", true); Theme.createDialogsResources(context); resetItems(); + try { + hasGps = ApplicationLoader.applicationContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_LOCATION_GPS); + } catch (Throwable e) { + hasGps = false; + } } private int getAccountRowsCount() { @@ -238,52 +246,60 @@ public class DrawerLayoutAdapter extends RecyclerListView.SelectionAdapter { int settingsIcon; int inviteIcon; int helpIcon; + int peopleNearbyIcon; if (eventType == 0) { newGroupIcon = R.drawable.menu_groups_ny; - newSecretIcon = R.drawable.menu_secret_ny; - newChannelIcon = R.drawable.menu_channel_ny; + //newSecretIcon = R.drawable.menu_secret_ny; + //newChannelIcon = R.drawable.menu_channel_ny; contactsIcon = R.drawable.menu_contacts_ny; callsIcon = R.drawable.menu_calls_ny; savedIcon = R.drawable.menu_bookmarks_ny; settingsIcon = R.drawable.menu_settings_ny; inviteIcon = R.drawable.menu_invite_ny; helpIcon = R.drawable.menu_help_ny; + peopleNearbyIcon = R.drawable.menu_nearby_ny; } else if (eventType == 1) { newGroupIcon = R.drawable.menu_groups_14; - newSecretIcon = R.drawable.menu_secret_14; - newChannelIcon = R.drawable.menu_broadcast_14; + //newSecretIcon = R.drawable.menu_secret_14; + //newChannelIcon = R.drawable.menu_broadcast_14; contactsIcon = R.drawable.menu_contacts_14; callsIcon = R.drawable.menu_calls_14; savedIcon = R.drawable.menu_bookmarks_14; settingsIcon = R.drawable.menu_settings_14; inviteIcon = R.drawable.menu_secret_ny; helpIcon = R.drawable.menu_help; + peopleNearbyIcon = R.drawable.menu_secret_14; } else if (eventType == 2) { newGroupIcon = R.drawable.menu_groups_hw; - newSecretIcon = R.drawable.menu_secret_hw; - newChannelIcon = R.drawable.menu_broadcast_hw; + //newSecretIcon = R.drawable.menu_secret_hw; + //newChannelIcon = R.drawable.menu_broadcast_hw; contactsIcon = R.drawable.menu_contacts_hw; callsIcon = R.drawable.menu_calls_hw; savedIcon = R.drawable.menu_bookmarks_hw; settingsIcon = R.drawable.menu_settings_hw; inviteIcon = R.drawable.menu_invite_hw; helpIcon = R.drawable.menu_help_hw; + peopleNearbyIcon = R.drawable.menu_secret_hw; } else { newGroupIcon = R.drawable.menu_groups; - newSecretIcon = R.drawable.menu_secret; - newChannelIcon = R.drawable.menu_broadcast; + //newSecretIcon = R.drawable.menu_secret; + //newChannelIcon = R.drawable.menu_broadcast; contactsIcon = R.drawable.menu_contacts; callsIcon = R.drawable.menu_calls; savedIcon = R.drawable.menu_saved; settingsIcon = R.drawable.menu_settings; inviteIcon = R.drawable.menu_invite; helpIcon = R.drawable.menu_help; + peopleNearbyIcon = R.drawable.menu_nearby; } items.add(new Item(2, LocaleController.getString("NewGroup", R.string.NewGroup), newGroupIcon)); - items.add(new Item(3, LocaleController.getString("NewSecretChat", R.string.NewSecretChat), newSecretIcon)); - items.add(new Item(4, LocaleController.getString("NewChannel", R.string.NewChannel), newChannelIcon)); + //items.add(new Item(3, LocaleController.getString("NewSecretChat", R.string.NewSecretChat), newSecretIcon)); + //items.add(new Item(4, LocaleController.getString("NewChannel", R.string.NewChannel), newChannelIcon)); items.add(new Item(6, LocaleController.getString("Contacts", R.string.Contacts), contactsIcon)); items.add(new Item(10, LocaleController.getString("Calls", R.string.Calls), callsIcon)); + if (hasGps) { + items.add(new Item(12, LocaleController.getString("PeopleNearby", R.string.PeopleNearby), peopleNearbyIcon)); + } items.add(new Item(11, LocaleController.getString("SavedMessages", R.string.SavedMessages), savedIcon)); items.add(new Item(8, LocaleController.getString("Settings", R.string.Settings), settingsIcon)); items.add(null); // divider diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java index 0b01b32d5..732484fa4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java @@ -688,7 +688,7 @@ public class FiltersView extends RecyclerListView { } }; - public class FilterView extends FrameLayout { + public static class FilterView extends FrameLayout { BackupImageView avatarImageView; TextView titleView; @@ -832,7 +832,7 @@ public class FiltersView extends RecyclerListView { return super.onTouchEvent(e); } - private class UpdateCallback implements ListUpdateCallback { + private static class UpdateCallback implements ListUpdateCallback { final RecyclerView.Adapter adapter; boolean changed; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java index 3975d05b1..f207942c2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java @@ -563,7 +563,7 @@ public class MentionsAdapter extends RecyclerListView.SelectionAdapter { AndroidUtilities.cancelRunOnUIThread(searchGlobalRunnable); searchGlobalRunnable = null; } - if (TextUtils.isEmpty(text)) { + if (TextUtils.isEmpty(text) || text.length() > MessagesController.getInstance(currentAccount).maxMessageLength) { searchForContextBot(null, null); delegate.needChangePanelVisibility(false); lastText = null; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapter.java index 84123386f..39ee4ae6f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapter.java @@ -11,6 +11,7 @@ package org.telegram.ui.Adapters; import android.content.Context; import android.text.SpannableStringBuilder; import android.text.Spanned; +import android.text.TextUtils; import android.util.SparseArray; import android.view.View; import android.view.ViewGroup; @@ -58,6 +59,10 @@ public class SearchAdapter extends RecyclerListView.SelectionAdapter { private boolean allowSelf; private boolean allowPhoneNumbers; private int channelId; + private boolean searchInProgress; + private int searchReqId; + private int searchPointer; + public SearchAdapter(Context context, SparseArray arg1, boolean usernameSearch, boolean mutual, boolean chats, boolean bots, boolean self, boolean phones, int searchChannelId) { mContext = context; @@ -74,6 +79,9 @@ public class SearchAdapter extends RecyclerListView.SelectionAdapter { @Override public void onDataSetChanged(int searchId) { notifyDataSetChanged(); + if (searchId != 0) { + onSearchProgressChanged(); + } } @Override @@ -99,14 +107,13 @@ public class SearchAdapter extends RecyclerListView.SelectionAdapter { } catch (Exception e) { FileLog.e(e); } - if (query == null) { - searchResult.clear(); - searchResultNames.clear(); - if (allowUsernameSearch) { - searchAdapterHelper.queryServerSearch(null, true, allowChats, allowBots, allowSelf, false, channelId, allowPhoneNumbers, 0, 0); - } - notifyDataSetChanged(); - } else { + searchResult.clear(); + searchResultNames.clear(); + if (allowUsernameSearch) { + searchAdapterHelper.queryServerSearch(null, true, allowChats, allowBots, allowSelf, false, channelId, allowPhoneNumbers, 0, 0); + } + notifyDataSetChanged(); + if (!TextUtils.isEmpty(query)) { searchTimer = new Timer(); searchTimer.schedule(new TimerTask() { @Override @@ -126,14 +133,17 @@ public class SearchAdapter extends RecyclerListView.SelectionAdapter { private void processSearch(final String query) { AndroidUtilities.runOnUIThread(() -> { if (allowUsernameSearch) { - searchAdapterHelper.queryServerSearch(query, true, allowChats, allowBots, allowSelf, false, channelId, allowPhoneNumbers, -1, 0); + searchAdapterHelper.queryServerSearch(query, true, allowChats, allowBots, allowSelf, false, channelId, allowPhoneNumbers, -1, 1); } final int currentAccount = UserConfig.selectedAccount; final ArrayList contactsCopy = new ArrayList<>(ContactsController.getInstance(currentAccount).contacts); + searchInProgress = true; + searchReqId = searchPointer++; + int searchReqIdFinal = searchReqId; Utilities.searchQueue.postRunnable(() -> { String search1 = query.trim().toLowerCase(); if (search1.length() == 0) { - updateSearchResults(new ArrayList<>(), new ArrayList<>()); + updateSearchResults(searchReqIdFinal, new ArrayList<>(), new ArrayList<>()); return; } String search2 = LocaleController.getInstance().getTranslitString(search1); @@ -191,21 +201,32 @@ public class SearchAdapter extends RecyclerListView.SelectionAdapter { } } } - - updateSearchResults(resultArray, resultArrayNames); + updateSearchResults(searchReqIdFinal, resultArray, resultArrayNames); }); }); } - private void updateSearchResults(final ArrayList users, final ArrayList names) { + private void updateSearchResults(int searchReqIdFinal, final ArrayList users, final ArrayList names) { AndroidUtilities.runOnUIThread(() -> { - searchResult = users; - searchResultNames = names; - searchAdapterHelper.mergeResults(users); - notifyDataSetChanged(); + if (searchReqIdFinal == searchReqId) { + searchResult = users; + searchResultNames = names; + searchAdapterHelper.mergeResults(users); + searchInProgress = false; + notifyDataSetChanged(); + onSearchProgressChanged(); + } }); } + protected void onSearchProgressChanged() { + + } + + public boolean searchInProgress() { + return searchInProgress || searchAdapterHelper.isSearchInProgress(); + } + @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { int type = holder.getItemViewType(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapterHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapterHelper.java index 89470453e..49608be8f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapterHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/SearchAdapterHelper.java @@ -49,6 +49,10 @@ public class SearchAdapterHelper { return null; } + default SparseArray getExcludeCallParticipants() { + return null; + } + default boolean canApplySearchResults(int searchId) { return true; } @@ -140,6 +144,7 @@ public class SearchAdapterHelper { final int currentReqId = ++channelLastReqId; channelReqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (currentReqId == channelLastReqId) { + channelReqId = 0; if (error == null) { TLRPC.TL_channels_channelParticipants res = (TLRPC.TL_channels_channelParticipants) response; lastFoundChannel = query.toLowerCase(); @@ -156,13 +161,13 @@ public class SearchAdapterHelper { } groupSearchMap.put(participant.user_id, participant); } + removeGroupSearchFromGlobal(); if (localSearchResults != null) { mergeResults(localSearchResults); } delegate.onDataSetChanged(searchId); } } - channelReqId = 0; }), ConnectionsManager.RequestFlagFailOnServerErrors); } else { lastFoundChannel = query.toLowerCase(); @@ -180,8 +185,10 @@ public class SearchAdapterHelper { req.limit = 50; final int currentReqId = ++lastReqId; reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - if (currentReqId == lastReqId && delegate.canApplySearchResults(searchId)) { + if (currentReqId == lastReqId) { reqId = 0; + } + if (currentReqId == lastReqId && delegate.canApplySearchResults(searchId)) { if (error == null) { TLRPC.TL_contacts_found res = (TLRPC.TL_contacts_found) response; globalSearch.clear(); @@ -263,6 +270,7 @@ public class SearchAdapterHelper { } } } + removeGroupSearchFromGlobal(); lastFoundUsername = query.toLowerCase(); if (localSearchResults != null) { mergeResults(localSearchResults); @@ -308,6 +316,21 @@ public class SearchAdapterHelper { } } + private void removeGroupSearchFromGlobal() { + if (globalSearchMap.size() == 0) { + return; + } + for (int a = 0, N = groupSearchMap.size(); a < N; a++) { + int uid = groupSearchMap.keyAt(a); + TLRPC.User u = (TLRPC.User) globalSearchMap.get(uid); + if (u != null) { + globalSearch.remove(u); + localServerSearch.remove(u); + globalSearchMap.remove(u.id); + } + } + } + public void clear() { globalSearch.clear(); globalSearchMap.clear(); @@ -352,6 +375,20 @@ public class SearchAdapterHelper { return false; } + public void addGroupMembers(ArrayList participants) { + groupSearch.clear(); + groupSearch.addAll(participants); + for (int a = 0, N = participants.size(); a < N; a++) { + TLObject object = participants.get(a); + if (object instanceof TLRPC.ChatParticipant) { + groupSearchMap.put(((TLRPC.ChatParticipant) object).user_id, object); + } else if (object instanceof TLRPC.ChannelParticipant) { + groupSearchMap.put(((TLRPC.ChannelParticipant) object).user_id, object); + } + } + removeGroupSearchFromGlobal(); + } + public void mergeResults(ArrayList localResults) { localSearchResults = localResults; if (globalSearchMap.size() == 0 || localResults == null) { @@ -395,15 +432,25 @@ public class SearchAdapterHelper { return; } SparseArray ignoreUsers = delegate.getExcludeUsers(); - if (ignoreUsers == null) { - return; + if (ignoreUsers != null) { + for (int a = 0, size = ignoreUsers.size(); a < size; a++) { + TLRPC.User u = (TLRPC.User) globalSearchMap.get(ignoreUsers.keyAt(a)); + if (u != null) { + globalSearch.remove(u); + localServerSearch.remove(u); + globalSearchMap.remove(u.id); + } + } } - for (int a = 0, size = ignoreUsers.size(); a < size; a++) { - TLRPC.User u = (TLRPC.User) globalSearchMap.get(ignoreUsers.keyAt(a)); - if (u != null) { - globalSearch.remove(u); - localServerSearch.remove(u); - globalSearchMap.remove(u.id); + SparseArray ignoreParticipants = delegate.getExcludeCallParticipants(); + if (ignoreParticipants != null) { + for (int a = 0, size = ignoreParticipants.size(); a < size; a++) { + TLRPC.User u = (TLRPC.User) globalSearchMap.get(ignoreParticipants.keyAt(a)); + if (u != null) { + globalSearch.remove(u); + localServerSearch.remove(u); + globalSearchMap.remove(u.id); + } } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java index a9291717f..10522610e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java @@ -220,19 +220,22 @@ public class StickersAdapter extends RecyclerListView.SelectionAdapter implement public void loadStikersForEmoji(CharSequence emoji, boolean emojiOnly) { boolean searchEmoji = emoji != null && emoji.length() > 0 && emoji.length() <= 14; - String originalEmoji = emoji.toString(); - int length = emoji.length(); - for (int a = 0; a < length; a++) { - char ch = emoji.charAt(a); - char nch = a < length - 1 ? emoji.charAt(a + 1) : 0; - if (a < length - 1 && ch == 0xD83C && nch >= 0xDFFB && nch <= 0xDFFF) { - emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 2, emoji.length())); - length -= 2; - a--; - } else if (ch == 0xfe0f) { - emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 1, emoji.length())); - length--; - a--; + String originalEmoji = ""; + if (searchEmoji) { + originalEmoji = emoji.toString(); + int length = emoji.length(); + for (int a = 0; a < length; a++) { + char ch = emoji.charAt(a); + char nch = a < length - 1 ? emoji.charAt(a + 1) : 0; + if (a < length - 1 && ch == 0xD83C && nch >= 0xDFFB && nch <= 0xDFFF) { + emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 2, emoji.length())); + length -= 2; + a--; + } else if (ch == 0xfe0f) { + emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 1, emoji.length())); + length--; + a--; + } } } lastSticker = emoji.toString().trim(); @@ -403,6 +406,10 @@ public class StickersAdapter extends RecyclerListView.SelectionAdapter implement } } + public String getQuery() { + return lastSticker; + } + public boolean isShowingKeywords() { return keywordResults != null && !keywordResults.isEmpty(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java index 40cb3b839..71b620a3a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java @@ -1,7 +1,6 @@ package org.telegram.ui.Adapters; import android.content.Context; -import android.graphics.Color; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.text.TextUtils; @@ -340,7 +339,7 @@ public class StickersSearchAdapter extends RecyclerListView.SelectionAdapter { View view = null; switch (viewType) { case 0: - StickerEmojiCell stickerEmojiCell = new StickerEmojiCell(context) { + StickerEmojiCell stickerEmojiCell = new StickerEmojiCell(context, false) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java index 1d886d596..9f5f4725f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java @@ -14,12 +14,12 @@ import android.content.SharedPreferences; import android.os.Build; import android.os.Environment; import android.os.StatFs; +import android.text.TextUtils; import android.transition.ChangeBounds; import android.transition.Fade; import android.transition.TransitionManager; import android.transition.TransitionSet; import android.view.Gravity; -import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; @@ -41,6 +41,7 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; +import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; @@ -154,31 +155,49 @@ public class CacheControlActivity extends BaseFragment { audioSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_AUDIO), 0); totalSize = cacheSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersSize; - File path = Environment.getDataDirectory(); - StatFs stat = new StatFs(path.getPath()); + File path; + if (Build.VERSION.SDK_INT >= 19) { + ArrayList storageDirs = AndroidUtilities.getRootDirs(); + String dir = (path = storageDirs.get(0)).getAbsolutePath(); + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0, N = storageDirs.size(); a < N; a++) { + File file = storageDirs.get(a); + if (file.getAbsolutePath().startsWith(SharedConfig.storageCacheDir)) { + path = file; + break; + } + } + } + } else { + path = new File(SharedConfig.storageCacheDir); + } + try { + StatFs stat = new StatFs(path.getPath()); + long blockSize; + long blockSizeExternal; + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { + blockSize = stat.getBlockSizeLong(); + } else { + blockSize = stat.getBlockSize(); + } + long availableBlocks; + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { + availableBlocks = stat.getAvailableBlocksLong(); + } else { + availableBlocks = stat.getAvailableBlocks(); + } + long blocksTotal; + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { + blocksTotal = stat.getBlockCountLong(); + } else { + blocksTotal = stat.getBlockCount(); + } - long blockSize; - long blockSizeExternal; - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { - blockSize = stat.getBlockSizeLong(); - } else { - blockSize = stat.getBlockSize(); + totalDeviceSize = blocksTotal * blockSize; + totalDeviceFreeSize = availableBlocks * blockSize; + } catch (Exception e) { + FileLog.e(e); } - long availableBlocks; - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { - availableBlocks = stat.getAvailableBlocksLong(); - } else { - availableBlocks = stat.getAvailableBlocks(); - } - long blocksTotal; - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR2) { - blocksTotal = stat.getBlockCountLong(); - } else { - blocksTotal = stat.getBlockCount(); - } - - totalDeviceSize = blocksTotal * blockSize; - totalDeviceFreeSize = availableBlocks * blockSize; AndroidUtilities.runOnUIThread(() -> { calculating = false; @@ -515,6 +534,7 @@ public class CacheControlActivity extends BaseFragment { final AlertDialog progressDialog = new AlertDialog(getParentActivity(), 3); progressDialog.setCanCacnel(false); progressDialog.showDelayed(500); + MessagesController.getInstance(currentAccount).clearQueryTime(); MessagesStorage.getInstance(currentAccount).getStorageQueue().postRunnable(() -> { try { SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); @@ -604,6 +624,7 @@ public class CacheControlActivity extends BaseFragment { databaseSize = MessagesStorage.getInstance(currentAccount).getDatabaseSize(); listAdapter.notifyDataSetChanged(); } + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.didDatabaseCleared); }); } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java index 1cc01996d..7806486e9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java @@ -27,11 +27,13 @@ import android.widget.FrameLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -163,12 +165,11 @@ public class ArchivedStickerSetCell extends FrameLayout implements Checkable { sticker = null; } if (sticker != null) { - TLObject object; - if (set.set.thumb instanceof TLRPC.TL_photoSize || set.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - object = set.set.thumb; - } else { + TLObject object = FileLoader.getClosestPhotoSizeWithSize(set.set.thumbs, 90); + if (object == null) { object = sticker; } + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(set.set.thumbs, Theme.key_windowBackgroundGray, 1.0f); ImageLocation imageLocation; if (object instanceof TLRPC.Document) { @@ -180,11 +181,15 @@ public class ArchivedStickerSetCell extends FrameLayout implements Checkable { } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { - imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", svgThumb, 0, set); + } else { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + } } else if (imageLocation != null && imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { - imageView.setImage(imageLocation, "50_50", "tgs", null, set); + imageView.setImage(imageLocation, "50_50", "tgs", svgThumb, set); } else { - imageView.setImage(imageLocation, "50_50", "webp", null, set); + imageView.setImage(imageLocation, "50_50", "webp", svgThumb, set); } } else { imageView.setImage(null, null, "webp", null, set); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java index a81850bfa..a3be227b3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java @@ -145,7 +145,7 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD } } - public void setCustomText(String text) { + public void setCustomText(CharSequence text) { customText = text; if (customText != null) { updateTextInternal(false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java index 264d36633..8419305aa 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java @@ -1639,7 +1639,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } private boolean checkPhotoImageMotionEvent(MotionEvent event) { - if (!drawPhotoImage && documentAttachType != DOCUMENT_ATTACH_TYPE_DOCUMENT) { + if (!drawPhotoImage && documentAttachType != DOCUMENT_ATTACH_TYPE_DOCUMENT || currentMessageObject.isSending() && buttonState != 1) { return false; } @@ -2617,7 +2617,9 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } if (replyTextLayout == null && currentMessageObject.replyMessageObject != null) { - return true; + if (!isThreadChat || currentMessageObject.replyMessageObject.messageOwner.fwd_from == null || currentMessageObject.replyMessageObject.messageOwner.fwd_from.channel_post == 0) { + return true; + } } if (currentPhoto == null && newPhoto != null || currentPhoto != null && newPhoto == null || currentPhoto != null && (currentPhoto.local_id != newPhoto.local_id || currentPhoto.volume_id != newPhoto.volume_id)) { @@ -3149,7 +3151,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { maxWidth = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) - AndroidUtilities.dp(80); } - drawName = isPinnedChat || messageObject.messageOwner.peer_id.channel_id != 0 && (!messageObject.isOutOwner() || messageObject.isMegagroup()); + drawName = isPinnedChat || messageObject.messageOwner.peer_id.channel_id != 0 && (!messageObject.isOutOwner() || messageObject.isSupergroup()); } availableTimeWidth = maxWidth; @@ -3361,7 +3363,8 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (site_name != null && photo != null && site_name.toLowerCase().equals("instagram")) { linkPreviewMaxWidth = Math.max(AndroidUtilities.displaySize.y / 3, currentMessageObject.textWidth); } - boolean isSmallImageType = "app".equals(type) || "profile".equals(type) || "article".equals(type); + boolean isSmallImageType = "app".equals(type) || "profile".equals(type) || "article".equals(type) || + "telegram_bot".equals(type) || "telegram_user".equals(type) || "telegram_channel".equals(type) || "telegram_megagroup".equals(type); smallImage = !slideshow && !drawInstantView && document == null && isSmallImageType; isSmallImage = !slideshow && !drawInstantView && document == null && description != null && type != null && isSmallImageType && currentMessageObject.photoThumbs != null; } else if (hasInvoicePreview) { @@ -4095,7 +4098,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate namesOffset -= AndroidUtilities.dp(1); } } else if (messageObject.type == 12) { - drawName = messageObject.isFromGroup() && messageObject.isMegagroup(); + drawName = messageObject.isFromGroup() && messageObject.isSupergroup(); drawForwardedName = !isRepliesChat; drawPhotoImage = true; photoImage.setRoundRadius(AndroidUtilities.dp(22)); @@ -4178,7 +4181,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } } else if (messageObject.type == 2) { drawForwardedName = !isRepliesChat; - drawName = messageObject.isFromGroup() && messageObject.isMegagroup(); + drawName = messageObject.isFromGroup() && messageObject.isSupergroup(); if (AndroidUtilities.isTablet()) { backgroundWidth = Math.min(AndroidUtilities.getMinTabletSide() - AndroidUtilities.dp(drawAvatar ? 102 : 50), AndroidUtilities.dp(270)); } else { @@ -4193,7 +4196,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate namesOffset -= AndroidUtilities.dp(1); } } else if (messageObject.type == 14) { - drawName = messageObject.isFromGroup() && messageObject.isMegagroup() && (currentPosition == null || (currentPosition.flags & MessageObject.POSITION_FLAG_TOP) != 0); + drawName = messageObject.isFromGroup() && messageObject.isSupergroup() && (currentPosition == null || (currentPosition.flags & MessageObject.POSITION_FLAG_TOP) != 0); if (AndroidUtilities.isTablet()) { backgroundWidth = Math.min(AndroidUtilities.getMinTabletSide() - AndroidUtilities.dp(drawAvatar ? 102 : 50), AndroidUtilities.dp(270)); } else { @@ -4482,7 +4485,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { drawForwardedName = messageObject.messageOwner.fwd_from != null && !messageObject.isAnyKindOfSticker(); if (!messageObject.isAnyKindOfSticker() && messageObject.type != MessageObject.TYPE_ROUND_VIDEO) { - drawName = messageObject.isFromGroup() && messageObject.isMegagroup() && (currentPosition == null || (currentPosition.flags & MessageObject.POSITION_FLAG_TOP) != 0); + drawName = messageObject.isFromGroup() && messageObject.isSupergroup() && (currentPosition == null || (currentPosition.flags & MessageObject.POSITION_FLAG_TOP) != 0); } mediaBackground = isMedia = messageObject.type != 9; drawImageButton = true; @@ -4815,7 +4818,11 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate photoImage.setRoundRadius(0); canChangeRadius = false; - if (messageObject.attachPathExists) { + if (messageObject.pathThumb != null) { + photoImage.setImage(ImageLocation.getForDocument(messageObject.getDocument()), filter, + messageObject.pathThumb, + messageObject.getDocument().size, isWebpSticker ? "webp" : null, parentObject, 1); + } else if (messageObject.attachPathExists) { photoImage.setImage(ImageLocation.getForPath(messageObject.messageOwner.attachPath), filter, ImageLocation.getForObject(currentPhotoObjectThumb, photoParentObject), "b1", messageObject.getDocument().size, isWebpSticker ? "webp" : null, parentObject, 1); @@ -5206,10 +5213,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } } else { String filterNew = String.format(Locale.US, "%d_%d", (int) (w / AndroidUtilities.density), (int) (h / AndroidUtilities.density)); - if (messageObject.needDrawBluredPreview()) { - filterNew += "_b2"; - } - if (filterNew != currentPhotoFilter) { + if (!messageObject.needDrawBluredPreview() && !filterNew.equals(currentPhotoFilter)) { ImageLocation location = ImageLocation.getForObject(currentPhotoObject, photoParentObject); if (location != null) { String key = location.getKey(photoParentObject, null, false) + "@" + currentPhotoFilter; @@ -6576,7 +6580,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate viewsLayout = null; } - if (currentRepliesString != null) { + if (currentRepliesString != null && !currentMessageObject.scheduled) { repliesLayout = new StaticLayout(currentRepliesString, Theme.chat_timePaint, repliesTextWidth, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); } else { repliesLayout = null; @@ -7954,7 +7958,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { autoDownload = DownloadController.getInstance(currentAccount).canDownloadMedia(currentMessageObject); } - canStreamVideo = currentMessageObject.isSent() && (documentAttachType == DOCUMENT_ATTACH_TYPE_VIDEO || documentAttachType == DOCUMENT_ATTACH_TYPE_ROUND || documentAttachType == DOCUMENT_ATTACH_TYPE_GIF && autoDownload) && currentMessageObject.canStreamVideo() && !currentMessageObject.needDrawBluredPreview(); + canStreamVideo = (currentMessageObject.isSent() || currentMessageObject.isForwarded()) && (documentAttachType == DOCUMENT_ATTACH_TYPE_VIDEO || documentAttachType == DOCUMENT_ATTACH_TYPE_ROUND || documentAttachType == DOCUMENT_ATTACH_TYPE_GIF && autoDownload) && currentMessageObject.canStreamVideo() && !currentMessageObject.needDrawBluredPreview(); if (SharedConfig.streamMedia && (int) currentMessageObject.getDialogId() != 0 && !currentMessageObject.isSecretMedia() && (documentAttachType == DOCUMENT_ATTACH_TYPE_MUSIC || canStreamVideo && currentPosition != null && ((currentPosition.flags & MessageObject.POSITION_FLAG_LEFT) == 0 || (currentPosition.flags & MessageObject.POSITION_FLAG_RIGHT) == 0))) { @@ -7971,7 +7975,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate boolean fromBot = currentMessageObject.messageOwner.params != null && currentMessageObject.messageOwner.params.containsKey("query_id"); if (documentAttachType == DOCUMENT_ATTACH_TYPE_AUDIO || documentAttachType == DOCUMENT_ATTACH_TYPE_MUSIC) { - if (currentMessageObject.isOut() && (currentMessageObject.isSending() || currentMessageObject.isEditing()) || currentMessageObject.isSendError() && fromBot) { + if (currentMessageObject.isOut() && (currentMessageObject.isSending() && !currentMessageObject.isForwarded() || currentMessageObject.isEditing() && currentMessageObject.isEditingMedia()) || currentMessageObject.isSendError() && fromBot) { if (!TextUtils.isEmpty(currentMessageObject.messageOwner.attachPath)) { DownloadController.getInstance(currentAccount).addLoadingFileObserver(currentMessageObject.messageOwner.attachPath, currentMessageObject, this); wasSending = true; @@ -8106,7 +8110,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate radialProgress.setIcon(getIconForCurrentState(), ifSame, animated); invalidate(); } else { - if (currentMessageObject.isOut() && (currentMessageObject.isSending() || currentMessageObject.isEditing())) { + if (currentMessageObject.isOut() && (currentMessageObject.isSending() && !currentMessageObject.isForwarded() || currentMessageObject.isEditing() && currentMessageObject.isEditingMedia())) { if (!TextUtils.isEmpty(currentMessageObject.messageOwner.attachPath)) { DownloadController.getInstance(currentAccount).addLoadingFileObserver(currentMessageObject.messageOwner.attachPath, currentMessageObject, this); wasSending = true; @@ -8142,7 +8146,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate invalidate(); } else { getIconForCurrentState(); - if (currentMessageObject.isSticker() || currentMessageObject.isAnimatedSticker() || currentMessageObject.isLocation()) { + if (currentMessageObject.isSticker() || currentMessageObject.isAnimatedSticker() || currentMessageObject.isLocation() || currentMessageObject.isGif()) { buttonState = -1; radialProgress.setIcon(MediaActionDrawable.ICON_NONE, ifSame, false); } else { @@ -8872,7 +8876,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (messageObject.scheduled) { if (messageObject.isSendError()) { timeWidth += AndroidUtilities.dp(18); - } else if (messageObject.isSending() && messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isMegagroup()) { + } else if (messageObject.isSending() && messageObject.messageOwner.peer_id.channel_id != 0 && !messageObject.isSupergroup()) { timeWidth += AndroidUtilities.dp(18); } } @@ -9404,7 +9408,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } private boolean isNeedAuthorName() { - return isPinnedChat && currentMessageObject.type == 0 || (!pinnedTop || ChatObject.isChannel(currentChat) && !currentChat.megagroup) && drawName && isChat && (!currentMessageObject.isOutOwner() || currentMessageObject.isMegagroup() && currentMessageObject.isFromGroup()); + return isPinnedChat && currentMessageObject.type == 0 || (!pinnedTop || ChatObject.isChannel(currentChat) && !currentChat.megagroup) && drawName && isChat && (!currentMessageObject.isOutOwner() || currentMessageObject.isSupergroup() && currentMessageObject.isFromGroup()); } private String getAuthorName() { @@ -10249,7 +10253,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(!mediaBackground && drawPinnedBottom ? 11 : 17) - nameOffsetX + getExtraTextX(); } if (currentUser != null) { - if (currentBackgroundDrawable.hasGradient()) { + if (currentBackgroundDrawable != null && currentBackgroundDrawable.hasGradient()) { Theme.chat_namePaint.setColor(Theme.getColor(Theme.key_chat_messageTextOut)); } else { Theme.chat_namePaint.setColor(AvatarDrawable.getNameColorForId(currentUser.id)); @@ -10977,7 +10981,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { r = AndroidUtilities.dp(4); } - float x1 = timeX - AndroidUtilities.dp(bigRadius ? 6 : 4); + float x1 = timeX - AndroidUtilities.dp(bigRadius ? 6 : 4) + transitionParams.deltaRight; float timeY = photoImage.getImageY2() + additionalTimeOffsetY; float y1 = timeY - AndroidUtilities.dp(23); rect.set(x1, y1, x1 + timeWidth + AndroidUtilities.dp((bigRadius ? 12 : 8) + (currentMessageObject.isOutOwner() ? 20 : 0)), y1 + AndroidUtilities.dp(17)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java index 13ccf5417..97b784455 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java @@ -195,7 +195,7 @@ public class CheckBoxCell extends FrameLayout { if (checkBoxRound != null) { return checkBoxRound.isChecked(); } else { - return checkBoxSquare.isChecked(); + return checkBoxSquare.isChecked(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java index 339992364..784e1dff1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java @@ -326,18 +326,18 @@ public class ContextLinkCell extends FrameLayout implements DownloadController.F if (documentAttach != null) { TLRPC.VideoSize thumb = MessageObject.getDocumentVideoThumb(documentAttach); if (thumb != null) { - linkImageView.setImage(ImageLocation.getForDocument(thumb, documentAttach), null, ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, -1, ext, parentObject, 1); + linkImageView.setImage(ImageLocation.getForDocument(thumb, documentAttach), "100_100", ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, -1, ext, parentObject, 1); } else { ImageLocation location = ImageLocation.getForDocument(documentAttach); if (isForceGif) { location.imageType = FileLoader.IMAGE_TYPE_ANIMATION; } - linkImageView.setImage(location, null, ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, documentAttach.size, ext, parentObject, 0); + linkImageView.setImage(location, "100_100", ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, documentAttach.size, ext, parentObject, 0); } } else if (webFile != null) { - linkImageView.setImage(ImageLocation.getForWebFile(webFile), null, ImageLocation.getForPhoto(currentPhotoObject, photoAttach), currentPhotoFilter, -1, ext, parentObject, 1); + linkImageView.setImage(ImageLocation.getForWebFile(webFile), "100_100", ImageLocation.getForPhoto(currentPhotoObject, photoAttach), currentPhotoFilter, -1, ext, parentObject, 1); } else { - linkImageView.setImage(ImageLocation.getForPath(urlLocation), null, ImageLocation.getForPhoto(currentPhotoObject, photoAttach), currentPhotoFilter, -1, ext, parentObject, 1); + linkImageView.setImage(ImageLocation.getForPath(urlLocation), "100_100", ImageLocation.getForPhoto(currentPhotoObject, photoAttach), currentPhotoFilter, -1, ext, parentObject, 1); } } else { if (currentPhotoObject != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java index e39d1a0b0..5e6fc8947 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java @@ -31,6 +31,7 @@ import android.view.HapticFeedbackConstants; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityNodeInfo; import android.view.animation.Interpolator; +import android.view.animation.OvershootInterpolator; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; @@ -56,6 +57,7 @@ import org.telegram.messenger.ImageReceiver; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.EmptyStubSpan; import org.telegram.ui.Components.ForegroundColorSpanThemable; +import org.telegram.ui.Adapters.DialogsAdapter; import org.telegram.ui.Components.PullForegroundDrawable; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.CheckBox2; @@ -64,7 +66,6 @@ import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.StaticLayoutEx; import org.telegram.ui.Components.StatusDrawable; import org.telegram.ui.Components.TypefaceSpan; -import org.telegram.ui.Components.TypingDotsDrawable; import org.telegram.ui.DialogsActivity; import java.util.ArrayList; @@ -140,6 +141,9 @@ public class DialogCell extends BaseCell { private float cornerProgress; private long lastUpdateTime; private float onlineProgress; + private float chatCallProgress; + private float innerProgress; + private int progressStage; private float clipProgress; private int topClip; @@ -180,6 +184,8 @@ public class DialogCell extends BaseCell { private boolean useMeForMyMessages; + private boolean hasCall; + private int nameLeft; private StaticLayout nameLayout; private boolean drawNameLock; @@ -230,7 +236,15 @@ public class DialogCell extends BaseCell { private int countTop; private int countLeft; private int countWidth; + private int countWidthOld; + private int countLeftOld; + private boolean countAnimationIncrement; + private ValueAnimator countAnimator; + private float countChangeProgress = 1f; private StaticLayout countLayout; + private StaticLayout countOldLayout; + private StaticLayout countAnimationStableLayout; + private StaticLayout countAnimationInLayout; private boolean drawMention; private int mentionLeft; @@ -244,6 +258,7 @@ public class DialogCell extends BaseCell { private boolean isSelected; private RectF rect = new RectF(); + private DialogsAdapter.DialogsPreloader preloader; private int animateToStatusDrawableParams; private int animateFromStatusDrawableParams; @@ -251,6 +266,7 @@ public class DialogCell extends BaseCell { private float statusDrawableProgress; private boolean statusDrawableAnimationInProgress; private ValueAnimator statusDrawableAnimator; + long lastDialogChangedTime; public static class BounceInterpolator implements Interpolator { @@ -301,6 +317,7 @@ public class DialogCell extends BaseCell { lastStatusDrawableParams = -1; } currentDialogId = dialog.id; + lastDialogChangedTime = System.currentTimeMillis(); isDialogCell = true; if (dialog instanceof TLRPC.TL_dialogFolder) { TLRPC.TL_dialogFolder dialogFolder = (TLRPC.TL_dialogFolder) dialog; @@ -314,8 +331,9 @@ public class DialogCell extends BaseCell { dialogsType = type; folderId = folder; messageId = 0; - update(0); + update(0, false); checkOnline(); + checkGroupCall(); } public void setDialogIndex(int i) { @@ -327,6 +345,7 @@ public class DialogCell extends BaseCell { messageId = 0; update(0); checkOnline(); + checkGroupCall(); } private void checkOnline() { @@ -334,11 +353,17 @@ public class DialogCell extends BaseCell { onlineProgress = isOnline ? 1.0f : 0.0f; } + private void checkGroupCall() { + hasCall = chat != null && chat.call_active && chat.call_not_empty; + chatCallProgress = hasCall ? 1.0f : 0.0f; + } + public void setDialog(long dialog_id, MessageObject messageObject, int date, boolean useMe) { if (currentDialogId != dialog_id) { lastStatusDrawableParams = -1; } currentDialogId = dialog_id; + lastDialogChangedTime = System.currentTimeMillis(); message = messageObject; useMeForMyMessages = useMe; isDialogCell = false; @@ -367,6 +392,10 @@ public class DialogCell extends BaseCell { return messageId; } + public void setPreloader(DialogsAdapter.DialogsPreloader preloader) { + this.preloader = preloader; + } + @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); @@ -384,6 +413,9 @@ public class DialogCell extends BaseCell { translationDrawable = null; translationAnimationStarted = false; } + if (preloader != null) { + preloader.remove(currentDialogId); + } } @Override @@ -423,8 +455,8 @@ public class DialogCell extends BaseCell { return; } if (checkBox != null) { - int x = LocaleController.isRTL ? (right - left) - AndroidUtilities.dp(45) : AndroidUtilities.dp(45); - int y = AndroidUtilities.dp(46); + int x = LocaleController.isRTL ? (right - left) - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 43 : 45) : AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 43 : 45); + int y = AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 48 : 42); checkBox.layout(x, y, x + checkBox.getMeasuredWidth(), y + checkBox.getMeasuredHeight()); } if (changed) { @@ -895,7 +927,7 @@ public class DialogCell extends BaseCell { boolean needEmoji = true; if (currentDialogFolderId == 0 && encryptedChat == null && !message.needDrawBluredPreview() && (message.isPhoto() || message.isNewGif() || message.isVideo())) { String type = message.isWebpage() ? message.messageOwner.media.webpage.type : null; - if (!("app".equals(type) || "profile".equals(type) || "article".equals(type))) { + if (!("app".equals(type) || "profile".equals(type) || "article".equals(type) || type != null && type.startsWith("telegram_"))) { TLRPC.PhotoSize smallThumb = FileLoader.getClosestPhotoSizeWithSize(message.photoThumbs, 40); TLRPC.PhotoSize bigThumb = FileLoader.getClosestPhotoSizeWithSize(message.photoThumbs, AndroidUtilities.getPhotoSize()); if (smallThumb == bigThumb) { @@ -1504,7 +1536,7 @@ public class DialogCell extends BaseCell { messageWidth = Math.max(AndroidUtilities.dp(12), messageWidth); if ((useForceThreeLines || SharedConfig.useThreeLinesLayout) && messageNameString != null && (currentDialogFolderId == 0 || currentDialogFolderDialogsCount == 1)) { try { - if (message.hasHighlightedWords()) { + if (message != null && message.hasHighlightedWords()) { CharSequence s = AndroidUtilities.highlightText(messageNameString, message.highlightedWords); if (s != null) { messageNameString = s; @@ -1649,6 +1681,9 @@ public class DialogCell extends BaseCell { } private void drawCheckStatus(Canvas canvas, boolean drawClock, boolean drawCheck1, boolean drawCheck2, boolean moveCheck, float alpha) { + if (alpha == 0 && !moveCheck) { + return; + } float scale = 0.5f + 0.5f * alpha; if (drawClock) { setDrawableBounds(Theme.dialogs_clockDrawable, clockDrawLeft, checkDrawTop); @@ -1749,7 +1784,17 @@ public class DialogCell extends BaseCell { message != newMessageObject || message == null && newMessageObject != null || newDraftMessage != draftMessage || drawPin != dialog.pinned) { boolean dialogChanged = currentDialogId != dialog.id; + currentDialogId = dialog.id; + if (dialogChanged) { + lastDialogChangedTime = System.currentTimeMillis(); + if (statusDrawableAnimator != null) { + statusDrawableAnimator.removeAllListeners(); + statusDrawableAnimator.cancel(); + } + statusDrawableAnimationInProgress = false; + lastStatusDrawableParams = -1; + } if (dialog instanceof TLRPC.TL_dialogFolder) { TLRPC.TL_dialogFolder dialogFolder = (TLRPC.TL_dialogFolder) dialog; currentDialogFolderId = dialogFolder.folder.id; @@ -1764,11 +1809,12 @@ public class DialogCell extends BaseCell { fullSeparator = dialog instanceof TLRPC.TL_dialog && dialog.pinned && nextDialog != null && !nextDialog.pinned; fullSeparator2 = dialog instanceof TLRPC.TL_dialogFolder && nextDialog != null && !nextDialog.pinned; } - update(0); + update(0, !dialogChanged); if (dialogChanged) { reorderIconProgress = drawPin && drawReorder ? 1.0f : 0.0f; } checkOnline(); + checkGroupCall(); } } } @@ -1814,6 +1860,10 @@ public class DialogCell extends BaseCell { } public void update(int mask) { + update(mask, true); + } + + public void update(int mask, boolean animated) { if (customDialog != null) { lastMessageDate = customDialog.date; lastUnreadState = customDialog.unread_count != 0; @@ -1824,6 +1874,8 @@ public class DialogCell extends BaseCell { avatarImage.setImage(null, "50_50", avatarDrawable, null, 0); thumbImage.setImageBitmap((BitmapDrawable) null); } else { + int oldUnreadCount = unreadCount; + boolean oldMarkUnread = markUnread; if (isDialogCell) { TLRPC.Dialog dialog = MessagesController.getInstance(currentAccount).dialogs_dict.get(currentDialogId); if (dialog != null) { @@ -1881,6 +1933,12 @@ public class DialogCell extends BaseCell { continueUpdate = true; } } + if (!continueUpdate && (mask & MessagesController.UPDATE_MASK_CHAT) != 0 && chat != null) { + TLRPC.Chat newChat = MessagesController.getInstance(currentAccount).getChat(chat.id); + if ((newChat.call_active && newChat.call_not_empty) != hasCall) { + continueUpdate = true; + } + } if (!continueUpdate && (mask & MessagesController.UPDATE_MASK_AVATAR) != 0) { if (chat == null) { continueUpdate = true; @@ -2006,6 +2064,66 @@ public class DialogCell extends BaseCell { avatarImage.setImage(ImageLocation.getForChat(chat, false), "50_50", avatarDrawable, null, chat, 0); } } + + if (animated && (oldUnreadCount != unreadCount || oldMarkUnread != markUnread) && (System.currentTimeMillis() - lastDialogChangedTime) > 100) { + if (countAnimator != null) { + countAnimator.cancel(); + } + countAnimator = ValueAnimator.ofFloat(0, 1f); + countAnimator.addUpdateListener(valueAnimator -> { + countChangeProgress = (float) valueAnimator.getAnimatedValue(); + invalidate(); + }); + countAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + countChangeProgress = 1f; + countOldLayout = null; + countAnimationStableLayout = null; + countAnimationInLayout = null; + invalidate(); + } + }); + if (oldUnreadCount == 0 || markUnread) { + countAnimator.setDuration(220); + countAnimator.setInterpolator(new OvershootInterpolator()); + } else if (unreadCount == 0) { + countAnimator.setDuration(150); + countAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + } else { + countAnimator.setDuration(430); + countAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + } + if (drawCount && countLayout != null) { + String oldStr = String.valueOf(oldUnreadCount); + String newStr = String.valueOf(unreadCount); + + if (oldStr.length() == newStr.length()) { + SpannableStringBuilder oldSpannableStr = new SpannableStringBuilder(oldStr); + SpannableStringBuilder newSpannableStr = new SpannableStringBuilder(newStr); + SpannableStringBuilder stableStr = new SpannableStringBuilder(newStr); + for (int i = 0; i < oldStr.length(); i++) { + if (oldStr.charAt(i) == newStr.charAt(i)) { + oldSpannableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + newSpannableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + } else { + stableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + } + } + + int countOldWidth = Math.max(AndroidUtilities.dp(12), (int) Math.ceil(Theme.dialogs_countTextPaint.measureText(oldStr))); + countOldLayout = new StaticLayout(oldSpannableStr, Theme.dialogs_countTextPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + countAnimationStableLayout = new StaticLayout(stableStr, Theme.dialogs_countTextPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + countAnimationInLayout = new StaticLayout(newSpannableStr, Theme.dialogs_countTextPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + } else { + countOldLayout = countLayout; + } + } + countWidthOld = countWidth; + countLeftOld = countLeft; + countAnimationIncrement = unreadCount > oldUnreadCount; + countAnimator.start(); + } } if (getMeasuredWidth() != 0 || getMeasuredHeight() != 0) { buildLayout(); @@ -2379,20 +2497,99 @@ public class DialogCell extends BaseCell { canvas.drawRoundRect(rect, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, Theme.dialogs_errorPaint); setDrawableBounds(Theme.dialogs_errorDrawable, errorLeft + AndroidUtilities.dp(5.5f), errorTop + AndroidUtilities.dp(5)); Theme.dialogs_errorDrawable.draw(canvas); - } else if (drawCount || drawMention) { - if (drawCount) { - Paint paint = dialogMuted || currentDialogFolderId != 0 ? Theme.dialogs_countGrayPaint : Theme.dialogs_countPaint; - paint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); - Theme.dialogs_countTextPaint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + } else if (drawCount || drawMention || countChangeProgress != 1f) { + if (drawCount || countChangeProgress != 1f) { + final float progressFinal = (unreadCount == 0 && !markUnread) ? 1f - countChangeProgress : countChangeProgress; + if (countOldLayout == null || unreadCount == 0) { + StaticLayout drawLayout = unreadCount == 0 ? countOldLayout : countLayout; + Paint paint = dialogMuted || currentDialogFolderId != 0 ? Theme.dialogs_countGrayPaint : Theme.dialogs_countPaint; + paint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + Theme.dialogs_countTextPaint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + + int x = countLeft - AndroidUtilities.dp(5.5f); + rect.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); + + if (progressFinal != 1f) { + if (drawPin) { + Theme.dialogs_pinnedDrawable.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + setDrawableBounds(Theme.dialogs_pinnedDrawable, pinLeft, pinTop); + canvas.save(); + canvas.scale(1f - progressFinal, 1f - progressFinal, Theme.dialogs_pinnedDrawable.getBounds().centerX(), Theme.dialogs_pinnedDrawable.getBounds().centerY()); + Theme.dialogs_pinnedDrawable.draw(canvas); + canvas.restore(); + } + canvas.save(); + canvas.scale(progressFinal, progressFinal, rect.centerX(), rect.centerY()); + } + + canvas.drawRoundRect(rect, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, paint); + if (drawLayout != null) { + canvas.save(); + canvas.translate(countLeft, countTop + AndroidUtilities.dp(4)); + drawLayout.draw(canvas); + canvas.restore(); + } + + if (progressFinal != 1f) { + canvas.restore(); + } + } else { + Paint paint = dialogMuted || currentDialogFolderId != 0 ? Theme.dialogs_countGrayPaint : Theme.dialogs_countPaint; + paint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + Theme.dialogs_countTextPaint.setAlpha((int) ((1.0f - reorderIconProgress) * 255)); + + float progressHalf = progressFinal * 2; + if (progressHalf > 1f) { + progressHalf = 1f; + } + + float countLeft = this.countLeft * progressHalf + countLeftOld * (1f - progressHalf); + float x = countLeft - AndroidUtilities.dp(5.5f); + rect.set(x, countTop, x + (countWidth * progressHalf) + (countWidthOld * (1f - progressHalf)) + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); + + float scale = 1f; + if (progressFinal <= 0.5f) { + scale += 0.1f * CubicBezierInterpolator.EASE_OUT.getInterpolation(progressFinal * 2); + } else { + scale += 0.1f * CubicBezierInterpolator.EASE_IN.getInterpolation((1f - (progressFinal - 0.5f) * 2)); + } + - int x = countLeft - AndroidUtilities.dp(5.5f); - rect.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); - canvas.drawRoundRect(rect, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, paint); - if (countLayout != null) { canvas.save(); - canvas.translate(countLeft, countTop + AndroidUtilities.dp(4)); - countLayout.draw(canvas); + canvas.scale(scale, scale, rect.centerX(), rect.centerY()); + canvas.drawRoundRect(rect, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, paint); + + if (countAnimationStableLayout != null) { + canvas.save(); + canvas.translate(countLeft, countTop + AndroidUtilities.dp(4)); + countAnimationStableLayout.draw(canvas); + canvas.restore(); + } + + int textAlpha = Theme.dialogs_countTextPaint.getAlpha(); + Theme.dialogs_countTextPaint.setAlpha((int) (textAlpha * progressHalf)); + if (countAnimationInLayout != null) { + canvas.save(); + canvas.translate(countLeft, (countAnimationIncrement ? AndroidUtilities.dp(13) : -AndroidUtilities.dp(13)) * (1f - progressHalf) + countTop + AndroidUtilities.dp(4)); + countAnimationInLayout.draw(canvas); + canvas.restore(); + } else if (countLayout != null) { + canvas.save(); + canvas.translate(countLeft, (countAnimationIncrement ? AndroidUtilities.dp(13) : -AndroidUtilities.dp(13)) * (1f - progressHalf) + countTop + AndroidUtilities.dp(4)); + countLayout.draw(canvas); + canvas.restore(); + } + + if (countOldLayout != null) { + Theme.dialogs_countTextPaint.setAlpha((int) (textAlpha * (1f - progressHalf))); + canvas.save(); + canvas.translate(countLeft, (countAnimationIncrement ? -AndroidUtilities.dp(13) : AndroidUtilities.dp(13)) * progressHalf + countTop + AndroidUtilities.dp(4)); + countOldLayout.draw(canvas); + canvas.restore(); + } + Theme.dialogs_countTextPaint.setAlpha(textAlpha); canvas.restore(); + } } if (drawMention) { @@ -2446,36 +2643,126 @@ public class DialogCell extends BaseCell { canvas.restore(); } - if (user != null && isDialogCell && currentDialogFolderId == 0 && !MessagesController.isSupportUser(user) && !user.bot) { - boolean isOnline = !user.self && (user.status != null && user.status.expires > ConnectionsManager.getInstance(currentAccount).getCurrentTime() || MessagesController.getInstance(currentAccount).onlinePrivacy.containsKey(user.id)); - if (isOnline || onlineProgress != 0) { - int top = (int) (avatarImage.getImageY2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 6 : 8)); - int left; - if (LocaleController.isRTL) { - left = (int) (avatarImage.getImageX() + AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); - } else { - left = (int) (avatarImage.getImageX2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); - } - - Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - canvas.drawCircle(left, top, AndroidUtilities.dp(7) * onlineProgress, Theme.dialogs_onlineCirclePaint); - Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_chats_onlineCircle)); - canvas.drawCircle(left, top, AndroidUtilities.dp(5) * onlineProgress, Theme.dialogs_onlineCirclePaint); - if (isOnline) { - if (onlineProgress < 1.0f) { - onlineProgress += dt / 150.0f; - if (onlineProgress > 1.0f) { - onlineProgress = 1.0f; - } - needInvalidate = true; + if (isDialogCell && currentDialogFolderId == 0) { + if (user != null && !MessagesController.isSupportUser(user) && !user.bot) { + boolean isOnline = !user.self && (user.status != null && user.status.expires > ConnectionsManager.getInstance(currentAccount).getCurrentTime() || MessagesController.getInstance(currentAccount).onlinePrivacy.containsKey(user.id)); + if (isOnline || onlineProgress != 0) { + int top = (int) (avatarImage.getImageY2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 6 : 8)); + int left; + if (LocaleController.isRTL) { + left = (int) (avatarImage.getImageX() + AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); + } else { + left = (int) (avatarImage.getImageX2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); } - } else { - if (onlineProgress > 0.0f) { - onlineProgress -= dt / 150.0f; - if (onlineProgress < 0.0f) { - onlineProgress = 0.0f; + + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + canvas.drawCircle(left, top, AndroidUtilities.dp(7) * onlineProgress, Theme.dialogs_onlineCirclePaint); + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_chats_onlineCircle)); + canvas.drawCircle(left, top, AndroidUtilities.dp(5) * onlineProgress, Theme.dialogs_onlineCirclePaint); + if (isOnline) { + if (onlineProgress < 1.0f) { + onlineProgress += dt / 150.0f; + if (onlineProgress > 1.0f) { + onlineProgress = 1.0f; + } + needInvalidate = true; + } + } else { + if (onlineProgress > 0.0f) { + onlineProgress -= dt / 150.0f; + if (onlineProgress < 0.0f) { + onlineProgress = 0.0f; + } + needInvalidate = true; + } + } + } + } else if (chat != null) { + hasCall = chat.call_active && chat.call_not_empty; + if (hasCall || chatCallProgress != 0) { + float checkProgress = checkBox.isChecked() ? 1.0f - checkBox.getProgress() : 1.0f; + int top = (int) (avatarImage.getImageY2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 6 : 8)); + int left; + if (LocaleController.isRTL) { + left = (int) (avatarImage.getImageX() + AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); + } else { + left = (int) (avatarImage.getImageX2() - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 10 : 6)); + } + + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + canvas.drawCircle(left, top, AndroidUtilities.dp(11) * chatCallProgress * checkProgress, Theme.dialogs_onlineCirclePaint); + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_chats_onlineCircle)); + canvas.drawCircle(left, top, AndroidUtilities.dp(9) * chatCallProgress * checkProgress, Theme.dialogs_onlineCirclePaint); + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + + float size1; + float size2; + if (progressStage == 0) { + size1 = AndroidUtilities.dp(1) + AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(3) - AndroidUtilities.dp(2) * innerProgress; + } else if (progressStage == 1) { + size1 = AndroidUtilities.dp(5) - AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(1) + AndroidUtilities.dp(4) * innerProgress; + } else if (progressStage == 2) { + size1 = AndroidUtilities.dp(1) + AndroidUtilities.dp(2) * innerProgress; + size2 = AndroidUtilities.dp(5) - AndroidUtilities.dp(4) * innerProgress; + } else if (progressStage == 3) { + size1 = AndroidUtilities.dp(3) - AndroidUtilities.dp(2) * innerProgress; + size2 = AndroidUtilities.dp(1) + AndroidUtilities.dp(2) * innerProgress; + } else if (progressStage == 4) { + size1 = AndroidUtilities.dp(1) + AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(3) - AndroidUtilities.dp(2) * innerProgress; + } else if (progressStage == 5) { + size1 = AndroidUtilities.dp(5) - AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(1) + AndroidUtilities.dp(4) * innerProgress; + } else if (progressStage == 6) { + size1 = AndroidUtilities.dp(1) + AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(5) - AndroidUtilities.dp(4) * innerProgress; + } else { + size1 = AndroidUtilities.dp(5) - AndroidUtilities.dp(4) * innerProgress; + size2 = AndroidUtilities.dp(1) + AndroidUtilities.dp(2) * innerProgress; + } + + if (chatCallProgress < 1.0f || checkProgress < 1.0f) { + canvas.save(); + canvas.scale(chatCallProgress * checkProgress, chatCallProgress * checkProgress, left, top); + } + rect.set(left - AndroidUtilities.dp(1), top - size1, left + AndroidUtilities.dp(1), top + size1); + canvas.drawRoundRect(rect, AndroidUtilities.dp(1), AndroidUtilities.dp(1), Theme.dialogs_onlineCirclePaint); + + rect.set(left - AndroidUtilities.dp(5), top - size2, left - AndroidUtilities.dp(3), top + size2); + canvas.drawRoundRect(rect, AndroidUtilities.dp(1), AndroidUtilities.dp(1), Theme.dialogs_onlineCirclePaint); + + rect.set(left + AndroidUtilities.dp(3), top - size2, left + AndroidUtilities.dp(5), top + size2); + canvas.drawRoundRect(rect, AndroidUtilities.dp(1), AndroidUtilities.dp(1), Theme.dialogs_onlineCirclePaint); + if (chatCallProgress < 1.0f || checkProgress < 1.0f) { + canvas.restore(); + } + + innerProgress += dt / 400.0f; + if (innerProgress >= 1.0f) { + innerProgress = 0.0f; + progressStage++; + if (progressStage >= 8) { + progressStage = 0; + } + } + needInvalidate = true; + + if (hasCall) { + if (chatCallProgress < 1.0f) { + chatCallProgress += dt / 150.0f; + if (chatCallProgress > 1.0f) { + chatCallProgress = 1.0f; + } + } + } else { + if (chatCallProgress > 0.0f) { + chatCallProgress -= dt / 150.0f; + if (chatCallProgress < 0.0f) { + chatCallProgress = 0.0f; + } } - needInvalidate = true; } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java index 5a4040b14..7080b3237 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java @@ -28,12 +28,14 @@ import android.widget.ImageView; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -180,12 +182,11 @@ public class FeaturedStickerSetCell extends FrameLayout { sticker = null; } if (sticker != null) { - TLObject object; - if (set.set.thumb instanceof TLRPC.TL_photoSize || set.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - object = set.set.thumb; - } else { + TLObject object = FileLoader.getClosestPhotoSizeWithSize(set.set.thumbs, 90); + if (object == null) { object = sticker; } + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(set.set.thumbs, Theme.key_windowBackgroundGray, 1.0f); ImageLocation imageLocation; if (object instanceof TLRPC.Document) { @@ -197,11 +198,15 @@ public class FeaturedStickerSetCell extends FrameLayout { } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { - imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", svgThumb, 0, set); + } else { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + } } else if (imageLocation != null && imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { - imageView.setImage(imageLocation, "50_50", "tgs", null, set); + imageView.setImage(imageLocation, "50_50", "tgs", svgThumb, set); } else { - imageView.setImage(imageLocation, "50_50", "webp", null, set); + imageView.setImage(imageLocation, "50_50", "webp", svgThumb, set); } } else { imageView.setImage(null, null, "webp", null, set); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java index a08deff5f..0c3013158 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java @@ -27,12 +27,14 @@ import android.widget.FrameLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -184,12 +186,11 @@ public class FeaturedStickerSetCell2 extends FrameLayout { } if (sticker != null) { if (MessageObject.canAutoplayAnimatedSticker(sticker)) { - TLObject object; - if (set.set.thumb instanceof TLRPC.TL_photoSize || set.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - object = set.set.thumb; - } else { + TLObject object = FileLoader.getClosestPhotoSizeWithSize(set.set.thumbs, 90); + if (object == null) { object = sticker; } + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(set.set.thumbs, Theme.key_windowBackgroundGray, 1.0f); ImageLocation imageLocation; if (object instanceof TLRPC.Document) { // first sticker in set as a thumb @@ -201,11 +202,15 @@ public class FeaturedStickerSetCell2 extends FrameLayout { } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { - imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", svgThumb, 0, set); + } else { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + } } else if (imageLocation != null && imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { - imageView.setImage(imageLocation, "50_50", "tgs", null, set); + imageView.setImage(imageLocation, "50_50", "tgs", svgThumb, set); } else { - imageView.setImage(imageLocation, "50_50", "webp", null, set); + imageView.setImage(imageLocation, "50_50", "webp", svgThumb, set); } } else { final TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(sticker.thumbs, 90); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GraySectionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GraySectionCell.java index 5e8ab13c4..7439f2d2a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GraySectionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GraySectionCell.java @@ -62,6 +62,12 @@ public class GraySectionCell extends FrameLayout { super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(32), MeasureSpec.EXACTLY)); } + public void setTextColor(String key) { + int color = Theme.getColor(key); + textView.setTextColor(color); + rightTextView.setTextColor(color); + } + public void setText(String text) { textView.setText(text); rightTextView.setVisibility(GONE); @@ -79,4 +85,8 @@ public class GraySectionCell extends FrameLayout { descriptions.add(new ThemeDescription(listView, 0, new Class[]{GraySectionCell.class}, new String[]{"rightTextView"}, null, null, null, Theme.key_graySectionText)); descriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR, new Class[]{GraySectionCell.class}, null, null, null, Theme.key_graySection)); } + + public TextView getTextView() { + return textView; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallInvitedCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallInvitedCell.java new file mode 100644 index 000000000..ef7f25aa5 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallInvitedCell.java @@ -0,0 +1,132 @@ +package org.telegram.ui.Cells; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.view.Gravity; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.UserObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.SimpleTextView; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AvatarDrawable; +import org.telegram.ui.Components.BackupImageView; +import org.telegram.ui.Components.LayoutHelper; + +@SuppressWarnings("FieldCanBeLocal") +public class GroupCallInvitedCell extends FrameLayout { + + private BackupImageView avatarImageView; + private SimpleTextView nameTextView; + private SimpleTextView statusTextView; + private ImageView muteButton; + + private AvatarDrawable avatarDrawable; + + private TLRPC.User currentUser; + + private Paint dividerPaint; + + private String grayIconColor = Theme.key_voipgroup_mutedIcon; + + private boolean needDivider; + + public GroupCallInvitedCell(Context context) { + super(context); + + dividerPaint = new Paint(); + dividerPaint.setColor(Theme.getColor(Theme.key_voipgroup_actionBar)); + + avatarDrawable = new AvatarDrawable(); + + avatarImageView = new BackupImageView(context); + avatarImageView.setRoundRadius(AndroidUtilities.dp(24)); + addView(avatarImageView, LayoutHelper.createFrame(46, 46, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 0 : 11, 6, LocaleController.isRTL ? 11 : 0, 0)); + + nameTextView = new SimpleTextView(context); + nameTextView.setTextColor(Theme.getColor(Theme.key_voipgroup_nameText)); + nameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + nameTextView.setTextSize(16); + nameTextView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); + addView(nameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 54 : 67, 10, LocaleController.isRTL ? 67 : 54, 0)); + + statusTextView = new SimpleTextView(context); + statusTextView.setTextSize(15); + statusTextView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); + statusTextView.setTextColor(Theme.getColor(grayIconColor)); + statusTextView.setText(LocaleController.getString("Invited", R.string.Invited)); + addView(statusTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 54 : 67, 32, LocaleController.isRTL ? 67 : 54, 0)); + + muteButton = new ImageView(context); + muteButton.setScaleType(ImageView.ScaleType.CENTER); + muteButton.setImageResource(R.drawable.msg_invited); + muteButton.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + muteButton.setPadding(0, 0, AndroidUtilities.dp(4), 0); + muteButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(grayIconColor), PorterDuff.Mode.MULTIPLY)); + addView(muteButton, LayoutHelper.createFrame(48, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.CENTER_VERTICAL, 6, 0, 6, 0)); + + setWillNotDraw(false); + + setFocusable(true); + } + + public CharSequence getName() { + return nameTextView.getText(); + } + + public void setData(int account, Integer uid) { + currentUser = MessagesController.getInstance(account).getUser(uid); + avatarDrawable.setInfo(currentUser); + + String lastName = UserObject.getUserName(currentUser); + nameTextView.setText(lastName); + + avatarImageView.getImageReceiver().setCurrentAccount(account); + avatarImageView.setImage(ImageLocation.getForUser(currentUser, false), "50_50", avatarDrawable, currentUser); + } + + public void setDrawDivider(boolean draw) { + needDivider = draw; + invalidate(); + } + + public void setGrayIconColor(String key, int value) { + if (!grayIconColor.equals(key)) { + grayIconColor = key; + } + muteButton.setColorFilter(new PorterDuffColorFilter(value, PorterDuff.Mode.MULTIPLY)); + statusTextView.setTextColor(value); + Theme.setSelectorDrawableColor(muteButton.getDrawable(), value & 0x24ffffff, true); + } + + public TLRPC.User getUser() { + return currentUser; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(58), MeasureSpec.EXACTLY)); + } + + @Override + public boolean hasOverlappingRendering() { + return false; + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (needDivider) { + canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(68), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(68) : 0), getMeasuredHeight() - 1, dividerPaint); + } + super.dispatchDraw(canvas); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallTextCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallTextCell.java new file mode 100644 index 000000000..4f4413c59 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallTextCell.java @@ -0,0 +1,252 @@ +/* + * This is the source code of Telegram for Android v. 5.x.x. + * It is licensed under GNU GPL v. 2 or later. + * You should have received a copy of the license in this archive (see LICENSE). + * + * Copyright Nikolai Kudashov, 2013-2018. + */ + +package org.telegram.ui.Cells; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.text.TextUtils; +import android.view.Gravity; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.ui.ActionBar.SimpleTextView; +import org.telegram.ui.ActionBar.Theme; + +public class GroupCallTextCell extends FrameLayout { + + private SimpleTextView textView; + private SimpleTextView valueTextView; + private ImageView imageView; + private ImageView valueImageView; + private int leftPadding; + private boolean needDivider; + private int offsetFromImage = 67; + private int imageLeft = 18; + + private Paint dividerPaint; + + public GroupCallTextCell(Context context) { + this(context, 23, false); + } + + public GroupCallTextCell(Context context, int left, boolean dialog) { + super(context); + + dividerPaint = new Paint(); + dividerPaint.setColor(Theme.getColor(Theme.key_voipgroup_actionBar)); + + leftPadding = left; + + textView = new SimpleTextView(context); + textView.setTextColor(Theme.getColor(dialog ? Theme.key_dialogTextBlack : Theme.key_windowBackgroundWhiteBlackText)); + textView.setTextSize(16); + textView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + textView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(textView); + + valueTextView = new SimpleTextView(context); + valueTextView.setTextColor(Theme.getColor(dialog ? Theme.key_dialogTextBlue2 : Theme.key_windowBackgroundWhiteValueText)); + valueTextView.setTextSize(16); + valueTextView.setGravity(LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT); + valueTextView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(valueTextView); + + imageView = new ImageView(context); + imageView.setScaleType(ImageView.ScaleType.CENTER); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(dialog ? Theme.key_dialogIcon : Theme.key_windowBackgroundWhiteGrayIcon), PorterDuff.Mode.MULTIPLY)); + addView(imageView); + + valueImageView = new ImageView(context); + valueImageView.setScaleType(ImageView.ScaleType.CENTER); + addView(valueImageView); + + setFocusable(true); + } + + public SimpleTextView getTextView() { + return textView; + } + + public SimpleTextView getValueTextView() { + return valueTextView; + } + + public ImageView getValueImageView() { + return valueImageView; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int width = MeasureSpec.getSize(widthMeasureSpec); + int height = AndroidUtilities.dp(48); + + valueTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(leftPadding), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + textView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding) - valueTextView.getTextWidth(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + if (imageView.getVisibility() == VISIBLE) { + imageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); + } + if (valueImageView.getVisibility() == VISIBLE) { + valueImageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); + } + setMeasuredDimension(width, AndroidUtilities.dp(50) + (needDivider ? 1 : 0)); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + int height = bottom - top; + int width = right - left; + + int viewTop = (height - valueTextView.getTextHeight()) / 2; + int viewLeft = LocaleController.isRTL ? AndroidUtilities.dp(leftPadding) : 0; + valueTextView.layout(viewLeft, viewTop, viewLeft + valueTextView.getMeasuredWidth(), viewTop + valueTextView.getMeasuredHeight()); + + viewTop = (height - textView.getTextHeight()) / 2; + if (LocaleController.isRTL) { + viewLeft = getMeasuredWidth() - textView.getMeasuredWidth() - AndroidUtilities.dp(imageView.getVisibility() == VISIBLE ? offsetFromImage : leftPadding); + } else { + viewLeft = AndroidUtilities.dp(imageView.getVisibility() == VISIBLE ? offsetFromImage : leftPadding); + } + textView.layout(viewLeft, viewTop, viewLeft + textView.getMeasuredWidth(), viewTop + textView.getMeasuredHeight()); + + if (imageView.getVisibility() == VISIBLE) { + viewTop = AndroidUtilities.dp(5); + viewLeft = !LocaleController.isRTL ? AndroidUtilities.dp(imageLeft) : width - imageView.getMeasuredWidth() - AndroidUtilities.dp(imageLeft); + imageView.layout(viewLeft, viewTop, viewLeft + imageView.getMeasuredWidth(), viewTop + imageView.getMeasuredHeight()); + } + + if (valueImageView.getVisibility() == VISIBLE) { + viewTop = (height - valueImageView.getMeasuredHeight()) / 2; + viewLeft = LocaleController.isRTL ? AndroidUtilities.dp(23) : width - valueImageView.getMeasuredWidth() - AndroidUtilities.dp(23); + valueImageView.layout(viewLeft, viewTop, viewLeft + valueImageView.getMeasuredWidth(), viewTop + valueImageView.getMeasuredHeight()); + } + } + + public void setTextColor(int color) { + textView.setTextColor(color); + } + + public void setColors(String icon, String text) { + textView.setTextColor(Theme.getColor(text)); + textView.setTag(text); + if (icon != null) { + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(icon), PorterDuff.Mode.MULTIPLY)); + imageView.setTag(icon); + } + } + + public void setColors(int icon, int text) { + textView.setTextColor(text); + textView.setTag(null); + imageView.setColorFilter(new PorterDuffColorFilter(icon, PorterDuff.Mode.MULTIPLY)); + imageView.setTag(null); + } + + public void setText(String text, boolean divider) { + textView.setText(text); + valueTextView.setText(null); + imageView.setVisibility(GONE); + valueTextView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setTextAndIcon(String text, int resId, boolean divider) { + textView.setText(text); + valueTextView.setText(null); + imageView.setImageResource(resId); + imageView.setVisibility(VISIBLE); + valueTextView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + imageView.setPadding(0, AndroidUtilities.dp(7), 0, 0); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setTextAndIcon(String text, Drawable drawable, boolean divider) { + offsetFromImage = 68; + imageLeft = 18; + textView.setText(text); + valueTextView.setText(null); + imageView.setColorFilter(null); + imageView.setImageDrawable(drawable); + imageView.setVisibility(VISIBLE); + valueTextView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + imageView.setPadding(0, AndroidUtilities.dp(6), 0, 0); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setOffsetFromImage(int value) { + offsetFromImage = value; + } + + public void setTextAndValue(String text, String value, boolean divider) { + textView.setText(text); + valueTextView.setText(value); + valueTextView.setVisibility(VISIBLE); + imageView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setTextAndValueAndIcon(String text, String value, int resId, boolean divider) { + textView.setText(text); + valueTextView.setText(value); + valueTextView.setVisibility(VISIBLE); + valueImageView.setVisibility(GONE); + imageView.setVisibility(VISIBLE); + imageView.setPadding(0, AndroidUtilities.dp(7), 0, 0); + imageView.setImageResource(resId); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setTextAndValueDrawable(String text, Drawable drawable, boolean divider) { + textView.setText(text); + valueTextView.setText(null); + valueImageView.setVisibility(VISIBLE); + valueImageView.setImageDrawable(drawable); + valueTextView.setVisibility(GONE); + imageView.setVisibility(GONE); + imageView.setPadding(0, AndroidUtilities.dp(7), 0, 0); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + @Override + protected void onDraw(Canvas canvas) { + if (needDivider) { + canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(imageView.getVisibility() == VISIBLE ? 68 : 20), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(imageView.getVisibility() == VISIBLE ? 68 : 20) : 0), getMeasuredHeight() - 1, dividerPaint); + } + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + final CharSequence text = textView.getText(); + if (!TextUtils.isEmpty(text)) { + final CharSequence valueText = valueTextView.getText(); + if (!TextUtils.isEmpty(valueText)) { + info.setText(text + ": " + valueText); + } else { + info.setText(text); + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java new file mode 100644 index 000000000..fbdd14e8e --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java @@ -0,0 +1,620 @@ +package org.telegram.ui.Cells; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; +import android.animation.ValueAnimator; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.RippleDrawable; +import android.os.Build; +import android.os.SystemClock; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.messenger.UserObject; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.SimpleTextView; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AvatarDrawable; +import org.telegram.ui.Components.BackupImageView; +import org.telegram.ui.Components.BlobDrawable; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.WaveDrawable; + +import java.util.ArrayList; + +public class GroupCallUserCell extends FrameLayout { + + private AvatarWavesDrawable avatarWavesDrawable; + + private BackupImageView avatarImageView; + private SimpleTextView nameTextView; + private SimpleTextView[] statusTextView = new SimpleTextView[2]; + private RLottieImageView muteButton; + private RLottieDrawable muteDrawable; + + private AvatarDrawable avatarDrawable; + + private ChatObject.Call currentCall; + private TLRPC.TL_groupCallParticipant participant; + private TLRPC.User currentUser; + + private Paint dividerPaint; + + private boolean lastMuted; + private int lastMuteColor; + + private AccountInstance accountInstance; + + private boolean needDivider; + private boolean currentIconGray; + + private String grayIconColor = Theme.key_voipgroup_mutedIcon; + + private Runnable updateRunnable = () -> { + isSpeaking = false; + applyParticipantChanges(true, true); + avatarWavesDrawable.setAmplitude(0); + updateRunnableScheduled = false; + }; + private boolean updateRunnableScheduled; + private boolean isSpeaking; + + private AnimatorSet animatorSet; + + public GroupCallUserCell(Context context) { + super(context); + + dividerPaint = new Paint(); + dividerPaint.setColor(Theme.getColor(Theme.key_voipgroup_actionBar)); + + avatarDrawable = new AvatarDrawable(); + + avatarImageView = new BackupImageView(context); + avatarImageView.setRoundRadius(AndroidUtilities.dp(24)); + addView(avatarImageView, LayoutHelper.createFrame(46, 46, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 0 : 11, 6, LocaleController.isRTL ? 11 : 0, 0)); + + nameTextView = new SimpleTextView(context); + nameTextView.setTextColor(Theme.getColor(Theme.key_voipgroup_nameText)); + nameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + nameTextView.setTextSize(16); + nameTextView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); + addView(nameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 54 : 67, 10, LocaleController.isRTL ? 67 : 54, 0)); + + for (int a = 0; a < 2; a++) { + statusTextView[a] = new SimpleTextView(context); + statusTextView[a].setTextSize(15); + statusTextView[a].setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); + if (a == 0) { + statusTextView[a].setTextColor(Theme.getColor(Theme.key_voipgroup_listeningText)); + statusTextView[a].setText(LocaleController.getString("Listening", R.string.Listening)); + } else { + statusTextView[a].setTextColor(Theme.getColor(Theme.key_voipgroup_speakingText)); + statusTextView[a].setText(LocaleController.getString("Speaking", R.string.Speaking)); + } + addView(statusTextView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 54 : 67, 32, LocaleController.isRTL ? 67 : 54, 0)); + } + + muteDrawable = new RLottieDrawable(R.raw.voice_outlined, "" + R.raw.voice_outlined, AndroidUtilities.dp(19), AndroidUtilities.dp(24), true, null); + + muteButton = new RLottieImageView(context) { + @Override + public boolean dispatchTouchEvent(MotionEvent event) { + TLRPC.Chat chat = accountInstance.getMessagesController().getChat(currentCall.chatId); + if (!ChatObject.canManageCalls(chat)) { + return false; + } + return super.dispatchTouchEvent(event); + } + }; + muteButton.setScaleType(ImageView.ScaleType.CENTER); + muteButton.setAnimation(muteDrawable); + if (Build.VERSION.SDK_INT >= 21) { + RippleDrawable rippleDrawable = (RippleDrawable) Theme.createSelectorDrawable(Theme.getColor(grayIconColor) & 0x24ffffff); + Theme.setRippleDrawableForceSoftware(rippleDrawable); + muteButton.setBackground(rippleDrawable); + } + muteButton.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(muteButton, LayoutHelper.createFrame(48, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.CENTER_VERTICAL, 6, 0, 6, 0)); + muteButton.setOnClickListener(v -> onMuteClick(GroupCallUserCell.this)); + + avatarWavesDrawable = new AvatarWavesDrawable(AndroidUtilities.dp(26), AndroidUtilities.dp(29)); + + setWillNotDraw(false); + + setFocusable(true); + } + + protected void onMuteClick(GroupCallUserCell cell) { + + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (updateRunnableScheduled) { + AndroidUtilities.cancelRunOnUIThread(updateRunnable); + updateRunnableScheduled = false; + } + if (animatorSet != null) { + animatorSet.cancel(); + } + } + + public boolean isSelfUser() { + return UserObject.isUserSelf(currentUser); + } + + public CharSequence getName() { + return nameTextView.getText(); + } + + public void setData(AccountInstance account, TLRPC.TL_groupCallParticipant groupCallParticipant, ChatObject.Call call) { + currentCall = call; + accountInstance = account; + + participant = groupCallParticipant; + + currentUser = accountInstance.getMessagesController().getUser(participant.user_id); + avatarDrawable.setInfo(currentUser); + + String lastName = UserObject.getUserName(currentUser); + nameTextView.setText(lastName); + + avatarImageView.getImageReceiver().setCurrentAccount(account.getCurrentAccount()); + avatarImageView.setImage(ImageLocation.getForUser(currentUser, false), "50_50", avatarDrawable, currentUser); + } + + public void setDrawDivider(boolean draw) { + needDivider = draw; + invalidate(); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + applyParticipantChanges(false); + } + + public TLRPC.TL_groupCallParticipant getParticipant() { + return participant; + } + + public void setAmplitude(double value) { + if (value > 1.5f) { + if (updateRunnableScheduled) { + AndroidUtilities.cancelRunOnUIThread(updateRunnable); + } + if (!isSpeaking) { + isSpeaking = true; + applyParticipantChanges(true); + } + avatarWavesDrawable.setAmplitude(value); + + AndroidUtilities.runOnUIThread(updateRunnable, 500); + updateRunnableScheduled = true; + } else { + avatarWavesDrawable.setAmplitude(0); + } + } + + public boolean clickMuteButton() { + if (muteButton.isEnabled()) { + muteButton.callOnClick(); + return true; + } + return false; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(58), MeasureSpec.EXACTLY)); + } + + public void applyParticipantChanges(boolean animated) { + applyParticipantChanges(animated, false); + } + + public void setGrayIconColor(String key, int value) { + if (!grayIconColor.equals(key)) { + if (currentIconGray) { + lastMuteColor = Theme.getColor(key); + } + grayIconColor = key; + } + if (currentIconGray) { + muteButton.setColorFilter(new PorterDuffColorFilter(value, PorterDuff.Mode.MULTIPLY)); + Theme.setSelectorDrawableColor(muteButton.getDrawable(), value & 0x24ffffff, true); + } + } + + private void applyParticipantChanges(boolean animated, boolean internal) { + TLRPC.Chat chat = accountInstance.getMessagesController().getChat(currentCall.chatId); + boolean canMute = ChatObject.canManageCalls(chat) && !isSelfUser(); + if (canMute) { + boolean isAdmin = false; + if (chat.megagroup) { + isAdmin = accountInstance.getMessagesController().getAdminRank(currentCall.chatId, participant.user_id) != null; + } else { + TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentCall.chatId); + if (chatFull != null) { + for (int a = 0, N = chatFull.participants.participants.size(); a < N; a++) { + TLRPC.ChatParticipant chatParticipant = chatFull.participants.participants.get(a); + if (chatParticipant.user_id == participant.user_id) { + isAdmin = chatParticipant instanceof TLRPC.TL_chatParticipantAdmin || chatParticipant instanceof TLRPC.TL_chatParticipantCreator; + break; + } + } + } + } + if (isAdmin && participant.muted) { + canMute = false; + } + } + muteButton.setEnabled(canMute); + + if (!internal) { + long diff = SystemClock.uptimeMillis() - participant.lastSpeakTime; + boolean newSpeaking = diff < 500; + + if (!isSpeaking || !newSpeaking) { + isSpeaking = newSpeaking; + if (updateRunnableScheduled) { + AndroidUtilities.cancelRunOnUIThread(updateRunnable); + updateRunnableScheduled = false; + } + if (isSpeaking) { + AndroidUtilities.runOnUIThread(updateRunnable, 500 - diff); + updateRunnableScheduled = true; + } + } + } + + TLRPC.TL_groupCallParticipant newParticipant = currentCall.participants.get(participant.user_id); + if (newParticipant != null) { + participant = newParticipant; + } + + ArrayList animators = null; + + boolean newMuted; + if (isSelfUser()) { + newMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute() && (!isSpeaking || !participant.hasVoice); + } else { + newMuted = participant.muted && (!isSpeaking || !participant.hasVoice); + } + boolean newMutedByAdmin = newMuted && !participant.can_self_unmute; + int newMuteColor; + Object newTag; + Object oldTag = statusTextView[0].getTag(); + currentIconGray = false; + if (participant.muted && !isSpeaking) { + if (!participant.can_self_unmute) { + newMuteColor = Theme.getColor(Theme.key_voipgroup_mutedByAdminIcon); + } else { + newMuteColor = Theme.getColor(grayIconColor); + currentIconGray = true; + } + newTag = null; + } else { + if (isSpeaking && participant.hasVoice) { + newMuteColor = Theme.getColor(Theme.key_voipgroup_speakingText); + newTag = 1; + } else { + newMuteColor = Theme.getColor(grayIconColor); + newTag = null; + currentIconGray = true; + } + } + boolean somethingChanged = false; + if (animatorSet != null) { + if (newTag == null && oldTag != null || newTag != null && oldTag == null || + lastMuteColor != newMuteColor) { + somethingChanged = true; + } + } + if (!animated || somethingChanged) { + if (animatorSet != null) { + animatorSet.cancel(); + } + } + if (!animated || lastMuteColor != newMuteColor || somethingChanged) { + if (animated) { + if (animators == null) { + animators = new ArrayList<>(); + } + int oldColor = lastMuteColor; + lastMuteColor = newMuteColor; + ValueAnimator animator = ValueAnimator.ofFloat(0.0f, 1.0f); + animator.addUpdateListener(animation -> { + float value = animation.getAnimatedFraction(); + int color = AndroidUtilities.getOffsetColor(oldColor, newMuteColor, value, 1.0f); + muteButton.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + Theme.setSelectorDrawableColor(muteButton.getDrawable(), color & 0x24ffffff, true); + }); + animators.add(animator); + } else { + muteButton.setColorFilter(new PorterDuffColorFilter(lastMuteColor = newMuteColor, PorterDuff.Mode.MULTIPLY)); + Theme.setSelectorDrawableColor(muteButton.getDrawable(), newMuteColor & 0x24ffffff, true); + } + } + if (!animated || newTag == null && oldTag != null || newTag != null && oldTag == null || somethingChanged) { + if (animated) { + if (animators == null) { + animators = new ArrayList<>(); + } + statusTextView[0].setVisibility(VISIBLE); + statusTextView[1].setVisibility(VISIBLE); + if (newTag == null) { + animators.add(ObjectAnimator.ofFloat(statusTextView[0], View.TRANSLATION_Y, 0)); + animators.add(ObjectAnimator.ofFloat(statusTextView[0], View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(statusTextView[1], View.TRANSLATION_Y, -AndroidUtilities.dp(2))); + animators.add(ObjectAnimator.ofFloat(statusTextView[1], View.ALPHA, 0.0f)); + } else { + animators.add(ObjectAnimator.ofFloat(statusTextView[0], View.TRANSLATION_Y, AndroidUtilities.dp(2))); + animators.add(ObjectAnimator.ofFloat(statusTextView[0], View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(statusTextView[1], View.TRANSLATION_Y, 0)); + animators.add(ObjectAnimator.ofFloat(statusTextView[1], View.ALPHA, 1.0f)); + } + } else { + if (newTag == null) { + statusTextView[0].setVisibility(VISIBLE); + statusTextView[1].setVisibility(INVISIBLE); + statusTextView[0].setTranslationY(0); + statusTextView[0].setAlpha(1.0f); + statusTextView[1].setTranslationY(-AndroidUtilities.dp(2)); + statusTextView[1].setAlpha(0.0f); + } else { + statusTextView[0].setVisibility(INVISIBLE); + statusTextView[1].setVisibility(VISIBLE); + statusTextView[0].setTranslationY(AndroidUtilities.dp(2)); + statusTextView[0].setAlpha(0.0f); + statusTextView[1].setTranslationY(0); + statusTextView[1].setAlpha(1.0f); + } + } + statusTextView[0].setTag(newTag); + } + avatarWavesDrawable.setMuted(newTag == null, animated); + if (animators != null) { + animatorSet = new AnimatorSet(); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (newTag == null) { + statusTextView[0].setVisibility(VISIBLE); + statusTextView[1].setVisibility(INVISIBLE); + statusTextView[0].setTranslationY(0); + statusTextView[0].setAlpha(1.0f); + statusTextView[1].setTranslationY(-AndroidUtilities.dp(2)); + statusTextView[1].setAlpha(0.0f); + } else { + statusTextView[0].setVisibility(INVISIBLE); + statusTextView[1].setVisibility(VISIBLE); + statusTextView[0].setTranslationY(AndroidUtilities.dp(2)); + statusTextView[0].setAlpha(0.0f); + statusTextView[1].setTranslationY(0); + statusTextView[1].setAlpha(1.0f); + } + animatorSet = null; + } + }); + animatorSet.playTogether(animators); + animatorSet.setDuration(180); + animatorSet.start(); + } + + if (!animated || lastMuted != newMuted) { + boolean changed = muteDrawable.setCustomEndFrame(newMuted ? 13 : 24); + if (animated) { + if (changed) { + if (newMuted) { + muteDrawable.setCurrentFrame(0); + } else { + muteDrawable.setCurrentFrame(12); + } + } + muteButton.playAnimation(); + } else { + muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame() - 1, false, true); + muteButton.invalidate(); + } + lastMuted = newMuted; + } + if (!isSpeaking) { + avatarWavesDrawable.setAmplitude(0); + } + avatarWavesDrawable.setShowWaves(isSpeaking, this); + } + + @Override + public boolean hasOverlappingRendering() { + return false; + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (needDivider) { + canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(68), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(68) : 0), getMeasuredHeight() - 1, dividerPaint); + } + int cx = avatarImageView.getLeft() + avatarImageView.getMeasuredWidth() / 2; + int cy = avatarImageView.getTop() + avatarImageView.getMeasuredHeight() / 2; + + avatarWavesDrawable.update(); + avatarWavesDrawable.draw(canvas, cx, cy, this); + + avatarImageView.setScaleX(avatarWavesDrawable.getAvatarScale()); + avatarImageView.setScaleY(avatarWavesDrawable.getAvatarScale()); + super.dispatchDraw(canvas); + } + + public static class AvatarWavesDrawable { + + float amplitude; + float animateToAmplitude; + float animateAmplitudeDiff; + float wavesEnter = 0f; + boolean showWaves; + + private BlobDrawable blobDrawable; + private BlobDrawable blobDrawable2; + + private boolean hasCustomColor; + private boolean isMuted; + private float progressToMuted = 0; + + boolean invalidateColor = true; + + public AvatarWavesDrawable(int minRadius, int maxRadius) { + blobDrawable = new BlobDrawable(6); + blobDrawable2 = new BlobDrawable(8); + blobDrawable.minRadius = minRadius; + blobDrawable.maxRadius = maxRadius; + blobDrawable2.minRadius = minRadius; + blobDrawable2.maxRadius = maxRadius; + blobDrawable.generateBlob(); + blobDrawable2.generateBlob(); + blobDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_speakingText), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + blobDrawable2.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_speakingText), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + } + + public void update() { + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * 16; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + } + + if (showWaves && wavesEnter != 1f) { + wavesEnter += 16 / 350f; + if (wavesEnter > 1f) { + wavesEnter = 1f; + } + } else if (!showWaves && wavesEnter != 0) { + wavesEnter -= 16 / 350f; + if (wavesEnter < 0f) { + wavesEnter = 0f; + } + } + } + + public void draw(Canvas canvas, float cx, float cy, View parentView) { + float scaleBlob = 0.8f + 0.4f * amplitude; + if (showWaves || wavesEnter != 0) { + canvas.save(); + float wavesEnter = CubicBezierInterpolator.DEFAULT.getInterpolation(this.wavesEnter); + + canvas.scale(scaleBlob * wavesEnter, scaleBlob * wavesEnter, cx, cy); + + if (!hasCustomColor) { + if (isMuted && progressToMuted != 1f) { + progressToMuted += 16 / 150f; + if (progressToMuted > 1f) { + progressToMuted = 1f; + } + invalidateColor = true; + } else if (!isMuted && progressToMuted != 0f) { + progressToMuted -= 16 / 150f; + if (progressToMuted < 0f) { + progressToMuted = 0f; + } + invalidateColor = true; + } + + if (invalidateColor) { + int color = ColorUtils.blendARGB(Theme.getColor(Theme.key_voipgroup_speakingText), Theme.getColor(Theme.key_voipgroup_listeningText), progressToMuted); + blobDrawable.paint.setColor(ColorUtils.setAlphaComponent(color, (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + } + } + + blobDrawable.update(amplitude, 1f); + blobDrawable.draw(cx, cy, canvas, blobDrawable.paint); + + blobDrawable2.update(amplitude, 1f); + blobDrawable2.draw(cx, cy, canvas, blobDrawable.paint); + canvas.restore(); + } + + if (wavesEnter != 0) { + parentView.invalidate(); + } + + } + + public float getAvatarScale() { + float scaleAvatar = 0.9f + 0.2f * amplitude; + float wavesEnter = CubicBezierInterpolator.EASE_OUT.getInterpolation(this.wavesEnter); + return scaleAvatar * wavesEnter + 1f * (1f - wavesEnter); + } + + public void setShowWaves(boolean show, View parenView) { + if (showWaves != show) { + parenView.invalidate(); + } + showWaves = show; + } + + public void setAmplitude(double value) { + float amplitude = (float) value / 80f; + if (!showWaves) { + amplitude = 0; + } + if (amplitude > 1f) { + amplitude = 1f; + } else if (amplitude < 0) { + amplitude = 0; + } + animateToAmplitude = amplitude; + animateAmplitudeDiff = (animateToAmplitude - this.amplitude) / 200; + } + + public void setColor(int color) { + hasCustomColor = true; + blobDrawable.paint.setColor(color); + } + + public void setMuted(boolean isMuted, boolean animated) { + this.isMuted = isMuted; + if (!animated) { + progressToMuted = isMuted ? 1f : 0f; + } + invalidateColor = true; + } + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + if (info.isEnabled() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, participant.muted && !participant.can_self_unmute ? LocaleController.getString("VoipUnmute", R.string.VoipUnmute) : LocaleController.getString("VoipMute", R.string.VoipMute))); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatTextCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatTextCell.java index e8409f2b3..bd1dbefbe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatTextCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatTextCell.java @@ -27,6 +27,7 @@ public class ManageChatTextCell extends FrameLayout { private SimpleTextView valueTextView; private ImageView imageView; private boolean divider; + private String dividerColor; public ManageChatTextCell(Context context) { super(context); @@ -57,6 +58,10 @@ public class ManageChatTextCell extends FrameLayout { return valueTextView; } + public void setDividerColor(String key) { + dividerColor = key; + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = MeasureSpec.getSize(widthMeasureSpec); @@ -99,6 +104,10 @@ public class ManageChatTextCell extends FrameLayout { } public void setText(String text, String value, int resId, boolean needDivider) { + setText(text, value, resId, 5, needDivider); + } + + public void setText(String text, String value, int resId, int paddingTop, boolean needDivider) { textView.setText(text); if (value != null) { valueTextView.setText(value); @@ -106,7 +115,7 @@ public class ManageChatTextCell extends FrameLayout { } else { valueTextView.setVisibility(INVISIBLE); } - imageView.setPadding(0, AndroidUtilities.dp(5), 0, 0); + imageView.setPadding(0, AndroidUtilities.dp(paddingTop), 0, 0); imageView.setImageResource(resId); divider = needDivider; setWillNotDraw(!divider); @@ -115,7 +124,10 @@ public class ManageChatTextCell extends FrameLayout { @Override protected void onDraw(Canvas canvas) { if (divider) { - canvas.drawLine(AndroidUtilities.dp(71), getMeasuredHeight() - 1, getMeasuredWidth(), getMeasuredHeight() - 1, Theme.dividerPaint); + if (dividerColor != null) { + Theme.dividerExtraPaint.setColor(Theme.getColor(dividerColor)); + } + canvas.drawLine(AndroidUtilities.dp(71), getMeasuredHeight() - 1, getMeasuredWidth(), getMeasuredHeight() - 1, dividerColor != null ? Theme.dividerExtraPaint : Theme.dividerPaint); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatUserCell.java index ad135e216..c08857730 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatUserCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ManageChatUserCell.java @@ -39,6 +39,7 @@ public class ManageChatUserCell extends FrameLayout { private SimpleTextView nameTextView; private SimpleTextView statusTextView; private ImageView optionsButton; + private ImageView customImageView; private AvatarDrawable avatarDrawable; private Object currentObject; @@ -60,6 +61,8 @@ public class ManageChatUserCell extends FrameLayout { private int currentAccount = UserConfig.selectedAccount; + private String dividerColor; + private ManageChatUserCellDelegate delegate; public interface ManageChatUserCellDelegate { @@ -99,12 +102,27 @@ public class ManageChatUserCell extends FrameLayout { optionsButton.setImageResource(R.drawable.ic_ab_other); optionsButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_stickers_menu), PorterDuff.Mode.MULTIPLY)); optionsButton.setScaleType(ImageView.ScaleType.CENTER); - addView(optionsButton, LayoutHelper.createFrame(52, 64, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.TOP)); + addView(optionsButton, LayoutHelper.createFrame(60, 64, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.TOP)); optionsButton.setOnClickListener(v -> delegate.onOptionsButtonCheck(ManageChatUserCell.this, true)); optionsButton.setContentDescription(LocaleController.getString("AccDescrUserOptions", R.string.AccDescrUserOptions)); } } + public void setCustomRightImage(int resId) { + customImageView = new ImageView(getContext()); + customImageView.setImageResource(resId); + customImageView.setScaleType(ImageView.ScaleType.CENTER); + customImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_voipgroup_mutedIconUnscrolled), PorterDuff.Mode.MULTIPLY)); + addView(customImageView, LayoutHelper.createFrame(52, 64, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.TOP)); + } + + public void setCustomImageVisible(boolean visible) { + if (customImageView == null) { + return; + } + customImageView.setVisibility(visible ? VISIBLE : GONE); + } + public void setData(Object object, CharSequence name, CharSequence status, boolean divider) { if (object == null) { currrntStatus = null; @@ -123,6 +141,10 @@ public class ManageChatUserCell extends FrameLayout { optionsButton.setVisibility(visible ? VISIBLE : INVISIBLE); nameTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? (visible ? 46 : 28) : (68 + namePadding), status == null || status.length() > 0 ? 11.5f : 20.5f, LocaleController.isRTL ? (68 + namePadding) : (visible ? 46 : 28), 0)); statusTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? (visible ? 46 : 28) : (68 + namePadding), 34.5f, LocaleController.isRTL ? (68 + namePadding) : (visible ? 46 : 28), 0)); + } else if (customImageView != null) { + boolean visible = customImageView.getVisibility() == VISIBLE; + nameTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? (visible ? 54 : 28) : (68 + namePadding), status == null || status.length() > 0 ? 11.5f : 20.5f, LocaleController.isRTL ? (68 + namePadding) : (visible ? 54 : 28), 0)); + statusTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? (visible ? 54 : 28) : (68 + namePadding), 34.5f, LocaleController.isRTL ? (68 + namePadding) : (visible ? 54 : 28), 0)); } needDivider = divider; setWillNotDraw(!needDivider); @@ -154,6 +176,14 @@ public class ManageChatUserCell extends FrameLayout { return avatarImageView.getImageReceiver().hasNotThumb(); } + public void setNameColor(int color) { + nameTextView.setTextColor(color); + } + + public void setDividerColor(String key) { + dividerColor = key; + } + public void update(int mask) { if (currentObject == null) { return; @@ -317,7 +347,10 @@ public class ManageChatUserCell extends FrameLayout { @Override protected void onDraw(Canvas canvas) { if (needDivider) { - canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(68), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(68) : 0), getMeasuredHeight() - 1, Theme.dividerPaint); + if (dividerColor != null) { + Theme.dividerExtraPaint.setColor(Theme.getColor(dividerColor)); + } + canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(68), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(68) : 0), getMeasuredHeight() - 1, dividerColor != null ? Theme.dividerExtraPaint : Theme.dividerPaint); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/NotificationsCheckCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/NotificationsCheckCell.java index 0b93add3b..290a578dd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/NotificationsCheckCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/NotificationsCheckCell.java @@ -64,7 +64,7 @@ public class NotificationsCheckCell extends FrameLayout { textView.setSingleLine(true); textView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); textView.setEllipsize(TextUtils.TruncateAt.END); - addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 80 : (reorder ? 64 : 23), 13 + (currentHeight - 70) / 2, LocaleController.isRTL ? (reorder ? 64 : 23) : 80, 0)); + addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 80 : (reorder ? 64 : padding), 13 + (currentHeight - 70) / 2, LocaleController.isRTL ? (reorder ? 64 : padding) : 80, 0)); valueTextView = new TextView(context); valueTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2)); @@ -75,7 +75,7 @@ public class NotificationsCheckCell extends FrameLayout { valueTextView.setSingleLine(true); valueTextView.setPadding(0, 0, 0, 0); valueTextView.setEllipsize(TextUtils.TruncateAt.END); - addView(valueTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 80 : (reorder ? 64 : 23), 38 + (currentHeight - 70) / 2, LocaleController.isRTL ? (reorder ? 64 : 23) : 80, 0)); + addView(valueTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 80 : (reorder ? 64 : padding), 38 + (currentHeight - 70) / 2, LocaleController.isRTL ? (reorder ? 64 : padding) : 80, 0)); checkBox = new Switch(context); checkBox.setColors(Theme.key_switchTrack, Theme.key_switchTrackChecked, Theme.key_windowBackgroundWhite, Theme.key_windowBackgroundWhite); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StatisticPostInfoCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StatisticPostInfoCell.java index 5ad6cfc56..e66fd1887 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StatisticPostInfoCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StatisticPostInfoCell.java @@ -3,6 +3,7 @@ package org.telegram.ui.Cells; import android.content.Context; import android.graphics.Color; import android.text.TextUtils; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; @@ -45,13 +46,13 @@ public class StatisticPostInfoCell extends FrameLayout { message = new TextView(context); message.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - message.setTextSize(15); + message.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); message.setTextColor(Color.BLACK); message.setLines(1); message.setEllipsize(TextUtils.TruncateAt.END); views = new TextView(context); - views.setTextSize(15); + views.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); views.setTextColor(Color.BLACK); linearLayout.addView(message, LayoutHelper.createLinear(0, LayoutHelper.WRAP_CONTENT, 1f, Gravity.NO_GRAVITY, 0, 0, 16, 0)); @@ -59,13 +60,13 @@ public class StatisticPostInfoCell extends FrameLayout { contentLayout.addView(linearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.TOP, 0, 8, 0, 0)); date = new TextView(context); - date.setTextSize(13); + date.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); date.setTextColor(Color.BLACK); date.setLines(1); date.setEllipsize(TextUtils.TruncateAt.END); shares = new TextView(context); - shares.setTextSize(13); + shares.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); shares.setTextColor(Color.BLACK); linearLayout = new LinearLayout(context); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java index 92f0e70b5..d37ca70ce 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java @@ -20,6 +20,7 @@ import android.view.animation.AccelerateInterpolator; import android.widget.FrameLayout; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; @@ -29,6 +30,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.messenger.SvgHelper; public class StickerCell extends FrameLayout { @@ -78,14 +80,25 @@ public class StickerCell extends FrameLayout { parentObject = parent; if (document != null) { TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, Theme.key_windowBackgroundGray, 1.0f); if (MessageObject.canAutoplayAnimatedSticker(document)) { - if (thumb != null) { + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(document), "80_80", null, svgThumb, parentObject); + } else if (thumb != null) { imageView.setImage(ImageLocation.getForDocument(document), "80_80", ImageLocation.getForDocument(thumb, document), null, 0, parentObject); } else { imageView.setImage(ImageLocation.getForDocument(document), "80_80", null, null, parentObject); } } else { - imageView.setImage(ImageLocation.getForDocument(thumb, document), null, "webp", null, parentObject); + if (svgThumb != null) { + if (thumb != null) { + imageView.setImage(ImageLocation.getForDocument(thumb, document), null, "webp", svgThumb, parentObject); + } else { + imageView.setImage(ImageLocation.getForDocument(document), null, "webp", svgThumb, parentObject); + } + } else { + imageView.setImage(ImageLocation.getForDocument(thumb, document), null, "webp", null, parentObject); + } } } sticker = document; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java index cae26b0a4..9b791f26a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java @@ -19,6 +19,7 @@ import android.widget.FrameLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; @@ -28,8 +29,10 @@ import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.messenger.SvgHelper; public class StickerEmojiCell extends FrameLayout { @@ -46,10 +49,13 @@ public class StickerEmojiCell extends FrameLayout { private boolean recent; private static AccelerateInterpolator interpolator = new AccelerateInterpolator(0.5f); private int currentAccount = UserConfig.selectedAccount; + private boolean fromEmojiPanel; - public StickerEmojiCell(Context context) { + public StickerEmojiCell(Context context, boolean isEmojiPanel) { super(context); + fromEmojiPanel = isEmojiPanel; + imageView = new BackupImageView(context); imageView.setAspectFit(true); imageView.setLayerNum(1); @@ -86,14 +92,23 @@ public class StickerEmojiCell extends FrameLayout { sticker = document; parentObject = parent; TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, fromEmojiPanel ? Theme.key_emptyListPlaceholder : Theme.key_windowBackgroundGray, fromEmojiPanel ? 0.2f : 1.0f); if (MessageObject.canAutoplayAnimatedSticker(document)) { - if (thumb != null) { + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(document), "80_80", null, svgThumb, parentObject); + } else if (thumb != null) { imageView.setImage(ImageLocation.getForDocument(document), "80_80", ImageLocation.getForDocument(thumb, document), null, 0, parentObject); } else { imageView.setImage(ImageLocation.getForDocument(document), "80_80", null, null, parentObject); } } else { - if (thumb != null) { + if (svgThumb != null) { + if (thumb != null) { + imageView.setImage(ImageLocation.getForDocument(thumb, document), null, "webp", svgThumb, parentObject); + } else { + imageView.setImage(ImageLocation.getForDocument(document), null, "webp", svgThumb, parentObject); + } + } else if (thumb != null) { imageView.setImage(ImageLocation.getForDocument(thumb, document), null, "webp", null, parentObject); } else { imageView.setImage(ImageLocation.getForDocument(document), null, "webp", null, parentObject); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java index c435e2f52..85e70feb7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java @@ -24,11 +24,13 @@ import android.widget.ImageView; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -179,12 +181,11 @@ public class StickerSetCell extends FrameLayout { valueTextView.setText(LocaleController.formatPluralString("Stickers", documents.size())); TLRPC.Document sticker = documents.get(0); - TLObject object; - if (set.set.thumb instanceof TLRPC.TL_photoSize || set.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - object = set.set.thumb; - } else { + TLObject object = FileLoader.getClosestPhotoSizeWithSize(set.set.thumbs, 90); + if (object == null) { object = sticker; } + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(set.set.thumbs, Theme.key_windowBackgroundGray, 1.0f); ImageLocation imageLocation; if (object instanceof TLRPC.Document) { @@ -196,11 +197,15 @@ public class StickerSetCell extends FrameLayout { } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { - imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", svgThumb, 0, set); + } else { + imageView.setImage(ImageLocation.getForDocument(sticker), "50_50", imageLocation, null, 0, set); + } } else if (imageLocation != null && imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { - imageView.setImage(imageLocation, "50_50", "tgs", null, set); + imageView.setImage(imageLocation, "50_50", "tgs", svgThumb, set); } else { - imageView.setImage(imageLocation, "50_50", "webp", null, set); + imageView.setImage(imageLocation, "50_50", "webp", svgThumb, set); } } else { valueTextView.setText(LocaleController.formatPluralString("Stickers", 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChangeBioActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChangeBioActivity.java index 35750699c..6e25aac7b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChangeBioActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChangeBioActivity.java @@ -41,8 +41,10 @@ import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.CodepointsLengthInputFilter; import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.NumberTextView; import java.util.ArrayList; @@ -50,7 +52,7 @@ public class ChangeBioActivity extends BaseFragment { private EditTextBoldCursor firstNameField; private View doneButton; - private TextView checkTextView; + private NumberTextView checkTextView; private TextView helpTextView; private final static int done_button = 1; @@ -95,7 +97,7 @@ public class ChangeBioActivity extends BaseFragment { firstNameField.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES | InputType.TYPE_TEXT_FLAG_MULTI_LINE); firstNameField.setImeOptions(EditorInfo.IME_ACTION_DONE); InputFilter[] inputFilters = new InputFilter[1]; - inputFilters[0] = new InputFilter.LengthFilter(70) { + inputFilters[0] = new CodepointsLengthInputFilter(70) { @Override public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) { if (source != null && TextUtils.indexOf(source, '\n') != -1) { @@ -139,18 +141,19 @@ public class ChangeBioActivity extends BaseFragment { @Override public void afterTextChanged(Editable s) { - checkTextView.setText(String.format("%d", (70 - firstNameField.length()))); + checkTextView.setNumber(70 - Character.codePointCount(s, 0, s.length()), true); } }); fieldContainer.addView(firstNameField, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 4, 0)); - checkTextView = new TextView(context); - checkTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - checkTextView.setText(String.format("%d", 70)); + checkTextView = new NumberTextView(context); + checkTextView.setCenterAlign(true); + checkTextView.setTextSize(15); + checkTextView.setNumber(70, false); checkTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText4)); checkTextView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - fieldContainer.addView(checkTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT, 0, 4, 4, 0)); + fieldContainer.addView(checkTextView, LayoutHelper.createFrame(20, 20, LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT, 0, 4, 4, 0)); helpTextView = new TextView(context); helpTextView.setFocusable(true); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChangeUsernameActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChangeUsernameActivity.java index 2c2b52989..855fffc1b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChangeUsernameActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChangeUsernameActivity.java @@ -28,7 +28,6 @@ import android.view.View; import android.view.inputmethod.EditorInfo; import android.widget.LinearLayout; import android.widget.TextView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; @@ -48,6 +47,7 @@ import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.LayoutHelper; @@ -89,7 +89,9 @@ public class ChangeUsernameActivity extends BaseFragment { android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", url); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if (BulletinFactory.canShowBulletin(ChangeUsernameActivity.this)) { + BulletinFactory.createCopyLinkBulletin(ChangeUsernameActivity.this).show(); + } } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java index 4f9105bfb..d0d43f4ca 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java @@ -340,7 +340,8 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio continue; } if (event.action instanceof TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin) { - if (event.action.prev_participant instanceof TLRPC.TL_channelParticipantCreator && !(event.action.new_participant instanceof TLRPC.TL_channelParticipantCreator)) { + TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin action = (TLRPC.TL_channelAdminLogEventActionParticipantToggleAdmin) event.action; + if (action.prev_participant instanceof TLRPC.TL_channelParticipantCreator && !(action.new_participant instanceof TLRPC.TL_channelParticipantCreator)) { continue; } } @@ -1016,9 +1017,10 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio } if (type == 1) { if (selectedObject.currentEvent != null && selectedObject.currentEvent.action instanceof TLRPC.TL_channelAdminLogEventActionChangeStickerSet) { - TLRPC.InputStickerSet stickerSet = selectedObject.currentEvent.action.new_stickerset; + TLRPC.TL_channelAdminLogEventActionChangeStickerSet action = (TLRPC.TL_channelAdminLogEventActionChangeStickerSet) selectedObject.currentEvent.action; + TLRPC.InputStickerSet stickerSet = action.new_stickerset; if (stickerSet == null || stickerSet instanceof TLRPC.TL_inputStickerSetEmpty) { - stickerSet = selectedObject.currentEvent.action.prev_stickerset; + stickerSet = action.prev_stickerset; } if (stickerSet != null) { showDialog(new StickersAlert(getParentActivity(), ChannelAdminLogActivity.this, stickerSet, null, null)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java index cc6818976..c4d32f869 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java @@ -33,7 +33,6 @@ import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; @@ -44,7 +43,6 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; -import org.telegram.messenger.SharedConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; @@ -61,6 +59,7 @@ import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextBlockCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Components.AvatarDrawable; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.EditTextEmoji; import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.Components.BackupImageView; @@ -699,7 +698,9 @@ public class ChannelCreateActivity extends BaseFragment implements NotificationC android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", invite.link); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if (BulletinFactory.canShowBulletin(ChannelCreateActivity.this)) { + BulletinFactory.createCopyLinkBulletin(ChannelCreateActivity.this).show(); + } } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java index c4854bad5..a264c6a6b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java @@ -6,6 +6,7 @@ import android.graphics.Typeface; import android.graphics.drawable.Drawable; import android.text.TextPaint; import android.text.TextUtils; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; @@ -46,24 +47,24 @@ public class ChartHeaderView extends FrameLayout { textMargin = (int) textPaint.measureText("00 MMM 0000 - 00 MMM 000"); title = new TextView(context); - title.setTextSize(15); + title.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); title.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); addView(title, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 16, 0, textMargin, 0)); back = new TextView(context); - back.setTextSize(15); + back.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); back.setTypeface(Typeface.DEFAULT_BOLD); back.setGravity(Gravity.START | Gravity.CENTER_VERTICAL); addView(back, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 8, 0, 8, 0)); dates = new TextView(context); - dates.setTextSize(13); + dates.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); dates.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); dates.setGravity(Gravity.END | Gravity.CENTER_VERTICAL); addView(dates, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.END | Gravity.CENTER_VERTICAL, 16, 0, 16, 0)); datesTmp = new TextView(context); - datesTmp.setTextSize(13); + datesTmp.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); datesTmp.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); datesTmp.setGravity(Gravity.END | Gravity.CENTER_VERTICAL); addView(datesTmp, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.END | Gravity.CENTER_VERTICAL, 16, 0, 16, 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/LegendSignatureView.java b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/LegendSignatureView.java index 4a6af0b36..c82918bfb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/LegendSignatureView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/LegendSignatureView.java @@ -9,6 +9,7 @@ import android.transition.ChangeBounds; import android.transition.Fade; import android.transition.TransitionManager; import android.transition.TransitionSet; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; @@ -77,10 +78,10 @@ public class LegendSignatureView extends FrameLayout { content.setOrientation(LinearLayout.VERTICAL); time = new TextView(context); - time.setTextSize(14); + time.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); time.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); hourTime = new TextView(context); - hourTime.setTextSize(14); + hourTime.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); hourTime.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); chevron = new ImageView(context); @@ -262,7 +263,7 @@ public class LegendSignatureView extends FrameLayout { percentage.getLayoutParams().width = AndroidUtilities.dp(36); percentage.setVisibility(GONE); percentage.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - percentage.setTextSize(13); + percentage.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); } root.addView(signature = new TextView(getContext())); @@ -273,10 +274,10 @@ public class LegendSignatureView extends FrameLayout { value.setGravity(Gravity.END); value.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - value.setTextSize(13); + value.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); value.setMinEms(4); value.setMaxEms(4); - signature.setTextSize(13); + signature.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java index eca004c57..1c1b0df4c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java @@ -20,6 +20,7 @@ import android.app.DatePickerDialog; import android.app.Dialog; import android.content.ClipData; import android.content.Context; +import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; @@ -79,6 +80,7 @@ import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ScrollView; +import android.widget.Space; import android.widget.TextView; import android.widget.Toast; @@ -94,6 +96,7 @@ import androidx.recyclerview.widget.RecyclerView; import com.google.android.exoplayer2.ui.AspectRatioFrameLayout; import org.telegram.PhoneFormat.PhoneFormat; +import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildConfig; @@ -123,6 +126,7 @@ import org.telegram.messenger.Utilities; import org.telegram.messenger.VideoEditedInfo; import org.telegram.messenger.browser.Browser; import org.telegram.messenger.support.SparseLongArray; +import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -205,6 +209,7 @@ import org.telegram.ui.Components.URLSpanNoUnderline; import org.telegram.ui.Components.URLSpanReplacement; import org.telegram.ui.Components.URLSpanUserMention; import org.telegram.ui.Components.UndoView; +import org.telegram.ui.Components.ViewHelper; import org.telegram.ui.Components.voip.VoIPHelper; import java.io.BufferedWriter; @@ -229,6 +234,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not protected TLRPC.EncryptedChat currentEncryptedChat; private boolean userBlocked; + private int chatInviterId; + private ArrayList chatMessageCellsCache = new ArrayList<>(); private HashMap alredyPlayedStickers = new HashMap<>(); @@ -239,6 +246,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private View progressView2; private FrameLayout bottomOverlay; protected ChatActivityEnterView chatActivityEnterView; + private ChatActivityEnterTopView chatActivityEnterTopView; private int chatActivityEnterViewAnimateFromTop; private boolean chatActivityEnterViewAnimateBeforeSending; private View timeItem2; @@ -333,6 +341,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int pinnedCounterTextViewX; private AnimatorSet[] pinnedNextAnimation = new AnimatorSet[2]; private ImageView closePinned; + private RadialProgressView pinnedProgress; private ImageView pinnedListButton; private AnimatorSet pinnedListAnimator; private FrameLayout alertView; @@ -348,7 +357,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private ImageView searchDownButton; private SimpleTextView searchCountText; private ChatActionCell floatingDateView; - private ChatActionCell distanseTopView; + private ChatActionCell infoTopView; private int hideDateDelay = 500; private InstantCameraView instantCameraView; private View overlayView; @@ -547,6 +556,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int createUnreadMessageAfterId; private boolean createUnreadMessageAfterIdLoading; private boolean loadingFromOldPosition; + private float alertViewEnterProgress; private boolean first = true; private int first_unread_id; @@ -559,6 +569,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private String currentPicturePath; + private ChatObject.Call groupCall; + private boolean createGroupCall; protected TLRPC.ChatFull chatInfo; protected TLRPC.UserFull userInfo; @@ -622,13 +634,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int distanceToPeer; private int chatListViewPaddingTop; + private int chatListViewPaddingVisibleOffset; private int contentPaddingTop; private float contentPanTranslation; private float floatingDateViewOffset; private float topChatPanelViewOffset; private float pinnedMessageEnterOffset; - private float distanceTopViewOffset; + private float topViewOffset; protected TLRPC.Document preloadedGreetingsSticker; private float bottomPanelTranslationY; private boolean invalidateChatListViewTopPadding; @@ -657,6 +670,15 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not NotificationCenter.didApplyNewTheme }; + private final DialogInterface.OnCancelListener postponedScrollCancelListener = dialog -> { + postponedScrollIsCanceled = true; + postponedScrollMessageId = 0; + nextScrollToMessageId = 0; + forceNextPinnedMessageId = 0; + invalidateMessagesVisiblePart(); + showPinnedProgress(false); + }; + private NotificationCenter.PostponeNotificationCallback postponeNotificationsWhileLoadingCallback = new NotificationCenter.PostponeNotificationCallback() { @Override public boolean needPostpone(int id, int currentAccount, Object[] args) { @@ -672,11 +694,17 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int chatEmojiViewPadding; private int fixedKeyboardHeight = -1; private boolean invalidateMessagesVisiblePart; + private boolean scrollByTouch; + private ChatActionCell infoTopView1; public int getChatListViewPadding() { return chatListViewPaddingTop; } + public FragmentContextView getFragmentContextView() { + return fragmentContextView; + } + private interface ChatActivityDelegate { void openReplyMessage(int mid); @@ -909,60 +937,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index, boolean needPreview) { - int count = chatListView.getChildCount(); - - for (int a = 0; a < count; a++) { - ImageReceiver imageReceiver = null; - View view = chatListView.getChildAt(a); - if (view instanceof ChatMessageCell) { - if (messageObject != null) { - ChatMessageCell cell = (ChatMessageCell) view; - MessageObject message = cell.getMessageObject(); - if (message != null && message.getId() == messageObject.getId()) { - imageReceiver = cell.getPhotoImage(); - } - } - } else if (view instanceof ChatActionCell) { - ChatActionCell cell = (ChatActionCell) view; - MessageObject message = cell.getMessageObject(); - if (message != null) { - if (messageObject != null) { - if (message.getId() == messageObject.getId()) { - imageReceiver = cell.getPhotoImage(); - } - } else if (fileLocation != null && message.photoThumbs != null) { - for (int b = 0; b < message.photoThumbs.size(); b++) { - TLRPC.PhotoSize photoSize = message.photoThumbs.get(b); - if (photoSize.location != null && photoSize.location.volume_id == fileLocation.volume_id && photoSize.location.local_id == fileLocation.local_id) { - imageReceiver = cell.getPhotoImage(); - break; - } - } - } - } - } - - if (imageReceiver != null) { - int[] coords = new int[2]; - view.getLocationInWindow(coords); - PhotoViewer.PlaceProviderObject object = new PhotoViewer.PlaceProviderObject(); - object.viewX = coords[0]; - object.viewY = coords[1] - (Build.VERSION.SDK_INT >= 21 ? 0 : AndroidUtilities.statusBarHeight); - object.parentView = chatListView; - object.animatingImageView = !SharedConfig.smoothKeyboard && pagedownButton != null && pagedownButton.getTag() != null && view instanceof ChatMessageCell ? animatingImageView : null; - object.imageReceiver = imageReceiver; - if (needPreview) { - object.thumb = imageReceiver.getBitmapSafe(); - } - object.radius = imageReceiver.getRoundRadius(); - if (view instanceof ChatActionCell && currentChat != null) { - object.dialogId = -currentChat.id; - } - object.clipTopAddition = chatListViewPaddingTop - AndroidUtilities.dp(4); - return object; - } - } - return null; + return ChatActivity.this.getPlaceForPhoto(messageObject, fileLocation, needPreview, false); } }; @@ -1254,6 +1229,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getNotificationCenter().addObserver(this, NotificationCenter.commentsRead); getNotificationCenter().addObserver(this, NotificationCenter.changeRepliesCounter); getNotificationCenter().addObserver(this, NotificationCenter.messagesRead); + getNotificationCenter().addObserver(this, NotificationCenter.didLoadChatInviter); + getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); } else { getNotificationCenter().addObserver(this, NotificationCenter.threadMessagesRead); } @@ -1362,9 +1339,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not boolean loadInfo = false; if (currentChat != null) { chatInfo = getMessagesController().getChatFull(currentChat.id); + groupCall = getMessagesController().getGroupCall(currentChat.id, true); if (currentChat.megagroup && !getMessagesController().isChannelAdminsLoaded(currentChat.id)) { getMessagesController().loadChannelAdmins(currentChat.id, true); } + fillInviterId(false); if (chatMode != MODE_PINNED) { getMessagesStorage().loadChatInfo(currentChat.id, ChatObject.isChannel(currentChat), null, true, false, startLoadFromMessageId); } @@ -1461,6 +1440,33 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return true; } + private void fillInviterId(boolean load) { + if (currentChat == null || chatInfo == null || ChatObject.isNotInChat(currentChat) || currentChat.creator) { + return; + } + if (chatInfo.inviterId != 0) { + chatInviterId = chatInfo.inviterId; + return; + } + if (chatInfo.participants != null) { + if (chatInfo.participants.self_participant != null) { + chatInviterId = chatInfo.participants.self_participant.inviter_id; + return; + } + int selfId = getUserConfig().getClientUserId(); + for (int a = 0, N = chatInfo.participants.participants.size(); a < N; a++) { + TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); + if (participant.user_id == selfId) { + chatInviterId = participant.inviter_id; + return; + } + } + } + if (load && chatInviterId == 0) { + getMessagesController().checkChatInviter(currentChat.id, false); + } + } + private void hideUndoViews() { if (undoView != null) { undoView.hide(true, 0); @@ -1513,6 +1519,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getNotificationCenter().removeObserver(this, NotificationCenter.messageReceivedByAck); getNotificationCenter().removeObserver(this, NotificationCenter.messageSendError); getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.didLoadChatInviter); + getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); getNotificationCenter().removeObserver(this, NotificationCenter.encryptedChatUpdated); getNotificationCenter().removeObserver(this, NotificationCenter.messagesReadEncrypted); getNotificationCenter().removeObserver(this, NotificationCenter.removeAllMessagesFromDialog); @@ -1616,7 +1624,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not selectedMessagesCanStarIds[a].clear(); } scheduledOrNoSoundHint = null; - distanseTopView = null; + infoTopView = null; aspectRatioFrameLayout = null; videoTextureView = null; searchAsListHint = null; @@ -2234,7 +2242,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (System.currentTimeMillis() - activityResumeTime < 250) { return false; } - if ((ChatActivity.this == actionBarLayout.getLastFragment() && actionBarLayout.isTransitionAnimationInProgress()) || actionBarLayout.isPreviewOpenAnimationInProgress() || isPaused || !openAnimationEnded || (chatAttachAlert != null && chatAttachAlert.isShowing())) { return false; } @@ -2326,6 +2333,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (scrimView != null && (child == pagedownButton || child == mentiondownButton || child == floatingDateView || child == fireworksOverlay)) { return false; } + if (child == fragmentContextView && fragmentContextView.getCurrentStyle() == 3) { + return true; + } if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) != null ) { boolean needBlur; if (((int) getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND)) == BlurBehindDrawable.STATIC_CONTENT) { @@ -2425,6 +2435,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not updateMessagesVisiblePart(false); } super.dispatchDraw(canvas); + if (fragmentContextView != null && fragmentContextView.getCurrentStyle() == 3) { + canvas.save(); + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.draw(canvas); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } if (chatActivityEnterView != null) { if (chatActivityEnterView.pannelAniamationInProgress() && chatActivityEnterView.getEmojiPadding() < bottomPanelTranslationY) { int color = Theme.getColor(Theme.key_chat_emojiPanelBackground); @@ -2441,7 +2459,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } if (scrimView != null) { canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); - float listTop = chatListView.getY() + chatListViewPaddingTop - AndroidUtilities.dp(4); + float listTop = chatListView.getY() + chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); MessageObject.GroupedMessages scrimGroup; if (scrimView instanceof ChatMessageCell) { scrimGroup = ((ChatMessageCell) scrimView).getCurrentMessagesGroup(); @@ -2483,8 +2501,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not b += scrimGroup.transitionParams.cell.getTranslationY(); } - if (t < chatListViewPaddingTop - AndroidUtilities.dp(20)) { - t = chatListViewPaddingTop - AndroidUtilities.dp(20); + if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { + t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); } if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { @@ -2731,6 +2749,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (fixPaddingsInLayout) { globalIgnoreLayout = true; invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); fixPaddingsInLayout = false; chatListView.measure(MeasureSpec.makeMeasureSpec(chatListView.getMeasuredWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(chatListView.getMeasuredHeight(), MeasureSpec.EXACTLY)); globalIgnoreLayout = false; @@ -2841,13 +2860,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } else if (child == gifHintTextView || child == voiceHintTextView || child == mediaBanTooltip) { childTop -= inputFieldHeight; - } else if (child == chatListView) { + } else if (child == chatListView || child == floatingDateView || child == infoTopView) { if (!inPreviewMode) { childTop -= (inputFieldHeight - AndroidUtilities.dp(51)); } - childTop -= paddingBottom; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { childTop -= keyboardSize; } @@ -2903,6 +2920,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public void setPadding(int left, int top, int right, int bottom) { contentPaddingTop = top; invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } }; @@ -2923,7 +2941,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not greetingsViewContainer = new ChatGreetingsView(context, currentUser, distance, preloadedGreetingsSticker); greetingsViewContainer.setListener((sticker) -> { animatingDocuments.put(sticker, 0); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, dialog_id, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, true, 0); }); emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); } else if (currentEncryptedChat == null) { @@ -3020,6 +3038,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not emptyViewContainer.setTranslationY(translationY / 1.7f); } invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } @Override @@ -3239,6 +3258,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public boolean onTouchEvent(MotionEvent e) { textSelectionHelper.checkSelectionCancel(e); + if (e != null && e.getAction() == MotionEvent.ACTION_DOWN) { + scrollByTouch = true; + } if (isFastScrollAnimationRunning()) { return false; } @@ -3331,7 +3353,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int count = getChildCount(); canvas.save(); - canvas.clipRect(0, chatListViewPaddingTop - AndroidUtilities.dp(4), getMeasuredWidth(), getMeasuredHeight()); + canvas.clipRect(0, chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4), getMeasuredWidth(), getMeasuredHeight()); for (int a = 0; a < count; a++) { View child = getChildAt(a); @@ -3500,8 +3522,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not b += group.transitionParams.cell.getTranslationY(); } - if (t < chatListViewPaddingTop - AndroidUtilities.dp(20)) { - t = chatListViewPaddingTop - AndroidUtilities.dp(20); + if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { + t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); } if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { @@ -3541,7 +3563,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int clipBottom = 0; boolean skipDraw = child == scrimView; ChatMessageCell cell; - float cilpTop = chatListViewPaddingTop - AndroidUtilities.dp(4); + float cilpTop = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); if (child.getY() > getMeasuredHeight() || child.getY() + child.getMeasuredHeight() < cilpTop) { skipDraw = true; @@ -3776,7 +3798,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (videoPlayerContainer != null && (message.isRoundVideo() || message.isVideo()) && MediaController.getInstance().isPlayingMessage(message)) { ImageReceiver imageReceiver = chatMessageCell.getPhotoImage(); float newX = imageReceiver.getImageX() + chatMessageCell.getX(); - float newY = fragmentView.getPaddingTop() + chatMessageCell.getY() + imageReceiver.getImageY() + chatListView.getTranslationY() + (inPreviewMode ? AndroidUtilities.statusBarHeight : 0); + float newY = chatMessageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); if (videoPlayerContainer.getTranslationX() != newX || videoPlayerContainer.getTranslationY() != newY) { videoPlayerContainer.setTranslationX(newX); videoPlayerContainer.setTranslationY(newY); @@ -4047,10 +4069,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not AndroidUtilities.cancelRunOnUIThread(finishRunnable); } AndroidUtilities.runOnUIThread(finishRunnable = () -> { - if (nextScrollToMessageId != 0) { - scrollToMessageId(nextScrollToMessageId, nextScrollFromMessageId, nextScrollSelect, nextScrollLoadIndex, nextScrollForce); - nextScrollToMessageId = 0; - } if (scrollAnimationIndex != -1) { getNotificationCenter().onAnimationFinish(scrollAnimationIndex); scrollAnimationIndex = -1; @@ -4158,6 +4176,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { + scrollByTouch = false; LinearSmoothScrollerCustom linearSmoothScroller = new LinearSmoothScrollerCustom(recyclerView.getContext(), LinearSmoothScrollerCustom.POSITION_MIDDLE); linearSmoothScroller.setTargetPosition(position); startSmoothScroll(linearSmoothScroller); @@ -4310,6 +4329,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatListView.setOverScrollMode(RecyclerView.OVER_SCROLL_ALWAYS); textSelectionHelper.stopScrolling(); updateVisibleRows(); + scrollByTouch = false; } else { if (newState == RecyclerView.SCROLL_STATE_SETTLING) { wasManualScroll = true; @@ -4327,14 +4347,42 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } + @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { chatListView.invalidate(); scrollUp = dy < 0; - if (recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { - if ((!scrollUp || forceScrollToFirst) && forceNextPinnedMessageId != 0) { - forceNextPinnedMessageId = 0; + int firstVisibleItem = chatLayoutManager.findFirstVisibleItemPosition(); + if (dy != 0 && (scrollByTouch && recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_SETTLING) || recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { + if (forceNextPinnedMessageId != 0) { + if ((!scrollUp || forceScrollToFirst)) { + forceNextPinnedMessageId = 0; + } else if (!chatListView.isFastScrollAnimationRunning() && firstVisibleItem != RecyclerView.NO_POSITION) { + int lastVisibleItem = chatLayoutManager.findLastVisibleItemPosition(); + MessageObject messageObject = null; + boolean foundForceNextPinnedView = false; + for (int i = lastVisibleItem; i >= firstVisibleItem; i--) { + View view = chatLayoutManager.findViewByPosition(i); + if (view instanceof ChatMessageCell) { + messageObject = ((ChatMessageCell) view).getMessageObject(); + } else if (view instanceof ChatActionCell) { + messageObject = ((ChatActionCell) view).getMessageObject(); + } + if (messageObject != null) { + if (forceNextPinnedMessageId == messageObject.getId()) { + foundForceNextPinnedView = true; + break; + } + } + } + if (!foundForceNextPinnedView && messageObject != null && messageObject.getId() < forceNextPinnedMessageId) { + forceNextPinnedMessageId = 0; + } + } + } + } + if (recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { forceScrollToFirst = false; if (!wasManualScroll && dy != 0) { wasManualScroll = true; @@ -4351,7 +4399,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not showFloatingDateView(true); } checkScrollForLoad(true); - int firstVisibleItem = chatLayoutManager.findFirstVisibleItemPosition(); if (firstVisibleItem != RecyclerView.NO_POSITION) { int totalItemCount = chatAdapter.getItemCount(); if (firstVisibleItem == 0 && forwardEndReached[0]) { @@ -4406,6 +4453,16 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not progressView.addView(progressBar, LayoutHelper.createFrame(32, 32, Gravity.CENTER)); floatingDateView = new ChatActionCell(context) { + + @Override + public void setTranslationY(float translationY) { + if (getTranslationY() != translationY) { + invalidate(); + } + super.setTranslationY(translationY); + + } + @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (getAlpha() == 0 || actionBar.isActionModeShowed()) { @@ -4421,6 +4478,22 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } return super.onTouchEvent(event); } + + @Override + protected void onDraw(Canvas canvas) { + float clipTop = chatListView.getY() + chatListViewPaddingTop - getY(); + clipTop -= AndroidUtilities.dp(4); + if (clipTop > 0) { + if (clipTop < getMeasuredHeight()) { + canvas.save(); + canvas.clipRect(0, clipTop, getMeasuredWidth(), getMeasuredHeight()); + super.onDraw(canvas); + canvas.restore(); + } + } else { + super.onDraw(canvas); + } + } }; floatingDateView.setCustomDate((int) (System.currentTimeMillis() / 1000), false, false); floatingDateView.setAlpha(0.0f); @@ -4506,20 +4579,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not pinnedMessageView.getBackground().setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); contentView.addView(pinnedMessageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); pinnedMessageView.setOnClickListener(v -> { - if (chatListView.isFastScrollAnimationRunning()) { //TODO remove later - return; - } wasManualScroll = true; if (isThreadChat()) { - scrollToMessageId(threadMessageId, 0, true, 0, true); + scrollToMessageId(threadMessageId, 0, true, 0, true, 0); } else if (currentPinnedMessageId != 0) { - int currentPinned; - /*if (forceNextPinnedMessageId != 0 && chatListView.isFastScrollAnimationRunning()) { - currentPinned = findClosest(pinnedMessageIds, forceNextPinnedMessageId, currentPinnedMessageIndex); - } else {*/ - currentPinned = currentPinnedMessageId; - //} - scrollToMessageId(currentPinned, 0, true, 0, true); + int currentPinned = currentPinnedMessageId; + + int forceNextPinnedMessageId = 0; if (!pinnedMessageIds.isEmpty()) { if (currentPinned == pinnedMessageIds.get(pinnedMessageIds.size() - 1)) { forceNextPinnedMessageId = pinnedMessageIds.get(0) + 1; @@ -4528,8 +4594,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not forceNextPinnedMessageId = currentPinned - 1; forceScrollToFirst = false; } - updateMessagesVisiblePart(false); } + this.forceNextPinnedMessageId = forceNextPinnedMessageId; + if (!forceScrollToFirst) { + forceNextPinnedMessageId = -forceNextPinnedMessageId; + } + scrollToMessageId(currentPinned, 0, true, 0, true, forceNextPinnedMessageId); + updateMessagesVisiblePart(false); } }); @@ -4613,6 +4684,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not closePinned.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.MULTIPLY)); closePinned.setScaleType(ImageView.ScaleType.CENTER); closePinned.setContentDescription(LocaleController.getString("Close", R.string.Close)); + + pinnedProgress = new RadialProgressView(context); + pinnedProgress.setVisibility(View.GONE); + pinnedProgress.setSize(AndroidUtilities.dp(16)); + pinnedProgress.setStrokeWidth(2f); + pinnedProgress.setProgressColor(Theme.getColor(Theme.key_chat_topPanelLine)); + pinnedMessageView.addView(pinnedProgress, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); + if (threadMessageId != 0) { closePinned.setVisibility(View.GONE); } @@ -4713,7 +4792,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not reportSpamButton = new TextView(context); reportSpamButton.setTextColor(Theme.getColor(Theme.key_chat_reportSpam)); if (Build.VERSION.SDK_INT >= 21) { - reportSpamButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_chat_reportSpam) & 0x19ffffff, 5)); + reportSpamButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_chat_reportSpam) & 0x19ffffff, 2)); } reportSpamButton.setTag(Theme.key_chat_reportSpam); reportSpamButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); @@ -4786,7 +4865,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not alertView = new FrameLayout(context); alertView.setTag(1); - alertView.setTranslationY(-AndroidUtilities.dp(50)); alertView.setVisibility(View.GONE); alertView.setBackgroundResource(R.drawable.blockpanel); alertView.getBackground().setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); @@ -4816,9 +4894,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not wasManualScroll = true; textSelectionHelper.cancelTextSelectionRunnable(); if (createUnreadMessageAfterId != 0) { - scrollToMessageId(createUnreadMessageAfterId, 0, false, returnToLoadIndex, true); + scrollToMessageId(createUnreadMessageAfterId, 0, false, returnToLoadIndex, true, 0); } else if (returnToMessageId > 0) { - scrollToMessageId(returnToMessageId, 0, true, returnToLoadIndex, true); + scrollToMessageId(returnToMessageId, 0, true, returnToLoadIndex, true, 0); } else { scrollToLastMessage(); if (!pinnedMessageIds.isEmpty()) { @@ -4841,7 +4919,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not hasAllMentionsLocal = false; loadLastUnreadMention(); } else { - scrollToMessageId(param, 0, false, 0, true); + scrollToMessageId(param, 0, false, 0, true, 0); } }); } else { @@ -4878,7 +4956,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not object.messageOwner.media_unread = true; object.messageOwner.mentioned = true; } - scrollToMessageId(id, 0, false, 0, true); + scrollToMessageId(id, 0, false, 0, true, 0); } })); } @@ -4912,17 +4990,17 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return; } if (mentionLayoutManager.getReverseLayout()) { - int top = mentionListViewScrollOffsetY + AndroidUtilities.dp(2); - int bottom = top + Theme.chat_composeShadowDrawable.getIntrinsicHeight(); - Theme.chat_composeShadowDrawable.setBounds(0, bottom, getMeasuredWidth(), top); + float top = mentionListView.getY() + mentionListViewScrollOffsetY + AndroidUtilities.dp(2); + float bottom = top + Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + Theme.chat_composeShadowDrawable.setBounds(0, (int) bottom, getMeasuredWidth(), (int) top); Theme.chat_composeShadowDrawable.draw(canvas); canvas.drawRect(0, 0, getMeasuredWidth(), top, Theme.chat_composeBackgroundPaint); } else { - int top; + int top = (int) mentionListView.getY(); if (mentionsAdapter.isBotContext() && mentionsAdapter.isMediaLayout() && mentionsAdapter.getBotContextSwitch() == null) { - top = mentionListViewScrollOffsetY - AndroidUtilities.dp(4); + top += mentionListViewScrollOffsetY - AndroidUtilities.dp(4); } else { - top = mentionListViewScrollOffsetY - AndroidUtilities.dp(2); + top += mentionListViewScrollOffsetY - AndroidUtilities.dp(2); } int bottom = top + Theme.chat_composeShadowDrawable.getIntrinsicHeight(); Theme.chat_composeShadowDrawable.setBounds(0, top, getMeasuredWidth(), bottom); @@ -5016,6 +5094,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not lastWidth = width; mentionListViewUpdateLayout(); } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + mentionContainer.invalidate(); + } }; mentionListView.setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, mentionListView, 0, mentionsOnItemClickListener, null)); mentionListView.setTag(2); @@ -5024,6 +5108,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public boolean supportsPredictiveItemAnimations() { return false; } + + @Override + public void setReverseLayout(boolean reverseLayout) { + super.setReverseLayout(reverseLayout); + invalidateChatListViewTopPadding(); + } }; mentionLayoutManager.setOrientation(LinearLayoutManager.VERTICAL); mentionGridLayoutManager = new ExtendedGridLayoutManager(context, 100) { @@ -5284,8 +5374,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int start = mentionsAdapter.getResultStartPosition(); int len = mentionsAdapter.getResultLength(); if (object instanceof TLRPC.Chat) { + TLRPC.Chat chat = (TLRPC.Chat) object; if (searchingForUser && searchContainer.getVisibility() == View.VISIBLE) { - searchUserMessages(null, (TLRPC.Chat) object); + searchUserMessages(null, chat); + } else { + if (chat.username != null) { + chatActivityEnterView.replaceWithText(start, len, "@" + chat.username + " ", false); + } } } else if (object instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) object; @@ -5471,11 +5566,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not mentiondownButton.addView(mentiondownButtonCounter, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 23, Gravity.TOP | Gravity.CENTER_HORIZONTAL)); mentiondownButton.setContentDescription(LocaleController.getString("AccDescrMentionDown", R.string.AccDescrMentionDown)); - if (!AndroidUtilities.isTablet() || AndroidUtilities.isSmallTablet()) { - contentView.addView(fragmentLocationContextView = new FragmentContextView(context, this, true), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); - contentView.addView(fragmentContextView = new FragmentContextView(context, this, false), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); - fragmentContextView.setAdditionalContextView(fragmentLocationContextView); - fragmentLocationContextView.setAdditionalContextView(fragmentContextView); + contentView.addView(fragmentLocationContextView = new FragmentContextView(context, this, true), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); + contentView.addView(fragmentContextView = new FragmentContextView(context, this, false), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); + fragmentContextView.setAdditionalContextView(fragmentLocationContextView); + fragmentLocationContextView.setAdditionalContextView(fragmentContextView); + + if (chatMode != 0) { + fragmentContextView.setSupportsCalls(false); } messagesSearchListView = new RecyclerListView(context); @@ -5572,6 +5669,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return super.dispatchTouchEvent(ev); } + @Override + protected boolean pannelAnimationEnabled() { + if (!openAnimationEnded) { + return false; + } + return true; + } + @Override public void checkAnimation() { if (actionBar.isActionModeShowed()) { @@ -5852,7 +5957,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not mentionsAdapter.setNeedBotContext(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); if (editingMessageObject != null) { AndroidUtilities.runOnUIThread(() -> hideFieldPanel(true), 30); - } boolean waitingForKeyboard = false; if (chatActivityEnterView.isPopupShowing()) { @@ -5965,7 +6069,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public void scrollToSendingMessage() { int id = getSendMessagesHelper().getSendingMessageId(dialog_id); if (id != 0) { - scrollToMessageId(id, 0, true, 0, true); + scrollToMessageId(id, 0, true, 0, true, 0); } } @@ -6042,7 +6146,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } contentView.addView(chatActivityEnterView, contentView.getChildCount() - 1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); - FrameLayout replyLayout = new FrameLayout(context) { + chatActivityEnterTopView = new ChatActivityEnterTopView(context) { @Override public void setTranslationY(float translationY) { super.setTranslationY(translationY); @@ -6066,6 +6170,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not mentiondownButton.setTranslationY(pagedownButton.getVisibility() != VISIBLE ? translationY : translationY - AndroidUtilities.dp(72)); } } + invalidateChatListViewTopPadding(); invalidateMessagesVisiblePart(); if (fragmentView != null) { fragmentView.invalidate(); @@ -6103,7 +6208,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not replyLineView = new View(context); replyLineView.setBackgroundColor(Theme.getColor(Theme.key_chat_replyPanelLine)); - chatActivityEnterView.addTopView(replyLayout, replyLineView, 48); + chatActivityEnterView.addTopView(chatActivityEnterTopView, replyLineView, 48); + + final FrameLayout replyLayout = new FrameLayout(context); + chatActivityEnterTopView.addReplyView(replyLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 52, 0)); replyLayout.setOnClickListener(v -> { if (forwardingMessages != null && !forwardingMessages.isEmpty()) { @@ -6121,10 +6229,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not args.putInt("hasPoll", hasPoll); args.putInt("messagesCount", forwardingMessages.size()); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate(this); + fragment.setDelegate(ChatActivity.this); presentFragment(fragment); } else if (replyingMessageObject != null) { - scrollToMessageId(replyingMessageObject.getId(), 0, true, 0, true); + scrollToMessageId(replyingMessageObject.getId(), 0, true, 0, true, 0); } else if (editingMessageObject != null) { if (editingMessageObject.canEditMedia() && editingMessageObjectReqId == 0) { if (chatAttachAlert == null) { @@ -6133,7 +6241,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatAttachAlert.setEditingMessageObject(editingMessageObject); openAttachMenu(); } else { - scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true); + scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); } } }); @@ -6150,7 +6258,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (Build.VERSION.SDK_INT >= 21) { replyCloseImageView.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_inappPlayerClose) & 0x19ffffff, 1, AndroidUtilities.dp(18))); } - replyLayout.addView(replyCloseImageView, LayoutHelper.createFrame(52, 46, Gravity.RIGHT | Gravity.TOP, 0, 0.5f, 0, 0)); + chatActivityEnterTopView.addView(replyCloseImageView, LayoutHelper.createFrame(52, 46, Gravity.RIGHT | Gravity.TOP, 0, 0.5f, 0, 0)); replyCloseImageView.setOnClickListener(v -> { if (forwardingMessages != null) { forwardingMessages.clear(); @@ -6162,12 +6270,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not replyNameTextView.setTextSize(14); replyNameTextView.setTextColor(Theme.getColor(Theme.key_chat_replyPanelName)); replyNameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - replyLayout.addView(replyNameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 6, 52, 0)); + replyLayout.addView(replyNameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 6, 0, 0)); replyObjectTextView = new SimpleTextView(context); replyObjectTextView.setTextSize(14); replyObjectTextView.setTextColor(Theme.getColor(Theme.key_chat_replyPanelMessage)); - replyLayout.addView(replyObjectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 52, 0)); + replyLayout.addView(replyObjectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 0, 0)); replyImageView = new BackupImageView(context); replyImageView.setRoundRadius(AndroidUtilities.dp(2)); @@ -6177,10 +6285,73 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not stickersPanel.setVisibility(View.GONE); contentView.addView(stickersPanel, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 81.5f, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 38)); + final ChatActivityEnterTopView.EditView editView = new ChatActivityEnterTopView.EditView(context); + editView.setMotionEventSplittingEnabled(false); + editView.setOrientation(LinearLayout.HORIZONTAL); + editView.setOnClickListener(v -> { + if (editingMessageObject != null) { + scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); + } + }); + chatActivityEnterTopView.addEditView(editView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 48, 0)); + + for (int i = 0; i < 2; i++) { + final boolean firstButton = i == 0; + + final ChatActivityEnterTopView.EditViewButton button = new ChatActivityEnterTopView.EditViewButton(context) { + @Override + public void setEditButton(boolean editButton) { + super.setEditButton(editButton); + if (firstButton) { + getTextView().setMaxWidth(editButton ? AndroidUtilities.dp(116) : Integer.MAX_VALUE); + } + } + + @Override + public void updateColors() { + final int leftInset = firstButton ? AndroidUtilities.dp(14) : 0; + setBackground(Theme.createCircleSelectorDrawable(Theme.getColor(Theme.key_chat_replyPanelName) & 0x19ffffff, leftInset, 0)); + getImageView().setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_replyPanelName), PorterDuff.Mode.MULTIPLY)); + getTextView().setTextColor(Theme.getColor(Theme.key_chat_replyPanelName)); + } + }; + button.setOrientation(LinearLayout.HORIZONTAL); + ViewHelper.setPadding(button, 10, 0, 10, 0); + editView.addButton(button, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); + + final ImageView imageView = new ImageView(context); + imageView.setScaleType(ImageView.ScaleType.CENTER); + imageView.setImageResource(firstButton ? R.drawable.msg_photoeditor : R.drawable.msg_replace); + button.addImageView(imageView, LayoutHelper.createLinear(24, LayoutHelper.MATCH_PARENT)); + + button.addView(new Space(context), LayoutHelper.createLinear(10, LayoutHelper.MATCH_PARENT)); + + final TextView textView = new TextView(context); + textView.setMaxLines(1); + textView.setSingleLine(true); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setGravity(Gravity.LEFT | Gravity.CENTER_VERTICAL); + textView.setEllipsize(TextUtils.TruncateAt.END); + button.addTextView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); + + button.updateColors(); + button.setOnClickListener(v -> { + if (editingMessageObject == null || !editingMessageObject.canEditMedia() || editingMessageObjectReqId != 0) { + return; + } + if (button.isEditButton()) { + openEditingMessageInPhotoEditor(); + } else { + replyLayout.callOnClick(); + } + }); + } + final ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override - public void sendSticker(TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { - chatActivityEnterView.onStickerSelected(sticker, parent, true, notify, scheduleDate); + public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { + chatActivityEnterView.onStickerSelected(sticker, query, parent, true, notify, scheduleDate); } @Override @@ -6456,7 +6627,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatInviteRunnable = null; } showBottomOverlayProgress(true, true); - getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), null, 0, null, ChatActivity.this, null); + getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), 0, null, ChatActivity.this, null); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeSearchByActiveAction); if (hasReportSpam() && reportSpamButton.getTag(R.id.object_tag) != null) { @@ -6560,7 +6731,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not updateSecretStatus(); updateTopPanel(false); updatePinnedMessageView(false); - updateDistanceView(false); + updateInfoTopView(false); chatScrollHelper = new RecyclerAnimationScrollHelper(chatListView, chatLayoutManager); chatScrollHelper.setScrollListener(this::invalidateMessagesVisiblePart); @@ -6647,6 +6818,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else { name = searchingChatMessages.title; } + if (name == null) { + return; + } if (name.length() > 10) { name = name.substring(0, 10); } @@ -6661,45 +6835,72 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getMediaDataController().searchMessagesInChat("", dialog_id, mergeDialogId, classGuid, 0, threadMessageId, searchingUserMessages, searchingChatMessages); } - Animator distanceViewAnimator; + private Animator infoTopViewAnimator; - private void updateDistanceView(boolean animated) { + private void updateInfoTopView(boolean animated) { SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); distanceToPeer = preferences.getInt("dialog_bar_distance" + dialog_id, -1); + CharSequence text = null; + View.OnClickListener onClickListener = null; if (distanceToPeer >= 0 && currentUser != null) { - if (distanseTopView == null) { - distanseTopView = new ChatActionCell(contentView.getContext()); - distanseTopView.setCustomText(LocaleController.formatString("ChatDistanceToPeer", R.string.ChatDistanceToPeer, currentUser.first_name, LocaleController.formatDistance(distanceToPeer, 0))); - distanseTopView.setOnClickListener(v -> presentFragment(new PeopleNearbyActivity())); - contentView.addView(distanseTopView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 4, 0, 0)); + text = LocaleController.formatString("ChatDistanceToPeer", R.string.ChatDistanceToPeer, currentUser.first_name, LocaleController.formatDistance(distanceToPeer, 0)); + onClickListener = v -> presentFragment(new PeopleNearbyActivity()); + } else if (currentChat != null && chatInviterId != 0) { + boolean show = preferences.getInt("dialog_bar_vis3" + dialog_id, 0) == 2; + boolean showReport = preferences.getBoolean("dialog_bar_report" + dialog_id, false); + boolean showBlock = preferences.getBoolean("dialog_bar_block" + dialog_id, false); + if (show && (showReport || showBlock)) { + TLRPC.User user = getMessagesController().getUser(chatInviterId); + if (user != null) { + text = ChatObject.isChannel(currentChat) && !currentChat.megagroup ? LocaleController.getString("ActionUserInvitedToChannel", R.string.ActionUserInvitedToChannel) : LocaleController.getString("ActionUserInvitedToGroup", R.string.ActionUserInvitedToGroup); + text = MessageObject.replaceWithLink(text, "un1", user); + onClickListener = (v) -> { + Bundle args = new Bundle(); + args.putInt("user_id", chatInviterId); + presentFragment(new ProfileActivity(args)); + }; + } + } else { + hideInfoView(); + } + } + if (text != null) { + if (infoTopViewAnimator != null) { + infoTopViewAnimator.cancel(); + } + if (infoTopView == null) { + infoTopView = new ChatActionCell(contentView.getContext()); + infoTopView.setCustomText(text); + infoTopView.setOnClickListener(onClickListener); + contentView.addView(infoTopView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 0, 0, 0)); } if (animated) { - if (distanseTopView.getTag() == null) { + if (infoTopView.getTag() == null) { ValueAnimator a = ValueAnimator.ofFloat(0, 1f); - distanseTopView.setTag(1); - distanseTopView.setAlpha(0f); - View distanceTopViewFinal = distanseTopView; + infoTopView.setTag(1); + infoTopView.setAlpha(0f); + View distanceTopViewFinal = infoTopView; a.addUpdateListener(animation -> { float alpha = (float) animation.getAnimatedValue(); - distanceTopViewOffset = (alpha) * AndroidUtilities.dp(30); + topViewOffset = (alpha) * AndroidUtilities.dp(30); invalidateChatListViewTopPadding(); distanceTopViewFinal.setAlpha(alpha); }); a.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - distanceTopViewOffset = AndroidUtilities.dp(30); + topViewOffset = AndroidUtilities.dp(30); invalidateChatListViewTopPadding(); } }); a.setDuration(150); - distanceViewAnimator = a; + infoTopViewAnimator = a; a.start(); } } else { - distanseTopView.setTag(1); - distanceTopViewOffset = AndroidUtilities.dp(30); + infoTopView.setTag(1); + topViewOffset = AndroidUtilities.dp(30); invalidateChatListViewTopPadding(); } } @@ -6741,7 +6942,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not fragment.chatActivityDelegate = new ChatActivityDelegate() { @Override public void openReplyMessage(int mid) { - scrollToMessageId(mid, 0, true, 0, true); + scrollToMessageId(mid, 0, true, 0, true, 0); } @Override @@ -6867,44 +7068,44 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } - private void hideDistanceView() { + private void hideInfoView() { if (distanceToPeer >= 0) { distanceToPeer = -1; SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); preferences.edit().putInt("dialog_bar_distance" + dialog_id, -2).commit(); + } - if (distanceViewAnimator != null) { - distanceViewAnimator.cancel(); - } - - if (distanseTopView != null) { - View distanseTopViewFinal = distanseTopView; - distanseTopView = null; - - ValueAnimator a = ValueAnimator.ofFloat(1f, 0); - a.addUpdateListener(animation -> { - float alpha = (float) animation.getAnimatedValue(); - distanceTopViewOffset = (alpha) * AndroidUtilities.dp(30); - invalidateChatListViewTopPadding(); - distanseTopViewFinal.setAlpha(alpha); - }); - a.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - distanceTopViewOffset = 0; - if (animation == distanceViewAnimator) { - ViewGroup parent = (ViewGroup) distanseTopViewFinal.getParent(); - if (parent != null) { - parent.removeView(distanseTopViewFinal); - } - distanceViewAnimator = null; + if (infoTopViewAnimator != null) { + infoTopViewAnimator.cancel(); + } + if (infoTopView != null && infoTopView.getTag() != null) { + infoTopView.setTag(null); + View topViewFinal = infoTopView; + ValueAnimator a = ValueAnimator.ofFloat(1f, 0); + a.addUpdateListener(animation -> { + float alpha = (float) animation.getAnimatedValue(); + topViewOffset = (alpha) * AndroidUtilities.dp(30); + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + topViewFinal.setAlpha(alpha); + }); + a.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + topViewOffset = 0; + if (animation == infoTopViewAnimator) { + ViewGroup parent = (ViewGroup) topViewFinal.getParent(); + if (parent != null) { + parent.removeView(topViewFinal); } + infoTopView = null; + infoTopViewAnimator = null; } - }); - a.setDuration(150); - distanceViewAnimator = a; - a.start(); - } + } + }); + a.setDuration(150); + infoTopViewAnimator = a; + a.start(); } } @@ -6918,7 +7119,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not pinnedViewH = Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); } int oldPadding = chatListViewPaddingTop; - chatListViewPaddingTop = (int) (AndroidUtilities.dp(4) + contentPaddingTop + distanceTopViewOffset + topPanelViewH + pinnedViewH); + chatListViewPaddingTop = (int) (AndroidUtilities.dp(4) + contentPaddingTop + topPanelViewH + pinnedViewH); + chatListViewPaddingVisibleOffset = 0; chatListViewPaddingTop += contentPanTranslation + bottomPanelTranslationY; if (bottomPanelTranslationY == 0 && !chatActivityEnterView.pannelAniamationInProgress() && contentView.getLayoutParams().height < 0) { chatListViewPaddingTop += contentView.getKeyboardHeight() <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !inBubbleMode ? chatActivityEnterView.getEmojiPadding() : contentView.getKeyboardHeight(); @@ -6928,10 +7130,22 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatListViewPaddingTop += chatActivityEnterView.getHeightWithTopView() - AndroidUtilities.dp(51) - chatActivityEnterView.getAnimatedTop(); } else if (!chatActivityEnterView.pannelAniamationInProgress()) { chatListViewPaddingTop += chatActivityEnterView.getHeightWithTopView() - AndroidUtilities.dp(51); - chatListViewPaddingTop -= chatListView.getTranslationY(); + if (chatActivityEnterView.currentTopViewAnimation == null) { + chatListViewPaddingTop -= chatListView.getTranslationY(); + } } } + if (infoTopView != null) { + infoTopView.setTranslationY(chatListViewPaddingTop - AndroidUtilities.dp(30) + topViewOffset); + chatListViewPaddingTop += topViewOffset; + chatListViewPaddingVisibleOffset += topViewOffset; + } + + if (floatingDateView != null) { + floatingDateView.setTranslationY(chatListView.getTranslationY() + chatListViewPaddingTop + floatingDateViewOffset - AndroidUtilities.dp(4)); + } + int p = chatListView.getMeasuredHeight() * 2 / 3; if (chatListView != null && chatLayoutManager != null && chatAdapter != null) { @@ -6968,7 +7182,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not invalidateMessagesVisiblePart(); } - chatListView.setTopGlowOffset(chatListViewPaddingTop - AndroidUtilities.dp(4)); + chatListView.setTopGlowOffset(chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4)); if (oldPadding != chatListViewPaddingTop) { int n = chatListView.getChildCount(); @@ -6998,33 +7212,45 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private void invalidateChatListViewTopPadding() { invalidateChatListViewTopPadding = true; - contentView.invalidate(); - chatListView.invalidate(); + if (contentView != null) { + contentView.invalidate(); + } + if (chatListView != null) { + chatListView.invalidate(); + } float topPanelViewH = Math.max(0, AndroidUtilities.dp(48) + topChatPanelViewOffset); - float pinnedViewH = 0; - if (pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { - pinnedViewH = Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); - } - int viewsOffset = (int) (contentPaddingTop + topPanelViewH + pinnedViewH); if (pinnedMessageView != null) { pinnedMessageView.setTranslationY(contentPanTranslation + pinnedMessageEnterOffset + contentPaddingTop + topPanelViewH); } - if (floatingDateView != null) { - floatingDateView.setTranslationY(contentPanTranslation + floatingDateViewOffset + viewsOffset + distanceTopViewOffset); - } if (fragmentContextView != null) { - fragmentContextView.setTranslationY(contentPanTranslation + contentPaddingTop); + float from = 0; + if (fragmentLocationContextView != null && fragmentLocationContextView.getVisibility() == View.VISIBLE) { + from += AndroidUtilities.dp(36); + } + fragmentContextView.setTranslationY(contentPanTranslation + from + fragmentContextView.getTopPadding()); } if (fragmentLocationContextView != null) { - fragmentLocationContextView.setTranslationY(contentPanTranslation + contentPaddingTop); + float from = 0; + if (fragmentContextView != null && fragmentContextView.getVisibility() == View.VISIBLE) { + from += AndroidUtilities.dp(fragmentContextView.getStyleHeight()) + fragmentContextView.getTopPadding(); + } + fragmentLocationContextView.setTranslationY(contentPanTranslation + from + fragmentLocationContextView.getTopPadding()); } if (topChatPanelView != null) { topChatPanelView.setTranslationY(contentPanTranslation + contentPaddingTop + topChatPanelViewOffset); } - if (distanseTopView != null) { - distanseTopView.setTranslationY(contentPanTranslation + contentPaddingTop + AndroidUtilities.dp(50) + topChatPanelViewOffset); + if (mentionListView != null && mentionLayoutManager != null) { + if (mentionLayoutManager.getReverseLayout()) { + mentionListView.setTranslationY(contentPanTranslation + contentPaddingTop); + } else { + mentionListView.setTranslationY(contentPanTranslation); + } + } + + if (alertView != null && alertView.getVisibility() == View.VISIBLE) { + alertView.setTranslationY(contentPanTranslation + contentPaddingTop - AndroidUtilities.dp(50) * (1f - alertViewEnterProgress)); } } @@ -7335,7 +7561,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not for (int a = messages.size() - 1; a >= 0; a--) { MessageObject message = messages.get(a); if (message.messageOwner.date >= date && message.getId() != 0) { - scrollToMessageId(message.getId(), 0, false, message.getDialogId() == mergeDialogId ? 1 : 0, true); + scrollToMessageId(message.getId(), 0, false, message.getDialogId() == mergeDialogId ? 1 : 0, true, 0); break; } } @@ -7360,9 +7586,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (progressDialog != null) { progressDialog.dismiss(); } + updatePinnedListButton(false); progressDialog = new AlertDialog(getParentActivity(), 3); - progressDialog.setOnCancelListener(dialog -> postponedScrollIsCanceled = true); - progressDialog.showDelayed(400); + progressDialog.setOnCancelListener(postponedScrollCancelListener); + progressDialog.showDelayed(1000); postponedScrollToLastMessageQueryIndex = lastLoadIndex; waitingForLoad.add(lastLoadIndex); @@ -7413,10 +7640,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } super.dismissInternal(); - if (openKeyboardOnAttachMenuClose) { - AndroidUtilities.runOnUIThread(() -> chatActivityEnterView.openKeyboard(), 50); - openKeyboardOnAttachMenuClose = false; - } + onEditTextDialogClose(false); } }; chatAttachAlert.setDelegate(new ChatAttachAlert.ChatAttachViewDelegate() { @@ -7493,14 +7717,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public boolean needEnterComment() { - boolean keyboardVisible = chatActivityEnterView.isKeyboardVisible(); - if (keyboardVisible) { - chatActivityEnterView.showEmojiView(); - openKeyboardOnAttachMenuClose = true; - } - AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); - fragmentView.requestLayout(); - return keyboardVisible; + return needEnterText(); } @Override @@ -7511,6 +7728,27 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } + public boolean needEnterText() { + boolean keyboardVisible = chatActivityEnterView.isKeyboardVisible(); + if (keyboardVisible) { + chatActivityEnterView.showEmojiView(); + openKeyboardOnAttachMenuClose = true; + } + AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); + fragmentView.requestLayout(); + return keyboardVisible; + } + + public void onEditTextDialogClose(boolean resetAdjust) { + if (openKeyboardOnAttachMenuClose) { + AndroidUtilities.runOnUIThread(() -> chatActivityEnterView.openKeyboard(), 50); + openKeyboardOnAttachMenuClose = false; + } + if (resetAdjust) { + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + } + } + public void doOnIdle(Runnable runnable) { NotificationCenter.getInstance(currentAccount).doOnIdle(runnable); } @@ -7628,7 +7866,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not fragment.chatActivityDelegate = new ChatActivityDelegate() { @Override public void openReplyMessage(int mid) { - scrollToMessageId(mid, 0, true, 0, true); + scrollToMessageId(mid, 0, true, 0, true, 0); } @Override @@ -7713,9 +7951,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } TLRPC.TL_document document = (TLRPC.TL_document) item; if (chatMode == MODE_SCHEDULED) { - AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, (notify, scheduleDate) -> SendMessagesHelper.getInstance(currentAccount).sendSticker(document, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate)); + String query = stickersAdapter.getQuery(); + AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, (notify, scheduleDate) -> SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate)); } else { - getSendMessagesHelper().sendSticker(document, dialog_id, replyingMessageObject, getThreadMessage(), parent, true, 0); + getSendMessagesHelper().sendSticker(document, stickersAdapter.getQuery(), dialog_id, replyingMessageObject, getThreadMessage(), parent, true, 0); } hideFieldPanel(false); chatActivityEnterView.addStickerToRecent(document); @@ -8923,6 +9162,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not messageObjectToReply = null; openKeyboard = true; } + chatActivityEnterTopView.setEditMode(false); if (messageObjectToEdit != null) { forwardingMessages = null; if (threadMessageId == 0) { @@ -8930,28 +9170,57 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityEnterView.setReplyingMessageObject(null); } editingMessageObject = messageObjectToEdit; - chatActivityEnterView.setEditingMessageObject(messageObjectToEdit, !messageObjectToEdit.isMediaEmpty()); + final boolean mediaEmpty = messageObjectToEdit.isMediaEmpty(); + chatActivityEnterView.setEditingMessageObject(messageObjectToEdit, !mediaEmpty); if (foundWebPage != null) { return; } chatActivityEnterView.setForceShowSendButton(false, false); - replyIconImageView.setImageResource(R.drawable.group_edit); - replyIconImageView.setContentDescription(LocaleController.getString("AccDescrEditing", R.string.AccDescrEditing)); - replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelEdit", R.string.AccDescrCancelEdit)); - if (messageObjectToEdit.isMediaEmpty()) { - replyNameTextView.setText(LocaleController.getString("EditMessage", R.string.EditMessage)); - } else { - replyNameTextView.setText(LocaleController.getString("EditCaption", R.string.EditCaption)); - } - if (messageObjectToEdit.canEditMedia()) { - replyObjectTextView.setText(LocaleController.getString("EditMessageMedia", R.string.EditMessageMedia)); - } else if (messageObjectToEdit.messageText != null || messageObjectToEdit.caption != null) { - String mess = messageObjectToEdit.caption != null ? messageObjectToEdit.caption.toString() : messageObjectToEdit.messageText.toString(); - if (mess.length() > 150) { - mess = mess.substring(0, 150); + final boolean canEditMedia = messageObjectToEdit.canEditMedia(); + if (!mediaEmpty && canEditMedia) { + String editButtonText = null; + String replaceButtonText; + if (messageObjectToEdit.isPhoto()) { + editButtonText = LocaleController.getString("EditMessageEditPhoto", R.string.EditMessageEditPhoto); + replaceButtonText = LocaleController.getString("EditMessageReplacePhoto", R.string.EditMessageReplacePhoto); + } else if (messageObjectToEdit.isVideo()) { + editButtonText = LocaleController.getString("EditMessageEditVideo", R.string.EditMessageEditVideo); + replaceButtonText = LocaleController.getString("EditMessageReplaceVideo", R.string.EditMessageReplaceVideo); + } else if (messageObjectToEdit.isGif()) { + replaceButtonText = LocaleController.getString("EditMessageReplaceGif", R.string.EditMessageReplaceGif); + } else if (messageObjectToEdit.isMusic()) { + replaceButtonText = LocaleController.getString("EditMessageReplaceAudio", R.string.EditMessageReplaceAudio); + } else { + replaceButtonText = LocaleController.getString("EditMessageReplaceFile", R.string.EditMessageReplaceFile); + } + final ChatActivityEnterTopView.EditViewButton[] buttons = chatActivityEnterTopView.getEditView().getButtons(); + buttons[0].setEditButton(editButtonText != null); + buttons[0].getTextView().setText(editButtonText != null ? editButtonText : replaceButtonText); + buttons[0].getImageView().setImageResource(editButtonText != null ? R.drawable.msg_photoeditor : R.drawable.msg_replace); + buttons[1].setVisibility(editButtonText != null ? View.VISIBLE : View.GONE); + if (editButtonText != null) { + buttons[1].getTextView().setText(replaceButtonText); + } + chatActivityEnterTopView.setEditMode(true); + } else { + replyIconImageView.setImageResource(R.drawable.group_edit); + replyIconImageView.setContentDescription(LocaleController.getString("AccDescrEditing", R.string.AccDescrEditing)); + replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelEdit", R.string.AccDescrCancelEdit)); + if (mediaEmpty) { + replyNameTextView.setText(LocaleController.getString("EditMessage", R.string.EditMessage)); + } else { + replyNameTextView.setText(LocaleController.getString("EditCaption", R.string.EditCaption)); + } + if (canEditMedia) { + replyObjectTextView.setText(LocaleController.getString("EditMessageMedia", R.string.EditMessageMedia)); + } else if (messageObjectToEdit.messageText != null || messageObjectToEdit.caption != null) { + String mess = messageObjectToEdit.caption != null ? messageObjectToEdit.caption.toString() : messageObjectToEdit.messageText.toString(); + if (mess.length() > 150) { + mess = mess.substring(0, 150); + } + mess = mess.replace('\n', ' '); + replyObjectTextView.setText(Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false)); } - mess = mess.replace('\n', ' '); - replyObjectTextView.setText(Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false)); } } else if (messageObjectToReply != null) { forwardingMessages = null; @@ -9184,7 +9453,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (messageObjectToReply != null) { thumbMediaMessageObject = messageObjectToReply; } else if (messageObjectToEdit != null) { - thumbMediaMessageObject = messageObjectToEdit; + if (!chatActivityEnterTopView.isEditMode()) { + thumbMediaMessageObject = messageObjectToEdit; + } else { + thumbMediaMessageObject = null; + } } else { thumbMediaMessageObject = null; } @@ -9347,6 +9620,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return; } forceNextPinnedMessageId = 0; + nextScrollToMessageId = 0; forceScrollToFirst = false; chatScrollHelper.setScrollDirection(RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN); if (forwardEndReached[0] && first_unread_id == 0 && startLoadFromMessageId == 0) { @@ -9364,9 +9638,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (progressDialog != null) { progressDialog.dismiss(); } + updatePinnedListButton(false); progressDialog = new AlertDialog(getParentActivity(), 3); - progressDialog.setOnCancelListener(dialog -> postponedScrollIsCanceled = true); - progressDialog.showDelayed(400); + progressDialog.setOnCancelListener(postponedScrollCancelListener); + progressDialog.showDelayed(1000); postponedScrollToLastMessageQueryIndex = lastLoadIndex; postponedScrollMessageId = 0; @@ -9392,7 +9667,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (videoPlayerContainer != null && (messageObject.isRoundVideo() || messageObject.isVideo()) && MediaController.getInstance().isPlayingMessage(messageObject)) { ImageReceiver imageReceiver = messageCell.getPhotoImage(); videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); - videoPlayerContainer.setTranslationY(fragmentView.getPaddingTop() + messageCell.getY() + imageReceiver.getImageY() + chatListView.getTranslationY() + (inPreviewMode ? AndroidUtilities.statusBarHeight : 0)); + float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); + videoPlayerContainer.setTranslationY(translationY); FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) videoPlayerContainer.getLayoutParams(); if (messageObject.isRoundVideo()) { videoPlayerContainer.setTag(R.id.parent_tag, null); @@ -9435,14 +9711,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (checkTextureViewPosition || PipRoundVideoView.getInstance() != null) { MediaController.getInstance().setCurrentVideoVisible(false); } else { - scrollToMessageId(messageObject.getId(), 0, false, 0, true); + scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); } } } } else { MediaController.getInstance().setCurrentVideoVisible(true); if (messageObject.isRoundVideo() || scrollToVideo) { - scrollToMessageId(messageObject.getId(), 0, false, 0, true); + scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); } else { chatListView.invalidate(); } @@ -9549,7 +9825,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not MessageObject messageObject = null; int top = (int) view.getY(); int bottom = top + view.getMeasuredHeight(); - if (bottom <= cilpTop || top > chatListView.getMeasuredHeight()) { + if (bottom <= cilpTop - chatListViewPaddingVisibleOffset || top > chatListView.getMeasuredHeight()) { continue; } @@ -9574,7 +9850,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not foundTextureViewMessage = false; } else { videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); - videoPlayerContainer.setTranslationY(fragmentView.getPaddingTop() + top + imageReceiver.getImageY() + chatListView.getTranslationY() + (inPreviewMode ? AndroidUtilities.statusBarHeight : 0)); + + float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); + videoPlayerContainer.setTranslationY(translationY); fragmentView.invalidate(); videoPlayerContainer.invalidate(); foundTextureViewMessage = true; @@ -9603,6 +9881,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not pollsToCheck.add(messageObject); } } + if (bottom <= cilpTop) { + if (view instanceof ChatActionCell && messageObject.isDateObject) { + view.setAlpha(0); + } + continue; + } int position = view.getBottom(); if (position < minPositionHolder) { minPositionHolder = position; @@ -9632,7 +9916,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } if (!pinnedMessageIds.isEmpty()) { - currentPinnedMessageId = findClosest(pinnedMessageIds, forceNextPinnedMessageId != 0 && (maxVisibleId > forceNextPinnedMessageId || forceScrollToFirst || chatListView.isFastScrollAnimationRunning() || postponedScrollToLastMessageQueryIndex != 0) ? forceNextPinnedMessageId : maxVisibleId, currentPinnedMessageIndex); + currentPinnedMessageId = findClosest(pinnedMessageIds, forceNextPinnedMessageId != 0 ? forceNextPinnedMessageId : maxVisibleId, currentPinnedMessageIndex); if (!loadingPinnedMessagesList && !pinnedEndReached && !pinnedMessageIds.isEmpty() && currentPinnedMessageIndex[0] > pinnedMessageIds.size() - 2) { getMediaDataController().loadPinnedMessages(dialog_id, pinnedMessageIds.get(pinnedMessageIds.size() - 1), 0); loadingPinnedMessagesList = true; @@ -9768,13 +10052,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } if (floatingDateView != null) { - float topPanelViewH = Math.max(0, AndroidUtilities.dp(48) + topChatPanelViewOffset); - float pinnedViewH = 0; - if (pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { - pinnedViewH = Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); - } - int viewsOffset = (int) (contentPaddingTop + topPanelViewH + pinnedViewH); - floatingDateView.setTranslationY(contentPanTranslation + floatingDateViewOffset + viewsOffset + distanceTopViewOffset); + floatingDateView.setTranslationY(chatListView.getTranslationY() + chatListViewPaddingTop + floatingDateViewOffset - AndroidUtilities.dp(4)); } invalidateChatListViewTopPadding(); if (!firstLoading && !paused && !inPreviewMode && fragmentOpened && chatMode == 0 && !getMessagesController().ignoreSetOnline) { @@ -9945,21 +10223,32 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private boolean nextScrollSelect; private int nextScrollLoadIndex; private boolean nextScrollForce; + private int nextScrollForcePinnedMessageId; - public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll) { - if (id == 0 || chatListView.isFastScrollAnimationRunning() || (chatListItemAniamtor != null && chatListItemAniamtor.isRunning()) || getParentActivity() == null) { - if (id != 0 && getParentActivity() != null) { + private boolean pinnedPorgressIsShowing; + Runnable updatePinnedProgressRunnable; + + public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll, int forcePinnedMessageId) { + if (id == 0 || NotificationCenter.getInstance(currentAccount).isAnimationInProgress() || getParentActivity() == null) { + if (NotificationCenter.getInstance(currentAccount).isAnimationInProgress()) { nextScrollToMessageId = id; nextScrollFromMessageId = fromMessageId; nextScrollSelect = select; nextScrollLoadIndex = loadIndex; nextScrollForce = forceScroll; + nextScrollForcePinnedMessageId = forcePinnedMessageId; + NotificationCenter.getInstance(currentAccount).doOnIdle(() -> { + if (nextScrollToMessageId != 0) { + scrollToMessageId(nextScrollToMessageId, nextScrollFromMessageId, nextScrollSelect, nextScrollLoadIndex, nextScrollForce, nextScrollForcePinnedMessageId); + nextScrollToMessageId = 0; + } + }); } return; } - forceNextPinnedMessageId = 0; - forceScrollToFirst = false; + forceNextPinnedMessageId = Math.abs(forcePinnedMessageId); + forceScrollToFirst = forcePinnedMessageId > 0; wasManualScroll = true; MessageObject object = messagesDict[loadIndex].get(id); boolean query = false; @@ -10048,6 +10337,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not scrollY = maxScrollOffset; } if (scrollY != 0) { + scrollByTouch = false; chatListView.smoothScrollBy(0, scrollY); chatListView.setOverScrollMode(RecyclerListView.OVER_SCROLL_NEVER); } @@ -10059,6 +10349,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatScrollHelperCallback.scrollTo = object; chatScrollHelperCallback.lastBottom = false; chatScrollHelperCallback.lastItemOffset = yOffset; + chatScrollHelperCallback.lastPadding = chatListViewPaddingTop; chatScrollHelper.setScrollDirection(scrollDirection); chatScrollHelper.scrollToPosition(position, yOffset, false, true); canShowPagedownButton = true; @@ -10079,8 +10370,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (progressDialog != null) { progressDialog.dismiss(); } + + showPinnedProgress(forceNextPinnedMessageId != 0); + progressDialog = new AlertDialog(getParentActivity(), 3); - progressDialog.setOnCancelListener(dialog -> postponedScrollIsCanceled = true); + progressDialog.setOnShowListener(dialogInterface -> showPinnedProgress(false)); + progressDialog.setOnCancelListener(postponedScrollCancelListener); progressDialog.showDelayed(400); waitingForLoad.clear(); @@ -10108,6 +10403,26 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not needSelectFromMessageId = select; } + + private void showPinnedProgress(boolean show) { + if (show) { + if (updatePinnedProgressRunnable == null) { + updatePinnedProgressRunnable = () -> { + pinnedPorgressIsShowing = true; + updatePinnedListButton(true); + }; + AndroidUtilities.runOnUIThread(updatePinnedProgressRunnable, 100); + } + } else { + if (updatePinnedProgressRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(updatePinnedProgressRunnable); + } + updatePinnedProgressRunnable = null; + pinnedPorgressIsShowing = false; + updatePinnedListButton(true); + } + } + private void updatePagedownButtonVisibility(boolean animated) { if (pagedownButton == null) { return; @@ -10337,7 +10652,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not processSelectedAttach(attach_photo); } else if (requestCode == 20 && grantResults != null && grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { processSelectedAttach(attach_video); - } else if ((requestCode == 101 || requestCode == 102) && currentUser != null) { + } else if ((requestCode == 101 || requestCode == 102) && currentUser != null || requestCode == 103 && currentChat != null) { boolean allGranted = true; for (int a = 0; a < grantResults.length; a++) { if (grantResults[a] != PackageManager.PERMISSION_GRANTED) { @@ -10346,7 +10661,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } if (grantResults.length > 0 && allGranted) { - VoIPHelper.startCall(currentUser, requestCode == 102, userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id)); + if (requestCode == 103) { + VoIPHelper.startCall(currentChat, createGroupCall, getParentActivity()); + } else { + VoIPHelper.startCall(currentUser, requestCode == 102, userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id)); + } } else { VoIPHelper.permissionDenied(getParentActivity(), null, requestCode); } @@ -11004,6 +11323,143 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } + private void openEditingMessageInPhotoEditor() { + if (editingMessageObject == null || !editingMessageObject.canEditMedia() || editingMessageObjectReqId != 0) { + return; + } + if (!editingMessageObject.isPhoto() && !editingMessageObject.isVideo()) { + return; + } + final MessageObject object = editingMessageObject; + File file = null; + if (!TextUtils.isEmpty(object.messageOwner.attachPath)) { + file = new File(object.messageOwner.attachPath); + if (!file.exists()) { + file = null; + } + } + if (file == null) { + file = FileLoader.getPathToMessage(object.messageOwner); + } + if (!file.exists()) { + return; + } + PhotoViewer.getInstance().setParentActivity(getParentActivity()); + final ArrayList photos = new ArrayList<>(); + final MediaController.PhotoEntry entry = new MediaController.PhotoEntry(0, 0, 0, file.getAbsolutePath(), 0, object.isVideo(), 0, 0, 0); + entry.caption = chatActivityEnterView.getFieldText(); + photos.add(entry); + PhotoViewer.getInstance().openPhotoForSelect(photos, 0, 2, false, new PhotoViewer.EmptyPhotoViewerProvider() { + @Override + public PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index, boolean needPreview) { + return ChatActivity.this.getPlaceForPhoto(object, null, needPreview, true); + } + + @Override + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + if (editingMessageObject != object) { + return; + } + if (entry.isCropped || entry.isPainted || entry.isFiltered || videoEditedInfo != null) { + sendMedia(entry, videoEditedInfo, notify, scheduleDate); + } else { + chatActivityEnterView.doneEditingMessage(); + } + } + + @Override + public boolean canCaptureMorePhotos() { + return false; + } + + @Override + public boolean allowSendingSubmenu() { + return false; + } + + @Override + public MessageObject getEditingMessageObject() { + return editingMessageObject == object ? object : null; + } + + @Override + public void onCaptionChanged(CharSequence caption) { + if (editingMessageObject == object) { + chatActivityEnterView.setFieldText(caption, true); + } + } + + @Override + public boolean closeKeyboard() { + if (chatActivityEnterView != null && isKeyboardVisible()) { + chatActivityEnterView.closeKeyboard(); + return true; + } + return false; + } + }, this); + } + + private PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, boolean needPreview, boolean onlyIfVisible) { + int count = chatListView.getChildCount(); + + for (int a = 0; a < count; a++) { + ImageReceiver imageReceiver = null; + View view = chatListView.getChildAt(a); + if (view instanceof ChatMessageCell) { + if (messageObject != null) { + ChatMessageCell cell = (ChatMessageCell) view; + MessageObject message = cell.getMessageObject(); + if (message != null && message.getId() == messageObject.getId()) { + imageReceiver = cell.getPhotoImage(); + } + } + } else if (view instanceof ChatActionCell) { + ChatActionCell cell = (ChatActionCell) view; + MessageObject message = cell.getMessageObject(); + if (message != null) { + if (messageObject != null) { + if (message.getId() == messageObject.getId()) { + imageReceiver = cell.getPhotoImage(); + } + } else if (fileLocation != null && message.photoThumbs != null) { + for (int b = 0; b < message.photoThumbs.size(); b++) { + TLRPC.PhotoSize photoSize = message.photoThumbs.get(b); + if (photoSize.location != null && photoSize.location.volume_id == fileLocation.volume_id && photoSize.location.local_id == fileLocation.local_id) { + imageReceiver = cell.getPhotoImage(); + break; + } + } + } + } + } + + if (imageReceiver != null) { + if (onlyIfVisible && view.getY() + imageReceiver.getImageY2() < chatListViewPaddingTop - AndroidUtilities.dp(4)) { + return null; + } + int[] coords = new int[2]; + view.getLocationInWindow(coords); + PhotoViewer.PlaceProviderObject object = new PhotoViewer.PlaceProviderObject(); + object.viewX = coords[0]; + object.viewY = coords[1] - (Build.VERSION.SDK_INT >= 21 ? 0 : AndroidUtilities.statusBarHeight); + object.parentView = chatListView; + object.animatingImageView = !SharedConfig.smoothKeyboard && pagedownButton != null && pagedownButton.getTag() != null && view instanceof ChatMessageCell ? animatingImageView : null; + object.imageReceiver = imageReceiver; + if (needPreview) { + object.thumb = imageReceiver.getBitmapSafe(); + } + object.radius = imageReceiver.getRoundRadius(); + if (view instanceof ChatActionCell && currentChat != null) { + object.dialogId = -currentChat.id; + } + object.clipTopAddition = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); + return object; + } + } + return null; + } + private void showAttachmentError() { if (getParentActivity() == null) { return; @@ -11162,7 +11618,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not doNotRemoveLoadIndex = false; } if (!doNotRemoveLoadIndex && !fragmentBeginToShow && !paused) { - int[] alowedNotifications = new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, + int[] alowedNotifications = new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, NotificationCenter.closeChats, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}; if (transitionAnimationIndex == 0) { transitionAnimationIndex = getNotificationCenter().setAnimationInProgress(transitionAnimationIndex, alowedNotifications); @@ -11276,7 +11732,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not postponedScrollMessageId = startLoadFromMessageId; } - progressDialog.dismiss(); + if (progressDialog != null) { + progressDialog.dismiss(); + } + showPinnedProgress(false); if (postponedScrollIsCanceled) { return; } @@ -11960,7 +12419,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not showProgressView(false); } if (newRowsCount == 0 && mergeDialogId != 0 && loadIndex == 0) { - getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, + getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, NotificationCenter.closeChats, NotificationCenter.messagesDidLoad, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}); } if (showDateAfter) { @@ -11973,10 +12432,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (progressDialog != null) { progressDialog.dismiss(); } + updatePinnedListButton(false); if (postponedScrollMessageId == 0) { chatScrollHelperCallback.scrollTo = null; chatScrollHelperCallback.lastBottom = true; chatScrollHelperCallback.lastItemOffset = 0; + chatScrollHelperCallback.lastPadding = chatListViewPaddingTop; chatScrollHelper.scrollToPosition(0, 0, true, true); } else { MessageObject object = messagesDict[loadIndex].get(postponedScrollMessageId); @@ -12015,6 +12476,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatScrollHelperCallback.scrollTo = object; chatScrollHelperCallback.lastBottom = false; chatScrollHelperCallback.lastItemOffset = yOffset; + chatScrollHelperCallback.lastPadding = chatListViewPaddingTop; chatScrollHelper.scrollToPosition(chatAdapter.messagesStartRow + k, yOffset, false, true); } } @@ -12517,6 +12979,23 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not obj.messageOwner.send_state = MessageObject.MESSAGE_SEND_STATE_SEND_ERROR; updateVisibleRows(); } + } else if (id == NotificationCenter.groupCallUpdated) { + Integer chatId = (Integer) args[0]; + if (dialog_id == -chatId) { + groupCall = getMessagesController().getGroupCall(currentChat.id, false); + if (fragmentContextView != null) { + fragmentContextView.checkCall(openAnimationStartTime == 0 || SystemClock.elapsedRealtime() < openAnimationStartTime + 150); + } + } + } else if (id == NotificationCenter.didLoadChatInviter) { + int chatId = (Integer) args[0]; + if (dialog_id == -chatId && chatInviterId == 0) { + chatInviterId = (Integer) args[1]; + if (chatInfo != null) { + chatInfo.inviterId = chatInviterId; + } + updateInfoTopView(openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); + } } else if (id == NotificationCenter.chatInfoDidLoad) { TLRPC.ChatFull chatFull = (TLRPC.ChatFull) args[0]; if (currentChat != null && chatFull.id == currentChat.id) { @@ -12538,6 +13017,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } int prevLinkedChatId = chatInfo != null ? chatInfo.linked_chat_id : 0; chatInfo = chatFull; + groupCall = getMessagesController().getGroupCall(currentChat.id, true); + if (ChatObject.isChannel(currentChat) && currentChat.megagroup && fragmentContextView != null) { + fragmentContextView.checkCall(openAnimationStartTime == 0 || SystemClock.elapsedRealtime() < openAnimationStartTime + 150); + } if (prevLinkedChatId != chatInfo.linked_chat_id) { if (prevLinkedChatId != 0) { TLRPC.Chat chat = getMessagesController().getChat(prevLinkedChatId); @@ -12550,6 +13033,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } } + if (chatInviterId == 0) { + fillInviterId(true); + updateInfoTopView(openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); + } if (chatActivityEnterView != null) { chatActivityEnterView.setChatInfo(chatInfo); } @@ -13303,7 +13790,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int messageId = (Integer) args[1]; long did = (Long) args[3]; if (messageId != 0) { - scrollToMessageId(messageId, 0, true, did == dialog_id ? 0 : 1, true); + scrollToMessageId(messageId, 0, true, did == dialog_id ? 0 : 1, true, 0); } else { updateVisibleRows(); } @@ -13470,7 +13957,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not long did = (Long) args[0]; if (did == dialog_id || currentUser != null && currentUser.id == did) { updateTopPanel(!paused); - updateDistanceView(true); + updateInfoTopView(true); } } else if (id == NotificationCenter.newDraftReceived) { long did = (Long) args[0]; @@ -13710,7 +14197,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not groupedMessages.messages.get(b).animateComments = true; } } - } else { + } else if (chatAdapter != null) { int row = messages.indexOf(obj); if (row >= 0) { if (updatedRows == null) { @@ -13940,9 +14427,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionPaymentSent) { messageObject.generatePaymentSentMessageText(null); } - if (messageObject.isMegagroup() && messageObject.replyMessageObject != null && messageObject.replyMessageObject.messageOwner != null) { - messageObject.replyMessageObject.messageOwner.flags |= TLRPC.MESSAGE_FLAG_MEGAGROUP; - } } } @@ -14207,7 +14691,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (obj.isOut() && !obj.messageOwner.from_scheduled) { removeUnreadPlane(true); - hideDistanceView(); + hideInfoView(); hasFromMe = true; } @@ -14396,7 +14880,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (chatLayoutManager != null && index > 0 && (chatLayoutManager.findViewByPosition(chatAdapter.messagesStartRow + index) != null || chatLayoutManager.findViewByPosition(chatAdapter.messagesStartRow + index - 1) != null)) { chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + messages.indexOf(messageObject), getScrollOffsetForMessage(messageObject), false); } else { - AndroidUtilities.runOnUIThread(() -> scrollToMessageId(mid, 0, false, 0, true)); + AndroidUtilities.runOnUIThread(() -> scrollToMessageId(mid, 0, false, 0, true, 0)); } } @@ -14848,7 +15332,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getNotificationCenter().postNotificationName(NotificationCenter.closeChats); final Bundle bundle = new Bundle(); bundle.putInt("chat_id", obj.messageOwner.action.channel_id); - actionBarLayout.presentFragment(new ChatActivity(bundle), true); + actionBarLayout.addFragmentToStack(new ChatActivity(bundle), actionBarLayout.fragmentsStack.size() - 1); + lastFragment.finishFragment(); }); } AndroidUtilities.runOnUIThread(() -> getMessagesController().loadFullChat(channelId, 0, true), 1000); @@ -14927,6 +15412,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public boolean needDelayOpenAnimation() { + if (chatMode != MODE_SCHEDULED && getParentLayout().fragmentsStack.size() > 1) { + BaseFragment previousFragment = getParentLayout().fragmentsStack.get(getParentLayout().fragmentsStack.size() - 2); + if (previousFragment instanceof ChatActivity && ((ChatActivity) previousFragment).isKeyboardVisible()) { + return false; + } + } return firstLoading; } @@ -14943,6 +15434,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not hideUndoViews(); } + public void saveKeyboardPositionBeforeTransition() { + if (chatActivityEnterView != null && contentView != null && chatActivityEnterView.getAdjustPanLayoutHelper() != null && !chatActivityEnterView.getAdjustPanLayoutHelper().animationInProgress()) { + fixedKeyboardHeight = contentView.getKeyboardHeight(); + } + } + @Override public void onTransitionAnimationStart(boolean isOpen, boolean backward) { int[] alowedNotifications = null; @@ -14976,9 +15473,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } transitionAnimationIndex = getNotificationCenter().setAnimationInProgress(transitionAnimationIndex, alowedNotifications); - if (chatActivityEnterView != null && contentView != null && chatActivityEnterView.getAdjustPanLayoutHelper() != null && !chatActivityEnterView.getAdjustPanLayoutHelper().animationInProgress()) { - fixedKeyboardHeight = contentView.getKeyboardHeight(); - } } @Override @@ -15358,14 +15852,25 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not alertViewAnimator = null; } + if (alertView.getVisibility() != View.VISIBLE) { + alertViewEnterProgress = 0; + invalidateChatListViewTopPadding(); + } alertView.setVisibility(View.VISIBLE); alertViewAnimator = new AnimatorSet(); - alertViewAnimator.playTogether(ObjectAnimator.ofFloat(alertView, View.TRANSLATION_Y, 0)); + ValueAnimator animator = ValueAnimator.ofFloat(alertViewEnterProgress, 1f); + animator.addUpdateListener(valueAnimator -> { + alertViewEnterProgress = (float) valueAnimator.getAnimatedValue(); + invalidateChatListViewTopPadding(); + }); + alertViewAnimator.playTogether(animator); alertViewAnimator.setDuration(200); alertViewAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { if (alertViewAnimator != null && alertViewAnimator.equals(animation)) { + alertViewEnterProgress = 1f; + invalidateChatListViewTopPadding(); alertViewAnimator = null; } } @@ -15397,13 +15902,20 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not alertViewAnimator = null; } alertViewAnimator = new AnimatorSet(); - alertViewAnimator.playTogether(ObjectAnimator.ofFloat(alertView, View.TRANSLATION_Y, -AndroidUtilities.dp(50))); + ValueAnimator animator = ValueAnimator.ofFloat(alertViewEnterProgress, 0f); + animator.addUpdateListener(valueAnimator -> { + alertViewEnterProgress = (float) valueAnimator.getAnimatedValue(); + invalidateChatListViewTopPadding(); + }); + alertViewAnimator.playTogether(animator); alertViewAnimator.setDuration(200); alertViewAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { if (alertViewAnimator != null && alertViewAnimator.equals(animation)) { alertView.setVisibility(View.GONE); + alertViewEnterProgress = 0; + invalidateChatListViewTopPadding(); alertViewAnimator = null; } } @@ -15441,6 +15953,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not animator.addUpdateListener(animation -> { pinnedMessageEnterOffset = (float) animation.getAnimatedValue(); invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); chatListView.invalidate(); }); pinnedMessageViewAnimator.playTogether(animator); @@ -15482,49 +15995,71 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } boolean show = pinnedMessageIds.size() > 1; boolean visible = pinnedListButton.getTag() != null; - if (show != visible) { + boolean progressIsVisible = pinnedProgress.getTag() != null; + + if (show != visible || progressIsVisible != pinnedPorgressIsShowing) { if (pinnedListAnimator != null) { pinnedListAnimator.cancel(); pinnedListAnimator = null; } + boolean showClosed = !show && !pinnedPorgressIsShowing; + boolean showPinned = show && !pinnedPorgressIsShowing; + if (animated) { if (show) { pinnedListButton.setVisibility(View.VISIBLE); } else { closePinned.setVisibility(View.VISIBLE); } + if (pinnedPorgressIsShowing) { + pinnedProgress.setVisibility(View.VISIBLE); + pinnedProgress.setAlpha(0); + pinnedProgress.setScaleX(0.4f); + pinnedProgress.setScaleY(0.4f); + } pinnedListAnimator = new AnimatorSet(); + pinnedListAnimator.playTogether( - ObjectAnimator.ofFloat(pinnedListButton, View.ALPHA, show ? 1.0f : 0.0f), - ObjectAnimator.ofFloat(pinnedListButton, View.SCALE_X, show ? 1.0f : 0.4f), - ObjectAnimator.ofFloat(pinnedListButton, View.SCALE_Y, show ? 1.0f : 0.4f), - ObjectAnimator.ofFloat(closePinned, View.ALPHA, show ? 0.0f : 1.0f), - ObjectAnimator.ofFloat(closePinned, View.SCALE_X, show ? 0.4f : 1.0f), - ObjectAnimator.ofFloat(closePinned, View.SCALE_Y, show ? 0.4f : 1.0f)); + ObjectAnimator.ofFloat(pinnedListButton, View.ALPHA, showPinned ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(pinnedListButton, View.SCALE_X, showPinned ? 1.0f : 0.4f), + ObjectAnimator.ofFloat(pinnedListButton, View.SCALE_Y, showPinned ? 1.0f : 0.4f), + ObjectAnimator.ofFloat(closePinned, View.ALPHA, showClosed ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(closePinned, View.SCALE_X, showClosed ? 1.0f : 0.4f), + ObjectAnimator.ofFloat(closePinned, View.SCALE_Y, showClosed ? 1.0f : 0.4f), + ObjectAnimator.ofFloat(pinnedProgress, View.ALPHA, !pinnedPorgressIsShowing ? 0.0f : 1.0f), + ObjectAnimator.ofFloat(pinnedProgress, View.SCALE_X, !pinnedPorgressIsShowing ? 0.4f : 1.0f), + ObjectAnimator.ofFloat(pinnedProgress, View.SCALE_Y, !pinnedPorgressIsShowing ? 0.4f : 1.0f) + ); + pinnedListAnimator.setDuration(180); pinnedListAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { pinnedListAnimator = null; - if (show) { - closePinned.setVisibility(View.INVISIBLE); - } else { - pinnedListButton.setVisibility(View.INVISIBLE); - } + + closePinned.setVisibility(showClosed ? View.VISIBLE : View.INVISIBLE); + pinnedListButton.setVisibility(showPinned ? View.VISIBLE : View.INVISIBLE); + pinnedProgress.setVisibility(pinnedPorgressIsShowing ? View.VISIBLE : View.INVISIBLE); } }); pinnedListAnimator.start(); } else { - closePinned.setAlpha(show ? 0.0f : 1.0f); - closePinned.setScaleX(show ? 0.4f : 1.0f); - closePinned.setScaleY(show ? 0.4f : 1.0f); - closePinned.setVisibility(show ? View.INVISIBLE : View.VISIBLE); - pinnedListButton.setAlpha(show ? 1.0f : 0.0f); - pinnedListButton.setScaleX(show ? 1.0f : 0.4f); - pinnedListButton.setScaleY(show ? 1.0f : 0.4f); - pinnedListButton.setVisibility(show ? View.VISIBLE : View.INVISIBLE); + closePinned.setAlpha(showClosed ? 1.0f : 0.0f); + closePinned.setScaleX(showClosed ? 1.0f : 0.4f); + closePinned.setScaleY(showClosed ? 1.0f : 0.4f); + closePinned.setVisibility(showClosed ? View.VISIBLE : View.INVISIBLE); + pinnedListButton.setAlpha(showPinned ? 1.0f : 0.0f); + pinnedListButton.setScaleX(showPinned ? 1.0f : 0.4f); + pinnedListButton.setScaleY(showPinned ? 1.0f : 0.4f); + pinnedListButton.setVisibility(showPinned ? View.VISIBLE : View.INVISIBLE); + + pinnedProgress.setAlpha(pinnedPorgressIsShowing ? 1.0f : 0.0f); + pinnedProgress.setScaleX(pinnedPorgressIsShowing ? 1.0f : 0.4f); + pinnedProgress.setScaleY(pinnedPorgressIsShowing ? 1.0f : 0.4f); + pinnedProgress.setVisibility(pinnedPorgressIsShowing ? View.VISIBLE : View.GONE); } pinnedListButton.setTag(show ? 1 : null); + pinnedProgress.setTag(pinnedPorgressIsShowing ? 1 : null); } if (pinnedLineView != null) { if (isThreadChat()) { @@ -15582,7 +16117,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public void onAnimationUpdate(ValueAnimator animation) { pinnedMessageEnterOffset = (float) animation.getAnimatedValue(); invalidateChatListViewTopPadding(); - chatListView.invalidate(); + invalidateMessagesVisiblePart(); } }); pinnedMessageView.setVisibility(View.VISIBLE); @@ -15608,6 +16143,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else { pinnedMessageEnterOffset = 0; invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); pinnedMessageView.setVisibility(View.VISIBLE); } } @@ -16025,7 +16561,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not pinnedLineView.set(0, 1, false); } else { int position = Collections.binarySearch(pinnedMessageIds, currentPinnedMessageId, Comparator.reverseOrder()); - pinnedLineView.set(pinnedMessageIds.size() - 1 - position, pinnedMessageIds.size(), animateToNext != 0); + pinnedLineView.set(pinnedMessageIds.size() - 1 - position, pinnedMessageIds.size(), animated); } } else { pinnedCounterTextView.setVisibility(loadedPinnedMessagesCount == 2 || currentPinnedMessageIndex[0] == 0 ? View.INVISIBLE : View.VISIBLE); @@ -16170,6 +16706,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not animator.addUpdateListener(animation -> { topChatPanelViewOffset = (float) animation.getAnimatedValue(); invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); }); reportSpamViewAnimator.playTogether(animator); reportSpamViewAnimator.setDuration(200); @@ -16192,6 +16729,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else { topChatPanelViewOffset = 0; invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } } } else { @@ -16211,6 +16749,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not animator.addUpdateListener(animation -> { topChatPanelViewOffset = (float) animation.getAnimatedValue(); invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); }); reportSpamViewAnimator.playTogether(animator); reportSpamViewAnimator.setDuration(200); @@ -16234,6 +16773,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else { topChatPanelViewOffset = -AndroidUtilities.dp(50); invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } } } @@ -16256,6 +16796,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not AndroidUtilities.runOnUIThread(checkPaddingsRunnable = () -> { checkPaddingsRunnable = null; invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); }); } } @@ -16505,7 +17046,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getMediaDataController().saveDraft(dialog_id, threadMessageId, message[0], entities, replyMessage != null ? replyMessage.messageOwner : null, !searchWebpage); getMessagesController().cancelTyping(0, dialog_id, threadMessageId); - if (!pausedOnLastMessage) { + if (!pausedOnLastMessage && !firstLoading) { SharedPreferences.Editor editor = MessagesController.getNotificationsSettings(currentAccount).edit(); int messageId = 0; int offset = 0; @@ -16728,7 +17269,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } - if (AndroidUtilities.isTablet()) { + /*if (AndroidUtilities.isTablet()) { if (AndroidUtilities.isSmallTablet() && ApplicationLoader.applicationContext.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { actionBar.setBackButtonDrawable(new BackDrawable(false)); if (fragmentContextView != null && fragmentContextView.getParent() == null) { @@ -16742,7 +17283,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } return false; - } + }*/ return true; } @@ -16961,11 +17502,37 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (single) { if (message.messageOwner.action instanceof TLRPC.TL_messageActionPinMessage) { if (message.getReplyMsgId() != 0) { - scrollToMessageId(message.getReplyMsgId(), message.messageOwner.id, true, 0, false); + scrollToMessageId(message.getReplyMsgId(), message.messageOwner.id, true, message.getDialogId() == mergeDialogId ? 1 : 0, false, 0); } else { Toast.makeText(getParentActivity(), LocaleController.getString("MessageNotFound", R.string.MessageNotFound), Toast.LENGTH_SHORT).show(); } return; + } else if (message.messageOwner.action instanceof TLRPC.TL_messageActionGroupCall || message.messageOwner.action instanceof TLRPC.TL_messageActionInviteToGroupCall) { + if (getParentActivity() == null) { + return; + } + VoIPService sharedInstance = VoIPService.getSharedInstance(); + if (sharedInstance != null) { + if (sharedInstance.groupCall != null && message.messageOwner.action.call.id == sharedInstance.groupCall.call.id) { + if (getParentActivity() instanceof LaunchActivity) { + GroupCallActivity.create((LaunchActivity) getParentActivity(), AccountInstance.getInstance(currentAccount)); + } else { + Intent intent = new Intent(getParentActivity(), LaunchActivity.class).setAction("voip_chat"); + intent.putExtra("currentAccount", VoIPService.getSharedInstance().getAccount()); + getParentActivity().startActivity(intent); + } + } else { + createGroupCall = getGroupCall() == null; + VoIPHelper.startCall(currentChat, createGroupCall, getParentActivity()); + } + return; + } else if (fragmentContextView != null && getGroupCall() != null) { + fragmentContextView.callOnClick(); + return; + } else if (ChatObject.canManageCalls(currentChat)) { + VoIPHelper.showGroupCallAlert(ChatActivity.this, currentChat, true); + return; + } } } if (threadMessageObjects != null && threadMessageObjects.contains(message)) { @@ -17064,6 +17631,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not icons.add(R.drawable.msg_delete); } else if (type == 1) { if (currentChat != null) { + /*if (selectedObject.messageOwner.action instanceof TLRPC.TL_messageActionGroupCall) { + items.add(LocaleController.getString("VoipGroupJoinCall", R.string.VoipGroupJoinCall)); + options.add(29); + icons.add(R.drawable.msg_callback); + }*/ if (allowChatActions) { items.add(LocaleController.getString("Reply", R.string.Reply)); options.add(8); @@ -17503,7 +18075,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not linearLayout.setOrientation(LinearLayout.VERTICAL); scrimPopupWindowItems = new ActionBarMenuSubItem[items.size()]; for (int a = 0, N = items.size(); a < N; a++) { - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity()); + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == N - 1); cell.setTextAndIcon(items.get(a), icons.get(a)); scrimPopupWindowItems[a] = cell; linearLayout.addView(cell); @@ -17741,10 +18313,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityEnterView.setEditingMessageObject(null, false); hideFieldPanel(true); } - } else { - if (chatActivityEnterView != null) { - chatActivityEnterView.showEditDoneProgress(false, true); - } } })); } else { @@ -17946,7 +18514,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not BulletinFactory.of(this).createDownloadBulletin(fileType, filesAmount).show(); } else { saveMessageToGallery(selectedObject); - BulletinFactory.createSaveToGalleryBulletin(this, selectedObject.isVideo()).show(); + if (getParentActivity() != null) { + BulletinFactory.createSaveToGalleryBulletin(this, selectedObject.isVideo()).show(); + } } break; } @@ -18289,10 +18859,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); ClipData clip = ClipData.newPlainText("label", exportedMessageLink.link); clipboard.setPrimaryClip(clip); - if (!isThreadChat() && exportedMessageLink.link.contains("/c/")) { - Toast.makeText(ApplicationLoader.applicationContext, LocaleController.getString("LinkCopiedPrivate", R.string.LinkCopiedPrivate), Toast.LENGTH_SHORT).show(); - } else { - Toast.makeText(ApplicationLoader.applicationContext, LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if (BulletinFactory.canShowBulletin(ChatActivity.this)) { + BulletinFactory.of(ChatActivity.this).createCopyLinkBulletin(!isThreadChat() && exportedMessageLink.link.contains("/c/")).show(); } } catch (Exception e) { FileLog.e(e); @@ -18441,9 +19009,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not MessageObject.GroupedMessages group = selectedObjectGroup; AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, message.messageOwner.date, (notify, scheduleDate) -> { if (group != null && !group.messages.isEmpty()) { - SendMessagesHelper.getInstance(currentAccount).editMessage(group.messages.get(0), null, false, ChatActivity.this, null, scheduleDate, null); + SendMessagesHelper.getInstance(currentAccount).editMessage(group.messages.get(0), null, false, ChatActivity.this, null, scheduleDate); } else { - SendMessagesHelper.getInstance(currentAccount).editMessage(message, null, false, ChatActivity.this, null, scheduleDate, null); + SendMessagesHelper.getInstance(currentAccount).editMessage(message, null, false, ChatActivity.this, null, scheduleDate); } }, null); break; @@ -18867,6 +19435,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return chatInfo; } + public ChatObject.Call getGroupCall() { + return chatMode == 0 && groupCall != null && groupCall.call instanceof TLRPC.TL_groupCall ? groupCall : null; + } + public TLRPC.UserFull getCurrentUserInfo() { return userInfo; } @@ -18954,8 +19526,19 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return; } - if (history != null && maxReadId != 1 && maxReadId != 0 && maxReadId != discussionMessage.read_inbox_max_id) { - history = null; + if (history != null) { + if (maxReadId != 1 && maxReadId != 0 && maxReadId != discussionMessage.read_inbox_max_id) { + history = null; + } else if (!history.messages.isEmpty() && discussionMessage != null && !discussionMessage.messages.isEmpty()) { + TLRPC.Message message = history.messages.get(0); + int replyId = message != null && message.reply_to != null ? (message.reply_to.reply_to_top_id != 0 ? message.reply_to.reply_to_top_id : message.reply_to.reply_to_msg_id) : 0; + if (replyId != discussionMessage.messages.get(discussionMessage.messages.size() - 1).id) { + history = null; + } + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("processLoadedDiscussionMessage reset history"); + } } boolean chatOpened = false; @@ -18998,7 +19581,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } TLRPC.messages_Messages historyFinal = history; int fnidFinal = fnid; - Utilities.stageQueue.postRunnable(() -> getMessagesController().processLoadedMessages(historyFinal, dialogId, 0, 30, maxReadId, 0, false, chatActivity.getClassGuid(), fnidFinal, 0, 0, 0, 2, true, false, 0, arrayList.get(arrayList.size() - 1).getId(), 1, false, 0)); + Utilities.stageQueue.postRunnable(() -> getMessagesController().processLoadedMessages(historyFinal, historyFinal.messages.size(), dialogId, 0, 30, maxReadId, 0, false, chatActivity.getClassGuid(), fnidFinal, 0, 0, 0, 2, true, false, 0, arrayList.get(arrayList.size() - 1).getId(), 1, false, 0, true)); } } @@ -19022,6 +19605,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not req.peer = MessagesController.getInputPeer(chat); req.msg_id = messageId; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("getDiscussionMessage chat = " + chat.id + " msg_id = " + messageId); + } + commentLoadingMessageId = 0; savedDiscussionMessage = null; savedNoDiscussion = false; @@ -19049,6 +19636,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not TLRPC.Chat linkedChat = getMessagesController().getChat(linkedChatId); if (linkedChat != null) { int count = 30; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("load replies for " + linkedChat.id + " msg_id = " + maxReadId); + } TLRPC.TL_messages_getReplies getReplies = new TLRPC.TL_messages_getReplies(); getReplies.peer = MessagesController.getInputPeer(linkedChat); getReplies.msg_id = maxReadId; @@ -19932,7 +20522,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private void openChat(ChatMessageCell cell, TLRPC.Chat chat, int postId) { if (currentChat != null && chat.id == currentChat.id) { - scrollToMessageId(postId, cell.getMessageObject().getId(), true, 0, true); + scrollToMessageId(postId, cell.getMessageObject().getId(), true, 0, true, 0); } else if (currentChat == null || chat.id != currentChat.id || isThreadChat()) { Bundle args = new Bundle(); args.putInt("chat_id", chat.id); @@ -20051,7 +20641,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityDelegate.openReplyMessage(id); finishFragment(); } else { - scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true); + scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0); } } @@ -20313,7 +20903,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public void didPressReplyMessage(ChatActionCell cell, int id) { MessageObject messageObject = cell.getMessageObject(); - scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true); + scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0); } @Override @@ -20790,7 +21380,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item changed " + position); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20806,7 +21398,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item range changed " + positionStart + ":" + itemCount); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20822,7 +21416,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item inserted " + position); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20838,7 +21434,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item moved" + fromPosition + ":" + toPosition); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20854,7 +21452,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item range inserted" + positionStart + ":" + itemCount); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20880,7 +21480,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item removed " + position); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20896,7 +21498,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (BuildVars.LOGS_ENABLED) { FileLog.d("notify item range removed" + positionStart + ":" + itemCount); } - if (chatListView.getItemAnimator() != chatListItemAniamtor) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAniamtor) { chatListView.setItemAnimator(chatListItemAniamtor); } updateRowsInternal(); @@ -20932,7 +21536,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityDelegate.openReplyMessage(messageId); finishFragment(); } else { - scrollToMessageId(messageId, fromMessageId, true, 0, false); + scrollToMessageId(messageId, fromMessageId, true, 0, false, 0); } } return true; @@ -20956,7 +21560,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityDelegate.openReplyMessage(messageId); finishFragment(); } else { - scrollToMessageId(messageId, fromMessageId, true, 0, false); + scrollToMessageId(messageId, fromMessageId, true, 0, false, 0); } } return true; @@ -20975,7 +21579,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityDelegate.openReplyMessage(messageId); finishFragment(); } else { - scrollToMessageId(messageId, fromMessageId, true, 0, false); + scrollToMessageId(messageId, fromMessageId, true, 0, false, 0); } return true; } @@ -21002,7 +21606,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityDelegate.openReplyMessage(messageId); finishFragment(); } else { - scrollToMessageId(messageId, fromMessageId, true, 0, false); + scrollToMessageId(messageId, fromMessageId, true, 0, false, 0); } return true; } @@ -21021,6 +21625,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private MessageObject scrollTo; private int lastItemOffset; private boolean lastBottom; + private int lastPadding; int animationIndex; @@ -21036,7 +21641,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatAdapter.updateRowsSafe(); int lastItemPosition = chatAdapter.messagesStartRow + messages.indexOf(scrollTo); if (lastItemPosition >= 0) { - chatLayoutManager.scrollToPositionWithOffset(lastItemPosition, lastItemOffset, lastBottom); + chatLayoutManager.scrollToPositionWithOffset(lastItemPosition, lastItemOffset + lastPadding - chatListViewPaddingTop, lastBottom); } } else { chatAdapter.updateRowsSafe(); @@ -21048,13 +21653,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not updateVisibleRows(); - AndroidUtilities.runOnUIThread(() -> { - if (nextScrollToMessageId != 0) { - scrollToMessageId(nextScrollToMessageId, nextScrollFromMessageId, nextScrollSelect, nextScrollLoadIndex, nextScrollForce); - nextScrollToMessageId = 0; - } - getNotificationCenter().onAnimationFinish(animationIndex); - }); + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().onAnimationFinish(animationIndex)); } @Override @@ -21117,6 +21716,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (pinnedLineView != null) { pinnedLineView.updateColors(); } + if (chatActivityEnterTopView != null && chatActivityEnterTopView.getEditView() != null) { + chatActivityEnterTopView.getEditView().updateColors(); + } }; ArrayList themeDescriptions = new ArrayList<>(); @@ -21417,7 +22019,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"notifyButton"}, null, null, null, Theme.key_chat_messagePanelVideoFrame)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_USEBACKGROUNDDRAWABLE | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, new Class[]{ChatActivityEnterView.class}, new String[]{"notifyButton"}, null, null, null, Theme.key_listSelector)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"videoTimelineView"}, null, null, null, Theme.key_chat_messagePanelSend)); - themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{ChatActivityEnterView.class}, new String[]{"doneButtonImage"}, null, null, null, Theme.key_chat_messagePanelBackground)); + //themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{ChatActivityEnterView.class}, new String[]{"doneButtonImage"}, null, null, null, Theme.key_chat_messagePanelBackground)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"micDrawable"}, null, null, null, Theme.key_chat_messagePanelVoicePressed)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"cameraDrawable"}, null, null, null, Theme.key_chat_messagePanelVoicePressed)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"sendDrawable"}, null, null, null, Theme.key_chat_messagePanelVoicePressed)); @@ -21485,6 +22087,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{FragmentContextView.class}, new String[]{"playButton"}, null, null, null, Theme.key_inappPlayerPlayPause)); themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_TEXTCOLOR | ThemeDescription.FLAG_CHECKTAG, new Class[]{FragmentContextView.class}, new String[]{"titleTextView"}, null, null, null, Theme.key_inappPlayerTitle)); themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_TEXTCOLOR | ThemeDescription.FLAG_FASTSCROLL, new Class[]{FragmentContextView.class}, new String[]{"titleTextView"}, null, null, null, Theme.key_inappPlayerPerformer)); + themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_TEXTCOLOR | ThemeDescription.FLAG_FASTSCROLL, new Class[]{FragmentContextView.class}, new String[]{"subtitleTextView"}, null, null, null, Theme.key_inappPlayerClose)); themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{FragmentContextView.class}, new String[]{"closeButton"}, null, null, null, Theme.key_inappPlayerClose)); themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_BACKGROUND | ThemeDescription.FLAG_CHECKTAG, new Class[]{FragmentContextView.class}, new String[]{"frameLayout"}, null, null, null, Theme.key_returnToCallBackground)); @@ -21514,6 +22117,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(replyObjectTextView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_chat_replyPanelMessage)); themeDescriptions.add(new ThemeDescription(replyIconImageView, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chat_replyPanelIcons)); themeDescriptions.add(new ThemeDescription(replyCloseImageView, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chat_replyPanelClose)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_replyPanelName)); themeDescriptions.add(new ThemeDescription(searchUpButton, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chat_searchPanelIcons)); themeDescriptions.add(new ThemeDescription(searchUpButton, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_actionBarActionModeDefaultSelector)); @@ -21632,6 +22236,26 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_chat_outTextSelectionHighlight)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_chat_inTextSelectionHighlight)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_chat_TextSelectionCursor)); + + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayGreen1)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayGreen2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayBlue1)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayBlue2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGreen1)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGreen2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelBlue1)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelBlue2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGray)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientMuted)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientMuted2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientUnmuted)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientUnmuted2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient2)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient3)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin2)); + return themeDescriptions; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java new file mode 100644 index 000000000..41999a227 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java @@ -0,0 +1,116 @@ +package org.telegram.ui; + +import android.content.Context; +import android.view.View; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +public class ChatActivityEnterTopView extends FrameLayout { + + private View replyView; + private EditView editView; + private boolean editMode; + + public ChatActivityEnterTopView(Context context) { + super(context); + } + + public void addReplyView(View replyView, LayoutParams layoutParams) { + if (this.replyView == null) { + addView(this.replyView = replyView, layoutParams); + } + } + + public void addEditView(EditView editView, LayoutParams layoutParams) { + if (this.editView == null) { + this.editView = editView; + editView.setVisibility(GONE); + addView(editView, layoutParams); + } + } + + public void setEditMode(boolean editMode) { + if (editMode != this.editMode) { + this.editMode = editMode; + replyView.setVisibility(editMode ? GONE : VISIBLE); + editView.setVisibility(editMode ? VISIBLE : GONE); + } + } + + public boolean isEditMode() { + return editMode; + } + + public EditView getEditView() { + return editView; + } + + public static class EditView extends LinearLayout { + + private EditViewButton[] buttons = new EditViewButton[2]; + + public EditView(Context context) { + super(context); + } + + public void addButton(EditViewButton button, LayoutParams layoutParams) { + final int childCount = getChildCount(); + if (childCount < 2) { + addView(buttons[childCount] = button, layoutParams); + } + } + + public EditViewButton[] getButtons() { + return buttons; + } + + public void updateColors() { + for (EditViewButton button : buttons) { + button.updateColors(); + } + } + } + + public static abstract class EditViewButton extends LinearLayout { + + private ImageView imageView; + private TextView textView; + private boolean editButton; + + public EditViewButton(Context context) { + super(context); + } + + public void addImageView(ImageView imageView, LayoutParams layoutParams) { + if (this.imageView == null) { + addView(this.imageView = imageView, layoutParams); + } + } + + public void addTextView(TextView textView, LayoutParams layoutParams) { + if (this.textView == null) { + addView(this.textView = textView, layoutParams); + } + } + + public ImageView getImageView() { + return imageView; + } + + public TextView getTextView() { + return textView; + } + + public void setEditButton(boolean editButton) { + this.editButton = editButton; + } + + public boolean isEditButton() { + return editButton; + } + + public abstract void updateColors(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatEditTypeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatEditTypeActivity.java index 7d99d04ee..d17584b19 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatEditTypeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatEditTypeActivity.java @@ -22,7 +22,6 @@ import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.widget.LinearLayout; import android.widget.ScrollView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; @@ -48,6 +47,7 @@ import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextBlockCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.LayoutHelper; @@ -351,7 +351,7 @@ public class ChatEditTypeActivity extends BaseFragment implements NotificationCe android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", invite.link); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + BulletinFactory.createCopyLinkBulletin(this).show(); } catch (Exception e) { FileLog.e(e); } @@ -369,7 +369,7 @@ public class ChatEditTypeActivity extends BaseFragment implements NotificationCe android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", invite.link); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + BulletinFactory.createCopyLinkBulletin(this).show(); } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java index c9e979f67..97881b01e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java @@ -114,6 +114,7 @@ public class ChatRightsEditActivity extends BaseFragment { private int sendStickersRow; private int sendPollsRow; private int embedLinksRow; + private int startVoiceChatRow; private int untilSectionRow; private int untilDateRow; @@ -153,6 +154,9 @@ public class ChatRightsEditActivity extends BaseFragment { myAdminRights.change_info = myAdminRights.post_messages = myAdminRights.edit_messages = myAdminRights.delete_messages = myAdminRights.ban_users = myAdminRights.invite_users = myAdminRights.pin_messages = myAdminRights.add_admins = true; + if (!isChannel) { + myAdminRights.manage_call = true; + } } if (type == TYPE_ADMIN) { adminRights = new TLRPC.TL_chatAdminRights(); @@ -161,6 +165,7 @@ public class ChatRightsEditActivity extends BaseFragment { adminRights.post_messages = myAdminRights.post_messages; adminRights.edit_messages = myAdminRights.edit_messages; adminRights.delete_messages = myAdminRights.delete_messages; + adminRights.manage_call = myAdminRights.manage_call; adminRights.ban_users = myAdminRights.ban_users; adminRights.invite_users = myAdminRights.invite_users; adminRights.pin_messages = myAdminRights.pin_messages; @@ -170,6 +175,7 @@ public class ChatRightsEditActivity extends BaseFragment { adminRights.post_messages = rightsAdmin.post_messages; adminRights.edit_messages = rightsAdmin.edit_messages; adminRights.delete_messages = rightsAdmin.delete_messages; + adminRights.manage_call = rightsAdmin.manage_call; adminRights.ban_users = rightsAdmin.ban_users; adminRights.invite_users = rightsAdmin.invite_users; adminRights.pin_messages = rightsAdmin.pin_messages; @@ -178,7 +184,7 @@ public class ChatRightsEditActivity extends BaseFragment { initialIsSet = adminRights.change_info || adminRights.post_messages || adminRights.edit_messages || adminRights.delete_messages || adminRights.ban_users || adminRights.invite_users || - adminRights.pin_messages || adminRights.add_admins || adminRights.anonymous; + adminRights.pin_messages || adminRights.add_admins || adminRights.manage_call || adminRights.anonymous; } } else { defaultBannedRights = rightsBannedDefault; @@ -503,6 +509,8 @@ public class ChatRightsEditActivity extends BaseFragment { adminRights.anonymous = !adminRights.anonymous; } else if (position == banUsersRow) { adminRights.ban_users = !adminRights.ban_users; + } else if (position == startVoiceChatRow) { + adminRights.manage_call = !adminRights.manage_call; } else if (position == addUsersRow) { if (currentType == TYPE_ADMIN) { adminRights.invite_users = !adminRights.invite_users; @@ -593,15 +601,15 @@ public class ChatRightsEditActivity extends BaseFragment { } private boolean isDefaultAdminRights() { - return adminRights.change_info && adminRights.delete_messages && adminRights.ban_users && adminRights.invite_users && adminRights.pin_messages && !adminRights.add_admins && !adminRights.anonymous || - !adminRights.change_info && !adminRights.delete_messages && !adminRights.ban_users && !adminRights.invite_users && !adminRights.pin_messages && !adminRights.add_admins && !adminRights.anonymous; + return adminRights.change_info && adminRights.delete_messages && adminRights.ban_users && adminRights.invite_users && adminRights.pin_messages && (isChannel || adminRights.manage_call) && !adminRights.add_admins && !adminRights.anonymous || + !adminRights.change_info && !adminRights.delete_messages && !adminRights.ban_users && !adminRights.invite_users && !adminRights.pin_messages && !adminRights.manage_call && !adminRights.add_admins && !adminRights.anonymous; } private boolean hasAllAdminRights() { if (isChannel) { return adminRights.change_info && adminRights.post_messages && adminRights.edit_messages && adminRights.delete_messages && adminRights.invite_users && adminRights.add_admins; } else { - return adminRights.change_info && adminRights.delete_messages && adminRights.ban_users && adminRights.invite_users && adminRights.pin_messages && adminRights.add_admins; + return adminRights.change_info && adminRights.delete_messages && adminRights.ban_users && adminRights.invite_users && adminRights.pin_messages && adminRights.add_admins && adminRights.manage_call; } } @@ -789,6 +797,7 @@ public class ChatRightsEditActivity extends BaseFragment { sendStickersRow = -1; sendPollsRow = -1; embedLinksRow = -1; + startVoiceChatRow = -1; untilSectionRow = -1; untilDateRow = -1; @@ -807,6 +816,7 @@ public class ChatRightsEditActivity extends BaseFragment { banUsersRow = rowCount++; addUsersRow = rowCount++; pinMessagesRow = rowCount++; + startVoiceChatRow = rowCount++; addAdminsRow = rowCount++; anonymousRow = rowCount++; } @@ -907,7 +917,7 @@ public class ChatRightsEditActivity extends BaseFragment { delegate.didSetRights( adminRights.change_info || adminRights.post_messages || adminRights.edit_messages || adminRights.delete_messages || adminRights.ban_users || adminRights.invite_users || - adminRights.pin_messages || adminRights.add_admins || adminRights.anonymous ? 1 : 0, adminRights, bannedRights, currentRank); + adminRights.pin_messages || adminRights.add_admins || adminRights.anonymous || adminRights.manage_call ? 1 : 0, adminRights, bannedRights, currentRank); } } else if (currentType == TYPE_BANNED) { MessagesController.getInstance(currentAccount).setUserBannedRole(chatId, currentUser, bannedRights, isChannel, getFragmentForAlert(1)); @@ -1002,6 +1012,8 @@ public class ChatRightsEditActivity extends BaseFragment { return myAdminRights.edit_messages; } else if (position == deleteMessagesRow) { return myAdminRights.delete_messages; + } else if (position == startVoiceChatRow) { + return myAdminRights.manage_call; } else if (position == addAdminsRow) { return myAdminRights.add_admins; } else if (position == anonymousRow) { @@ -1168,6 +1180,8 @@ public class ChatRightsEditActivity extends BaseFragment { checkCell.setTextAndCheck(LocaleController.getString("EditAdminSendAnonymously", R.string.EditAdminSendAnonymously), adminRights.anonymous, false); } else if (position == banUsersRow) { checkCell.setTextAndCheck(LocaleController.getString("EditAdminBanUsers", R.string.EditAdminBanUsers), adminRights.ban_users, true); + } else if (position == startVoiceChatRow) { + checkCell.setTextAndCheck(LocaleController.getString("StartVoipChatPermission", R.string.StartVoipChatPermission), adminRights.manage_call, true); } else if (position == addUsersRow) { if (currentType == TYPE_ADMIN) { if (ChatObject.isActionBannedByDefault(currentChat, ChatObject.ACTION_INVITE)) { @@ -1276,7 +1290,7 @@ public class ChatRightsEditActivity extends BaseFragment { } else if (position == changeInfoRow || position == postMessagesRow || position == editMesagesRow || position == deleteMessagesRow || position == addAdminsRow || position == banUsersRow || position == addUsersRow || position == pinMessagesRow || position == sendMessagesRow || position == sendMediaRow || position == sendStickersRow || position == embedLinksRow || - position == sendPollsRow || position == anonymousRow) { + position == sendPollsRow || position == anonymousRow || position == startVoiceChatRow) { return 4; } else if (position == cantEditInfoRow || position == rankInfoRow) { return 1; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java index b9a8402db..30b999ddd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java @@ -8,6 +8,8 @@ package org.telegram.ui; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; import android.graphics.Paint; @@ -25,6 +27,7 @@ import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; +import android.view.ViewTreeObserver; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.EditText; import android.widget.FrameLayout; @@ -33,6 +36,10 @@ import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; +import androidx.recyclerview.widget.SimpleItemAnimator; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; @@ -56,30 +63,28 @@ import org.telegram.ui.Cells.GraySectionCell; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.LoadingCell; import org.telegram.ui.Cells.ManageChatTextCell; +import org.telegram.ui.Cells.ManageChatUserCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCheckCell2; import org.telegram.ui.Cells.TextInfoPrivacyCell; -import org.telegram.ui.Cells.ManageChatUserCell; import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Components.BulletinFactory; -import org.telegram.ui.Components.EmptyTextProgressView; +import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.IntSeekBarAccessibilityDelegate; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RadialProgressView; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SeekBarAccessibilityDelegate; +import org.telegram.ui.Components.StickerEmptyView; import org.telegram.ui.Components.UndoView; import java.util.ArrayList; import java.util.Collections; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; -import androidx.recyclerview.widget.SimpleItemAnimator; - public class ChatUsersActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { private ListAdapter listViewAdapter; - private EmptyTextProgressView emptyView; + private StickerEmptyView emptyView; private RecyclerListView listView; private SearchAdapter searchListViewAdapter; private ActionBarMenuItem searchItem; @@ -105,6 +110,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente private boolean loadingUsers; private boolean firstLoaded; + private SparseArray ignoredUsers; + private int permissionsSectionRow; private int sendMessagesRow; private int sendMediaRow; @@ -143,6 +150,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente private int blockedEmptyRow; private int rowCount; private int selectType; + private int lastEmptyViewRow; private int delayResults; @@ -162,10 +170,22 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente public final static int TYPE_ADMIN = 1; public final static int TYPE_USERS = 2; public final static int TYPE_KICKED = 3; + private boolean openTransitionEnded; + private FlickerLoadingView flickerLoadingView; + private View progressBar; public interface ChatUsersActivityDelegate { - void didAddParticipantToList(int uid, TLObject participant); - void didChangeOwner(TLRPC.User user); + default void didAddParticipantToList(int uid, TLObject participant) { + + } + + default void didChangeOwner(TLRPC.User user) { + + } + + default void didSelectUser(int uid) { + + } } private class ChooseView extends View { @@ -434,6 +454,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente slowmodeSelectRow = -1; slowmodeInfoRow = -1; loadingProgressRow = -1; + lastEmptyViewRow = -1; rowCount = 0; if (type == TYPE_KICKED) { @@ -466,7 +487,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } if (loadingUsers && !firstLoaded) { - loadingProgressRow = rowCount++; + //loadingProgressRow = rowCount++; } else { if (!participants.isEmpty()) { participantsStartRow = rowCount; @@ -476,6 +497,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (addNewRow != -1 || participantsStartRow != -1) { addNewSectionRow = rowCount++; } + lastEmptyViewRow = rowCount++; } } else if (type == TYPE_BANNED) { if (ChatObject.canBlockUsers(currentChat)) { @@ -485,7 +507,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } } if (loadingUsers && !firstLoaded) { - loadingProgressRow = rowCount++; + // loadingProgressRow = rowCount++; } else { if (!participants.isEmpty()) { restricted1SectionRow = rowCount++; @@ -496,12 +518,14 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (participantsStartRow != -1) { if (participantsInfoRow == -1) { participantsInfoRow = rowCount++; + lastEmptyViewRow = rowCount++; } else { addNewSectionRow = rowCount++; } } else { blockedEmptyRow = rowCount++; } + lastEmptyViewRow = rowCount++; } } else if (type == TYPE_ADMIN) { if (ChatObject.isChannel(currentChat) && currentChat.megagroup && (info == null || info.participants_count <= 200)) { @@ -513,7 +537,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente addNewRow = rowCount++; } if (loadingUsers && !firstLoaded) { - loadingProgressRow = rowCount++; + // loadingProgressRow = rowCount++; } else { if (!participants.isEmpty()) { participantsStartRow = rowCount; @@ -521,6 +545,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente participantsEndRow = rowCount; } participantsInfoRow = rowCount++; + lastEmptyViewRow = rowCount++; } } else if (type == TYPE_USERS) { if (selectType == 0 && ChatObject.canAddUsers(currentChat)) { @@ -531,7 +556,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente addNewRow = rowCount++; } if (loadingUsers && !firstLoaded) { - loadingProgressRow = rowCount++; + // loadingProgressRow = rowCount++; } else { boolean hasAnyOther = false; if (!contacts.isEmpty()) { @@ -558,6 +583,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } if (rowCount != 0) { participantsInfoRow = rowCount++; + lastEmptyViewRow = rowCount++; } } } @@ -625,7 +651,6 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente @Override public void onSearchExpand() { searching = true; - emptyView.setShowAtCenter(true); if (doneItem != null) { doneItem.setVisibility(View.GONE); } @@ -639,7 +664,6 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente listViewAdapter.notifyDataSetChanged(); listView.setFastScrollVisible(true); listView.setVerticalScrollBarEnabled(false); - emptyView.setShowAtCenter(false); if (doneItem != null) { doneItem.setVisibility(View.VISIBLE); } @@ -651,7 +675,16 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente return; } String text = editText.getText().toString(); + int oldItemsCount = listView.getAdapter() == null ? 0 : listView.getAdapter().getItemCount(); searchListViewAdapter.searchUsers(text); + if (TextUtils.isEmpty(text) && listView != null && listView.getAdapter() != listViewAdapter) { + listView.setAdapter(listViewAdapter); + if (oldItemsCount == 0) { + showItemsAnimated(0); + } + } + progressBar.setVisibility(View.GONE); + flickerLoadingView.setVisibility(View.VISIBLE); } }); if (type == TYPE_KICKED) { @@ -666,15 +699,31 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } fragmentView = new FrameLayout(context); - fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); FrameLayout frameLayout = (FrameLayout) fragmentView; - emptyView = new EmptyTextProgressView(context); - if (type == TYPE_BANNED || type == TYPE_USERS || type == TYPE_KICKED) { - emptyView.setText(LocaleController.getString("NoResult", R.string.NoResult)); + FrameLayout progressLayout = new FrameLayout(context); + flickerLoadingView = new FlickerLoadingView(context); + flickerLoadingView.setViewType(FlickerLoadingView.USERS_TYPE); + flickerLoadingView.showDate(false); + flickerLoadingView.setUseHeaderOffset(true); + progressLayout.addView(flickerLoadingView); + + progressBar = new RadialProgressView(context); + progressLayout.addView(progressBar, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + frameLayout.addView(progressLayout); + + if (type == 3) { + flickerLoadingView.setVisibility(View.GONE); + } else { + progressBar.setVisibility(View.GONE); } - emptyView.setShowAtCenter(true); + emptyView = new StickerEmptyView(context, progressLayout, StickerEmptyView.STICKER_TYPE_SEARCH); + emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); emptyView.setVisibility(View.GONE); + emptyView.setAnimateLayoutChange(true); + emptyView.showProgress(true, false); frameLayout.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); listView = new RecyclerListView(context); @@ -743,7 +792,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente public void didSelectUsers(ArrayList users, int fwdCount) { for (int a = 0, N = users.size(); a < N; a++) { TLRPC.User user = users.get(a); - getMessagesController().addUserToChat(chatId, user, null, fwdCount, null, ChatUsersActivity.this, null); + getMessagesController().addUserToChat(chatId, user, fwdCount, null, ChatUsersActivity.this, null); } } @@ -875,6 +924,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente adminRights.change_info = adminRights.post_messages = adminRights.edit_messages = adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = adminRights.pin_messages = adminRights.add_admins = true; + if (!isChannel) { + adminRights.manage_call = true; + } } } } else if (participant instanceof TLRPC.ChatParticipant) { @@ -886,6 +938,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente adminRights.change_info = adminRights.post_messages = adminRights.edit_messages = adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = adminRights.pin_messages = adminRights.add_admins = true; + if (!isChannel) { + adminRights.manage_call = true; + } } } } else { @@ -1011,14 +1066,10 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente undoView = new UndoView(context); frameLayout.addView(undoView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); - if (loadingUsers) { - emptyView.showProgress(); - } else { - emptyView.showTextView(); - } updateRows(); listView.setEmptyView(emptyView); + listView.setAnimateEmptyView(true, 0); if (needOpenSearch) { searchItem.openSearch(false); @@ -1026,6 +1077,39 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente return fragmentView; } + + private void showItemsAnimated(int from) { + if (isPaused || !openTransitionEnded) { + return; + } + listView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) < from) { + continue; + } + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + animatorSet.start(); + return true; + } + }); + } + + public void setIgnoresUsers(SparseArray participants) { + ignoredUsers = participants; + } private void onOwnerChaged(TLRPC.User user) { undoView.showWithAction(-chatId, isChannel ? UndoView.ACTION_OWNER_TRANSFERED_CHANNEL : UndoView.ACTION_OWNER_TRANSFERED_GROUP, user); @@ -1070,6 +1154,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente admin.admin_rights.change_info = admin.admin_rights.post_messages = admin.admin_rights.edit_messages = admin.admin_rights.delete_messages = admin.admin_rights.ban_users = admin.admin_rights.invite_users = admin.admin_rights.pin_messages = admin.admin_rights.add_admins = true; + if (!isChannel) { + admin.admin_rights.manage_call = true; + } map.put(selfUserId, admin); int index = arrayList.indexOf(object); @@ -1410,6 +1497,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (actions.get(i) == 2) { getMessagesController().deleteUserFromChat(chatId, user, null); removeParticipants(userId); + if (currentChat != null && user != null && BulletinFactory.canShowBulletin(this)) { + BulletinFactory.createRemoveFromChatBulletin(this, user.first_name, currentChat.title).show(); + } } else { if (actions.get(i) == 1 && canEditAdmin && (participant instanceof TLRPC.TL_channelParticipantAdmin || participant instanceof TLRPC.TL_chatParticipantAdmin)) { AlertDialog.Builder builder2 = new AlertDialog.Builder(getParentActivity()); @@ -1526,7 +1616,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente presentFragment(fragment); } else if (type == TYPE_BANNED) { TLRPC.User user = getMessagesController().getUser(userId); - getMessagesController().addUserToChat(chatId, user, null, 0, null, ChatUsersActivity.this, null); + getMessagesController().addUserToChat(chatId, user, 0, null, ChatUsersActivity.this, null); } } else if (i == 1) { TLRPC.TL_channels_editBanned req = new TLRPC.TL_channels_editBanned(); @@ -1808,6 +1898,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (selectType != 0 && participant.user_id == selfUserId) { continue; } + if (ignoredUsers != null && ignoredUsers.indexOfKey(participant.user_id) >= 0) { + continue; + } if (selectType == 1) { if (getContactsController().isContact(participant.user_id)) { contacts.add(participant); @@ -1843,8 +1936,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } } else { loadingUsers = true; - if (emptyView != null && !firstLoaded) { - emptyView.showProgress(); + if (emptyView != null) { + emptyView.showProgress(true, false); } if (listViewAdapter != null) { listViewAdapter.notifyDataSetChanged(); @@ -1920,18 +2013,10 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente objects = participants; map = participantsMap; } - if (delayResults <= 0) { - if (emptyView != null) { - emptyView.showTextView(); - } - } } else { objects = participants; map = participantsMap; participantsMap.clear(); - if (emptyView != null) { - emptyView.showTextView(); - } } objects.clear(); objects.addAll(res.participants); @@ -1947,6 +2032,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente remove = true; } else if (selectType == 1 && UserObject.isDeleted(getMessagesController().getUser(participant.user_id))) { remove = true; + } else if (ignoredUsers != null && ignoredUsers.indexOfKey(participant.user_id) >= 0) { + remove = true; } if (remove) { participants.remove(a); @@ -2020,10 +2107,14 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (type != TYPE_USERS || delayResults <= 0) { loadingUsers = false; firstLoaded = true; + showItemsAnimated(listViewAdapter != null ? listViewAdapter.getItemCount() : 0); } updateRows(); if (listViewAdapter != null) { listViewAdapter.notifyDataSetChanged(); + if (emptyView != null && listViewAdapter.getItemCount() == 0 && firstLoaded) { + emptyView.showProgress(false, true); + } } })); getConnectionsManager().bindRequestToGuid(reqId, classGuid); @@ -2037,6 +2128,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (listViewAdapter != null) { listViewAdapter.notifyDataSetChanged(); } + if (emptyView != null) { + emptyView.requestLayout(); + } } @Override @@ -2054,8 +2148,15 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } } + public int getSelectType() { + return selectType; + } + @Override protected void onTransitionAnimationEnd(boolean isOpen, boolean backward) { + if (isOpen) { + openTransitionEnded = true; + } if (isOpen && !backward && needOpenSearch) { searchItem.getSearchField().requestFocus(); AndroidUtilities.showKeyboard(searchItem.getSearchField()); @@ -2071,6 +2172,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente private SearchAdapterHelper searchAdapterHelper; private Runnable searchRunnable; private int totalCount = 0; + private boolean searchInProgress; private int groupStartRow; private int contactsStartRow; @@ -2079,7 +2181,20 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente public SearchAdapter(Context context) { mContext = context; searchAdapterHelper = new SearchAdapterHelper(true); - searchAdapterHelper.setDelegate(searchId -> notifyDataSetChanged()); + searchAdapterHelper.setDelegate(searchId -> { + if (!searchAdapterHelper.isSearchInProgress()) { + int oldItemCount = getItemCount(); + notifyDataSetChanged(); + if (getItemCount() > oldItemCount) { + showItemsAnimated(oldItemCount); + } + if (!searchInProgress) { + if (getItemCount() == 0 && searchId != 0) { + emptyView.showProgress(false, true); + } + } + } + }); } public void searchUsers(final String query) { @@ -2087,14 +2202,16 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente Utilities.searchQueue.cancelRunnable(searchRunnable); searchRunnable = null; } - if (TextUtils.isEmpty(query)) { - searchResult.clear(); - searchResultMap.clear(); - searchResultNames.clear(); - searchAdapterHelper.mergeResults(null); - searchAdapterHelper.queryServerSearch(null, type != 0, false, true, false, false, ChatObject.isChannel(currentChat) ? chatId : 0, false, type, 0); - notifyDataSetChanged(); - } else { + searchResult.clear(); + searchResultMap.clear(); + searchResultNames.clear(); + searchAdapterHelper.mergeResults(null); + searchAdapterHelper.queryServerSearch(null, type != 0, false, true, false, false, ChatObject.isChannel(currentChat) ? chatId : 0, false, type, 0); + notifyDataSetChanged(); + + if (!TextUtils.isEmpty(query)) { + searchInProgress = true; + emptyView.showProgress(true, true); Utilities.searchQueue.postRunnable(searchRunnable = () -> processSearch(query), 300); } } @@ -2103,11 +2220,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente AndroidUtilities.runOnUIThread(() -> { searchRunnable = null; - int kickedType; final ArrayList participantsCopy = !ChatObject.isChannel(currentChat) && info != null ? new ArrayList<>(info.participants.participants) : null; final ArrayList contactsCopy = selectType == 1 ? new ArrayList<>(getContactsController().contacts) : null; - searchAdapterHelper.queryServerSearch(query, selectType != 0, false, true, false, false, ChatObject.isChannel(currentChat) ? chatId : 0, false, type, 0); if (participantsCopy != null || contactsCopy != null) { Utilities.searchQueue.postRunnable(() -> { String search1 = query.trim().toLowerCase(); @@ -2209,7 +2324,10 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente } updateSearchResults(resultArray, resultMap, resultArrayNames, resultArray2); }); + } else { + searchInProgress = false; } + searchAdapterHelper.queryServerSearch(query, selectType != 0, false, true, false, false, ChatObject.isChannel(currentChat) ? chatId : 0, false, type, 1); }); } @@ -2218,6 +2336,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente if (!searching) { return; } + searchInProgress = false; searchResult = users; searchResultMap = usersMap; searchResultNames = names; @@ -2227,7 +2346,16 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente search.clear(); search.addAll(participants); } + int oldItemCount = getItemCount(); notifyDataSetChanged(); + if (getItemCount() > oldItemCount) { + showItemsAnimated(oldItemCount); + } + if (!searchAdapterHelper.isSearchInProgress()) { + if (getItemCount() == 0) { + emptyView.showProgress(false, true); + } + } }); } @@ -2325,9 +2453,9 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente View view; switch (viewType) { case 0: - view = new ManageChatUserCell(mContext, 2, 2, selectType == 0); - view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - ((ManageChatUserCell) view).setDelegate((cell, click) -> { + ManageChatUserCell manageChatUserCell = new ManageChatUserCell(mContext, 2, 2, selectType == 0); + manageChatUserCell.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + manageChatUserCell.setDelegate((cell, click) -> { TLObject object = getItem((Integer) cell.getTag()); if (object instanceof TLRPC.ChannelParticipant) { TLRPC.ChannelParticipant participant = (TLRPC.ChannelParticipant) object; @@ -2336,6 +2464,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente return false; } }); + view = manageChatUserCell; break; case 1: default: @@ -2519,29 +2648,50 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente View view; switch (viewType) { case 0: - view = new ManageChatUserCell(mContext, type == TYPE_BANNED || type == TYPE_KICKED ? 7 : 6, type == TYPE_BANNED || type == TYPE_KICKED ? 6 : 2, selectType == 0); - view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - ((ManageChatUserCell) view).setDelegate((cell, click) -> { + ManageChatUserCell manageChatUserCell = new ManageChatUserCell(mContext, type == TYPE_BANNED || type == TYPE_KICKED ? 7 : 6, type == TYPE_BANNED || type == TYPE_KICKED ? 6 : 2, selectType == 0); + manageChatUserCell.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + manageChatUserCell.setDelegate((cell, click) -> { TLObject participant = listViewAdapter.getItem((Integer) cell.getTag()); return createMenuForParticipant(participant, !click); }); + view = manageChatUserCell; break; case 1: - view = new TextInfoPrivacyCell(mContext); + view = new TextInfoPrivacyCell(mContext) { + @Override + protected void dispatchDraw(Canvas canvas) { + canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundGray)); + super.dispatchDraw(canvas); + } + }; break; case 2: view = new ManageChatTextCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; case 3: - view = new ShadowSectionCell(mContext); + view = new ShadowSectionCell(mContext) { + @Override + protected void dispatchDraw(Canvas canvas) { + canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundGray)); + super.dispatchDraw(canvas); + } + }; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); break; case 4: view = new FrameLayout(mContext) { + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightMeasureSpec) - AndroidUtilities.dp(56), MeasureSpec.EXACTLY)); } + + @Override + protected void dispatchDraw(Canvas canvas) { + canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundGray)); + super.dispatchDraw(canvas); + } }; FrameLayout frameLayout = (FrameLayout) view; frameLayout.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); @@ -2597,6 +2747,25 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente case 10: view = new LoadingCell(mContext, AndroidUtilities.dp(40), AndroidUtilities.dp(120)); break; + case 11: + view = new View(mContext) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int h = 0; + for (int i = 0; i < listView.getChildCount(); i++) { + if (listView.getChildAt(i) != this) { + h += listView.getChildAt(i).getMeasuredHeight(); + } + } + setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), Math.max(0, listView.getMeasuredHeight() - h)); + } + + @Override + protected void onDraw(Canvas canvas) { + canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundGray)); + } + }; + break; case 9: default: view = new ChooseView(mContext); @@ -2644,7 +2813,7 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente banned = participant instanceof TLRPC.TL_channelParticipantBanned; creator = participant instanceof TLRPC.TL_channelParticipantCreator; admin = participant instanceof TLRPC.TL_channelParticipantAdmin; - } else { + } else if (item instanceof TLRPC.ChatParticipant) { TLRPC.ChatParticipant participant = (TLRPC.ChatParticipant) item; userId = participant.user_id; joined = participant.date; @@ -2654,6 +2823,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente banned = false; creator = participant instanceof TLRPC.TL_chatParticipantCreator; admin = participant instanceof TLRPC.TL_chatParticipantAdmin; + } else { + return; } TLRPC.User user = getMessagesController().getUser(userId); if (user != null) { @@ -2884,6 +3055,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente return 9; } else if (position == loadingProgressRow) { return 10; + } else if (position == lastEmptyViewRow) { + return 11; } return 0; } @@ -2950,14 +3123,6 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ManageChatUserCell.class}, new String[]{"nameTextView"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ManageChatUserCell.class}, new String[]{"statusColor"}, null, null, cellDelegate, Theme.key_windowBackgroundWhiteGrayText)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ManageChatUserCell.class}, new String[]{"statusOnlineColor"}, null, null, cellDelegate, Theme.key_windowBackgroundWhiteBlueText)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ManageChatUserCell.class}, null, Theme.avatarDrawables, null, Theme.key_avatar_text)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundRed)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundOrange)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundViolet)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundGreen)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundCyan)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundBlue)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); themeDescriptions.add(new ThemeDescription(undoView, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_undo_background)); themeDescriptions.add(new ThemeDescription(undoView, 0, new Class[]{UndoView.class}, new String[]{"undoImageView"}, null, null, null, Theme.key_undo_cancelColor)); @@ -2972,6 +3137,20 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_CHECKTAG, new Class[]{ManageChatTextCell.class}, new String[]{"imageView"}, null, null, null, Theme.key_windowBackgroundWhiteBlueButton)); themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_CHECKTAG, new Class[]{ManageChatTextCell.class}, new String[]{"textView"}, null, null, null, Theme.key_windowBackgroundWhiteBlueIcon)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{StickerEmptyView.class}, new String[]{"title"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{StickerEmptyView.class}, new String[]{"subtitle"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(emptyView.title, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(emptyView.subtitle, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ManageChatUserCell.class}, null, Theme.avatarDrawables, null, Theme.key_avatar_text)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundRed)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundOrange)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundViolet)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundGreen)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundCyan)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundBlue)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); + return themeDescriptions; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AdminLogFilterAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AdminLogFilterAlert.java index b3f93613a..bb1c16062 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AdminLogFilterAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AdminLogFilterAlert.java @@ -72,6 +72,7 @@ public class AdminLogFilterAlert extends BottomSheet { private int editRow; private int pinnedRow; private int leavingRow; + private int callsRow; private int allAdminsRow; public AdminLogFilterAlert(Context context, TLRPC.TL_channelAdminLogEventsFilter filter, SparseArray admins, boolean megagroup) { @@ -92,6 +93,7 @@ public class AdminLogFilterAlert extends BottomSheet { currentFilter.pinned = filter.pinned; currentFilter.edit = filter.edit; currentFilter.delete = filter.delete; + currentFilter.group_call = filter.group_call; } if (admins != null) { selectedAdmins = admins.clone(); @@ -114,7 +116,8 @@ public class AdminLogFilterAlert extends BottomSheet { } else { pinnedRow = -1; } - leavingRow = rowCount; + leavingRow = rowCount++; + callsRow = rowCount; rowCount += 2; allAdminsRow = rowCount; @@ -144,7 +147,7 @@ public class AdminLogFilterAlert extends BottomSheet { height -= AndroidUtilities.statusBarHeight; } int measuredWidth = getMeasuredWidth(); - int contentSize = AndroidUtilities.dp(48) + (isMegagroup ? 9 : 7) * AndroidUtilities.dp(48) + backgroundPaddingTop; + int contentSize = AndroidUtilities.dp(48) + (isMegagroup ? 10 : 7) * AndroidUtilities.dp(48) + backgroundPaddingTop; if (currentAdmins != null) { contentSize += (currentAdmins.size() + 1) * AndroidUtilities.dp(48) + AndroidUtilities.dp(20); } @@ -225,7 +228,7 @@ public class AdminLogFilterAlert extends BottomSheet { currentFilter.join = currentFilter.leave = currentFilter.invite = currentFilter.ban = currentFilter.unban = currentFilter.kick = currentFilter.unkick = currentFilter.promote = currentFilter.demote = currentFilter.info = currentFilter.settings = currentFilter.pinned = - currentFilter.edit = currentFilter.delete = false; + currentFilter.edit = currentFilter.delete = currentFilter.group_call = false; } else { currentFilter = null; } @@ -260,7 +263,7 @@ public class AdminLogFilterAlert extends BottomSheet { currentFilter.join = currentFilter.leave = currentFilter.invite = currentFilter.ban = currentFilter.unban = currentFilter.kick = currentFilter.unkick = currentFilter.promote = currentFilter.demote = currentFilter.info = currentFilter.settings = currentFilter.pinned = - currentFilter.edit = currentFilter.delete = true; + currentFilter.edit = currentFilter.delete = currentFilter.group_call = true; RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(0); if (holder != null) { ((CheckBoxCell) holder.itemView).setChecked(false, true); @@ -282,6 +285,8 @@ public class AdminLogFilterAlert extends BottomSheet { currentFilter.pinned = !currentFilter.pinned; } else if (position == leavingRow) { currentFilter.leave = !currentFilter.leave; + } else if (position == callsRow) { + currentFilter.group_call = !currentFilter.group_call; } } if (currentFilter != null && !currentFilter.join && !currentFilter.leave && @@ -289,7 +294,7 @@ public class AdminLogFilterAlert extends BottomSheet { !currentFilter.unban && !currentFilter.kick && !currentFilter.unkick && !currentFilter.promote && !currentFilter.demote && !currentFilter.info && !currentFilter.settings && !currentFilter.pinned && !currentFilter.edit && - !currentFilter.delete) { + !currentFilter.delete && !currentFilter.group_call) { saveButton.setEnabled(false); saveButton.setAlpha(0.5f); } else { @@ -381,7 +386,7 @@ public class AdminLogFilterAlert extends BottomSheet { @Override public int getItemCount() { - return (isMegagroup ? 9 : 7) + (currentAdmins != null ? 2 + currentAdmins.size() : 0); + return (isMegagroup ? 10 : 7) + (currentAdmins != null ? 2 + currentAdmins.size() : 0); } @Override @@ -445,6 +450,8 @@ public class AdminLogFilterAlert extends BottomSheet { cell.setChecked(currentFilter == null || currentFilter.pinned, false); } else if (position == leavingRow) { cell.setChecked(currentFilter == null || currentFilter.leave, false); + } else if (position == callsRow) { + cell.setChecked(currentFilter == null || currentFilter.group_call, false); } else if (position == allAdminsRow) { cell.setChecked(selectedAdmins == null, false); } @@ -485,7 +492,9 @@ public class AdminLogFilterAlert extends BottomSheet { } else if (position == pinnedRow) { cell.setText(LocaleController.getString("EventLogFilterPinnedMessages", R.string.EventLogFilterPinnedMessages), "", currentFilter == null || currentFilter.pinned, true); } else if (position == leavingRow) { - cell.setText(LocaleController.getString("EventLogFilterLeavingMembers", R.string.EventLogFilterLeavingMembers), "", currentFilter == null || currentFilter.leave, false); + cell.setText(LocaleController.getString("EventLogFilterLeavingMembers", R.string.EventLogFilterLeavingMembers), "", currentFilter == null || currentFilter.leave, true); + } else if (position == callsRow) { + cell.setText(LocaleController.getString("EventLogFilterCalls", R.string.EventLogFilterCalls), "", currentFilter == null || currentFilter.group_call, false); } else if (position == allAdminsRow) { cell.setText(LocaleController.getString("EventLogAllAdmins", R.string.EventLogAllAdmins), "", selectedAdmins == null, true); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java index a59e7eac5..b8b270bd2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java @@ -13,17 +13,21 @@ import android.app.Activity; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; +import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.graphics.Color; import android.graphics.Outline; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; +import android.graphics.drawable.GradientDrawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Vibrator; +import android.provider.Settings; import android.text.Html; import android.text.InputType; import android.text.Spannable; @@ -62,6 +66,7 @@ import org.telegram.messenger.NotificationsController; import org.telegram.messenger.R; import org.telegram.messenger.SecretChatHelper; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; @@ -126,7 +131,8 @@ public class AlertsCreator { request instanceof TLRPC.TL_channels_editBanned || request instanceof TLRPC.TL_messages_editChatDefaultBannedRights || request instanceof TLRPC.TL_messages_editChatAdmin || - request instanceof TLRPC.TL_messages_migrateChat) { + request instanceof TLRPC.TL_messages_migrateChat || + request instanceof TLRPC.TL_phone_inviteToGroupCall) { if (fragment != null && error.text.equals("CHANNELS_TOO_MUCH")) { if (request instanceof TLRPC.TL_channels_joinChannel || request instanceof TLRPC.TL_channels_inviteToChannel) { fragment.presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_JOIN)); @@ -1134,7 +1140,7 @@ public class AlertsCreator { boolean canDeleteInbox = !secret && user != null && canRevokeInbox && revokeTimeLimit == 0x7fffffff; final boolean[] deleteForAll = new boolean[1]; - if (!second && canDeleteInbox) { + if (!second && canDeleteInbox && !UserObject.isDeleted(user)) { cell[0] = new CheckBoxCell(context, 1); cell[0].setBackgroundDrawable(Theme.getSelectorDrawable(false)); if (clear) { @@ -1551,15 +1557,54 @@ public class AlertsCreator { void didSelectDate(boolean notify, int scheduleDate); } + public static class ScheduleDatePickerColors { + + public final int textColor; + public final int backgroundColor; + + public final int iconColor; + public final int iconSelectorColor; + + public final int subMenuTextColor; + public final int subMenuBackgroundColor; + public final int subMenuSelectorColor; + + public final int buttonTextColor = Theme.getColor(Theme.key_featuredStickers_buttonText); + public final int buttonBackgroundColor = Theme.getColor(Theme.key_featuredStickers_addButton); + public final int buttonBackgroundPressedColor = Theme.getColor(Theme.key_featuredStickers_addButtonPressed); + + private ScheduleDatePickerColors() { + this(Theme.getColor(Theme.key_dialogTextBlack), Theme.getColor(Theme.key_dialogBackground), Theme.getColor(Theme.key_sheet_other), Theme.getColor(Theme.key_player_actionBarSelector), Theme.getColor(Theme.key_actionBarDefaultSubmenuItem), Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground), Theme.getColor(Theme.key_listSelector)); + } + + public ScheduleDatePickerColors(int textColor, int backgroundColor, int iconColor, int iconSelectorColor, int subMenuTextColor, int subMenuBackgroundColor, int subMenuSelectorColor) { + this.textColor = textColor; + this.backgroundColor = backgroundColor; + this.iconColor = iconColor; + this.iconSelectorColor = iconSelectorColor; + this.subMenuTextColor = subMenuTextColor; + this.subMenuBackgroundColor = subMenuBackgroundColor; + this.subMenuSelectorColor = subMenuSelectorColor; + } + } + public static BottomSheet.Builder createScheduleDatePickerDialog(Context context, long dialogId, final ScheduleDatePickerDelegate datePickerDelegate) { return createScheduleDatePickerDialog(context, dialogId, -1, datePickerDelegate, null); } + public static BottomSheet.Builder createScheduleDatePickerDialog(Context context, long dialogId, final ScheduleDatePickerDelegate datePickerDelegate, final ScheduleDatePickerColors datePickerColors) { + return createScheduleDatePickerDialog(context, dialogId, -1, datePickerDelegate, null, datePickerColors); + } + public static BottomSheet.Builder createScheduleDatePickerDialog(Context context, long dialogId, final ScheduleDatePickerDelegate datePickerDelegate, final Runnable cancelRunnable) { return createScheduleDatePickerDialog(context, dialogId, -1, datePickerDelegate, cancelRunnable); } public static BottomSheet.Builder createScheduleDatePickerDialog(Context context, long dialogId, long currentDate, final ScheduleDatePickerDelegate datePickerDelegate, final Runnable cancelRunnable) { + return createScheduleDatePickerDialog(context, dialogId, currentDate, datePickerDelegate, cancelRunnable, new ScheduleDatePickerColors()); + } + + public static BottomSheet.Builder createScheduleDatePickerDialog(Context context, long dialogId, long currentDate, final ScheduleDatePickerDelegate datePickerDelegate, final Runnable cancelRunnable, final ScheduleDatePickerColors datePickerColors) { if (context == null) { return null; } @@ -1570,6 +1615,7 @@ public class AlertsCreator { builder.setApplyBottomPadding(false); final NumberPicker dayPicker = new NumberPicker(context); + dayPicker.setTextColor(datePickerColors.textColor); dayPicker.setTextOffset(AndroidUtilities.dp(10)); dayPicker.setItemCount(5); final NumberPicker hourPicker = new NumberPicker(context) { @@ -1579,6 +1625,7 @@ public class AlertsCreator { } }; hourPicker.setItemCount(5); + hourPicker.setTextColor(datePickerColors.textColor); hourPicker.setTextOffset(-AndroidUtilities.dp(10)); final NumberPicker minutePicker = new NumberPicker(context) { @Override @@ -1587,6 +1634,7 @@ public class AlertsCreator { } }; minutePicker.setItemCount(5); + minutePicker.setTextColor(datePickerColors.textColor); minutePicker.setTextOffset(-AndroidUtilities.dp(34)); LinearLayout container = new LinearLayout(context) { @@ -1631,7 +1679,7 @@ public class AlertsCreator { } else { titleView.setText(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage)); } - titleView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + titleView.setTextColor(datePickerColors.textColor); titleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); titleView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); titleLayout.addView(titleView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 12, 0, 0)); @@ -1645,14 +1693,19 @@ public class AlertsCreator { name = name.substring(0, 10) + "\u2026"; } - ActionBarMenuItem optionsButton = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_sheet_other)); + ActionBarMenuItem optionsButton = new ActionBarMenuItem(context, null, 0, datePickerColors.iconColor); optionsButton.setLongClickEnabled(false); optionsButton.setSubMenuOpenSide(2); optionsButton.setIcon(R.drawable.ic_ab_other); - optionsButton.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_player_actionBarSelector), 1)); + optionsButton.setBackgroundDrawable(Theme.createSelectorDrawable(datePickerColors.iconSelectorColor, 1)); titleLayout.addView(optionsButton, LayoutHelper.createFrame(40, 40, Gravity.TOP | Gravity.RIGHT, 0, 8, 5, 0)); optionsButton.addSubItem(1, LocaleController.formatString("ScheduleWhenOnline", R.string.ScheduleWhenOnline, name)); - optionsButton.setOnClickListener(v -> optionsButton.toggleSubMenu()); + optionsButton.setOnClickListener(v -> { + optionsButton.toggleSubMenu(); + optionsButton.setPopupItemsColor(datePickerColors.subMenuTextColor, false); + optionsButton.setupPopupRadialSelectors(datePickerColors.subMenuSelectorColor); + optionsButton.redrawPopup(datePickerColors.subMenuBackgroundColor); + }); optionsButton.setDelegate(id -> { if (id == 1) { datePickerDelegate.didSelectDate(true, 0x7ffffffe); @@ -1742,10 +1795,10 @@ public class AlertsCreator { buttonTextView.setPadding(AndroidUtilities.dp(34), 0, AndroidUtilities.dp(34), 0); buttonTextView.setGravity(Gravity.CENTER); - buttonTextView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + buttonTextView.setTextColor(datePickerColors.buttonTextColor); buttonTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); buttonTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - buttonTextView.setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_featuredStickers_addButton), Theme.getColor(Theme.key_featuredStickers_addButtonPressed))); + buttonTextView.setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), datePickerColors.buttonBackgroundColor, datePickerColors.buttonBackgroundPressedColor)); container.addView(buttonTextView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.BOTTOM, 16, 15, 16, 16)); buttonTextView.setOnClickListener(v -> { canceled[0] = false; @@ -1761,11 +1814,13 @@ public class AlertsCreator { }); builder.setCustomView(container); - builder.show().setOnDismissListener(dialog -> { + BottomSheet bottomSheet = builder.show(); + bottomSheet.setOnDismissListener(dialog -> { if (cancelRunnable != null && canceled[0]) { cancelRunnable.run(); } }); + bottomSheet.setBackgroundColor(datePickerColors.backgroundColor); return builder; } @@ -2249,6 +2304,10 @@ public class AlertsCreator { builder.setTitle(LocaleController.getString("ChannelTooMuchTitle", R.string.ChannelTooMuchTitle)); builder.setMessage(LocaleController.getString("UserChannelTooMuchJoin", R.string.UserChannelTooMuchJoin)); break; + case "USER_ALREADY_PARTICIPANT": + builder.setTitle(LocaleController.getString("VoipGroupVoiceChat", R.string.VoipGroupVoiceChat)); + builder.setMessage(LocaleController.getString("VoipGroupInviteAlreadyParticipant", R.string.VoipGroupInviteAlreadyParticipant)); + break; default: builder.setMessage(LocaleController.getString("ErrorOccurred", R.string.ErrorOccurred) + "\n" + error); break; @@ -2313,12 +2372,16 @@ public class AlertsCreator { SharedPreferences.Editor editor = preferences1.edit(); if (dialog_id != 0) { editor.putInt("color_" + dialog_id, selectedColor[0]); - } else if (globalType == NotificationsController.TYPE_PRIVATE) { - editor.putInt("MessagesLed", selectedColor[0]); - } else if (globalType == NotificationsController.TYPE_GROUP) { - editor.putInt("GroupLed", selectedColor[0]); + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannel(dialog_id); } else { - editor.putInt("ChannelLed", selectedColor[0]); + if (globalType == NotificationsController.TYPE_PRIVATE) { + editor.putInt("MessagesLed", selectedColor[0]); + } else if (globalType == NotificationsController.TYPE_GROUP) { + editor.putInt("GroupLed", selectedColor[0]); + } else { + editor.putInt("ChannelLed", selectedColor[0]); + } + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannelGlobal(globalType); } editor.commit(); if (onSelect != null) { @@ -2356,22 +2419,22 @@ public class AlertsCreator { return builder.create(); } - public static Dialog createVibrationSelectDialog(Activity parentActivity, final long dialog_id, final boolean globalGroup, final boolean globalAll, final Runnable onSelect) { + public static Dialog createVibrationSelectDialog(Activity parentActivity, final long dialogId, final boolean globalGroup, final boolean globalAll, final Runnable onSelect) { String prefix; - if (dialog_id != 0) { - prefix = "vibrate_"; + if (dialogId != 0) { + prefix = "vibrate_" + dialogId; } else { prefix = globalGroup ? "vibrate_group" : "vibrate_messages"; } - return createVibrationSelectDialog(parentActivity, dialog_id, prefix, onSelect); + return createVibrationSelectDialog(parentActivity, dialogId, prefix, onSelect); } - public static Dialog createVibrationSelectDialog(Activity parentActivity, final long dialog_id, final String prefKeyPrefix, final Runnable onSelect) { + public static Dialog createVibrationSelectDialog(Activity parentActivity, final long dialogId, final String prefKeyPrefix, final Runnable onSelect) { SharedPreferences preferences = MessagesController.getNotificationsSettings(UserConfig.selectedAccount); final int[] selected = new int[1]; String[] descriptions; - if (dialog_id != 0) { - selected[0] = preferences.getInt(prefKeyPrefix + dialog_id, 0); + if (dialogId != 0) { + selected[0] = preferences.getInt(prefKeyPrefix, 0); if (selected[0] == 3) { selected[0] = 2; } else if (selected[0] == 2) { @@ -2417,16 +2480,17 @@ public class AlertsCreator { final SharedPreferences preferences1 = MessagesController.getNotificationsSettings(UserConfig.selectedAccount); SharedPreferences.Editor editor = preferences1.edit(); - if (dialog_id != 0) { + if (dialogId != 0) { if (selected[0] == 0) { - editor.putInt(prefKeyPrefix + dialog_id, 0); + editor.putInt(prefKeyPrefix, 0); } else if (selected[0] == 1) { - editor.putInt(prefKeyPrefix + dialog_id, 1); + editor.putInt(prefKeyPrefix, 1); } else if (selected[0] == 2) { - editor.putInt(prefKeyPrefix + dialog_id, 3); + editor.putInt(prefKeyPrefix, 3); } else if (selected[0] == 3) { - editor.putInt(prefKeyPrefix + dialog_id, 2); + editor.putInt(prefKeyPrefix, 2); } + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannel(dialogId); } else { if (selected[0] == 0) { editor.putInt(prefKeyPrefix, 2); @@ -2439,6 +2503,13 @@ public class AlertsCreator { } else if (selected[0] == 4) { editor.putInt(prefKeyPrefix, 4); } + if (prefKeyPrefix.equals("vibrate_channel")) { + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannelGlobal(NotificationsController.TYPE_CHANNEL); + } else if (prefKeyPrefix.equals("vibrate_group")) { + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannelGlobal(NotificationsController.TYPE_GROUP); + } else { + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannelGlobal(NotificationsController.TYPE_PRIVATE); + } } editor.commit(); builder.getDismissRunnable().run(); @@ -2554,6 +2625,100 @@ public class AlertsCreator { return builder; } + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public static AlertDialog.Builder createDrawOverlayPermissionDialog(Activity activity, DialogInterface.OnClickListener onCancel) { + AlertDialog.Builder builder = new AlertDialog.Builder(activity); + String svg = RLottieDrawable.readRes(null, R.raw.pip_video_request); + + FrameLayout frameLayout = new FrameLayout(activity); + frameLayout.setBackground(new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{0xFF22364F, 0xFF22526A})); + frameLayout.setClipToOutline(true); + frameLayout.setOutlineProvider(new ViewOutlineProvider() { + @Override + public void getOutline(View view, Outline outline) { + outline.setRoundRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight() + AndroidUtilities.dp(6), AndroidUtilities.dpf2(6)); + } + }); + + float aspectRatio = 472f / 936f; + View background = new View(activity); + background.setBackground(new BitmapDrawable(SvgHelper.getBitmap(svg, AndroidUtilities.dp(320), AndroidUtilities.dp(320 * aspectRatio), false))); + frameLayout.addView(background, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, -1, -1, -1, -1)); + + builder.setTopView(frameLayout); + builder.setTitle(LocaleController.getString("PermissionDrawAboveOtherAppsTitle", R.string.PermissionDrawAboveOtherAppsTitle)); + builder.setMessage(LocaleController.getString("PermissionDrawAboveOtherApps", R.string.PermissionDrawAboveOtherApps)); + builder.setPositiveButton(LocaleController.getString("Enable", R.string.Enable), (dialogInterface, i) -> { + if (activity != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + activity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + activity.getPackageName()))); + } + } + }); + builder.notDrawBackgroundOnTopView(true); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), onCancel); + builder.setTopViewAspectRatio(aspectRatio); + return builder; + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public static AlertDialog.Builder createDrawOverlayGroupCallPermissionDialog(Context context) { + AlertDialog.Builder builder = new AlertDialog.Builder(context); + String svg = RLottieDrawable.readRes(null, R.raw.pip_voice_request); + + GroupCallPipButton button = new GroupCallPipButton(context, 0, true); + button.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); + FrameLayout frameLayout = new FrameLayout(context) { + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + button.setTranslationY(getMeasuredHeight() * 0.28f - button.getMeasuredWidth() / 2f); + button.setTranslationX(getMeasuredWidth() * 0.82f - button.getMeasuredWidth() / 2f); + } + }; + frameLayout.setBackground(new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{0xFF192A3D, 0xFF19514E})); + frameLayout.setClipToOutline(true); + frameLayout.setOutlineProvider(new ViewOutlineProvider() { + @Override + public void getOutline(View view, Outline outline) { + outline.setRoundRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight() + AndroidUtilities.dp(6), AndroidUtilities.dpf2(6)); + } + }); + + + float aspectRatio = 540f / 936f; + View background = new View(context); + background.setBackground(new BitmapDrawable(SvgHelper.getBitmap(svg, AndroidUtilities.dp(320), AndroidUtilities.dp(320 * aspectRatio), false))); + frameLayout.addView(background, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, -1, -1, -1, -1)); + + frameLayout.addView(button, LayoutHelper.createFrame(117,117)); + + builder.setTopView(frameLayout); + builder.setTitle(LocaleController.getString("PermissionDrawAboveOtherAppsGroupCallTitle", R.string.PermissionDrawAboveOtherAppsGroupCallTitle)); + builder.setMessage(LocaleController.getString("PermissionDrawAboveOtherAppsGroupCall", R.string.PermissionDrawAboveOtherAppsGroupCall)); + builder.setPositiveButton(LocaleController.getString("Enable", R.string.Enable), (dialogInterface, i) -> { + if (context != null) { + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Intent intent = new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + context.getPackageName())); + Activity activity = AndroidUtilities.findActivity(context); + if (activity instanceof LaunchActivity) { + activity.startActivityForResult(intent, 105); + } else { + context.startActivity(intent); + } + } + } catch (Exception e) { + FileLog.e(e); + } + } + }); + builder.notDrawBackgroundOnTopView(true); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + builder.setTopViewAspectRatio(aspectRatio); + return builder; + } + public static AlertDialog.Builder createContactsPermissionDialog(Activity parentActivity, MessagesStorage.IntCallback callback) { AlertDialog.Builder builder = new AlertDialog.Builder(parentActivity); builder.setTopImage(R.drawable.permissions_contacts, Theme.getColor(Theme.key_dialogTopBackground)); @@ -2709,6 +2874,7 @@ public class AlertsCreator { option = 1; } editor.putInt("priority_" + dialog_id, option); + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannel(dialog_id); } else { int option; if (selected[0] == 0) { @@ -2730,6 +2896,7 @@ public class AlertsCreator { editor.putInt("priority_channel", option); selected[0] = preferences.getInt("priority_channel", 1); } + NotificationsController.getInstance(UserConfig.selectedAccount).deleteNotificationChannelGlobal(globalType); } editor.commit(); builder.getDismissRunnable().run(); @@ -3193,7 +3360,7 @@ public class AlertsCreator { } } } - if (myMessagesCount > 0 && hasNonDiceMessages) { + if (myMessagesCount > 0 && hasNonDiceMessages && (user == null || !UserObject.isDeleted(user))) { hasDeleteForAllCheck = true; FrameLayout frameLayout = new FrameLayout(activity); CheckBoxCell cell = new CheckBoxCell(activity, 1); @@ -3360,7 +3527,7 @@ public class AlertsCreator { } else { message.setText(LocaleController.getString("EnterThemeName", R.string.EnterThemeName)); } - message.setTextSize(16); + message.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); message.setPadding(AndroidUtilities.dp(23), AndroidUtilities.dp(12), AndroidUtilities.dp(23), AndroidUtilities.dp(6)); message.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); linearLayout.addView(message, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java index d8fcd6487..ee9071c27 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java @@ -116,7 +116,9 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { private DispatchQueue decodeQueue; private float startTime; private float endTime; - + private int renderingHeight; + private int renderingWidth; + private float scaleFactor = 1f; private View parentView; private ArrayList secondParentViews = new ArrayList<>(); @@ -238,13 +240,21 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { destroyDecoder(nativePtr); nativePtr = 0; } + if (renderingHeight > 0 && renderingWidth > 0 && metaData[0] > 0 && metaData[1] > 0) { + scaleFactor = Math.max(renderingWidth / (float) metaData[0], renderingHeight / (float) metaData[1]); + if (scaleFactor <= 0 || scaleFactor > 0.7) { + scaleFactor = 1; + } + } else { + scaleFactor = 1f; + } decoderCreated = true; } try { if (nativePtr != 0 || metaData[0] == 0 || metaData[1] == 0) { if (backgroundBitmap == null && metaData[0] > 0 && metaData[1] > 0) { try { - backgroundBitmap = Bitmap.createBitmap(metaData[0], metaData[1], Bitmap.Config.ARGB_8888); + backgroundBitmap = Bitmap.createBitmap((int) (metaData[0] * scaleFactor), (int) (metaData[1] * scaleFactor), Bitmap.Config.ARGB_8888); } catch (Throwable e) { FileLog.e(e); } @@ -300,9 +310,15 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { }; public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview) { + this(file, createDecoder, streamSize, document, location, parentObject, seekTo ,account, preview, 0, 0); + } + + public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview, int w, int h) { path = file; streamFileSize = streamSize; currentAccount = account; + renderingHeight = h; + renderingWidth = w; getPaint().setFlags(Paint.ANTI_ALIAS_FLAG | Paint.FILTER_BITMAP_FLAG); if (streamSize != 0 && (document != null || location != null)) { stream = new AnimatedFileDrawableStream(document, location, parentObject, account, preview); @@ -313,6 +329,14 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { destroyDecoder(nativePtr); nativePtr = 0; } + if (renderingHeight > 0 && renderingWidth > 0 && metaData[0] > 0 && metaData[1] > 0) { + scaleFactor = Math.max(renderingWidth / (float) metaData[0], renderingHeight / (float) metaData[1]); + if (scaleFactor <= 0 || scaleFactor > 0.7) { + scaleFactor = 1f; + } + } else { + scaleFactor = 1f; + } decoderCreated = true; } if (seekTo != 0) { @@ -336,7 +360,7 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { seekToMs(nativePtr, ms, precise); } if (backgroundBitmap == null) { - backgroundBitmap = Bitmap.createBitmap(metaData[0], metaData[1], Bitmap.Config.ARGB_8888); + backgroundBitmap = Bitmap.createBitmap((int) (metaData[0] * scaleFactor), (int) (metaData[1] * scaleFactor), Bitmap.Config.ARGB_8888); } int result; if (precise) { @@ -551,6 +575,8 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { int height = decoderCreated ? (metaData[2] == 90 || metaData[2] == 270 ? metaData[0] : metaData[1]) : 0; if (height == 0) { return AndroidUtilities.dp(100); + } else { + height *= scaleFactor; } return height; } @@ -560,6 +586,8 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable { int width = decoderCreated ? (metaData[2] == 90 || metaData[2] == 270 ? metaData[1] : metaData[0]) : 0; if (width == 0) { return AndroidUtilities.dp(100); + } else { + width *= scaleFactor; } return width; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java index 5907c3b41..42857325f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java @@ -1986,7 +1986,7 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. protected abstract void onImageUpdated(ImageReceiver imageReceiver); } - private abstract static class ClippingTextViewSwitcher extends FrameLayout { + public abstract static class ClippingTextViewSwitcher extends FrameLayout { private final TextView[] textViews = new TextView[2]; private final float[] clipProgress = new float[]{0f, 0.75f}; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java index e3d724622..5d8587cf4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java @@ -118,6 +118,10 @@ public class AudioVisualizerDrawable { p1.setColor(Theme.getColor(Theme.key_chat_inLoader)); p1.setAlpha(ALPHA); } + this.draw(canvas, cx, cy); + } + + public void draw(Canvas canvas, float cx, float cy) { for (int i = 0; i < 8; i++) { if (animateTo[i] != current[i]) { current[i] += dt[i] * 16; @@ -148,15 +152,10 @@ public class AudioVisualizerDrawable { if (enterProgress == 0 && radiusProgress == 0) { return; } - // float idleProgress = radiusProgress > 0.4f ? 0 : (1f - radiusProgress / 0.4f); for (int i = 0; i < 3; i++) { tmpWaveform[i] = (int) (current[i] * WAVE_RADIUS); } - - //drawables[0].idleStateDiff = enterProgress * idleProgress * IDLE_AMPLITUDE; - //drawables[1].idleStateDiff = enterProgress * idleProgress * IDLE_AMPLITUDE; - drawables[0].setAdditionals(tmpWaveform); for (int i = 0; i < 3; i++) { @@ -188,11 +187,15 @@ public class AudioVisualizerDrawable { canvas.restore(); } - public void setParentView(ChatMessageCell parentView) { + public void setParentView(View parentView) { this.parentView = parentView; } public View getParentView() { return parentView; } + + public void setColor(int color) { + p1.setColor(color); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java index 0e594d05b..aa3ab5441 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java @@ -361,7 +361,7 @@ public class AvatarDrawable extends Drawable { } else if (drawDeleted && Theme.avatarDrawables[1] != null) { int w = Theme.avatarDrawables[1].getIntrinsicWidth(); int h = Theme.avatarDrawables[1].getIntrinsicHeight(); - if (w > size || h > size) { + if (w > size - AndroidUtilities.dp(6) || h > size - AndroidUtilities.dp(6)) { float scale = size / (float) AndroidUtilities.dp(50); w *= scale; h *= scale; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java new file mode 100644 index 000000000..1ecf15779 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java @@ -0,0 +1,425 @@ +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.drawable.Drawable; +import android.os.SystemClock; +import android.widget.FrameLayout; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.TLObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Cells.GroupCallUserCell; + +import java.util.Random; + +public class AvatarsImageView extends FrameLayout { + + DrawingState[] currentStates = new DrawingState[3]; + DrawingState[] animatingStates = new DrawingState[3]; + boolean wasDraw; + + float transitionProgress = 1f; + ValueAnimator transitionProgressAnimator; + boolean updateAfterTransition; + + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint xRefP = new Paint(Paint.ANTI_ALIAS_FLAG); + + Runnable updateDelegate; + int currentStyle; + boolean centered; + + public void commitTransition(boolean animated) { + if (!wasDraw || !animated) { + transitionProgress = 1f; + swapStates(); + return; + } + + DrawingState[] removedStates = new DrawingState[3]; + boolean changed = false; + for (int i = 0; i < 3; i++) { + removedStates[i] = currentStates[i]; + if (currentStates[i].id != animatingStates[i].id) { + changed = true; + } else { + currentStates[i].lastSpeakTime = animatingStates[i].lastSpeakTime; + } + } + if (!changed) { + transitionProgress = 1f; + return; + } + for (int i = 0; i < 3; i++) { + boolean found = false; + for (int j = 0; j < 3; j++) { + if (currentStates[j].id == animatingStates[i].id) { + found = true; + removedStates[j] = null; + if (i == j) { + animatingStates[i].animationType = DrawingState.ANIMATION_TYPE_NONE; + GroupCallUserCell.AvatarWavesDrawable wavesDrawable = animatingStates[i].wavesDrawable; + animatingStates[i].wavesDrawable = currentStates[i].wavesDrawable; + currentStates[i].wavesDrawable = wavesDrawable; + } else { + animatingStates[i].animationType = DrawingState.ANIMATION_TYPE_MOVE; + animatingStates[i].moveFromIndex = j; + } + break; + } + } + if (!found) { + animatingStates[i].animationType = DrawingState.ANIMATION_TYPE_IN; + } + } + + for (int i = 0; i < 3; i++) { + if (removedStates[i] != null) { + removedStates[i].animationType = DrawingState.ANIMATION_TYPE_OUT; + } + } + if (transitionProgressAnimator != null) { + transitionProgressAnimator.cancel(); + } + transitionProgress = 0; + transitionProgressAnimator = ValueAnimator.ofFloat(0, 1f); + transitionProgressAnimator.addUpdateListener(valueAnimator -> { + transitionProgress = (float) valueAnimator.getAnimatedValue(); + invalidate(); + }); + transitionProgressAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (transitionProgressAnimator != null) { + transitionProgress = 1f; + swapStates(); + if (updateAfterTransition) { + updateAfterTransition = false; + if (updateDelegate != null) { + updateDelegate.run(); + } + } + invalidate(); + } + transitionProgressAnimator = null; + } + }); + transitionProgressAnimator.setDuration(220); + transitionProgressAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + transitionProgressAnimator.start(); + invalidate(); + } + + private void swapStates() { + for (int i = 0; i < 3; i++) { + DrawingState state = currentStates[i]; + currentStates[i] = animatingStates[i]; + animatingStates[i] = state; + } + } + + public void updateAfterTransitionEnd() { + updateAfterTransition = true; + } + + public void setDelegate(Runnable delegate) { + updateDelegate = delegate; + } + + public void setStyle(int currentStyle) { + this.currentStyle = currentStyle; + invalidate(); + } + + private static class DrawingState { + + public static final int ANIMATION_TYPE_NONE = -1; + public static final int ANIMATION_TYPE_IN = 0; + public static final int ANIMATION_TYPE_OUT = 1; + public static final int ANIMATION_TYPE_MOVE = 2; + + private AvatarDrawable avatarDrawable; + private GroupCallUserCell.AvatarWavesDrawable wavesDrawable; + private long lastUpdateTime; + private long lastSpeakTime; + private ImageReceiver imageReceiver; + TLRPC.TL_groupCallParticipant participant; + + private int id; + + private int animationType; + private int moveFromIndex; + } + + Random random = new Random(); + + public AvatarsImageView(Context context) { + super(context); + for (int a = 0; a < 3; a++) { + currentStates[a] = new DrawingState(); + currentStates[a].imageReceiver = new ImageReceiver(this); + currentStates[a].imageReceiver.setRoundRadius(AndroidUtilities.dp(12)); + currentStates[a].avatarDrawable = new AvatarDrawable(); + currentStates[a].avatarDrawable.setTextSize(AndroidUtilities.dp(9)); + + animatingStates[a] = new DrawingState(); + animatingStates[a].imageReceiver = new ImageReceiver(this); + animatingStates[a].imageReceiver.setRoundRadius(AndroidUtilities.dp(12)); + animatingStates[a].avatarDrawable = new AvatarDrawable(); + animatingStates[a].avatarDrawable.setTextSize(AndroidUtilities.dp(9)); + } + setWillNotDraw(false); + xRefP.setColor(0); + xRefP.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); + } + + public void setObject(int index, int account, TLObject object) { + animatingStates[index].id = 0; + animatingStates[index].participant = null; + if (object == null) { + animatingStates[index].imageReceiver.setImageBitmap((Drawable) null); + invalidate(); + return; + } + TLRPC.User currentUser = null; + TLRPC.Chat currentChat = null; + animatingStates[index].lastSpeakTime = -1; + if (object instanceof TLRPC.TL_groupCallParticipant) { + TLRPC.TL_groupCallParticipant participant = (TLRPC.TL_groupCallParticipant) object; + animatingStates[index].participant = participant; + currentUser = MessagesController.getInstance(account).getUser(participant.user_id); + animatingStates[index].avatarDrawable.setInfo(currentUser); + if (currentStyle == 4) { + if (participant.user_id == AccountInstance.getInstance(account).getUserConfig().getClientUserId()) { + animatingStates[index].lastSpeakTime = 0; + } else { + animatingStates[index].lastSpeakTime = participant.active_date; + } + } else { + animatingStates[index].lastSpeakTime = participant.active_date; + } + animatingStates[index].id = participant.user_id; + } else if (object instanceof TLRPC.User) { + currentUser = (TLRPC.User) object; + animatingStates[index].avatarDrawable.setInfo(currentUser); + animatingStates[index].id = currentUser.id; + } else { + currentChat = (TLRPC.Chat) object; + animatingStates[index].avatarDrawable.setInfo(currentChat); + animatingStates[index].id = currentChat.id; + } + if (currentUser != null) { + animatingStates[index].imageReceiver.setImage(ImageLocation.getForUser(currentUser, false), "50_50", animatingStates[index].avatarDrawable, null, currentUser, 0); + } else { + animatingStates[index].imageReceiver.setImage(ImageLocation.getForChat(currentChat, false), "50_50", animatingStates[index].avatarDrawable, null, currentChat, 0); + } + animatingStates[index].imageReceiver.setRoundRadius(AndroidUtilities.dp(currentStyle == 4 ? 16 : 12)); + int size = AndroidUtilities.dp(currentStyle == 4 ? 32 : 24); + animatingStates[index].imageReceiver.setImageCoords(0, 0, size, size); + invalidate(); + } + + @SuppressLint("DrawAllocation") + @Override + protected void onDraw(Canvas canvas) { + wasDraw = true; + + int toAdd = AndroidUtilities.dp(currentStyle == 4 ? 24 : 20); + int drawCount = 0; + for (int i = 0; i < 3; i++) { + if (currentStates[i].id != 0) { + drawCount++; + } + } + int ax = centered ? (getMeasuredWidth() - drawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : AndroidUtilities.dp(10); + boolean isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + if (currentStyle == 4) { + paint.setColor(Theme.getColor(Theme.key_inappPlayerBackground)); + } else if (currentStyle != 3) { + paint.setColor(Theme.getColor(isMuted ? Theme.key_returnToCallMutedBackground : Theme.key_returnToCallBackground)); + } + + int animateToDrawCount = 0; + for (int i = 0; i < 3; i++) { + if (animatingStates[i].id != 0) { + animateToDrawCount++; + } + } + boolean useAlphaLayer = currentStyle == 3 || currentStyle == 4 || currentStyle == 5; + if (useAlphaLayer) { + canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), 255, Canvas.ALL_SAVE_FLAG); + } + for (int a = 2; a >= 0; a--) { + for (int k = 0; k < 2; k++) { + if (k == 0 && transitionProgress == 1f) { + continue; + } + DrawingState[] states = k == 0 ? animatingStates : currentStates; + + + if (k == 1 && transitionProgress != 1f && states[a].animationType != DrawingState.ANIMATION_TYPE_OUT) { + continue; + } + ImageReceiver imageReceiver = states[a].imageReceiver; + if (!imageReceiver.hasImageSet()) { + continue; + } + if (k == 0) { + int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : AndroidUtilities.dp(10); + imageReceiver.setImageX(toAx + toAdd * a); + } else { + imageReceiver.setImageX(ax + toAdd * a); + } + + imageReceiver.setImageY(AndroidUtilities.dp(currentStyle == 4 ? 8 : 6)); + + boolean needRestore = false; + float alpha = 1f; + if (transitionProgress != 1f) { + if (states[a].animationType == DrawingState.ANIMATION_TYPE_OUT) { + canvas.save(); + canvas.scale(1f - transitionProgress, 1f - transitionProgress, imageReceiver.getCenterX(), imageReceiver.getCenterY()); + needRestore = true; + alpha = 1f - transitionProgress; + } else if (states[a].animationType == DrawingState.ANIMATION_TYPE_IN) { + canvas.save(); + canvas.scale(transitionProgress, transitionProgress, imageReceiver.getCenterX(), imageReceiver.getCenterY()); + alpha = transitionProgress; + needRestore = true; + } else if (states[a].animationType == DrawingState.ANIMATION_TYPE_MOVE) { + int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : AndroidUtilities.dp(10); + int toX = toAx + toAdd * a; + int fromX = ax + toAdd * states[a].moveFromIndex; + imageReceiver.setImageX((int) (toX * transitionProgress + fromX * (1f - transitionProgress))); + } else if (states[a].animationType == DrawingState.ANIMATION_TYPE_NONE && centered) { + int toAx = (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2; + int toX = toAx + toAdd * a; + int fromX = ax + toAdd * a; + imageReceiver.setImageX((int) (toX * transitionProgress + fromX * (1f - transitionProgress))); + } + } + + float avatarScale = 1f; + if (a != states.length - 1) { + if (currentStyle == 3 || currentStyle == 5) { + canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(13), xRefP); + if (states[a].wavesDrawable == null) { + if (currentStyle == 5) { + states[a].wavesDrawable = new GroupCallUserCell.AvatarWavesDrawable(AndroidUtilities.dp(14), AndroidUtilities.dp(16)); + } else { + states[a].wavesDrawable = new GroupCallUserCell.AvatarWavesDrawable(AndroidUtilities.dp(17), AndroidUtilities.dp(21)); + } + } + if (currentStyle == 5) { + states[a].wavesDrawable.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_speakingText), (int) (255 * 0.3f * alpha))); + } + if (states[a].participant.amplitude > 0) { + states[a].wavesDrawable.setShowWaves(true, this); + float amplitude = states[a].participant.amplitude * 15f; + states[a].wavesDrawable.setAmplitude(amplitude); + } else { + states[a].wavesDrawable.setShowWaves(false, this); + } + if (currentStyle == 5 && (SystemClock.uptimeMillis() - states[a].participant.lastSpeakTime) > 500) { + updateDelegate.run(); + } + states[a].wavesDrawable.update(); + if (currentStyle == 5) { + states[a].wavesDrawable.draw(canvas, imageReceiver.getCenterX(), imageReceiver.getCenterY(), this); + invalidate(); + } + avatarScale = states[a].wavesDrawable.getAvatarScale(); + } else if (currentStyle == 4) { + canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(17), xRefP); + if (states[a].wavesDrawable == null) { + states[a].wavesDrawable = new GroupCallUserCell.AvatarWavesDrawable(AndroidUtilities.dp(17), AndroidUtilities.dp(21)); + } + states[a].wavesDrawable.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_listeningText), (int) (255 * 0.3f * alpha))); + long currentTime = System.currentTimeMillis(); + if (currentTime - states[a].lastUpdateTime > 100) { + states[a].lastUpdateTime = currentTime; + if (ConnectionsManager.getInstance(UserConfig.selectedAccount).getCurrentTime() - states[a].lastSpeakTime <= 5) { + states[a].wavesDrawable.setShowWaves(true, this); + states[a].wavesDrawable.setAmplitude(random.nextInt() % 100); + } else { + states[a].wavesDrawable.setShowWaves(false, this); + states[a].wavesDrawable.setAmplitude(0); + } + } + states[a].wavesDrawable.update(); + states[a].wavesDrawable.draw(canvas, imageReceiver.getCenterX(), imageReceiver.getCenterY(), this); + avatarScale = states[a].wavesDrawable.getAvatarScale(); + } else { + int paintAlpha = paint.getAlpha(); + if (alpha != 1f) { + paint.setAlpha((int) (paintAlpha * alpha)); + } + canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(currentStyle == 4 ? 17 : 13), paint); + if (alpha != 1f) { + paint.setAlpha(paintAlpha); + } + } + } + imageReceiver.setAlpha(alpha); + if (avatarScale != 1f) { + canvas.save(); + canvas.scale(avatarScale, avatarScale, imageReceiver.getCenterX(), imageReceiver.getCenterY()); + imageReceiver.draw(canvas); + canvas.restore(); + } else { + imageReceiver.draw(canvas); + } + if (needRestore) { + canvas.restore(); + } + } + } + if (useAlphaLayer) { + canvas.restore(); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + wasDraw = false; + for (int a = 0; a < 3; a++) { + currentStates[a].imageReceiver.onDetachedFromWindow(); + animatingStates[a].imageReceiver.onDetachedFromWindow(); + } + if (currentStyle == 3) { + Theme.getFragmentContextViewWavesDrawable().setAmplitude(0); + } + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + for (int a = 0; a < 3; a++) { + currentStates[a].imageReceiver.onAttachedToWindow(); + animatingStates[a].imageReceiver.onAttachedToWindow(); + } + } + + public void setCentered(boolean centered) { + this.centered = centered; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java new file mode 100644 index 000000000..bcfe03d40 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java @@ -0,0 +1,153 @@ +package org.telegram.ui.Components; + +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Path; + +import java.util.Random; + +public class BlobDrawable { + + public static float MAX_SPEED = 8.2f; + public static float MIN_SPEED = 0.8f; + public static float AMPLITUDE_SPEED = 0.33f; + + public static float SCALE_BIG = 0.807f; + public static float SCALE_SMALL = 0.704f; + + public static float SCALE_BIG_MIN = 0.878f; + public static float SCALE_SMALL_MIN = 0.926f; + + public static float FORM_BIG_MAX = 0.6f; + public static float FORM_SMALL_MAX = 0.6f; + + public static float GLOBAL_SCALE = 1f; + + public static float FORM_BUTTON_MAX = 0f; + + public static float GRADIENT_SPEED_MIN = 0.5f; + public static float GRADIENT_SPEED_MAX = 0.01f; + + public static float LIGHT_GRADIENT_SIZE = 0.5f; + + public float minRadius; + public float maxRadius; + + private Path path = new Path(); + public Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private float[] radius; + private float[] angle; + private float[] radiusNext; + private float[] angleNext; + private float[] progress; + private float[] speed; + + + private float[] pointStart = new float[4]; + private float[] pointEnd = new float[4]; + + final Random random = new Random(); + + private final float N; + private final float L; + public float cubicBezierK = 1f; + + private final Matrix m = new Matrix(); + + public BlobDrawable(int n) { + N = n; + L = (float) ((4.0 / 3.0) * Math.tan(Math.PI / (2 * N))); + radius = new float[n]; + angle = new float[n]; + + radiusNext = new float[n]; + angleNext = new float[n]; + progress = new float[n]; + speed = new float[n]; + + for (int i = 0; i < N; i++) { + generateBlob(radius, angle, i); + generateBlob(radiusNext, angleNext, i); + progress[i] = 0; + } + } + + private void generateBlob(float[] radius, float[] angle, int i) { + float angleDif = 360f / N * 0.05f; + float radDif = maxRadius - minRadius; + radius[i] = minRadius + Math.abs(((random.nextInt() % 100f) / 100f)) * radDif; + angle[i] = 360f / N * i + ((random.nextInt() % 100f) / 100f) * angleDif; + speed[i] = (float) (0.017 + 0.003 * (Math.abs(random.nextInt() % 100f) / 100f)); + } + + public void update(float amplitude, float speedScale) { + for (int i = 0; i < N; i++) { + progress[i] += (speed[i] * MIN_SPEED) + amplitude * speed[i] * MAX_SPEED * speedScale; + if (progress[i] >= 1f) { + progress[i] = 0; + radius[i] = radiusNext[i]; + angle[i] = angleNext[i]; + generateBlob(radiusNext, angleNext, i); + } + } + } + + public void draw(float cX, float cY, Canvas canvas, Paint paint) { + path.reset(); + + for (int i = 0; i < N; i++) { + float progress = this.progress[i]; + int nextIndex = i + 1 < N ? i + 1 : 0; + float progressNext = this.progress[nextIndex]; + float r1 = radius[i] * (1f - progress) + radiusNext[i] * progress; + float r2 = radius[nextIndex] * (1f - progressNext) + radiusNext[nextIndex] * progressNext; + float angle1 = angle[i] * (1f - progress) + angleNext[i] * progress; + float angle2 = angle[nextIndex] * (1f - progressNext) + angleNext[nextIndex] * progressNext; + + float l = L * (Math.min(r1, r2) + (Math.max(r1, r2) - Math.min(r1, r2)) / 2f) * cubicBezierK; + m.reset(); + m.setRotate(angle1, cX, cY); + + pointStart[0] = cX; + pointStart[1] = cY - r1; + pointStart[2] = cX + l; + pointStart[3] = cY - r1; + + m.mapPoints(pointStart); + + pointEnd[0] = cX; + pointEnd[1] = cY - r2; + pointEnd[2] = cX - l; + pointEnd[3] = cY - r2; + + m.reset(); + m.setRotate(angle2, cX, cY); + + m.mapPoints(pointEnd); + + if (i == 0) { + path.moveTo(pointStart[0], pointStart[1]); + } + + path.cubicTo( + pointStart[2], pointStart[3], + pointEnd[2], pointEnd[3], + pointEnd[0], pointEnd[1] + ); + } + + canvas.save(); + canvas.drawPath(path, paint); + canvas.restore(); + } + + public void generateBlob() { + for (int i = 0; i < N; i++) { + generateBlob(radius, angle, i); + generateBlob(radiusNext, angleNext, i); + progress[i] = 0; + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java index 055361dbf..576eaa31c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java @@ -12,11 +12,13 @@ import android.graphics.Rect; import android.graphics.Typeface; import android.graphics.drawable.InsetDrawable; import android.os.Build; +import android.text.TextUtils; import android.util.TypedValue; import android.view.GestureDetector; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; +import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; @@ -24,10 +26,10 @@ import android.widget.TextView; import androidx.annotation.CallSuper; import androidx.annotation.ColorInt; +import androidx.annotation.IntDef; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.core.util.Consumer; -import androidx.core.util.Preconditions; import androidx.core.view.ViewCompat; import androidx.dynamicanimation.animation.DynamicAnimation; import androidx.dynamicanimation.animation.SpringAnimation; @@ -39,11 +41,16 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ChatActivity; +import org.telegram.ui.DialogsActivity; +import java.lang.annotation.Retention; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import static java.lang.annotation.RetentionPolicy.SOURCE; + public final class Bulletin { public static final int DURATION_SHORT = 1500; @@ -53,7 +60,13 @@ public final class Bulletin { return new Bulletin(containerLayout, contentLayout, duration); } + @SuppressLint("RtlHardcoded") public static Bulletin make(@NonNull BaseFragment fragment, @NonNull Layout contentLayout, int duration) { + if (fragment instanceof ChatActivity) { + contentLayout.setWideScreenParams(ViewGroup.LayoutParams.WRAP_CONTENT, Gravity.RIGHT); + } else if (fragment instanceof DialogsActivity) { + contentLayout.setWideScreenParams(ViewGroup.LayoutParams.MATCH_PARENT, Gravity.NO_GRAVITY); + } return new Bulletin(fragment.getLayoutContainer(), contentLayout, duration); } @@ -105,6 +118,11 @@ public final class Bulletin { containerLayout.getParent().requestDisallowInterceptTouchEvent(pressed); } } + + @Override + protected void onHide() { + hide(); + } }; this.containerLayout = containerLayout; this.duration = duration; @@ -114,7 +132,9 @@ public final class Bulletin { if (!showing) { showing = true; - Preconditions.checkState(layout.getParent() == parentLayout, "Layout has incorrect parent"); + if (layout.getParent() != parentLayout) { + throw new IllegalStateException("Layout has incorrect parent"); + } if (visibleBulletin != null) { visibleBulletin.hide(); @@ -130,6 +150,9 @@ public final class Bulletin { layout.onShow(); currentDelegate = delegates.get(containerLayout); currentBottomOffset = currentDelegate != null ? currentDelegate.getBottomOffset() : 0; + if (currentDelegate != null) { + currentDelegate.onShow(Bulletin.this); + } if (isTransitionsEnabled()) { if (currentBottomOffset != 0) { ViewCompat.setClipBounds(parentLayout, new Rect(left, top - currentBottomOffset, right, bottom - currentBottomOffset)); @@ -138,6 +161,7 @@ public final class Bulletin { } ensureLayoutTransitionCreated(); layoutTransition.animateEnter(layout, layout::onEnterTransitionStart, () -> { + ViewCompat.setClipBounds(parentLayout, null); layout.onEnterTransitionEnd(); setCanHide(true); }, offset -> { @@ -196,7 +220,7 @@ public final class Bulletin { } public void hide(boolean animated) { - if (showing && canHide) { + if (showing) { showing = false; if (visibleBulletin == this) { @@ -209,11 +233,18 @@ public final class Bulletin { if (ViewCompat.isLaidOut(layout)) { layout.removeCallbacks(hideRunnable); if (animated) { + if (bottomOffset != 0) { + ViewCompat.setClipBounds(parentLayout, new Rect(layout.getLeft(), layout.getTop() - bottomOffset, layout.getRight(), layout.getBottom() - bottomOffset)); + } else { + ViewCompat.setClipBounds(parentLayout, null); + } ensureLayoutTransitionCreated(); layoutTransition.animateExit(layout, layout::onExitTransitionStart, () -> { if (currentDelegate != null) { currentDelegate.onOffsetChange(0); + currentDelegate.onHide(this); } + ViewCompat.setClipBounds(parentLayout, null); layout.onExitTransitionEnd(); layout.onHide(); containerLayout.removeView(parentLayout); @@ -229,6 +260,7 @@ public final class Bulletin { if (currentDelegate != null) { currentDelegate.onOffsetChange(0); + currentDelegate.onHide(this); } layout.onExitTransitionStart(); layout.onExitTransitionEnd(); @@ -252,6 +284,17 @@ public final class Bulletin { return MessagesController.getGlobalMainSettings().getBoolean("view_animations", true) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; } + @Retention(SOURCE) + @IntDef(value = {ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT}) + private @interface WidthDef { + } + + @Retention(SOURCE) + @SuppressLint("RtlHardcoded") + @IntDef(value = {Gravity.LEFT, Gravity.RIGHT, Gravity.CENTER_HORIZONTAL, Gravity.NO_GRAVITY}) + private @interface GravityDef { + } + private static abstract class ParentLayout extends FrameLayout { private final Layout layout; @@ -260,7 +303,9 @@ public final class Bulletin { private boolean pressed; private float translationX; - private SpringAnimation springAnimation; + private boolean hideAnimationRunning; + private boolean needLeftAlphaAnimation; + private boolean needRightAlphaAnimation; public ParentLayout(Layout layout) { super(layout.getContext()); @@ -269,30 +314,64 @@ public final class Bulletin { @Override public boolean onDown(MotionEvent e) { - return springAnimation == null; + if (!hideAnimationRunning) { + needLeftAlphaAnimation = layout.isNeedSwipeAlphaAnimation(true); + needRightAlphaAnimation = layout.isNeedSwipeAlphaAnimation(false); + return true; + } + return false; } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { layout.setTranslationX(translationX -= distanceX); + if (translationX == 0 || (translationX < 0f && needLeftAlphaAnimation) || (translationX > 0f && needRightAlphaAnimation)) { + layout.setAlpha(1f - Math.abs(translationX) / layout.getWidth()); + } return true; } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { if (Math.abs(velocityX) > 2000f) { - springAnimation = new SpringAnimation(layout, DynamicAnimation.TRANSLATION_X, Math.signum(velocityX) * layout.getWidth() * 2f); + final boolean needAlphaAnimation = (velocityX < 0f && needLeftAlphaAnimation) || (velocityX > 0f && needRightAlphaAnimation); + + final SpringAnimation springAnimation = new SpringAnimation(layout, DynamicAnimation.TRANSLATION_X, Math.signum(velocityX) * layout.getWidth() * 2f); + if (!needAlphaAnimation) { + springAnimation.addEndListener((animation, canceled, value, velocity) -> onHide()); + springAnimation.addUpdateListener(((animation, value, velocity) -> { + if (Math.abs(value) > layout.getWidth()) { + animation.cancel(); + } + })); + } springAnimation.getSpring().setDampingRatio(SpringForce.DAMPING_RATIO_NO_BOUNCY); springAnimation.getSpring().setStiffness(100f); springAnimation.setStartVelocity(velocityX); springAnimation.start(); + + if (needAlphaAnimation) { + final SpringAnimation springAnimation2 = new SpringAnimation(layout, DynamicAnimation.ALPHA, 0f); + springAnimation2.addEndListener((animation, canceled, value, velocity) -> onHide()); + springAnimation2.addUpdateListener(((animation, value, velocity) -> { + if (value <= 0f) { + animation.cancel(); + } + })); + springAnimation.getSpring().setDampingRatio(SpringForce.DAMPING_RATIO_NO_BOUNCY); + springAnimation.getSpring().setStiffness(10f); + springAnimation.setStartVelocity(velocityX); + springAnimation2.start(); + } + + hideAnimationRunning = true; return true; } return false; } }); gestureDetector.setIsLongpressEnabled(false); - addView(layout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM)); + addView(layout); } @Override @@ -301,18 +380,24 @@ public final class Bulletin { gestureDetector.onTouchEvent(event); final int actionMasked = event.getActionMasked(); if (actionMasked == MotionEvent.ACTION_DOWN) { - if (!pressed && springAnimation == null) { + if (!pressed && !hideAnimationRunning) { layout.animate().cancel(); translationX = layout.getTranslationX(); onPressedStateChanged(pressed = true); } } else if (actionMasked == MotionEvent.ACTION_UP || actionMasked == MotionEvent.ACTION_CANCEL) { if (pressed) { - if (springAnimation == null) { - if (Math.abs(translationX) > layout.getWidth() / 2) { - layout.animate().translationX(Math.signum(translationX) * layout.getWidth()).setDuration(200).setInterpolator(AndroidUtilities.accelerateInterpolator).start(); + if (!hideAnimationRunning) { + if (Math.abs(translationX) > layout.getWidth() / 3f) { + final float tx = Math.signum(translationX) * layout.getWidth(); + final boolean needAlphaAnimation = (translationX < 0f && needLeftAlphaAnimation) || (translationX > 0f && needRightAlphaAnimation); + layout.animate().translationX(tx).alpha(needAlphaAnimation ? 0f : 1f).setDuration(200).setInterpolator(AndroidUtilities.accelerateInterpolator).withEndAction(() -> { + if (layout.getTranslationX() == tx) { + onHide(); + } + }).start(); } else { - layout.animate().translationX(0).setDuration(200).start(); + layout.animate().translationX(0).alpha(1f).setDuration(200).start(); } } onPressedStateChanged(pressed = false); @@ -329,6 +414,7 @@ public final class Bulletin { } protected abstract void onPressedStateChanged(boolean pressed); + protected abstract void onHide(); } //region Offset Providers @@ -362,6 +448,12 @@ public final class Bulletin { default void onOffsetChange(float offset) { } + + default void onShow(Bulletin bulletin) { + } + + default void onHide(Bulletin bulletin) { + } } //endregion @@ -372,6 +464,11 @@ public final class Bulletin { protected Bulletin bulletin; + @WidthDef + private int wideScreenWidth = ViewGroup.LayoutParams.WRAP_CONTENT; + @GravityDef + private int wideScreenGravity = Gravity.CENTER_HORIZONTAL; + public Layout(@NonNull Context context) { this(context, Theme.getColor(Theme.key_undo_background)); } @@ -390,10 +487,45 @@ public final class Bulletin { } private void updateSize() { - final boolean isPortrait = AndroidUtilities.displaySize.x < AndroidUtilities.displaySize.y; - final boolean matchParentWidth = !AndroidUtilities.isTablet() && isPortrait; - setMinimumWidth(matchParentWidth ? 0 : AndroidUtilities.dp(344)); - setLayoutParams(LayoutHelper.createFrame(matchParentWidth ? LayoutHelper.MATCH_PARENT : LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL)); + final boolean isWideScreen = isWideScreen(); + setLayoutParams(LayoutHelper.createFrame(isWideScreen ? wideScreenWidth : LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, isWideScreen ? Gravity.BOTTOM | wideScreenGravity : Gravity.BOTTOM)); + } + + private boolean isWideScreen() { + return AndroidUtilities.isTablet() || AndroidUtilities.displaySize.x >= AndroidUtilities.displaySize.y; + } + + private void setWideScreenParams(@WidthDef int width, @GravityDef int gravity) { + boolean changed = false; + + if (wideScreenWidth != width) { + wideScreenWidth = width; + changed = true; + } + + if (wideScreenGravity != gravity) { + wideScreenGravity = gravity; + changed = true; + } + + if (isWideScreen() && changed) { + updateSize(); + } + } + + @SuppressLint("RtlHardcoded") + private boolean isNeedSwipeAlphaAnimation(boolean swipeLeft) { + if (!isWideScreen() || wideScreenWidth == ViewGroup.LayoutParams.MATCH_PARENT) { + return false; + } + if (wideScreenGravity == Gravity.CENTER_HORIZONTAL) { + return true; + } + if (swipeLeft) { + return wideScreenGravity == Gravity.RIGHT; + } else { + return wideScreenGravity != Gravity.RIGHT; + } } public Bulletin getBulletin() { @@ -569,7 +701,9 @@ public final class Bulletin { public void animateEnter(@NonNull Layout layout, @Nullable Runnable startAction, @Nullable Runnable endAction, @Nullable Consumer onUpdate, int bottomOffset) { final int translationY = layout.getHeight() - bottomOffset; layout.setTranslationY(translationY); - onUpdate.accept((float) translationY); + if (onUpdate != null) { + onUpdate.accept((float) translationY); + } final SpringAnimation springAnimation = new SpringAnimation(layout, SpringAnimation.TRANSLATION_Y, -bottomOffset); springAnimation.getSpring().setDampingRatio(DAMPING_RATIO); springAnimation.getSpring().setStiffness(STIFFNESS); @@ -741,13 +875,14 @@ public final class Bulletin { this.textColor = textColor; imageView = new RLottieImageView(context); - addView(imageView, LayoutHelper.createFrameRelatively(28, 28, Gravity.START | Gravity.CENTER_VERTICAL, 14, 10, 14, 10)); + imageView.setScaleType(ImageView.ScaleType.CENTER); + addView(imageView, LayoutHelper.createFrameRelatively(56, 48, Gravity.START | Gravity.CENTER_VERTICAL)); final int undoInfoColor = Theme.getColor(Theme.key_undo_infoColor); final LinearLayout linearLayout = new LinearLayout(context); linearLayout.setOrientation(LinearLayout.VERTICAL); - addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 54, 8, 12, 8)); + addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 56, 8, 12, 8)); titleTextView = new TextView(context); titleTextView.setSingleLine(); @@ -770,9 +905,13 @@ public final class Bulletin { } public void setAnimation(int resId, String... layers) { - imageView.setAnimation(resId, 28, 28); - for (int i = 0; i < layers.length; i++) { - imageView.setLayerColor(layers[i] + ".**", textColor); + setAnimation(resId, 32, 32, layers); + } + + public void setAnimation(int resId, int w, int h, String... layers) { + imageView.setAnimation(resId, w, h); + for (String layer : layers) { + imageView.setLayerColor(layer + ".**", textColor); } } } @@ -793,13 +932,15 @@ public final class Bulletin { this.textColor = textColor; imageView = new RLottieImageView(context); - addView(imageView, LayoutHelper.createFrameRelatively(28, 28, Gravity.START | Gravity.CENTER_VERTICAL, 14, 10, 14, 10)); + imageView.setScaleType(ImageView.ScaleType.CENTER); + addView(imageView, LayoutHelper.createFrameRelatively(56, 48, Gravity.START | Gravity.CENTER_VERTICAL)); textView = new TextView(context); textView.setSingleLine(); textView.setTextColor(textColor); textView.setTypeface(Typeface.SANS_SERIF); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + textView.setEllipsize(TextUtils.TruncateAt.END); addView(textView, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 56, 0, 16, 0)); } @@ -810,11 +951,19 @@ public final class Bulletin { } public void setAnimation(int resId, String... layers) { - imageView.setAnimation(resId, 28, 28); - for (int i = 0; i < layers.length; i++) { - imageView.setLayerColor(layers[i] + ".**", textColor); + setAnimation(resId, 32, 32, layers); + } + + public void setAnimation(int resId, int w, int h, String... layers) { + imageView.setAnimation(resId, w, h); + for (String layer : layers) { + imageView.setLayerColor(layer + ".**", textColor); } } + + public void setIconPaddingBottom(int paddingBottom) { + imageView.setLayoutParams(LayoutHelper.createFrameRelatively(56, 48 - paddingBottom, Gravity.START | Gravity.CENTER_VERTICAL, 0, 0, 0, paddingBottom)); + } } //endregion @@ -859,6 +1008,7 @@ public final class Bulletin { } } + @SuppressLint("ViewConstructor") public static final class UndoButton extends Button { private Runnable undoAction; @@ -875,14 +1025,16 @@ public final class Bulletin { if (text) { TextView undoTextView = new TextView(context); undoTextView.setOnClickListener(v -> undo()); - undoTextView.setBackground(Theme.createSelectorDrawable((undoCancelColor & 0x00ffffff) | 0x19000000, 7)); + final int leftInset = LocaleController.isRTL ? AndroidUtilities.dp(16) : 0; + final int rightInset = LocaleController.isRTL ? 0 : AndroidUtilities.dp(16); + undoTextView.setBackground(Theme.createCircleSelectorDrawable((undoCancelColor & 0x00ffffff) | 0x19000000, leftInset, rightInset)); undoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); undoTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); undoTextView.setTextColor(undoCancelColor); undoTextView.setText(LocaleController.getString("Undo", R.string.Undo)); undoTextView.setGravity(Gravity.CENTER_VERTICAL); ViewHelper.setPaddingRelative(undoTextView, 16, 0, 16, 0); - addView(undoTextView, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, 48, Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); + addView(undoTextView, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, 48, Gravity.CENTER_VERTICAL, 8, 0, 0, 0)); } else { final ImageView undoImageView = new ImageView(getContext()); undoImageView.setOnClickListener(v -> undo()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java index 05d34a448..14c1ea04c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java @@ -3,7 +3,7 @@ package org.telegram.ui.Components; import android.content.Context; import android.widget.FrameLayout; -import androidx.core.util.Preconditions; +import androidx.annotation.CheckResult; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; @@ -14,12 +14,10 @@ import org.telegram.ui.ActionBar.BaseFragment; public final class BulletinFactory { public static BulletinFactory of(BaseFragment fragment) { - Preconditions.checkNotNull(fragment); return new BulletinFactory(fragment); } public static BulletinFactory of(FrameLayout containerLayout) { - Preconditions.checkNotNull(containerLayout); return new BulletinFactory(containerLayout); } @@ -28,34 +26,41 @@ public final class BulletinFactory { } public enum FileType { - PHOTO("PhotoSavedHint", R.string.PhotoSavedHint), - PHOTO_TO_DOWNLOADS("PhotoSavedToDownloadsHint", R.string.PhotoSavedToDownloadsHint), - PHOTOS("PhotosSavedHint"), - VIDEO("VideoSavedHint", R.string.VideoSavedHint), - VIDEO_TO_DOWNLOADS("VideoSavedToDownloadsHint", R.string.VideoSavedToDownloadsHint), - VIDEOS("VideosSavedHint"), + PHOTO("PhotoSavedHint", R.string.PhotoSavedHint, Icon.SAVED_TO_GALLERY), + PHOTOS("PhotosSavedHint", Icon.SAVED_TO_GALLERY), - AUDIO("AudioSavedHint", R.string.AudioSavedHint), - AUDIOS("AudiosSavedHint"), - GIF("GifSavedToDownloadsHint", R.string.GifSavedToDownloadsHint), - MEDIA("MediaSavedHint"), + VIDEO("VideoSavedHint", R.string.VideoSavedHint, Icon.SAVED_TO_GALLERY), + VIDEOS("VideosSavedHint", Icon.SAVED_TO_GALLERY), - UNKNOWN("FileSavedHint", R.string.FileSavedHint), - UNKNOWNS("FilesSavedHint"); + MEDIA("MediaSavedHint", Icon.SAVED_TO_GALLERY), + + PHOTO_TO_DOWNLOADS("PhotoSavedToDownloadsHint", Icon.SAVED_TO_DOWNLOADS), + VIDEO_TO_DOWNLOADS("VideoSavedToDownloadsHint", R.string.VideoSavedToDownloadsHint, Icon.SAVED_TO_DOWNLOADS), + + GIF("GifSavedToDownloadsHint", Icon.SAVED_TO_DOWNLOADS), + + AUDIO("AudioSavedHint", Icon.SAVED_TO_MUSIC), + AUDIOS("AudiosSavedHint", Icon.SAVED_TO_MUSIC), + + UNKNOWN("FileSavedHint", Icon.SAVED_TO_DOWNLOADS), + UNKNOWNS("FilesSavedHint", Icon.SAVED_TO_DOWNLOADS); private final String localeKey; private final int localeRes; private final boolean plural; + private final Icon icon; - FileType(String localeKey, int localeRes) { + FileType(String localeKey, int localeRes, Icon icon) { this.localeKey = localeKey; this.localeRes = localeRes; + this.icon = icon; this.plural = false; } - FileType(String localeKey) { + FileType(String localeKey, Icon icon) { this.localeKey = localeKey; + this.icon = icon; this.localeRes = 0; this.plural = true; } @@ -71,6 +76,23 @@ public final class BulletinFactory { return LocaleController.getString(localeKey, localeRes); } } + + private enum Icon { + + SAVED_TO_DOWNLOADS(R.raw.ic_download, 2, "Box", "Arrow"), + SAVED_TO_GALLERY(R.raw.ic_save_to_gallery, 0, "Box", "Arrow", "Mask", "Arrow 2", "Splash"), + SAVED_TO_MUSIC(R.raw.ic_save_to_music, 2, "Box", "Arrow"); + + private final int resId; + private final String[] layers; + private final int paddingBottom; + + Icon(int resId, int paddingBottom, String... layers) { + this.resId = resId; + this.paddingBottom = paddingBottom; + this.layers = layers; + } + } } private final BaseFragment fragment; @@ -86,14 +108,17 @@ public final class BulletinFactory { this.fragment = null; } + @CheckResult public Bulletin createDownloadBulletin(FileType fileType) { return createDownloadBulletin(fileType, 1); } + @CheckResult public Bulletin createDownloadBulletin(FileType fileType, int filesAmount) { return createDownloadBulletin(fileType, filesAmount, 0, 0); } + @CheckResult public Bulletin createDownloadBulletin(FileType fileType, int filesAmount, int backgroundColor, int textColor) { final Bulletin.LottieLayout layout; if (backgroundColor != 0 && textColor != 0) { @@ -101,11 +126,35 @@ public final class BulletinFactory { } else { layout = new Bulletin.LottieLayout(getContext()); } - layout.setAnimation(R.raw.ic_download, "Box", "Arrow", "Mask", "Arrow 2", "Splash"); + layout.setAnimation(fileType.icon.resId, fileType.icon.layers); layout.textView.setText(fileType.getText(filesAmount)); + if (fileType.icon.paddingBottom != 0) { + layout.setIconPaddingBottom(fileType.icon.paddingBottom); + } return create(layout, Bulletin.DURATION_SHORT); } + @CheckResult + public Bulletin createCopyLinkBulletin() { + return createCopyLinkBulletin(false); + } + + @CheckResult + public Bulletin createCopyLinkBulletin(boolean isPrivate) { + if (isPrivate) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext()); + layout.setAnimation(R.raw.voip_invite, 36, 36, "Wibe", "Circle"); + layout.titleTextView.setText(LocaleController.getString("LinkCopied", R.string.LinkCopied)); + layout.subtitleTextView.setText(LocaleController.getString("LinkCopiedPrivateInfo", R.string.LinkCopiedPrivateInfo)); + return create(layout, Bulletin.DURATION_LONG); + } else { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext()); + layout.setAnimation(R.raw.voip_invite, 36, 36, "Wibe", "Circle"); + layout.textView.setText(LocaleController.getString("LinkCopied", R.string.LinkCopied)); + return create(layout, Bulletin.DURATION_SHORT); + } + } + private Bulletin create(Bulletin.Layout layout, int duration) { if (fragment != null) { return Bulletin.make(fragment, layout, duration); @@ -119,8 +168,9 @@ public final class BulletinFactory { } //region Static Factory + + @CheckResult public static Bulletin createMuteBulletin(BaseFragment fragment, int setting) { - Preconditions.checkArgument(canShowBulletin(fragment)); final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); final String text; @@ -161,30 +211,31 @@ public final class BulletinFactory { return Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT); } + @CheckResult public static Bulletin createMuteBulletin(BaseFragment fragment, boolean muted) { return createMuteBulletin(fragment, muted ? NotificationsController.SETTING_MUTE_FOREVER : NotificationsController.SETTING_MUTE_UNMUTE); } + @CheckResult public static Bulletin createDeleteMessagesBulletin(BaseFragment fragment, int count) { - Preconditions.checkArgument(canShowBulletin(fragment)); final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); layout.setAnimation(R.raw.ic_delete, "Envelope", "Cover", "Bucket"); layout.textView.setText(LocaleController.formatPluralString("MessagesDeletedHint", count)); return Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT); } + @CheckResult public static Bulletin createUnpinAllMessagesBulletin(BaseFragment fragment, int count, boolean hide, Runnable undoAction, Runnable delayedAction) { - Preconditions.checkArgument(canShowBulletin(fragment)); Bulletin.ButtonLayout buttonLayout; if (hide) { final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(fragment.getParentActivity()); - layout.setAnimation(R.raw.ic_unpin, "Pin", "Line"); + layout.setAnimation(R.raw.ic_unpin, 28, 28, "Pin", "Line"); layout.titleTextView.setText(LocaleController.getString("PinnedMessagesHidden", R.string.PinnedMessagesHidden)); layout.subtitleTextView.setText(LocaleController.getString("PinnedMessagesHiddenInfo", R.string.PinnedMessagesHiddenInfo)); buttonLayout = layout; } else { final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); - layout.setAnimation(R.raw.ic_unpin, "Pin", "Line"); + layout.setAnimation(R.raw.ic_unpin, 28, 28, "Pin", "Line"); layout.textView.setText(LocaleController.formatPluralString("MessagesUnpinned", count)); buttonLayout = layout; } @@ -192,34 +243,66 @@ public final class BulletinFactory { return Bulletin.make(fragment, buttonLayout, 5000); } + @CheckResult public static Bulletin createSaveToGalleryBulletin(BaseFragment fragment, boolean video) { return of(fragment).createDownloadBulletin(video ? FileType.VIDEO : FileType.PHOTO); } + @CheckResult public static Bulletin createSaveToGalleryBulletin(FrameLayout containerLayout, boolean video, int backgroundColor, int textColor) { return of(containerLayout).createDownloadBulletin(video ? FileType.VIDEO : FileType.PHOTO, 1, backgroundColor, textColor); } + @CheckResult public static Bulletin createPromoteToAdminBulletin(BaseFragment fragment, String userFirstName) { - Preconditions.checkArgument(canShowBulletin(fragment)); final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); layout.setAnimation(R.raw.ic_admin, "Shield"); layout.textView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("UserSetAsAdminHint", R.string.UserSetAsAdminHint, userFirstName))); return Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT); } + @CheckResult + public static Bulletin createRemoveFromChatBulletin(BaseFragment fragment, String userFirstName, String chatName) { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); + layout.setAnimation(R.raw.ic_ban, "Hand"); + layout.textView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("UserRemovedFromChatHint", R.string.UserRemovedFromChatHint, userFirstName, chatName))); + return Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT); + } + + @CheckResult + public static Bulletin createBanBulletin(BaseFragment fragment, boolean banned) { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); + final String text; + if (banned) { + layout.setAnimation(R.raw.ic_ban, "Hand"); + text = LocaleController.getString("UserBlocked", R.string.UserBlocked); + } else { + layout.setAnimation(R.raw.ic_unban, "Main", "Finger 1", "Finger 2", "Finger 3", "Finger 4"); + text = LocaleController.getString("UserUnblocked", R.string.UserUnblocked); + } + layout.textView.setText(AndroidUtilities.replaceTags(text)); + return Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT); + } + + @CheckResult + public static Bulletin createCopyLinkBulletin(BaseFragment fragment) { + return of(fragment).createCopyLinkBulletin(); + } + + @CheckResult public static Bulletin createPinMessageBulletin(BaseFragment fragment) { return createPinMessageBulletin(fragment, true, null, null); } + @CheckResult public static Bulletin createUnpinMessageBulletin(BaseFragment fragment, Runnable undoAction, Runnable delayedAction) { return createPinMessageBulletin(fragment, false, undoAction, delayedAction); } + @CheckResult private static Bulletin createPinMessageBulletin(BaseFragment fragment, boolean pinned, Runnable undoAction, Runnable delayedAction) { - Preconditions.checkArgument(canShowBulletin(fragment)); final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(fragment.getParentActivity()); - layout.setAnimation(pinned ? R.raw.ic_pin : R.raw.ic_unpin, "Pin", "Line"); + layout.setAnimation(pinned ? R.raw.ic_pin : R.raw.ic_unpin, 28, 28, "Pin", "Line"); layout.textView.setText(LocaleController.getString(pinned ? "MessagePinnedHint" : "MessageUnpinnedHint", pinned ? R.string.MessagePinnedHint : R.string.MessageUnpinnedHint)); if (!pinned) { layout.setButton(new Bulletin.UndoButton(fragment.getParentActivity(), true).setUndoAction(undoAction).setDelayedAction(delayedAction)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java index 7b9506118..bc88335a6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java @@ -38,8 +38,8 @@ import android.os.Build; import android.os.Bundle; import android.os.PowerManager; import android.os.SystemClock; +import android.os.Vibrator; import android.text.Editable; -import android.text.InputFilter; import android.text.Layout; import android.text.Spannable; import android.text.SpannableStringBuilder; @@ -65,8 +65,6 @@ import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityManager; import android.view.accessibility.AccessibilityNodeInfo; import android.view.animation.DecelerateInterpolator; -import android.view.animation.Interpolator; -import android.view.animation.LinearInterpolator; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputConnection; import android.widget.FrameLayout; @@ -131,17 +129,6 @@ import java.util.Locale; public class ChatActivityEnterView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, SizeNotifierFrameLayout.SizeNotifierFrameLayoutDelegate, StickersAlert.StickersAlertDelegate { - private boolean clearBotButtonsOnKeyboardOpen; - private boolean expandStickersWithKeyboard; - - public int getHeightWithTopView() { - int h = getMeasuredHeight(); - if (topView != null && topView.getVisibility() == View.VISIBLE) { - h -= (1f - topViewEnterProgress) * topView.getLayoutParams().height; - } - return h; - } - public interface ChatActivityEnterViewDelegate { void onMessageSend(CharSequence message, boolean notify, int scheduleDate); @@ -228,6 +215,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private Runnable showTopViewRunnable; private Runnable setTextFieldRunnable; public boolean preventInput; + private NumberTextView captionLimitView; + private int currentLimit = -1; + private int codePointCount; private class SeekBarWaveformView extends View { @@ -360,13 +350,12 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private Runnable focusRunnable; protected float topViewEnterProgress; protected int animatedTop; - private ValueAnimator currentTopViewAnimation; + public ValueAnimator currentTopViewAnimation; private ReplaceableIconDrawable botButtonDrawablel; private boolean destroyed; private MessageObject editingMessageObject; - private int editingMessageReqId; private boolean editingCaption; private TLRPC.ChatFull info; @@ -447,6 +436,14 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private boolean wasSendTyping; protected boolean shouldAnimateEditTextWithBounds; private int animatingContentType = -1; + + private boolean clearBotButtonsOnKeyboardOpen; + private boolean expandStickersWithKeyboard; + private float doneButtonEnabledProgress = 1f; + private final Drawable doneCheckDrawable; + boolean doneButtonEnabled = true; + private ValueAnimator doneButtonColorAnimator; + private Runnable openKeyboardRunnable = new Runnable() { @Override public void run() { @@ -714,8 +711,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); - private Paint paintRecordWaveBig = new Paint(); - private Paint paintRecordWaveTin = new Paint(); private Drawable micOutline; private Drawable cameraOutline; private Drawable micDrawable; @@ -727,34 +722,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private Drawable lockShadowDrawable; - private final static float MAX_AMPLITUDE = 1800f; - private class RecordCircle extends View { - private final static float ROTATION_SPEED = 0.36f * 0.1f; - private final static float SINE_WAVE_SPEED = 0.81f; - private final static float SMALL_WAVE_RADIUS = 0.55f; - private final static float SMALL_WAVE_SCALE = 0.40f; - private final static float SMALL_WAVE_SCALE_SPEED = 0.60f; - private final static float FLING_DISTANCE = 0.50f; - private final static float WAVE_ANGLE = 0.03f; - private final static float RANDOM_RADIUS_SIZE = 0.3f; - private final static float ANIMATION_SPEED_WAVE_HUGE = 0.65f; - private final static float ANIMATION_SPEED_WAVE_SMALL = 0.45f; - private final static float ANIMATION_SPEED_CIRCLE = 0.45f; - private final static float CIRCLE_ALPHA_1 = 0.30f; - private final static float CIRCLE_ALPHA_2 = 0.15f; - - private final static float IDLE_ROTATION_SPEED = 0.2f; - private final static float IDLE_WAVE_ANGLE = 0.5f; - private final static float IDLE_SCALE_SPEED = 0.3f; - private final static float IDLE_RADIUS = 0.56f; - private final static float IDLE_ROTATE_DIF = 0.1f * IDLE_ROTATION_SPEED; - - float animationSpeed = 1f - ANIMATION_SPEED_WAVE_HUGE; - float animationSpeedTiny = 1f - ANIMATION_SPEED_WAVE_SMALL; - float animationSpeedCircle = 1f - ANIMATION_SPEED_CIRCLE; - private float scale; private float amplitude; private float animateToAmplitude; @@ -797,7 +766,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe float idleProgress; boolean incIdle; - private Interpolator linearInterpolator = new LinearInterpolator(); private VirtualViewHelper virtualViewHelper; private int paintAlpha; @@ -835,16 +803,18 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe virtualViewHelper = new VirtualViewHelper(this); ViewCompat.setAccessibilityDelegate(this, virtualViewHelper); - bigWaveDrawable = new WaveDrawable(12, 0.03f, AndroidUtilities.dp(40), true); + tinyWaveDrawable = new WaveDrawable(this, 12, 0.03f, AndroidUtilities.dp(35), null); + bigWaveDrawable = new WaveDrawable(this, 12, 0.03f, AndroidUtilities.dp(40), tinyWaveDrawable); + tinyWaveDrawable.setCircleRadius(circleRadius); + bigWaveDrawable.setCircleRadius(circleRadius); bigWaveDrawable.rotation = 30f; - tinyWaveDrawable = new WaveDrawable(12, 0.03f, AndroidUtilities.dp(35), false); - bigWaveDrawable.amplitudeWaveDif = 0.02f * SINE_WAVE_SPEED; - tinyWaveDrawable.amplitudeWaveDif = 0.026f * SINE_WAVE_SPEED; - tinyWaveDrawable.amplitudeRadius = AndroidUtilities.dp(20) + AndroidUtilities.dp(20) * SMALL_WAVE_RADIUS; - tinyWaveDrawable.maxScale = 0.3f * SMALL_WAVE_SCALE; - tinyWaveDrawable.scaleSpeed = 0.001f * SMALL_WAVE_SCALE_SPEED; - tinyWaveDrawable.fling = FLING_DISTANCE; + bigWaveDrawable.amplitudeWaveDif = 0.02f * WaveDrawable.SINE_WAVE_SPEED; + tinyWaveDrawable.amplitudeWaveDif = 0.026f * WaveDrawable.SINE_WAVE_SPEED; + tinyWaveDrawable.amplitudeRadius = AndroidUtilities.dp(20) + AndroidUtilities.dp(20) * WaveDrawable.SMALL_WAVE_RADIUS; + tinyWaveDrawable.maxScale = 0.3f * WaveDrawable.SMALL_WAVE_SCALE; + tinyWaveDrawable.scaleSpeed = 0.001f * WaveDrawable.SMALL_WAVE_SCALE_SPEED; + tinyWaveDrawable.fling = WaveDrawable.FLING_DISTANCE; lockOutlinePaint.setStyle(Paint.Style.STROKE); lockOutlinePaint.setStrokeCap(Paint.Cap.ROUND); @@ -863,18 +833,14 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe touchSlop = vc.getScaledTouchSlop(); touchSlop *= touchSlop; - if (Build.VERSION.SDK_INT >= 26) { - paintRecordWaveBig.setAntiAlias(true); - paintRecordWaveTin.setAntiAlias(true); - } updateColors(); } public void setAmplitude(double value) { - bigWaveDrawable.setValue((float) (Math.min(MAX_AMPLITUDE, value) / MAX_AMPLITUDE)); - tinyWaveDrawable.setValue((float) (Math.min(MAX_AMPLITUDE, value) / MAX_AMPLITUDE)); - animateToAmplitude = (float) (Math.min(MAX_AMPLITUDE, value) / MAX_AMPLITUDE); - animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500.0f * animationSpeedCircle); + bigWaveDrawable.setValue((float) (Math.min(WaveDrawable.MAX_AMPLITUDE, value) / WaveDrawable.MAX_AMPLITUDE)); + tinyWaveDrawable.setValue((float) (Math.min(WaveDrawable.MAX_AMPLITUDE, value) / WaveDrawable.MAX_AMPLITUDE)); + animateToAmplitude = (float) (Math.min(WaveDrawable.MAX_AMPLITUDE, value) / WaveDrawable.MAX_AMPLITUDE); + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500.0f * WaveDrawable.animationSpeedCircle); invalidate(); } @@ -1456,10 +1422,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe public void updateColors() { paint.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground)); - paintRecordWaveBig.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground)); - paintRecordWaveTin.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground)); - paintRecordWaveBig.setAlpha((int) (255 * CIRCLE_ALPHA_1)); - paintRecordWaveTin.setAlpha((int) (255 * CIRCLE_ALPHA_2)); + tinyWaveDrawable.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2)); + bigWaveDrawable.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground), (int) (255 * WaveDrawable.CIRCLE_ALPHA_1)); tooltipPaint.setColor(Theme.getColor(Theme.key_chat_gifSaveHintText)); tooltipBackground = Theme.createRoundRectDrawable(AndroidUtilities.dp(5), Theme.getColor(Theme.key_chat_gifSaveHintBackground)); tooltipBackgroundArrow.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_gifSaveHintBackground), PorterDuff.Mode.MULTIPLY)); @@ -1557,302 +1521,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe return true; } } - - private class WaveDrawable { - - public float fling; - private float animateToAmplitude; - private float amplitude; - private float slowAmplitude; - private float animateAmplitudeDiff; - private float animateAmplitudeSlowDiff; - float lastRadius; - float radiusDiff; - float waveDif; - double waveAngle; - private boolean incRandomAdditionals; - - float rotation; - float idleRotation; - - private float amplitudeWaveDif; - private final CircleBezierDrawable circleBezierDrawable; - private float amplitudeRadius; - private float idleRadius = 0; - private float idleRadiusK = 0.15f * IDLE_WAVE_ANGLE; - private boolean expandIdleRadius; - private boolean expandScale; - - private boolean isBig; - - private boolean isIdle = true; - private float scaleIdleDif; - private float scaleDif; - public float scaleSpeed = 0.00008f; - public float scaleSpeedIdle = 0.0002f * IDLE_SCALE_SPEED; - private float maxScale; - - private float flingRadius; - private Animator flingAnimator; - - private ValueAnimator animator; - - float randomAdditions = AndroidUtilities.dp(8) * RANDOM_RADIUS_SIZE; - - private final ValueAnimator.AnimatorUpdateListener flingUpdateListener = animation -> flingRadius = (float) animation.getAnimatedValue(); - private float idleGlobalRadius = AndroidUtilities.dp(10f) * IDLE_RADIUS; - - private float sineAngleMax; - - public WaveDrawable(int n, - float rotateDif, - float amplitudeRadius, - boolean isFrequncy) { - circleBezierDrawable = new CircleBezierDrawable(n); - this.amplitudeRadius = amplitudeRadius; - this.isBig = isFrequncy; - expandIdleRadius = isBig; - radiusDiff = AndroidUtilities.dp(34) * 0.0012f; - } - - - public void setValue(float value) { - animateToAmplitude = value; - - if (isBig) { - if (animateToAmplitude > amplitude) { - animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 300f * animationSpeed); - } else { - animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500f * animationSpeed); - } - animateAmplitudeSlowDiff = (animateToAmplitude - slowAmplitude) / (100f + 500 * animationSpeed); - } else { - if (animateToAmplitude > amplitude) { - animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 400f * animationSpeedTiny); - } else { - animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 500f * animationSpeedTiny); - } - animateAmplitudeSlowDiff = (animateToAmplitude - slowAmplitude) / (100f + 500 * animationSpeedTiny); - } - - boolean idle = value < 0.1f; - if (isIdle != idle && idle && isBig) { - float bRotation = rotation; - int k = 60; - float animateToBRotation = Math.round(rotation / k) * k + k / 2; - float tRotation = tinyWaveDrawable.rotation; - float animateToTRotation = Math.round(tRotation / k) * k; - - float bWaveDif = waveDif; - float tWaveDif = tinyWaveDrawable.waveDif; - animator = ValueAnimator.ofFloat(1f, 0f); - animator.addUpdateListener(animation -> { - float v = (float) animation.getAnimatedValue(); - rotation = animateToBRotation + (bRotation - animateToBRotation) * v; - tinyWaveDrawable.rotation = animateToTRotation + (tRotation - animateToTRotation) * v; - waveDif = 1f + (bWaveDif - 1f) * v; - tinyWaveDrawable.waveDif = 1 + (tWaveDif - 1f) * v; - - waveAngle = (float) Math.acos(waveDif); - tinyWaveDrawable.waveAngle = (float) Math.acos(-tinyWaveDrawable.waveDif); - }); - animator.setDuration(1200); - animator.start(); - } - - isIdle = idle; - - if (!isIdle && animator != null) { - animator.cancel(); - animator = null; - } - } - - private void startFling(float delta) { - if (flingAnimator != null) { - flingAnimator.cancel(); - } - float fling = this.fling * 2; - float flingDistance = delta * amplitudeRadius * (isBig ? 8 : 20) * 16 * fling; - ValueAnimator valueAnimator = ValueAnimator.ofFloat(flingRadius, flingDistance); - valueAnimator.addUpdateListener(flingUpdateListener); - - valueAnimator.setDuration((long) ((isBig ? 200 : 350) * fling)); - valueAnimator.setInterpolator(linearInterpolator); - ValueAnimator valueAnimator1 = ValueAnimator.ofFloat(flingDistance, 0); - valueAnimator1.addUpdateListener(flingUpdateListener); - - valueAnimator1.setInterpolator(linearInterpolator); - valueAnimator1.setDuration((long) ((isBig ? 220 : 380) * fling)); - - AnimatorSet animatorSet = new AnimatorSet(); - flingAnimator = animatorSet; - animatorSet.playSequentially(valueAnimator, valueAnimator1); - animatorSet.start(); - - } - - boolean wasFling; - - public void tick(float circleRadius) { - long dt = System.currentTimeMillis() - lastUpdateTime; - - if (animateToAmplitude != amplitude) { - amplitude += animateAmplitudeDiff * dt; - if (animateAmplitudeDiff > 0) { - if (amplitude > animateToAmplitude) { - amplitude = animateToAmplitude; - } - } else { - if (amplitude < animateToAmplitude) { - amplitude = animateToAmplitude; - } - } - - if (Math.abs(amplitude - animateToAmplitude) * amplitudeRadius < AndroidUtilities.dp(4)) { - if (!wasFling) { - startFling(animateAmplitudeDiff); - wasFling = true; - } - } else { - wasFling = false; - } - } - - if (animateToAmplitude != slowAmplitude) { - slowAmplitude += animateAmplitudeSlowDiff * dt; - if (Math.abs(slowAmplitude - amplitude) > 0.2f) { - slowAmplitude = amplitude + (slowAmplitude > amplitude ? - 0.2f : -0.2f); - } - if (animateAmplitudeSlowDiff > 0) { - if (slowAmplitude > animateToAmplitude) { - slowAmplitude = animateToAmplitude; - } - } else { - if (slowAmplitude < animateToAmplitude) { - slowAmplitude = animateToAmplitude; - } - } - } - - - idleRadius = circleRadius * idleRadiusK; - if (expandIdleRadius) { - scaleIdleDif += scaleSpeedIdle * dt; - if (scaleIdleDif >= 0.05f) { - scaleIdleDif = 0.05f; - expandIdleRadius = false; - } - } else { - scaleIdleDif -= scaleSpeedIdle * dt; - if (scaleIdleDif < 0f) { - scaleIdleDif = 0f; - expandIdleRadius = true; - } - } - - if (maxScale > 0) { - if (expandScale) { - scaleDif += scaleSpeed * dt; - if (scaleDif >= maxScale) { - scaleDif = maxScale; - expandScale = false; - } - } else { - scaleDif -= scaleSpeed * dt; - if (scaleDif < 0f) { - scaleDif = 0f; - expandScale = true; - } - } - } - - - if (sineAngleMax > animateToAmplitude) { - sineAngleMax -= 0.25f; - if (sineAngleMax < animateToAmplitude) { - sineAngleMax = animateToAmplitude; - } - } else if (sineAngleMax < animateToAmplitude) { - sineAngleMax += 0.25f; - if (sineAngleMax > animateToAmplitude) { - sineAngleMax = animateToAmplitude; - } - } - - if (!isIdle) { - rotation += (ROTATION_SPEED * 0.5f + ROTATION_SPEED * 4f * (amplitude > 0.5f ? 1 : amplitude / 0.5f)) * dt; - if (rotation > 360) rotation %= 360; - } else { - idleRotation += IDLE_ROTATE_DIF * dt; - if (idleRotation > 360) idleRotation %= 360; - } - - if (lastRadius < circleRadius) { - lastRadius = circleRadius; - } else { - lastRadius -= radiusDiff * dt; - if (lastRadius < circleRadius) { - lastRadius = circleRadius; - } - } - - lastRadius = circleRadius; - - if (!isIdle) { - waveAngle += (amplitudeWaveDif * sineAngleMax) * dt; - if (isBig) { - waveDif = (float) Math.cos(waveAngle); - } else { - waveDif = -(float) Math.cos(waveAngle); - } - - if (waveDif > 0f && incRandomAdditionals) { - circleBezierDrawable.calculateRandomAdditionals(); - incRandomAdditionals = false; - } else if (waveDif < 0f && !incRandomAdditionals) { - circleBezierDrawable.calculateRandomAdditionals(); - incRandomAdditionals = true; - } - } - - invalidate(); - } - - public void draw(float cx, float cy, float scale, Canvas canvas) { - float waveAmplitude = amplitude < 0.3f ? amplitude / 0.3f : 1f; - float radiusDiff = AndroidUtilities.dp(10) + AndroidUtilities.dp(50) * WAVE_ANGLE * animateToAmplitude; - - - circleBezierDrawable.idleStateDiff = idleRadius * (1f - waveAmplitude); - - float kDiff = 0.35f * waveAmplitude * waveDif; - circleBezierDrawable.radiusDiff = radiusDiff * kDiff; - circleBezierDrawable.cubicBezierK = 1f + Math.abs(kDiff) * waveAmplitude + (1f - waveAmplitude) * idleRadiusK; - - - circleBezierDrawable.radius = (lastRadius + amplitudeRadius * amplitude) + idleGlobalRadius + (flingRadius * waveAmplitude); - - if (circleBezierDrawable.radius + circleBezierDrawable.radiusDiff < circleRadius) { - circleBezierDrawable.radiusDiff = circleRadius - circleBezierDrawable.radius; - } - - if (isBig) { - circleBezierDrawable.globalRotate = rotation + idleRotation; - } else { - circleBezierDrawable.globalRotate = -rotation + idleRotation; - } - - canvas.save(); - float s = scale + scaleIdleDif * (1f - waveAmplitude) + scaleDif * waveAmplitude; - canvas.scale(s, s, cx, cy); - circleBezierDrawable.setRandomAdditions(waveAmplitude * waveDif * randomAdditions); - - circleBezierDrawable.draw(cx, cy, canvas, isBig ? paintRecordWaveBig : paintRecordWaveTin); - canvas.restore(); - } - } } @SuppressLint("ClickableViewAccessibility") @@ -1983,6 +1651,14 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } setEmojiButtonImage(false, false); + captionLimitView = new NumberTextView(context); + captionLimitView.setVisibility(View.GONE); + captionLimitView.setTextSize(15); + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + captionLimitView.setCenterAlign(true); + addView(captionLimitView, LayoutHelper.createFrame(48, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 0, 48)); + messageEditText = new EditTextCaption(context) { private void send(InputContentInfoCompat inputContentInfo, boolean notify, int scheduleDate) { @@ -2186,7 +1862,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { - } @Override @@ -2227,20 +1902,66 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe @Override public void afterTextChanged(Editable editable) { - if (innerTextChange != 0) { - return; - } - if (nextChangeIsSend) { - sendMessage(); - nextChangeIsSend = false; - } - if (processChange) { - ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); - for (int i = 0; i < spans.length; i++) { - editable.removeSpan(spans[i]); + if (innerTextChange == 0) { + if (nextChangeIsSend) { + sendMessage(); + nextChangeIsSend = false; } - Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); - processChange = false; + if (processChange) { + ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); + for (int i = 0; i < spans.length; i++) { + editable.removeSpan(spans[i]); + } + Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); + processChange = false; + } + } + + int beforeLimit; + codePointCount = Character.codePointCount(editable, 0, editable.length()); + boolean doneButtonEnabledLocal = true; + if (currentLimit > 0 && (beforeLimit = currentLimit - codePointCount) <= 100) { + if (beforeLimit < -9999) { + beforeLimit = -9999; + } + captionLimitView.setNumber(beforeLimit, captionLimitView.getVisibility() == View.VISIBLE); + if (captionLimitView.getVisibility() != View.VISIBLE) { + captionLimitView.setVisibility(View.VISIBLE); + captionLimitView.setAlpha(0); + captionLimitView.setScaleX(0.5f); + captionLimitView.setScaleY(0.5f); + } + captionLimitView.animate().setListener(null).cancel(); + captionLimitView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).start(); + if (beforeLimit < 0) { + doneButtonEnabledLocal = false; + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText)); + } else { + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + } + } else { + captionLimitView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(100).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + captionLimitView.setVisibility(View.GONE); + } + }); + } + + if (doneButtonEnabled != doneButtonEnabledLocal) { + doneButtonEnabled = doneButtonEnabledLocal; + if (doneButtonColorAnimator != null) { + doneButtonColorAnimator.cancel(); + } + doneButtonColorAnimator = ValueAnimator.ofFloat(doneButtonEnabled ? 0 : 1f, doneButtonEnabled ? 1f : 0); + doneButtonColorAnimator.addUpdateListener(valueAnimator -> { + int color = Theme.getColor(Theme.key_chat_messagePanelVoicePressed); + int defaultAlpha = Color.alpha(color); + doneButtonEnabledProgress = (float) valueAnimator.getAnimatedValue(); + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); + doneButtonImage.invalidate(); + }); + doneButtonColorAnimator.setDuration(150).start(); } } }); @@ -2913,10 +2634,10 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe textFieldContainer.addView(doneButtonContainer, LayoutHelper.createLinear(48, 48, Gravity.BOTTOM)); doneButtonContainer.setOnClickListener(view -> doneEditingMessage()); - Drawable drawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), Theme.getColor(Theme.key_chat_messagePanelSend)); - Drawable checkDrawable = context.getResources().getDrawable(R.drawable.input_done).mutate(); - checkDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); - CombinedDrawable combinedDrawable = new CombinedDrawable(drawable, checkDrawable, 0, AndroidUtilities.dp(1)); + Drawable doneCircleDrawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), Theme.getColor(Theme.key_chat_messagePanelSend)); + doneCheckDrawable = context.getResources().getDrawable(R.drawable.input_done).mutate(); + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); + CombinedDrawable combinedDrawable = new CombinedDrawable(doneCircleDrawable, doneCheckDrawable, 0, AndroidUtilities.dp(1)); combinedDrawable.setCustomSize(AndroidUtilities.dp(32), AndroidUtilities.dp(32)); doneButtonImage = new ImageView(context); @@ -3052,7 +2773,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe continue; } int num = a; - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext()); + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), a == 0, a == 1); if (num == 0) { if (UserObject.isUserSelf(user)) { cell.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); @@ -3639,16 +3360,25 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } } + private Runnable hideKeyboardRunnable; + public void onPause() { isPaused = true; if (keyboardVisible) { showKeyboardOnResume = true; } - closeKeyboard(); + AndroidUtilities.runOnUIThread(hideKeyboardRunnable = () -> { + closeKeyboard(); + hideKeyboardRunnable = null; + }, 500); } public void onResume() { isPaused = false; + if (hideKeyboardRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(hideKeyboardRunnable); + hideKeyboardRunnable = null; + } int visibility = getVisibility(); if (showKeyboardOnResume) { showKeyboardOnResume = false; @@ -4066,16 +3796,26 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } public void doneEditingMessage() { - if (editingMessageObject != null) { - delegate.onMessageEditEnd(true); - showEditDoneProgress(true, true); - CharSequence[] message = new CharSequence[]{messageEditText.getText()}; - ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsSendingNewEntities()); - editingMessageReqId = SendMessagesHelper.getInstance(currentAccount).editMessage(editingMessageObject, message[0].toString(), messageWebPageSearch, parentFragment, entities, editingMessageObject.scheduled ? editingMessageObject.messageOwner.date : 0, () -> { - editingMessageReqId = 0; - setEditingMessageObject(null, false); - }); + if (editingMessageObject == null) { + return; } + if (currentLimit - codePointCount < 0) { + AndroidUtilities.shakeView(captionLimitView, 2, 0); + Vibrator v = (Vibrator) captionLimitView.getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (v != null) { + v.vibrate(200); + } + return; + } + CharSequence[] message = new CharSequence[]{messageEditText.getText()}; + ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsSendingNewEntities()); + if (!TextUtils.equals(message[0], editingMessageObject.messageText) || entities != null && !entities.isEmpty() || editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage) { + editingMessageObject.editingMessage = message[0]; + editingMessageObject.editingMessageEntities = entities; + editingMessageObject.editingMessageSearchWebPage = messageWebPageSearch; + SendMessagesHelper.getInstance(currentAccount).editMessage(editingMessageObject, null, null, null, null, null, false, null); + } + setEditingMessageObject(null, false); } public boolean processSendingText(CharSequence text, boolean notify, int scheduleDate) { @@ -4086,12 +3826,48 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (delegate != null && parentFragment != null && (scheduleDate != 0) == parentFragment.isInScheduleMode()) { delegate.prepareMessageSending(); } - int count = (int) Math.ceil(text.length() / (float) maxLength); - for (int a = 0; a < count; a++) { - CharSequence[] message = new CharSequence[]{text.subSequence(a * maxLength, Math.min((a + 1) * maxLength, text.length()))}; + int end; + int start = 0; + do { + int whitespaceIndex = -1; + int dotIndex = -1; + int tabIndex = -1; + int enterIndex = -1; + if (text.length() > start + maxLength) { + int i = start + maxLength - 1; + int k = 0; + while (i > start && k < 300) { + char c = text.charAt(i); + char c2 = i > 0 ? text.charAt(i - 1) : ' '; + if (c == '\n' && c2 == '\n') { + tabIndex = i; + break; + } else if (c == '\n') { + enterIndex = i; + } else if (dotIndex < 0 && Character.isWhitespace(c) && c2 == '.') { + dotIndex = i; + } else if (whitespaceIndex < 0 && Character.isWhitespace(c)) { + whitespaceIndex = i; + } + i--; + k++; + } + } + end = Math.min(start + maxLength, text.length()); + if (tabIndex > 0) { + end = tabIndex; + } else if (enterIndex > 0) { + end = enterIndex; + } else if (dotIndex > 0) { + end = dotIndex; + } else if (whitespaceIndex > 0) { + end = whitespaceIndex; + } + CharSequence[] message = new CharSequence[]{AndroidUtilities.getTrimmedString(text.subSequence(start, end))}; ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsNewEntities); SendMessagesHelper.getInstance(currentAccount).sendMessage(message[0].toString(), dialog_id, replyingMessageObject, getThreadMessage(), messageWebPage, messageWebPageSearch, entities, null, null, notify, scheduleDate); - } + start = end + 1; + } while (end != text.length()); return true; } return false; @@ -5503,10 +5279,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (audioToSend != null || videoToSendMessageObject != null || editingMessageObject == messageObject) { return; } - if (editingMessageReqId != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(editingMessageReqId, true); - editingMessageReqId = 0; - } editingMessageObject = messageObject; editingCaption = caption; CharSequence textToSetWithKeyboard; @@ -5516,15 +5288,17 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe doneButtonAnimation = null; } doneButtonContainer.setVisibility(View.VISIBLE); - showEditDoneProgress(true, false); + doneButtonImage.setScaleX(0.1f); + doneButtonImage.setScaleY(0.1f); + doneButtonImage.setAlpha(0.0f); + doneButtonImage.animate().alpha(1f).scaleX(1).scaleY(1).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); - InputFilter[] inputFilters = new InputFilter[1]; CharSequence editingText; if (caption) { - inputFilters[0] = new InputFilter.LengthFilter(accountInstance.getMessagesController().maxCaptionLength); + currentLimit = accountInstance.getMessagesController().maxCaptionLength; editingText = editingMessageObject.caption; } else { - inputFilters[0] = new InputFilter.LengthFilter(accountInstance.getMessagesController().maxMessageLength); + currentLimit = accountInstance.getMessagesController().maxMessageLength; editingText = editingMessageObject.messageText; } if (editingText != null) { @@ -5598,7 +5372,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } setFieldText(textToSetWithKeyboard); } - messageEditText.setFilters(inputFilters); openKeyboard(); FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) messageEditText.getLayoutParams(); layoutParams.rightMargin = AndroidUtilities.dp(4); @@ -5618,7 +5391,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe setTextFieldRunnable = null; } doneButtonContainer.setVisibility(View.GONE); - messageEditText.setFilters(new InputFilter[0]); + currentLimit = -1; delegate.onMessageEditEnd(false); sendButtonContainer.setVisibility(VISIBLE); cancelBotButton.setScaleX(0.1f); @@ -5725,6 +5498,17 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe slideText.updateColors(); recordTimerView.updateColors(); videoTimelineView.updateColors(); + + if (captionLimitView != null && messageEditText != null) { + if (codePointCount - currentLimit < 0) { + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText)); + } else { + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + } + } + int color = Theme.getColor(Theme.key_chat_messagePanelVoicePressed); + int defaultAlpha = Color.alpha(color); + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); } private void updateRecordedDeleteIconColors() { @@ -5946,11 +5730,11 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe botButtonDrawablel.setIcon(R.drawable.input_keyboard, true); botButton.setContentDescription(LocaleController.getString("AccDescrShowKeyboard", R.string.AccDescrShowKeyboard)); } else { - botButtonDrawablel.setIcon(R.drawable.input_bot2, true); + botButtonDrawablel.setIcon(R.drawable.input_bot2, true); botButton.setContentDescription(LocaleController.getString("AccDescrBotKeyboard", R.string.AccDescrBotKeyboard)); } } else { - botButtonDrawablel.setIcon(R.drawable.input_bot1, true); + botButtonDrawablel.setIcon(R.drawable.input_bot1, true); botButton.setContentDescription(LocaleController.getString("AccDescrBotCommands", R.string.AccDescrBotCommands)); } } else { @@ -6200,7 +5984,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } @Override - public void onStickerSelected(View view, TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { + public void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { if (trendingStickersAlert != null) { trendingStickersAlert.dismiss(); trendingStickersAlert = null; @@ -6219,7 +6003,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } setStickersExpanded(false, true, false); } - ChatActivityEnterView.this.onStickerSelected(sticker, parent, false, notify, scheduleDate); + ChatActivityEnterView.this.onStickerSelected(sticker, query, parent, false, notify, scheduleDate); if ((int) dialog_id == 0 && MessageObject.isGifDocument(sticker)) { accountInstance.getMessagesController().saveGif(parent, sticker); } @@ -6233,9 +6017,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } @Override - public void onGifSelected(View view, Object gif, Object parent, boolean notify, int scheduleDate) { + public void onGifSelected(View view, Object gif, String query, Object parent, boolean notify, int scheduleDate) { if (isInScheduleMode() && scheduleDate == 0) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onGifSelected(view, gif, parent, n, s)); + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onGifSelected(view, gif, query, parent, n, s)); } else { if (slowModeTimer > 0 && !isInScheduleMode()) { if (delegate != null) { @@ -6251,7 +6035,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } if (gif instanceof TLRPC.Document) { TLRPC.Document document = (TLRPC.Document) gif; - SendMessagesHelper.getInstance(currentAccount).sendSticker(document, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); + SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); MediaDataController.getInstance(currentAccount).addRecentGif(document, (int) (System.currentTimeMillis() / 1000)); if ((int) dialog_id == 0) { accountInstance.getMessagesController().saveGif(parent, document); @@ -6352,8 +6136,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe // expandStickersWithKeyboard = true; // if (expandStickersWithKeyboard) { // expandStickersWithKeyboard = false; - setStickersExpanded(true, true, false); - // } + setStickersExpanded(true, true, false); + // } } if (emojiTabOpen && searchingType == 2) { checkStickresExpandHeight(); @@ -6483,9 +6267,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } @Override - public void onStickerSelected(TLRPC.Document sticker, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { + public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { if (isInScheduleMode() && scheduleDate == 0) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onStickerSelected(sticker, parent, clearsInputField, n, s)); + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onStickerSelected(sticker, query, parent, clearsInputField, n, s)); } else { if (slowModeTimer > 0 && !isInScheduleMode()) { if (delegate != null) { @@ -6499,7 +6283,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe emojiView.hideSearchKeyboard(); } setStickersExpanded(false, true, false); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); if (delegate != null) { delegate.onMessageSend(null, true, scheduleDate); } @@ -6592,7 +6376,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe int currentHeight = AndroidUtilities.displaySize.x > AndroidUtilities.displaySize.y ? keyboardHeightLand : keyboardHeight; /*if (!samePannelWasVisible && !anotherPanelWasVisible) { currentHeight = 0; - } else */if (contentType == 1) { + } else */ + if (contentType == 1) { currentHeight = Math.min(botKeyboardView.getKeyboardHeight(), currentHeight); } if (botKeyboardView != null) { @@ -6693,7 +6478,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } if (botKeyboardView != null) { if (show != 2 || AndroidUtilities.usingHardwareInput || AndroidUtilities.isInMultiwindow) { - if (smoothKeyboard && !keyboardVisible) { + if (smoothKeyboard && !keyboardVisible) { if (botKeyboardViewVisible) { animatingContentType = 1; } @@ -6848,7 +6633,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } if (byBackButton && searchingType != 0) { searchingType = 0; - emojiView.closeSearch(true); + if (emojiView != null) { + emojiView.closeSearch(true); + } messageEditText.requestFocus(); setStickersExpanded(false, true, false); if (emojiTabOpen) { @@ -6986,7 +6773,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe emojiPadding = layoutParams.height; sizeNotifierLayout.requestLayout(); onWindowSizeChanged(); - if (smoothKeyboard && !keyboardVisible && oldHeight != emojiPadding) { + if (smoothKeyboard && !keyboardVisible && oldHeight != emojiPadding && pannelAnimationEnabled()) { panelAnimation = new AnimatorSet(); panelAnimation.playTogether(ObjectAnimator.ofFloat(currentView, View.TRANSLATION_Y, emojiPadding - oldHeight, 0)); panelAnimation.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); @@ -7469,6 +7256,13 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe return true; } + public int getHeightWithTopView() { + int h = getMeasuredHeight(); + if (topView != null && topView.getVisibility() == View.VISIBLE) { + h -= (1f - topViewEnterProgress) * topView.getLayoutParams().height; + } + return h; + } public void setAdjustPanLayoutHelper(AdjustPanLayoutHelper adjustPanLayoutHelper) { this.adjustPanLayoutHelper = adjustPanLayoutHelper; @@ -8002,4 +7796,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe stoppedInternal = false; } } + + protected boolean pannelAnimationEnabled() { + return true; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java index d72c41a25..fd77876fc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java @@ -28,9 +28,11 @@ import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Build; -import android.text.InputFilter; +import android.os.Vibrator; +import android.text.Editable; import android.text.TextPaint; import android.text.TextUtils; +import android.text.TextWatcher; import android.util.Property; import android.util.TypedValue; import android.view.Gravity; @@ -92,6 +94,15 @@ import androidx.recyclerview.widget.RecyclerView; public class ChatAttachAlert extends BottomSheet implements NotificationCenter.NotificationCenterDelegate, BottomSheet.BottomSheetDelegateInterface { + private final NumberTextView captionLimitView; + private final int currentLimit; + private int codepointCount; + + public float getClipLayoutBottom() { + float alphaOffset = (frameLayout2.getMeasuredHeight()- AndroidUtilities.dp(84)) * (1f -frameLayout2.getAlpha()); + return frameLayout2.getMeasuredHeight() - alphaOffset; + } + public interface ChatAttachViewDelegate { void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate); View getRevealView(); @@ -320,6 +331,10 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N protected boolean avatarSearch; protected boolean typeButtonsAvailable; + boolean sendButtonEnabled = true; + private float sendButtonEnabledProgress = 1f; + private ValueAnimator sendButtonColorAnimator; + private int selectedId; protected float cornerRadius = 1.0f; @@ -730,12 +745,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(child.getLayoutParams().height, MeasureSpec.EXACTLY)); } } else { -// if (child == currentAttachLayout || child == nextAttachLayout) { -// measureChildWithMargins(child, widthMeasureSpec, 0, MeasureSpec.makeMeasureSpec(getMeasuredHeight(), MeasureSpec.EXACTLY), 0); -// } else { - measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); -// } - + measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); } } } @@ -1322,6 +1332,16 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N containerView.addView(frameLayout2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); frameLayout2.setOnTouchListener((v, event) -> true); + captionLimitView = new NumberTextView(context); + captionLimitView.setVisibility(View.GONE); + captionLimitView.setTextSize(15); + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + captionLimitView.setCenterAlign(true); + frameLayout2.addView(captionLimitView, LayoutHelper.createFrame(56, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 14, 78)); + + currentLimit = MessagesController.getInstance(UserConfig.selectedAccount).maxCaptionLength; + commentTextView = new EditTextEmoji(context, sizeNotifierFrameLayout, null, EditTextEmoji.STYLE_DIALOG) { private boolean shouldAnimateEditTextWithBounds; @@ -1338,7 +1358,6 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } else { makeFocusable(commentTextView.getEditText(), false); } - } return super.onInterceptTouchEvent(ev); } @@ -1390,11 +1409,71 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N updateLayout(currentAttachLayout,true, 0); } }; - InputFilter[] inputFilters = new InputFilter[1]; - inputFilters[0] = new InputFilter.LengthFilter(MessagesController.getInstance(UserConfig.selectedAccount).maxCaptionLength); - commentTextView.setFilters(inputFilters); commentTextView.setHint(LocaleController.getString("AddCaption", R.string.AddCaption)); commentTextView.onResume(); + commentTextView.getEditText().addTextChangedListener(new TextWatcher() { + + @Override + public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { + + } + + @Override + public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { + + } + + @Override + public void afterTextChanged(Editable editable) { + int beforeLimit; + codepointCount = Character.codePointCount(editable, 0, editable.length()); + boolean sendButtonEnabledLocal = true; + if (currentLimit > 0 && (beforeLimit = currentLimit - codepointCount) <= 100) { + if (beforeLimit < -9999) { + beforeLimit = -9999; + } + captionLimitView.setNumber(beforeLimit, captionLimitView.getVisibility() == View.VISIBLE); + if (captionLimitView.getVisibility() != View.VISIBLE) { + captionLimitView.setVisibility(View.VISIBLE); + captionLimitView.setAlpha(0); + captionLimitView.setScaleX(0.5f); + captionLimitView.setScaleY(0.5f); + } + captionLimitView.animate().setListener(null).cancel(); + captionLimitView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).start(); + if (beforeLimit < 0) { + sendButtonEnabledLocal = false; + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText)); + } else { + captionLimitView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + } + } else { + captionLimitView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(100).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + captionLimitView.setVisibility(View.GONE); + } + }); + } + + if (sendButtonEnabled != sendButtonEnabledLocal) { + sendButtonEnabled = sendButtonEnabledLocal; + if (sendButtonColorAnimator != null) { + sendButtonColorAnimator.cancel(); + } + sendButtonColorAnimator = ValueAnimator.ofFloat(sendButtonEnabled ? 0 : 1f, sendButtonEnabled ? 1f : 0); + sendButtonColorAnimator.addUpdateListener(valueAnimator -> { + sendButtonEnabledProgress = (float) valueAnimator.getAnimatedValue(); + int color = Theme.getColor(Theme.key_dialogFloatingIcon); + int defaultAlpha = Color.alpha(color); + writeButton.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * sendButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); + selectedCountView.invalidate(); + + }); + sendButtonColorAnimator.setDuration(150).start(); + } + } + }); frameLayout2.addView(commentTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 0, 0, 84, 0)); frameLayout2.setClipChildren(false); commentTextView.setClipChildren(false); @@ -1448,6 +1527,14 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } writeButtonContainer.addView(writeButton, LayoutHelper.createFrame(Build.VERSION.SDK_INT >= 21 ? 56 : 60, Build.VERSION.SDK_INT >= 21 ? 56 : 60, Gravity.LEFT | Gravity.TOP, Build.VERSION.SDK_INT >= 21 ? 2 : 0, 0, 0, 0)); writeButton.setOnClickListener(v -> { + if (currentLimit - codepointCount < 0) { + AndroidUtilities.shakeView(captionLimitView, 2, 0); + Vibrator vibrator = (Vibrator) captionLimitView.getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (vibrator != null) { + vibrator.vibrate(200); + } + return; + } if (editingMessageObject == null && baseFragment instanceof ChatActivity && ((ChatActivity) baseFragment).isInScheduleMode()) { AlertsCreator.createScheduleDatePickerDialog(getContext(), ((ChatActivity) baseFragment).getDialogId(), (notify, scheduleDate) -> { if (currentAttachLayout == photoLayout) { @@ -1467,7 +1554,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } }); writeButton.setOnLongClickListener(view -> { - if (!(baseFragment instanceof ChatActivity) || editingMessageObject != null) { + if (!(baseFragment instanceof ChatActivity) || editingMessageObject != null || currentLimit - codepointCount < 0) { return false; } ChatActivity chatActivity = (ChatActivity) baseFragment; @@ -1514,7 +1601,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N continue; } int num = a; - itemCells[a] = new ActionBarMenuSubItem(getContext()); + itemCells[a] = new ActionBarMenuSubItem(getContext(), a == 0, a == 1); if (num == 0) { if (UserObject.isUserSelf(user)) { itemCells[a].setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); @@ -1583,9 +1670,9 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N int textSize = (int) Math.ceil(textPaint.measureText(text)); int size = Math.max(AndroidUtilities.dp(16) + textSize, AndroidUtilities.dp(24)); int cx = getMeasuredWidth() / 2; - int cy = getMeasuredHeight() / 2; - textPaint.setColor(Theme.getColor(Theme.key_dialogRoundCheckBoxCheck)); + int color = Theme.getColor(Theme.key_dialogRoundCheckBoxCheck); + textPaint.setColor(ColorUtils.setAlphaComponent(color, (int) (Color.alpha(color) * (0.58 + 0.42 * sendButtonEnabledProgress)))); paint.setColor(Theme.getColor(Theme.key_dialogBackground)); rect.set(cx - size / 2, 0, cx + size / 2, getMeasuredHeight()); canvas.drawRoundRect(rect, AndroidUtilities.dp(12), AndroidUtilities.dp(12), paint); @@ -2419,7 +2506,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N enterCommentEventSent = false; setFocusable(false); ChatAttachAlert.AttachAlertLayout layoutToSet; - if (editingMessageObject != null && editingMessageObject.hasValidGroupId() && (editingMessageObject.isMusic() || editingMessageObject.isDocument())) { + if (editingMessageObject != null && (editingMessageObject.isMusic() || (editingMessageObject.isDocument() && !editingMessageObject.isGif()))) { if (editingMessageObject.isMusic()) { openAudioLayout(false); layoutToSet = audioLayout; @@ -2429,18 +2516,14 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N layoutToSet = documentLayout; selectedId = 4; } - typeButtonsAvailable = false; - buttonsRecyclerView.setVisibility(View.GONE); - shadow.setVisibility(View.INVISIBLE); + typeButtonsAvailable = !editingMessageObject.hasValidGroupId(); } else { layoutToSet = photoLayout; typeButtonsAvailable = avatarPicker == 0; selectedId = 1; - if (typeButtonsAvailable) { - buttonsRecyclerView.setVisibility(View.VISIBLE); - shadow.setVisibility(View.VISIBLE); - } } + buttonsRecyclerView.setVisibility(typeButtonsAvailable ? View.VISIBLE : View.GONE); + shadow.setVisibility(typeButtonsAvailable ? View.VISIBLE : View.INVISIBLE); if (currentAttachLayout != layoutToSet) { if (actionBar.isSearchFieldVisible()) { actionBar.closeSearchField(); @@ -2461,6 +2544,9 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N actionBar.setVisibility(layoutToSet.needsActionBar() != 0 ? View.VISIBLE : View.INVISIBLE); actionBarShadow.setVisibility(actionBar.getVisibility()); } + if (currentAttachLayout != photoLayout) { + photoLayout.setCheckCameraWhenShown(true); + } updateCountButton(0); buttonsAdapter.notifyDataSetChanged(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertLocationLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertLocationLayout.java index 9e861addb..f24d56f8e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertLocationLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertLocationLayout.java @@ -1286,7 +1286,7 @@ public class ChatAttachAlertLocationLayout extends ChatAttachAlert.AttachAlertLa if (loadingMapView.getTag() == null) { loadingMapView.animate().alpha(0.0f).setDuration(180).start(); } - }, 2000); + }, 200); positionMarker(myLocation = getLastLocation()); if (checkGpsEnabled && getParentActivity() != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java index c1cc3a92a..685852f1d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java @@ -149,6 +149,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou private static ArrayList selectedPhotosOrder = new ArrayList<>(); private static int lastImageId = -1; private boolean cancelTakingPhotos; + private boolean checkCameraWhenShown; private boolean mediaEnabled; @@ -290,6 +291,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou object.imageReceiver = cell.getImageView().getImageReceiver(); object.thumb = object.imageReceiver.getBitmapSafe(); object.scale = cell.getScale(); + object.clipBottomAddition = (int) parentAlert.getClipLayoutBottom(); cell.showCheck(false); return object; } @@ -2513,6 +2515,14 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } } + if (checkCameraWhenShown) { + checkCameraWhenShown = false; + checkCamera(true); + } + } + + public void setCheckCameraWhenShown(boolean checkCameraWhenShown) { + this.checkCameraWhenShown = checkCameraWhenShown; } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java index e40d3b056..ef019acc4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java @@ -13,6 +13,7 @@ import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Context; +import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; @@ -25,7 +26,9 @@ import android.widget.ImageView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; +import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessagesController; @@ -132,6 +135,16 @@ public class ChatAvatarContainer extends FrameLayout implements NotificationCent } TLRPC.User user = parentFragment.getCurrentUser(); TLRPC.Chat chat = parentFragment.getCurrentChat(); + ImageReceiver imageReceiver = avatarImageView.getImageReceiver(); + String key = imageReceiver.getImageKey(); + ImageLoader imageLoader = ImageLoader.getInstance(); + if (key != null && !imageLoader.isInMemCache(key, false)) { + Drawable drawable = imageReceiver.getDrawable(); + if (drawable instanceof BitmapDrawable) { + imageLoader.putImageToCache((BitmapDrawable) drawable, key); + } + } + if (user != null) { Bundle args = new Bundle(); if (UserObject.isUserSelf(user)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java index afd864fd9..43f419244 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java @@ -1,6 +1,7 @@ package org.telegram.ui.Components; import android.content.Context; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.LinearLayout; @@ -35,12 +36,12 @@ public class ChatGreetingsView extends LinearLayout { setOrientation(VERTICAL); titleView = new TextView(context); - titleView.setTextSize(14); + titleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); titleView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); titleView.setGravity(Gravity.CENTER_HORIZONTAL); descriptionView = new TextView(context); - descriptionView.setTextSize(14); + descriptionView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); descriptionView.setGravity(Gravity.CENTER_HORIZONTAL); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxSquare.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxSquare.java index 99735caa6..a8758e1b7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxSquare.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxSquare.java @@ -37,17 +37,32 @@ public class CheckBoxSquare extends View { private final static float progressBounceDiff = 0.2f; + private String key1; + private String key2; + private String key3; + public CheckBoxSquare(Context context, boolean alert) { super(context); if (Theme.checkboxSquare_backgroundPaint == null) { Theme.createCommonResources(context); } + + key1 = isAlert ? Theme.key_dialogCheckboxSquareUnchecked : Theme.key_checkboxSquareUnchecked; + key2 = isAlert ? Theme.key_dialogCheckboxSquareBackground : Theme.key_checkboxSquareBackground; + key3 = isAlert ? Theme.key_dialogCheckboxSquareCheck : Theme.key_checkboxSquareCheck; + rectF = new RectF(); drawBitmap = Bitmap.createBitmap(AndroidUtilities.dp(18), AndroidUtilities.dp(18), Bitmap.Config.ARGB_4444); drawCanvas = new Canvas(drawBitmap); isAlert = alert; } + public void setColors(String unchecked, String checked, String check) { + key1 = unchecked; + key2 = checked; + key3 = check; + } + @Keep public void setProgress(float value) { if (progress == value) { @@ -121,8 +136,8 @@ public class CheckBoxSquare extends View { float checkProgress; float bounceProgress; - int uncheckedColor = Theme.getColor(isAlert ? Theme.key_dialogCheckboxSquareUnchecked : Theme.key_checkboxSquareUnchecked); - int color = Theme.getColor(isAlert ? Theme.key_dialogCheckboxSquareBackground : Theme.key_checkboxSquareBackground); + int uncheckedColor = Theme.getColor(key1); + int color = Theme.getColor(key2); if (progress <= 0.5f) { bounceProgress = checkProgress = progress / 0.5f; int rD = (int) ((Color.red(color) - Color.red(uncheckedColor)) * checkProgress); @@ -151,7 +166,7 @@ public class CheckBoxSquare extends View { } if (progress > 0.5f) { - Theme.checkboxSquare_checkPaint.setColor(Theme.getColor(isAlert ? Theme.key_dialogCheckboxSquareCheck : Theme.key_checkboxSquareCheck)); + Theme.checkboxSquare_checkPaint.setColor(Theme.getColor(key3)); int endX = (int) (AndroidUtilities.dp(7) - AndroidUtilities.dp(3) * (1.0f - bounceProgress)); int endY = (int) (AndroidUtilities.dpf2(13) - AndroidUtilities.dp(3) * (1.0f - bounceProgress)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CodepointsLengthInputFilter.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CodepointsLengthInputFilter.java new file mode 100644 index 000000000..2027bc8e4 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CodepointsLengthInputFilter.java @@ -0,0 +1,36 @@ +package org.telegram.ui.Components; + +import android.text.InputFilter; +import android.text.Spanned; + +public class CodepointsLengthInputFilter implements InputFilter { + private final int mMax; + + public CodepointsLengthInputFilter(int max) { + mMax = max; + } + + public CharSequence filter(CharSequence source, int start, int end, Spanned dest, + int dstart, int dend) { + int destAfter = Character.codePointCount(dest, 0, dest.length()) - Character.codePointCount(dest, dstart, dend); + int keep = mMax - destAfter; + if (keep <= 0) { + return ""; + } else if (keep >= Character.codePointCount(source, start, end)) { + return null; // keep original + } else { + keep += start; + if (Character.isHighSurrogate(source.charAt(keep - 1))) { + --keep; + if (keep == start) { + return ""; + } + } + return source.subSequence(start, keep); + } + } + + public int getMax() { + return mMax; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ContactsEmptyView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ContactsEmptyView.java index b40975679..469246a3d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ContactsEmptyView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ContactsEmptyView.java @@ -41,6 +41,7 @@ public class ContactsEmptyView extends LinearLayout implements NotificationCente private static final String stickerSetName = "tg_placeholders"; + public static final String svg = "m418 282.6c13.4-21.1 20.2-44.9 20.2-70.8 0-88.3-79.8-175.3-178.9-175.3-100.1 0-178.9 88-178.9 175.3 0 46.6 16.9 73.1 29.1 86.1-19.3 23.4-30.9 52.3-34.6 86.1-2.5 22.7 3.2 41.4 17.4 57.3 14.3 16 51.7 35 148.1 35 41.2 0 119.9-5.3 156.7-18.3 49.5-17.4 59.2-41.1 59.2-76.2 0-41.5-12.9-74.8-38.3-99.2z"; public ContactsEmptyView(Context context) { super(context); @@ -48,7 +49,7 @@ public class ContactsEmptyView extends LinearLayout implements NotificationCente setOrientation(LinearLayout.VERTICAL); stickerView = new BackupImageView(context); - drawable = new LoadingStickerDrawable(stickerView, "m418 282.6c13.4-21.1 20.2-44.9 20.2-70.8 0-88.3-79.8-175.3-178.9-175.3-100.1 0-178.9 88-178.9 175.3 0 46.6 16.9 73.1 29.1 86.1-19.3 23.4-30.9 52.3-34.6 86.1-2.5 22.7 3.2 41.4 17.4 57.3 14.3 16 51.7 35 148.1 35 41.2 0 119.9-5.3 156.7-18.3 49.5-17.4 59.2-41.1 59.2-76.2 0-41.5-12.9-74.8-38.3-99.2z", AndroidUtilities.dp(130), AndroidUtilities.dp(130)); + drawable = new LoadingStickerDrawable(stickerView, svg, AndroidUtilities.dp(130), AndroidUtilities.dp(130)); stickerView.setImageDrawable(drawable); addView(stickerView, LayoutHelper.createLinear(130, 130, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 2, 0, 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Crop/CropView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Crop/CropView.java index 2efe5aaa3..7d9a227cc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Crop/CropView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Crop/CropView.java @@ -777,7 +777,9 @@ public class CropView extends FrameLayout implements CropAreaView.AreaViewListen float x = previousAreaRect.centerX() - areaView.getCropCenterX(); float y = previousAreaRect.centerY() - areaView.getCropCenterY(); - state.translate(x, y); + if (state != null) { + state.translate(x, y); + } updateMatrix(); areaView.getCropRect(previousAreaRect); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java index dfa7644e9..51bf044dc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java @@ -19,9 +19,12 @@ import android.graphics.Paint; import android.graphics.drawable.Drawable; import android.graphics.drawable.GradientDrawable; import android.graphics.Rect; +import android.graphics.drawable.ShapeDrawable; +import android.graphics.drawable.shapes.RectShape; import android.os.Build; import android.os.SystemClock; import androidx.annotation.Keep; +import androidx.annotation.Nullable; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import android.text.Layout; @@ -53,6 +56,7 @@ public class EditTextBoldCursor extends EditText { private static Field mEditor; private static Field mShowCursorField; private static Field mScrollYField; + private static boolean mScrollYGet; private static Method getVerticalOffsetMethod; private static Class editorClass; private static Field mCursorDrawableResField; @@ -94,6 +98,8 @@ public class EditTextBoldCursor extends EditText { private float cursorWidth = 2.0f; private boolean supportRtlHint; + private boolean cursorDrawn; + private int lineColor; private int activeLineColor; private int errorLineColor; @@ -158,6 +164,20 @@ public class EditTextBoldCursor extends EditText { init(); } + @Nullable + @Override + public Drawable getTextCursorDrawable() { + ShapeDrawable shapeDrawable = new ShapeDrawable(new RectShape()) { + @Override + public void draw(Canvas canvas) { + super.draw(canvas); + cursorDrawn = true; + } + }; + shapeDrawable.getPaint().setColor(0); + return shapeDrawable; + } + @TargetApi(Build.VERSION_CODES.O) @Override public int getAutofillType() { @@ -174,7 +194,8 @@ public class EditTextBoldCursor extends EditText { } try { - if (mScrollYField == null) { + if (!mScrollYGet && mScrollYField == null) { + mScrollYGet = true; mScrollYField = View.class.getDeclaredField("mScrollY"); mScrollYField.setAccessible(true); } @@ -186,12 +207,14 @@ public class EditTextBoldCursor extends EditText { mEditor = TextView.class.getDeclaredField("mEditor"); mEditor.setAccessible(true); editorClass = Class.forName("android.widget.Editor"); - mShowCursorField = editorClass.getDeclaredField("mShowCursor"); - mShowCursorField.setAccessible(true); + try { + mShowCursorField = editorClass.getDeclaredField("mShowCursor"); + mShowCursorField.setAccessible(true); + } catch (Exception ignore) { + + } getVerticalOffsetMethod = TextView.class.getDeclaredMethod("getVerticalOffset", boolean.class); getVerticalOffsetMethod.setAccessible(true); - mShowCursorField = editorClass.getDeclaredField("mShowCursor"); - mShowCursorField.setAccessible(true); } } catch (Throwable e) { FileLog.e(e); @@ -452,8 +475,12 @@ public class EditTextBoldCursor extends EditText { int topPadding = getExtendedPaddingTop(); scrollY = Integer.MAX_VALUE; try { - scrollY = mScrollYField.getInt(this); - mScrollYField.set(this, 0); + if (mScrollYField != null) { + scrollY = mScrollYField.getInt(this); + mScrollYField.set(this, 0); + } else { + scrollY = getScrollX(); + } } catch (Exception e) { // } @@ -539,40 +566,44 @@ public class EditTextBoldCursor extends EditText { canvas.restore(); } try { - if (allowDrawCursor && mShowCursorField != null) { + boolean showCursor; + if (mShowCursorField != null) { long mShowCursor = mShowCursorField.getLong(editor); - boolean showCursor = (SystemClock.uptimeMillis() - mShowCursor) % (2 * 500) < 500 && isFocused(); - if (showCursor) { - canvas.save(); - int voffsetCursor = 0; - if (getVerticalOffsetMethod != null) { - if ((getGravity() & Gravity.VERTICAL_GRAVITY_MASK) != Gravity.TOP) { - voffsetCursor = (int) getVerticalOffsetMethod.invoke(this, true); - } - } else { - if ((getGravity() & Gravity.VERTICAL_GRAVITY_MASK) != Gravity.TOP) { - voffsetCursor = getTotalPaddingTop() - getExtendedPaddingTop(); - } + showCursor = (SystemClock.uptimeMillis() - mShowCursor) % (2 * 500) < 500 && isFocused(); + } else { + showCursor = cursorDrawn; + cursorDrawn = false; + } + if (allowDrawCursor && showCursor) { + canvas.save(); + int voffsetCursor = 0; + if (getVerticalOffsetMethod != null) { + if ((getGravity() & Gravity.VERTICAL_GRAVITY_MASK) != Gravity.TOP) { + voffsetCursor = (int) getVerticalOffsetMethod.invoke(this, true); } - canvas.translate(getPaddingLeft(), getExtendedPaddingTop() + voffsetCursor); - Layout layout = getLayout(); - int line = layout.getLineForOffset(getSelectionStart()); - int lineCount = layout.getLineCount(); - updateCursorPosition(); - Rect bounds = gradientDrawable.getBounds(); - rect.left = bounds.left; - rect.right = bounds.left + AndroidUtilities.dp(cursorWidth); - rect.bottom = bounds.bottom; - rect.top = bounds.top; - if (lineSpacingExtra != 0 && line < lineCount - 1) { - rect.bottom -= lineSpacingExtra; + } else { + if ((getGravity() & Gravity.VERTICAL_GRAVITY_MASK) != Gravity.TOP) { + voffsetCursor = getTotalPaddingTop() - getExtendedPaddingTop(); } - rect.top = rect.centerY() - cursorSize / 2; - rect.bottom = rect.top + cursorSize; - gradientDrawable.setBounds(rect); - gradientDrawable.draw(canvas); - canvas.restore(); } + canvas.translate(getPaddingLeft(), getExtendedPaddingTop() + voffsetCursor); + Layout layout = getLayout(); + int line = layout.getLineForOffset(getSelectionStart()); + int lineCount = layout.getLineCount(); + updateCursorPosition(); + Rect bounds = gradientDrawable.getBounds(); + rect.left = bounds.left; + rect.right = bounds.left + AndroidUtilities.dp(cursorWidth); + rect.bottom = bounds.bottom; + rect.top = bounds.top; + if (lineSpacingExtra != 0 && line < lineCount - 1) { + rect.bottom -= lineSpacingExtra; + } + rect.top = rect.centerY() - cursorSize / 2; + rect.bottom = rect.top + cursorSize; + gradientDrawable.setBounds(rect); + gradientDrawable.draw(canvas); + canvas.restore(); } } catch (Throwable ignore) { @@ -664,7 +695,11 @@ public class EditTextBoldCursor extends EditText { @Override protected void onAttachedToWindow() { - super.onAttachedToWindow(); + try { + super.onAttachedToWindow(); + } catch (Exception e) { + FileLog.e(e); + } attachedToWindow = getRootView(); AndroidUtilities.runOnUIThread(invalidateRunnable); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EllipsizeSpanAnimator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EllipsizeSpanAnimator.java index 3c283d5b5..8aff03107 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EllipsizeSpanAnimator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EllipsizeSpanAnimator.java @@ -13,8 +13,8 @@ import java.util.ArrayList; public class EllipsizeSpanAnimator { - private TextAlphaSpan[] ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()}; - private AnimatorSet ellAnimator; + private final TextAlphaSpan[] ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()}; + private final AnimatorSet ellAnimator; boolean attachedToWindow; public ArrayList ellipsizedViews = new ArrayList<>(); @@ -33,8 +33,12 @@ public class EllipsizeSpanAnimator { private Runnable restarter = new Runnable() { @Override public void run() { - if (attachedToWindow && !ellipsizedViews.isEmpty()) { - ellAnimator.start(); + if (attachedToWindow && !ellipsizedViews.isEmpty() && !ellAnimator.isRunning()) { + try { + ellAnimator.start(); + } catch (Exception ignored) { + + } } } }; @@ -90,7 +94,9 @@ public class EllipsizeSpanAnimator { if (ellipsizedViews.isEmpty()) { ellAnimator.start(); } - ellipsizedViews.add(view); + if (!ellipsizedViews.contains(view)) { + ellipsizedViews.add(view); + } } public void removeView(View view) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmbedBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmbedBottomSheet.java index 02b4a4338..a5ade3e64 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmbedBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmbedBottomSheet.java @@ -47,7 +47,6 @@ import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; @@ -58,9 +57,9 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; import org.telegram.messenger.Utilities; import org.telegram.messenger.browser.Browser; -import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BottomSheet; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.LaunchActivity; import java.util.HashMap; import java.util.Locale; @@ -787,7 +786,9 @@ public class EmbedBottomSheet extends BottomSheet { } catch (Exception e) { FileLog.e(e); } - Toast.makeText(getContext(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if (parentActivity instanceof LaunchActivity) { + ((LaunchActivity) parentActivity).showBulletin(BulletinFactory::createCopyLinkBulletin); + } dismiss(); }; @@ -980,13 +981,7 @@ public class EmbedBottomSheet extends BottomSheet { if (Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(parentActivity)) { return true; } else { - new AlertDialog.Builder(parentActivity).setTitle(LocaleController.getString("AppName", R.string.AppName)) - .setMessage(LocaleController.getString("PermissionDrawAboveOtherApps", R.string.PermissionDrawAboveOtherApps)) - .setPositiveButton(LocaleController.getString("PermissionOpenSettings", R.string.PermissionOpenSettings), (dialog, which) -> { - if (parentActivity != null) { - parentActivity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + parentActivity.getPackageName()))); - } - }).show(); + AlertsCreator.createDrawOverlayPermissionDialog(parentActivity, null); } return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java index 3d1a17330..3ffb51e68 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java @@ -68,6 +68,7 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.Emoji; import org.telegram.messenger.EmojiData; @@ -79,6 +80,7 @@ import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.FileLog; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.browser.Browser; import org.telegram.tgnet.ConnectionsManager; @@ -252,7 +254,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific } - default void onStickerSelected(View view, TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { + default void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { } @@ -264,7 +266,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific } - default void onGifSelected(View view, Object gif, Object parent, boolean notify, int scheduleDate) { + default void onGifSelected(View view, Object gif, String query, Object parent, boolean notify, int scheduleDate) { } @@ -326,8 +328,8 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific private ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override - public void sendSticker(TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { - delegate.onStickerSelected(null, sticker, parent, notify, scheduleDate); + public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { + delegate.onStickerSelected(null, sticker, query, parent, notify, scheduleDate); } @Override @@ -356,9 +358,9 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific @Override public void sendGif(Object gif, Object parent, boolean notify, int scheduleDate) { if (gifGridView.getAdapter() == gifAdapter) { - delegate.onGifSelected(null, gif, parent, notify, scheduleDate); + delegate.onGifSelected(null, gif, null, parent, notify, scheduleDate); } else if (gifGridView.getAdapter() == gifSearchAdapter) { - delegate.onGifSelected(null, gif, parent, notify, scheduleDate); + delegate.onGifSelected(null, gif, null, parent, notify, scheduleDate); } } @@ -371,6 +373,14 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific public long getDialogId() { return delegate.getDialogId(); } + + @Override + public String getQuery(boolean isGif) { + if (isGif) { + return gifGridView.getAdapter() == gifSearchAdapter ? gifSearchAdapter.lastSearchImageString : null; + } + return emojiGridView.getAdapter() == emojiSearchAdapter ? emojiSearchAdapter.lastSearchEmojiString : null; + } }; private static final Field superListenerField; @@ -1465,7 +1475,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific return; } if (position < gifAdapter.recentItemsCount) { - delegate.onGifSelected(view, recentGifs.get(position), "gif", true, 0); + delegate.onGifSelected(view, recentGifs.get(position), null, "gif", true, 0); } else { int resultPos = position; if (gifAdapter.recentItemsCount > 0) { @@ -1473,14 +1483,14 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific resultPos--; // trending section item } if (resultPos >= 0 && resultPos < gifAdapter.results.size()) { - delegate.onGifSelected(view, gifAdapter.results.get(resultPos), gifAdapter.bot, true, 0); + delegate.onGifSelected(view, gifAdapter.results.get(resultPos), null, gifAdapter.bot, true, 0); } } } else if (gifGridView.getAdapter() == gifSearchAdapter) { if (position < 0 || position >= gifSearchAdapter.results.size()) { return; } - delegate.onGifSelected(view, gifSearchAdapter.results.get(position), gifSearchAdapter.bot, true, 0); + delegate.onGifSelected(view, gifSearchAdapter.results.get(position), gifSearchAdapter.lastSearchImageString, gifSearchAdapter.bot, true, 0); updateRecentGifs(); } }; @@ -1609,7 +1619,9 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific stickersGridView.setAdapter(stickersGridAdapter = new StickersGridAdapter(context)); stickersGridView.setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, stickersGridView, EmojiView.this.getMeasuredHeight(), stickersOnItemClickListener, contentPreviewViewerDelegate)); stickersOnItemClickListener = (view, position) -> { + String query = null; if (stickersGridView.getAdapter() == stickersSearchGridAdapter) { + query = stickersSearchGridAdapter.searchQuery; TLRPC.StickerSetCovered pack = stickersSearchGridAdapter.positionsToSets.get(position); if (pack != null) { delegate.onShowStickerSet(pack.set, null); @@ -1625,7 +1637,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific return; } cell.disable(); - delegate.onStickerSelected(cell, cell.getSticker(), cell.getParentObject(), true, 0); + delegate.onStickerSelected(cell, cell.getSticker(), query, cell.getParentObject(), true, 0); }; stickersGridView.setOnItemClickListener(stickersOnItemClickListener); stickersGridView.setGlowColor(Theme.getColor(Theme.key_chat_emojiPanelBackground)); @@ -1685,7 +1697,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific @Override public void onStickerSelected(TLRPC.Document sticker, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { - delegate.onStickerSelected(null, sticker, parent, notify, scheduleDate); + delegate.onStickerSelected(null, sticker, null, parent, notify, scheduleDate); } @Override @@ -2971,14 +2983,13 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific } } else { TLRPC.TL_messages_stickerSet stickerSet = stickerSets.get(a); - TLObject thumb; TLRPC.Document document = stickerSet.documents.get(0); - if (stickerSet.set.thumb instanceof TLRPC.TL_photoSize || stickerSet.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - thumb = stickerSet.set.thumb; - } else { + TLObject thumb = FileLoader.getClosestPhotoSizeWithSize(stickerSet.set.thumbs, 90); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(stickerSet.set.thumbs, Theme.key_emptyListPlaceholder, 0.2f); + if (thumb == null) { thumb = document; } - stickersTab.addStickerTab(thumb, document, stickerSet).setContentDescription(stickerSet.set.title + ", " + LocaleController.getString("AccDescrStickerSet", R.string.AccDescrStickerSet)); + stickersTab.addStickerTab(thumb, svgThumb, document, stickerSet).setContentDescription(stickerSet.set.title + ", " + LocaleController.getString("AccDescrStickerSet", R.string.AccDescrStickerSet)); } } stickersTab.commitUpdate(); @@ -3720,7 +3731,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific View view = null; switch (viewType) { case 0: - view = new StickerEmojiCell(context) { + view = new StickerEmojiCell(context, true) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } @@ -5208,7 +5219,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific View view = null; switch (viewType) { case 0: - view = new StickerEmojiCell(context) { + view = new StickerEmojiCell(context, true) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastLinearLayoutManager.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastLinearLayoutManager.java index b7a2a4c44..7a535e37b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastLinearLayoutManager.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastLinearLayoutManager.java @@ -16,6 +16,8 @@ public class FillLastLinearLayoutManager extends LinearLayoutManager { private int additionalHeight; private RecyclerView listView; private boolean skipFirstItem; + private boolean bind = true; + private boolean canScrollVertically = true; public FillLastLinearLayoutManager(Context context, int h, RecyclerView recyclerView) { super(context); @@ -37,6 +39,19 @@ public class FillLastLinearLayoutManager extends LinearLayoutManager { skipFirstItem = true; } + public void setBind(boolean value) { + bind = value; + } + + public void setCanScrollVertically(boolean value) { + canScrollVertically = value; + } + + @Override + public boolean canScrollVertically() { + return canScrollVertically; + } + @SuppressWarnings("unchecked") private void calcLastItemHeight() { if (listHeight <= 0) { @@ -58,7 +73,9 @@ public class FillLastLinearLayoutManager extends LinearLayoutManager { holder.itemView.setLayoutParams(generateDefaultLayoutParams()); } } - adapter.onBindViewHolder(holder, a); + if (bind) { + adapter.onBindViewHolder(holder, a); + } final RecyclerView.LayoutParams lp = (RecyclerView.LayoutParams) holder.itemView.getLayoutParams(); final int widthSpec = getChildMeasureSpec(listWidth, getWidthMode(), getPaddingLeft() + getPaddingRight() + lp.leftMargin + lp.rightMargin, lp.width, canScrollHorizontally()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java index f75b6039e..14cb4a079 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java @@ -12,9 +12,9 @@ import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; +import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.content.Context; -import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.RectF; @@ -22,9 +22,11 @@ import android.graphics.drawable.Drawable; import android.graphics.drawable.GradientDrawable; import android.os.SystemClock; import android.text.Layout; +import android.text.SpannableStringBuilder; import android.text.StaticLayout; import android.text.TextPaint; import android.text.TextUtils; +import android.util.Log; import android.util.Property; import android.util.SparseIntArray; import android.view.View; @@ -32,9 +34,15 @@ import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.TextView; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.ItemTouchHelper; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.LinearSmoothScroller; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; -import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; @@ -46,15 +54,10 @@ import org.telegram.ui.ActionBar.Theme; import java.util.ArrayList; -import androidx.core.graphics.ColorUtils; -import androidx.recyclerview.widget.DefaultItemAnimator; -import androidx.recyclerview.widget.ItemTouchHelper; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.LinearSmoothScroller; -import androidx.recyclerview.widget.RecyclerView; - public class FilterTabsView extends FrameLayout { + private final ItemTouchHelper itemTouchHelper; + public interface FilterTabsViewDelegate { void onPageSelected(int page, boolean forward); void onPageScrolled(float progress); @@ -112,6 +115,7 @@ public class FilterTabsView extends FrameLayout { public class TabView extends View { + public ValueAnimator changeAnimator; private Tab currentTab; private int textHeight; private int tabWidth; @@ -121,6 +125,43 @@ public class FilterTabsView extends FrameLayout { private StaticLayout textLayout; private int textOffsetX; + public boolean animateChange; + public float changeProgress; + + public boolean animateCounterChange; + + + float lastTextX; + float animateFromTextX; + boolean animateTextX; + + boolean animateTabCounter; + int lastTabCount = -1; + int animateFromTabCount; + StaticLayout inCounter; + StaticLayout outCounter; + StaticLayout stableCounter; + + StaticLayout lastTitleLayout; + String lastTitle; + private StaticLayout titleAnimateInLayout; + private StaticLayout titleAnimateOutLayout; + private StaticLayout titleAnimateStableLayout; + private boolean animateTextChange; + private boolean animateTextChangeOut; + private boolean animateTabWidth; + private float animateFromWidth; + private float titleXOffset; + private int lastTitleWidth; + private int animateFromTitleWidth; + private int lastCountWidth; + private float lastCounterWidth; + private float animateFromCountWidth; + private float animateFromCounterWidth; + private float lastTabWidth; + private float animateFromTabWidth; + private float lastWidth; + public TabView(Context context) { super(context); } @@ -198,25 +239,40 @@ public class FilterTabsView extends FrameLayout { } } - int counterWidth; + float counterWidth; int countWidth; String counterText; - if (currentTab.counter > 0) { - counterText = String.format("%d", currentTab.counter); + + boolean animateCounterEnter = animateFromTabCount == 0 && animateTabCounter; + boolean animateCounterRemove = animateFromTabCount > 0 && currentTab.counter == 0 && animateTabCounter; + boolean animateCounterReplace = animateFromTabCount > 0 && currentTab.counter > 0 && animateTabCounter; + + if (currentTab.counter > 0 || animateCounterRemove) { + if (animateCounterRemove) { + counterText = String.format("%d", animateFromTabCount); + } else { + counterText = String.format("%d", currentTab.counter); + } counterWidth = (int) Math.ceil(textCounterPaint.measureText(counterText)); - countWidth = Math.max(AndroidUtilities.dp(10), counterWidth) + AndroidUtilities.dp(10); + countWidth = (int) (Math.max(AndroidUtilities.dp(10), counterWidth) + AndroidUtilities.dp(10)); } else { counterText = null; counterWidth = 0; countWidth = 0; } + + if (currentTab.id != Integer.MAX_VALUE && (isEditing || editingStartAnimationProgress != 0)) { countWidth = (int) (countWidth + (AndroidUtilities.dp(20) - countWidth) * editingStartAnimationProgress); } - tabWidth = currentTab.titleWidth + (countWidth != 0 ? countWidth + AndroidUtilities.dp(6 * (counterText != null ? 1.0f : editingStartAnimationProgress)) : 0); - int textX = (getMeasuredWidth() - tabWidth) / 2; + tabWidth = currentTab.titleWidth + ((countWidth != 0 && !animateCounterRemove) ? countWidth + AndroidUtilities.dp(6 * (counterText != null ? 1.0f : editingStartAnimationProgress)) : 0); + float textX = (getMeasuredWidth() - tabWidth) / 2f; + if (animateTextX) { + textX = textX * changeProgress + animateFromTextX * (1f - changeProgress); + } + if (!TextUtils.equals(currentTab.title, currentText)) { currentText = currentTab.title; CharSequence text = Emoji.replaceEmoji(currentText, textPaint.getFontMetricsInt(), AndroidUtilities.dp(15), false); @@ -224,14 +280,45 @@ public class FilterTabsView extends FrameLayout { textHeight = textLayout.getHeight(); textOffsetX = (int) -textLayout.getLineLeft(0); } - if (textLayout != null) { - canvas.save(); - canvas.translate(textX + textOffsetX, (getMeasuredHeight() - textHeight) / 2 + 1); - textLayout.draw(canvas); - canvas.restore(); + + + float titleOffsetX = 0; + if (animateTextChange) { + titleOffsetX = titleXOffset * (animateTextChangeOut ? changeProgress : 1f - changeProgress); + if (titleAnimateStableLayout != null) { + canvas.save(); + canvas.translate(textX + textOffsetX + titleOffsetX, (getMeasuredHeight() - textHeight) / 2f + 1); + titleAnimateStableLayout.draw(canvas); + canvas.restore(); + } + if (titleAnimateInLayout != null) { + canvas.save(); + int alpha = textPaint.getAlpha(); + textPaint.setAlpha((int) (alpha * (animateTextChangeOut ? 1f - changeProgress : changeProgress))); + canvas.translate(textX + textOffsetX + titleOffsetX, (getMeasuredHeight() - textHeight) / 2f + 1); + titleAnimateInLayout.draw(canvas); + canvas.restore(); + textPaint.setAlpha(alpha); + } + if (titleAnimateOutLayout != null) { + canvas.save(); + int alpha = textPaint.getAlpha(); + textPaint.setAlpha((int) (alpha * (animateTextChangeOut ? changeProgress : 1f - changeProgress))); + canvas.translate(textX + textOffsetX + titleOffsetX, (getMeasuredHeight() - textHeight) / 2f + 1); + titleAnimateOutLayout.draw(canvas); + canvas.restore(); + textPaint.setAlpha(alpha); + } + } else { + if (textLayout != null) { + canvas.save(); + canvas.translate(textX + textOffsetX, (getMeasuredHeight() - textHeight) / 2f + 1); + textLayout.draw(canvas); + canvas.restore(); + } } - if (counterText != null || currentTab.id != Integer.MAX_VALUE && (isEditing || editingStartAnimationProgress != 0)) { + if (animateCounterEnter || counterText != null || currentTab.id != Integer.MAX_VALUE && (isEditing || editingStartAnimationProgress != 0)) { if (aBackgroundColorKey == null) { textCounterPaint.setColor(Theme.getColor(backgroundColorKey)); } else { @@ -251,7 +338,16 @@ public class FilterTabsView extends FrameLayout { counterPaint.setColor(textPaint.getColor()); } - int x = textX + currentTab.titleWidth + AndroidUtilities.dp(6); + float x; + float titleWidth = currentTab.titleWidth; + if (animateTextChange) { + titleWidth = animateFromTitleWidth * (1f - changeProgress) + currentTab.titleWidth * changeProgress; + } + if (animateTextChange && titleAnimateOutLayout == null) { + x = textX - titleXOffset + titleOffsetX + titleWidth + AndroidUtilities.dp(6); + } else { + x = textX + titleWidth + AndroidUtilities.dp(6); + } int countTop = (getMeasuredHeight() - AndroidUtilities.dp(20)) / 2; if (currentTab.id != Integer.MAX_VALUE && (isEditing || editingStartAnimationProgress != 0) && counterText == null) { @@ -260,14 +356,66 @@ public class FilterTabsView extends FrameLayout { counterPaint.setAlpha(255); } - rect.set(x, countTop, x + countWidth, countTop + AndroidUtilities.dp(20)); + + float w = (animateCounterReplace && animateFromCountWidth != countWidth) ? animateFromCountWidth * (1f - changeProgress) + countWidth * changeProgress : countWidth; + if (animateCounterReplace) { + counterWidth = animateFromCounterWidth * (1f - changeProgress) + counterWidth * changeProgress; + } + rect.set(x, countTop, x + w, countTop + AndroidUtilities.dp(20)); + if (animateCounterEnter || animateCounterRemove) { + canvas.save(); + float s = animateCounterEnter ? changeProgress : 1f - changeProgress; + canvas.scale(s, s, rect.centerX(), rect.centerY()); + } canvas.drawRoundRect(rect, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, counterPaint); - if (counterText != null) { - if (currentTab.id != Integer.MAX_VALUE) { - textCounterPaint.setAlpha((int) (255 * (1.0f - editingStartAnimationProgress))); + if (animateCounterReplace) { + float y = countTop; + if (inCounter != null) { + y += (AndroidUtilities.dp(20) - (inCounter.getLineBottom(0) - inCounter.getLineTop(0))) / 2f; + } else if (outCounter != null) { + y += (AndroidUtilities.dp(20) - (outCounter.getLineBottom(0) - outCounter.getLineTop(0))) / 2f; + } else if (stableCounter != null) { + y += (AndroidUtilities.dp(20) - (stableCounter.getLineBottom(0) - stableCounter.getLineTop(0))) / 2f; } - canvas.drawText(counterText, rect.left + (rect.width() - counterWidth) / 2, countTop + AndroidUtilities.dp(14.5f), textCounterPaint); + float alpha = 1f; + if (currentTab.id != Integer.MAX_VALUE) { + alpha = (1.0f - editingStartAnimationProgress); + } + if (inCounter != null) { + canvas.save(); + textCounterPaint.setAlpha((int) (255 * alpha * changeProgress)); + canvas.translate(rect.left + (rect.width() - counterWidth) / 2, (1f - changeProgress) * AndroidUtilities.dp(15) + y); + inCounter.draw(canvas); + canvas.restore(); + } + if (outCounter != null) { + canvas.save(); + textCounterPaint.setAlpha((int) (255 * alpha * (1f - changeProgress))); + canvas.translate(rect.left + (rect.width() - counterWidth) / 2, changeProgress * -AndroidUtilities.dp(15) + y); + outCounter.draw(canvas); + canvas.restore(); + } + + if (stableCounter != null) { + canvas.save(); + textCounterPaint.setAlpha((int) (255 * alpha)); + canvas.translate(rect.left + (rect.width() - counterWidth) / 2, y); + stableCounter.draw(canvas); + canvas.restore(); + } + textCounterPaint.setAlpha(255); + } else { + if (counterText != null) { + if (currentTab.id != Integer.MAX_VALUE) { + textCounterPaint.setAlpha((int) (255 * (1.0f - editingStartAnimationProgress))); + } + canvas.drawText(counterText, rect.left + (rect.width() - counterWidth) / 2, countTop + AndroidUtilities.dp(14.5f), textCounterPaint); + } + } + + if (animateCounterEnter || animateCounterRemove) { + canvas.restore(); } if (currentTab.id != Integer.MAX_VALUE && (isEditing || editingStartAnimationProgress != 0)) { deletePaint.setColor(textCounterPaint.getColor()); @@ -280,6 +428,127 @@ public class FilterTabsView extends FrameLayout { if (currentTab.id != Integer.MAX_VALUE && editingAnimationProgress != 0) { canvas.restore(); } + + lastTextX = textX; + lastTabCount = currentTab.counter; + lastTitleLayout = textLayout; + lastTitle = currentText; + lastTitleWidth = currentTab.titleWidth; + lastCountWidth = countWidth; + lastCounterWidth = counterWidth; + lastTabWidth = tabWidth; + lastWidth = getMeasuredWidth(); + } + + public boolean animateChange() { + boolean changed = false; + if (currentTab.counter != lastTabCount) { + animateTabCounter = true; + animateFromTabCount = lastTabCount; + animateFromCountWidth = lastCountWidth; + animateFromCounterWidth = lastCounterWidth; + if (animateFromTabCount > 0 && currentTab.counter > 0) { + String oldStr = String.valueOf(animateFromTabCount); + String newStr = String.valueOf(currentTab.counter); + + if (oldStr.length() == newStr.length()) { + SpannableStringBuilder oldSpannableStr = new SpannableStringBuilder(oldStr); + SpannableStringBuilder newSpannableStr = new SpannableStringBuilder(newStr); + SpannableStringBuilder stableStr = new SpannableStringBuilder(newStr); + for (int i = 0; i < oldStr.length(); i++) { + if (oldStr.charAt(i) == newStr.charAt(i)) { + oldSpannableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + newSpannableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + } else { + stableStr.setSpan(new EmptyStubSpan(), i, i + 1, 0); + } + } + + int countOldWidth = (int) Math.ceil(Theme.dialogs_countTextPaint.measureText(oldStr)); + outCounter = new StaticLayout(oldSpannableStr, textCounterPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + stableCounter = new StaticLayout(stableStr, textCounterPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + inCounter = new StaticLayout(newSpannableStr, textCounterPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + } else { + int countOldWidth = (int) Math.ceil(Theme.dialogs_countTextPaint.measureText(oldStr)); + outCounter = new StaticLayout(oldStr, textCounterPaint, countOldWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + int countNewWidth = (int) Math.ceil(Theme.dialogs_countTextPaint.measureText(newStr)); + inCounter = new StaticLayout(newStr, textCounterPaint, countNewWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + } + } + changed = true; + } + + int countWidth; + String counterText = null; + if (currentTab.counter > 0) { + counterText = String.format("%d", currentTab.counter); + int counterWidth = (int) Math.ceil(textCounterPaint.measureText(counterText)); + countWidth = Math.max(AndroidUtilities.dp(10), counterWidth) + AndroidUtilities.dp(10); + } else { + countWidth = 0; + } + int tabWidth = currentTab.titleWidth + (countWidth != 0 ? countWidth + AndroidUtilities.dp(6 * (counterText != null ? 1.0f : editingStartAnimationProgress)) : 0); + int textX = (getMeasuredWidth() - tabWidth) / 2; + + if (textX != lastTextX) { + animateTextX = true; + animateFromTextX = lastTextX; + changed = true; + } + + if (lastTitle != null && !currentTab.title.equals(lastTitle)) { + boolean animateOut; + String maxStr; + String substring; + if (lastTitle.length() > currentTab.title.length()) { + animateOut = true; + maxStr = lastTitle; + substring = currentTab.title; + } else { + animateOut = false; + maxStr = currentTab.title; + substring = lastTitle; + } + int startFrom = maxStr.indexOf(substring); + if (startFrom >= 0) { + CharSequence text = Emoji.replaceEmoji(maxStr, textPaint.getFontMetricsInt(), AndroidUtilities.dp(15), false); + SpannableStringBuilder inStr = new SpannableStringBuilder(text); + SpannableStringBuilder stabeStr = new SpannableStringBuilder(text); + if (startFrom != 0) { + stabeStr.setSpan(new EmptyStubSpan(), 0, startFrom, 0); + } + if (startFrom + substring.length() != maxStr.length()) { + stabeStr.setSpan(new EmptyStubSpan(), startFrom + substring.length(), maxStr.length(), 0); + } + inStr.setSpan(new EmptyStubSpan(), startFrom, startFrom + substring.length(), 0); + + titleAnimateInLayout = new StaticLayout(inStr, textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0, false); + titleAnimateStableLayout = new StaticLayout(stabeStr, textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0, false); + animateTextChange = true; + animateTextChangeOut = animateOut; + titleXOffset = startFrom == 0 ? 0 : -titleAnimateStableLayout.getPrimaryHorizontal(startFrom); + animateFromTitleWidth = lastTitleWidth; + titleAnimateOutLayout = null; + changed = true; + } else { + titleAnimateInLayout = new StaticLayout(currentTab.title, textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0, false); + titleAnimateOutLayout = new StaticLayout(lastTitle, textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0, false); + titleAnimateStableLayout = null; + animateTextChange = true; + titleXOffset = 0; + animateFromTitleWidth = lastTitleWidth; + changed = true; + } + } + + if (tabWidth != lastTabWidth || getMeasuredWidth() != lastWidth) { + animateTabWidth = true; + animateFromTabWidth = lastTabWidth; + animateFromWidth = lastWidth; + changed = true; + } + + return changed; } } @@ -290,11 +559,6 @@ public class FilterTabsView extends FrameLayout { private ArrayList tabs = new ArrayList<>(); - private Bitmap crossfadeBitmap; - private Paint crossfadePaint = new Paint(); - private float crossfadeAlpha; - private boolean commitCrossfade; - private boolean isEditing; private long lastEditingAnimationTime; private boolean editingForwardAnimation; @@ -344,6 +608,7 @@ public class FilterTabsView extends FrameLayout { private CubicBezierInterpolator interpolator = CubicBezierInterpolator.EASE_OUT_QUINT; private SparseIntArray positionToId = new SparseIntArray(5); + private SparseIntArray positionToStableId = new SparseIntArray(5); private SparseIntArray idToPosition = new SparseIntArray(5); private SparseIntArray positionToWidth = new SparseIntArray(5); private SparseIntArray positionToX = new SparseIntArray(5); @@ -353,6 +618,8 @@ public class FilterTabsView extends FrameLayout { private float animationTime; private int previousPosition; private int previousId; + DefaultItemAnimator itemAnimator; + private Runnable animationRunnable = new Runnable() { @Override public void run() { @@ -442,10 +709,114 @@ public class FilterTabsView extends FrameLayout { return super.canHighlightChildAt(child, x, y); } }; - ((DefaultItemAnimator) listView.getItemAnimator()).setDelayAnimations(false); + listView.setClipChildren(false); + itemAnimator = new DefaultItemAnimator() { + + @Override + public void runPendingAnimations() { + boolean removalsPending = !mPendingRemovals.isEmpty(); + boolean movesPending = !mPendingMoves.isEmpty(); + boolean changesPending = !mPendingChanges.isEmpty(); + boolean additionsPending = !mPendingAdditions.isEmpty(); + if (removalsPending || movesPending || additionsPending || changesPending) { + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0.1f); + valueAnimator.addUpdateListener(valueAnimator12 -> { + listView.invalidate(); + invalidate(); + }); + valueAnimator.setDuration(getMoveDuration()); + valueAnimator.start(); + } + super.runPendingAnimations(); + } + + @Override + public boolean animateMove(RecyclerView.ViewHolder holder, ItemHolderInfo info, int fromX, int fromY, int toX, int toY) { + if (holder.itemView instanceof TabView) { + final View view = holder.itemView; + fromX += (int) holder.itemView.getTranslationX(); + fromY += (int) holder.itemView.getTranslationY(); + resetAnimation(holder); + int deltaX = toX - fromX; + int deltaY = toY - fromY; + if (deltaX != 0) { + view.setTranslationX(-deltaX); + } + if (deltaY != 0) { + view.setTranslationY(-deltaY); + } + + TabView tabView = (TabView) holder.itemView; + boolean animateChange = tabView.animateChange(); + if (animateChange) { + tabView.changeProgress = 0; + tabView.animateChange = true; + invalidate(); + } + + if (deltaX == 0 && deltaY == 0 && !animateChange) { + dispatchMoveFinished(holder); + return false; + } + + mPendingMoves.add(new MoveInfo(holder, fromX, fromY, toX, toY)); + return true; + } + return super.animateMove(holder, info, fromX, fromY, toX, toY); + } + + @Override + protected void animateMoveImpl(RecyclerView.ViewHolder holder, MoveInfo moveInfo) { + super.animateMoveImpl(holder, moveInfo); + if (holder.itemView instanceof TabView) { + TabView tabView = (TabView) holder.itemView; + if (tabView.animateChange) { + if (tabView.changeAnimator != null) { + tabView.changeAnimator.removeAllListeners(); + tabView.changeAnimator.cancel(); + } + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); + valueAnimator.addUpdateListener(valueAnimator1 -> { + tabView.changeProgress = (float) valueAnimator1.getAnimatedValue(); + tabView.invalidate(); + }); + valueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + tabView.animateChange = false; + tabView.animateTabCounter = false; + tabView.animateCounterChange = false; + tabView.animateTextChange = false; + tabView.animateTextX = false; + tabView.animateTabWidth = false; + tabView.changeAnimator = null; + tabView.invalidate(); + } + }); + tabView.changeAnimator = valueAnimator; + valueAnimator.setDuration(getMoveDuration()); + valueAnimator.start(); + } + } + } + + @Override + public void onMoveFinished(RecyclerView.ViewHolder item) { + super.onMoveFinished(item); + item.itemView.setTranslationX(0); + } + }; + itemAnimator.setDelayAnimations(false); + listView.setItemAnimator(itemAnimator); listView.setSelectorType(7); listView.setSelectorDrawableColor(Theme.getColor(selectorColorKey)); listView.setLayoutManager(layoutManager = new LinearLayoutManager(context, LinearLayoutManager.HORIZONTAL, false) { + + @Override + public boolean supportsPredictiveItemAnimations() { + return true; + } + @Override public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { LinearSmoothScroller linearSmoothScroller = new LinearSmoothScroller(recyclerView.getContext()) { @@ -478,12 +849,15 @@ public class FilterTabsView extends FrameLayout { return super.scrollHorizontallyBy(dx, recycler, state); } }); - ItemTouchHelper itemTouchHelper = new ItemTouchHelper(new TouchHelperCallback()); + itemTouchHelper = new ItemTouchHelper(new TouchHelperCallback()); itemTouchHelper.attachToRecyclerView(listView); listView.setPadding(AndroidUtilities.dp(7), 0, AndroidUtilities.dp(7), 0); listView.setClipToPadding(false); listView.setDrawSelectorBehind(true); - listView.setAdapter(adapter = new ListAdapter(context)); + adapter = new ListAdapter(context); + adapter.setHasStableIds(true); + listView.setAdapter(adapter); + listView.setOnItemClickListener((view, position, x, y) -> { if (!delegate.canPerformActions()) { return; @@ -596,36 +970,13 @@ public class FilterTabsView extends FrameLayout { selectedTabId = -1; } - public void beginCrossfade() { - try { - Bitmap bitmap = Bitmap.createBitmap(getMeasuredWidth(), getMeasuredHeight(), Bitmap.Config.ARGB_8888); - Canvas canvas = new Canvas(bitmap); - this.draw(canvas); - crossfadeBitmap = bitmap; - crossfadeAlpha = 1.0f; - commitCrossfade = false; - listView.invalidate(); - invalidate(); - } catch (Throwable e) { - FileLog.e(e); - } - } - - public void commitCrossfade() { - if (crossfadeBitmap == null) { - return; - } - commitCrossfade = true; - listView.invalidate(); - invalidate(); - } - - public void addTab(int id, String text) { + public void addTab(int id, int stableId, String text) { int position = tabs.size(); if (position == 0 && selectedTabId == -1) { selectedTabId = id; } positionToId.put(position, id); + positionToStableId.put(position, stableId); idToPosition.put(id, position); if (selectedTabId != -1 && selectedTabId == id) { currentPosition = position; @@ -636,7 +987,8 @@ public class FilterTabsView extends FrameLayout { tabs.add(tab); } - public void finishAddingTabs() { + public void finishAddingTabs(boolean animated) { + listView.setItemAnimator(animated ? itemAnimator : null); adapter.notifyDataSetChanged(); } @@ -696,8 +1048,8 @@ public class FilterTabsView extends FrameLayout { if (child == listView) { final int height = getMeasuredHeight(); selectorDrawable.setAlpha((int) (255 * listView.getAlpha())); - int indicatorX = 0; - int indicatorWidth = 0; + float indicatorX = 0; + float indicatorWidth = 0; if (animatingIndicator || manualScrollingToPosition != -1) { int position = layoutManager.findFirstVisibleItemPosition(); if (position != RecyclerListView.NO_POSITION) { @@ -729,18 +1081,15 @@ public class FilterTabsView extends FrameLayout { RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(currentPosition); if (holder != null) { TabView tabView = (TabView) holder.itemView; - indicatorWidth = Math.max(AndroidUtilities.dp(40), tabView.tabWidth); - indicatorX = (int) (tabView.getX() + (tabView.getMeasuredWidth() - indicatorWidth) / 2); + indicatorWidth = Math.max(AndroidUtilities.dp(40), tabView.animateTabWidth ? tabView.animateFromTabWidth * (1f - tabView.changeProgress) + tabView.tabWidth * tabView.changeProgress : tabView.tabWidth); + float viewWidth = tabView.animateTabWidth ? tabView.animateFromWidth * (1f - tabView.changeProgress) + tabView.getMeasuredWidth() * tabView.changeProgress : tabView.getMeasuredWidth(); + indicatorX = (int) (tabView.getX() + (viewWidth - indicatorWidth) / 2); } } if (indicatorWidth != 0) { - selectorDrawable.setBounds(indicatorX, height - AndroidUtilities.dpr(4), indicatorX + indicatorWidth, height); + selectorDrawable.setBounds((int) indicatorX, height - AndroidUtilities.dpr(4), (int) (indicatorX + indicatorWidth), height); selectorDrawable.draw(canvas); } - if (crossfadeBitmap != null) { - crossfadePaint.setAlpha((int) (crossfadeAlpha * 255)); - canvas.drawBitmap(crossfadeBitmap, 0, 0, crossfadePaint); - } } long newTime = SystemClock.elapsedRealtime(); long dt = Math.min(17, newTime - lastEditingAnimationTime); @@ -787,17 +1136,6 @@ public class FilterTabsView extends FrameLayout { invalidate = true; } } - if (commitCrossfade) { - crossfadeAlpha -= dt / 180.0f; - if (crossfadeAlpha < 0.0f) { - commitCrossfade = false; - if (crossfadeBitmap != null) { - crossfadeBitmap.recycle(); - crossfadeBitmap = null; - } - } - invalidate = true; - } if (invalidate) { listView.invalidateViews(); invalidate(); @@ -959,7 +1297,6 @@ public class FilterTabsView extends FrameLayout { if (oldWidth != width || invalidated) { invalidated = true; requestLayout(); - adapter.notifyDataSetChanged(); allTabsWidth = 0; tabs.get(0).setTitle(LocaleController.getString("FilterAllChats", R.string.FilterAllChats)); for (int b = 0; b < N; b++) { @@ -969,7 +1306,8 @@ public class FilterTabsView extends FrameLayout { } } if (changed) { - listView.invalidateViews(); + listView.setItemAnimator(itemAnimator); + adapter.notifyDataSetChanged(); } } @@ -988,6 +1326,7 @@ public class FilterTabsView extends FrameLayout { if (oldWidth != width || invalidated) { invalidated = true; requestLayout(); + listView.setItemAnimator(itemAnimator); adapter.notifyDataSetChanged(); allTabsWidth = 0; tabs.get(0).setTitle(LocaleController.getString("FilterAllChats", R.string.FilterAllChats)); @@ -1011,8 +1350,8 @@ public class FilterTabsView extends FrameLayout { } @Override - public long getItemId(int i) { - return i; + public long getItemId(int position) { + return positionToStableId.get(position); } @Override @@ -1058,6 +1397,12 @@ public class FilterTabsView extends FrameLayout { tab1.id = tab2.id; tab2.id = temp; + int fromStableId = positionToStableId.get(fromIndex); + int toStableId = positionToStableId.get(toIndex); + + positionToStableId.put(fromIndex, toStableId); + positionToStableId.put(toIndex, fromStableId); + delegate.onPageReorder(tab2.id, tab1.id); if (currentPosition == fromIndex) { @@ -1082,6 +1427,7 @@ public class FilterTabsView extends FrameLayout { updateTabsWidths(); orderChanged = true; + listView.setItemAnimator(itemAnimator); notifyItemMoved(fromIndex, toIndex); } } @@ -1110,11 +1456,6 @@ public class FilterTabsView extends FrameLayout { return true; } - @Override - public void onChildDraw(Canvas c, RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder, float dX, float dY, int actionState, boolean isCurrentlyActive) { - super.onChildDraw(c, recyclerView, viewHolder, dX, dY, actionState, isCurrentlyActive); - } - @Override public void onSelectedChanged(RecyclerView.ViewHolder viewHolder, int actionState) { if (actionState != ItemTouchHelper.ACTION_STATE_IDLE) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java index 4586ea899..d4f17320b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java @@ -21,21 +21,29 @@ public class FlickerLoadingView extends View { public final static int FILES_TYPE = 3; public final static int AUDIO_TYPE = 4; public final static int LINKS_TYPE = 5; + public final static int USERS_TYPE = 6; - int gradientWidth; - LinearGradient gradient; - Paint paint = new Paint(); + private int gradientWidth; + private LinearGradient gradient; + private Paint paint = new Paint(); + private Paint headerPaint = new Paint(); private long lastUpdateTime; private int totalTranslation; private Matrix matrix; - RectF rectF = new RectF(); - int color0; - int color1; - private boolean showDate = true; + private RectF rectF = new RectF(); + private int color0; + private int color1; + private int skipDrawItemsCount; + private boolean showDate = true; + private boolean useHeaderOffset; private boolean isSingleCell; - int viewType; + private int viewType; + + private String colorKey1 = Theme.key_windowBackgroundWhite; + private String colorKey2 = Theme.key_windowBackgroundGray; + private String colorKey3; public void setViewType(int type) { this.viewType = type; @@ -54,6 +62,13 @@ public class FlickerLoadingView extends View { return 2; } + public void setColors(String key1, String key2, String key3) { + colorKey1 = key1; + colorKey2 = key2; + colorKey3 = key3; + invalidate(); + } + public FlickerLoadingView(Context context) { super(context); matrix = new Matrix(); @@ -62,7 +77,7 @@ public class FlickerLoadingView extends View { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (isSingleCell) { - super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(getCellHeight(), MeasureSpec.EXACTLY)); + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(getCellHeight(MeasureSpec.getSize(widthMeasureSpec)), MeasureSpec.EXACTLY)); } else { super.onMeasure(widthMeasureSpec, heightMeasureSpec); } @@ -70,8 +85,8 @@ public class FlickerLoadingView extends View { @Override protected void onDraw(Canvas canvas) { - int color0 = Theme.getColor(Theme.key_dialogBackground); - int color1 = Theme.getColor(Theme.key_windowBackgroundGray); + int color0 = Theme.getColor(colorKey1); + int color1 = Theme.getColor(colorKey2); if (this.color1 != color1 || this.color0 != color0) { this.color0 = color0; this.color1 = color1; @@ -82,8 +97,16 @@ public class FlickerLoadingView extends View { } paint.setShader(gradient); } + + int h = 0; + if (useHeaderOffset) { + h += AndroidUtilities.dp(32); + if (colorKey3 != null) { + headerPaint.setColor(Theme.getColor(colorKey3)); + } + canvas.drawRect(0,0, getMeasuredWidth(), AndroidUtilities.dp(32), colorKey3 != null ? headerPaint : paint); + } if (getViewType() == DIALOG_TYPE) { - int h = 0; while (h < getMeasuredHeight()) { int r = AndroidUtilities.dp(25); canvas.drawCircle(checkRtl(AndroidUtilities.dp(9) + r), h + (AndroidUtilities.dp(78) >> 1), r, paint); @@ -102,26 +125,29 @@ public class FlickerLoadingView extends View { canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); } - h += getCellHeight(); + h += getCellHeight(getMeasuredWidth()); if (isSingleCell) { break; } } } else if (getViewType() == PHOTOS_TYPE) { int photoWidth = (getMeasuredWidth() - (AndroidUtilities.dp(2) * (getColumnsCount() - 1))) / getColumnsCount(); - int h = 0; - while (h < getMeasuredHeight()) { + int k = 0; + while (h < getMeasuredHeight() || isSingleCell) { for (int i = 0; i < getColumnsCount(); i++) { + if (k == 0 && i < skipDrawItemsCount) { + continue; + } int x = i * (photoWidth + AndroidUtilities.dp(2)); canvas.drawRect(x, h, x + photoWidth, h + photoWidth, paint); } h += photoWidth + AndroidUtilities.dp(2); - if (isSingleCell) { + k++; + if (isSingleCell && k >= 2) { break; } } } else if (getViewType() == 3) { - int h = 0; while (h < getMeasuredHeight()) { rectF.set(AndroidUtilities.dp(12), h + AndroidUtilities.dp(8), AndroidUtilities.dp(52), h + AndroidUtilities.dp(48)); checkRtl(rectF); @@ -141,13 +167,12 @@ public class FlickerLoadingView extends View { canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); } - h += getCellHeight(); + h += getCellHeight(getMeasuredWidth()); if (isSingleCell) { break; } } } else if (getViewType() == 4) { - int h = 0; while (h < getMeasuredHeight()) { int radius = AndroidUtilities.dp(44) >> 1; canvas.drawCircle(checkRtl(AndroidUtilities.dp(12) + radius), h + AndroidUtilities.dp(6) + radius, radius, paint); @@ -166,13 +191,12 @@ public class FlickerLoadingView extends View { canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); } - h += getCellHeight(); + h += getCellHeight(getMeasuredWidth()); if (isSingleCell) { break; } } } else if (getViewType() == 5) { - int h = 0; while (h < getMeasuredHeight()) { rectF.set(AndroidUtilities.dp(10), h + AndroidUtilities.dp(11), AndroidUtilities.dp(62), h + AndroidUtilities.dp(11 + 52)); checkRtl(rectF); @@ -196,7 +220,31 @@ public class FlickerLoadingView extends View { canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); } - h += getCellHeight(); + h += getCellHeight(getMeasuredWidth()); + if (isSingleCell) { + break; + } + } + } else if (getViewType() == 6) { + while (h < getMeasuredHeight()) { + int r = AndroidUtilities.dp(23); + canvas.drawCircle(checkRtl(AndroidUtilities.dp(9) + r), h + (AndroidUtilities.dp(64) >> 1), r, paint); + + rectF.set(AndroidUtilities.dp(68), h + AndroidUtilities.dp(17), AndroidUtilities.dp(260), h + AndroidUtilities.dp(25)); + checkRtl(rectF); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + + rectF.set(AndroidUtilities.dp(68), h + AndroidUtilities.dp(39), AndroidUtilities.dp(140), h + AndroidUtilities.dp(47)); + checkRtl(rectF); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + + if (showDate) { + rectF.set(getMeasuredWidth() - AndroidUtilities.dp(50), h + AndroidUtilities.dp(20), getMeasuredWidth() - AndroidUtilities.dp(12), h + AndroidUtilities.dp(28)); + checkRtl(rectF); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + } + + h += getCellHeight(getMeasuredWidth()); if (isSingleCell) { break; } @@ -240,11 +288,11 @@ public class FlickerLoadingView extends View { } } - private int getCellHeight() { + private int getCellHeight(int width) { if (getViewType() == DIALOG_TYPE) { return AndroidUtilities.dp(78) + 1; } else if (getViewType() == PHOTOS_TYPE) { - int photoWidth = (getMeasuredWidth() - (AndroidUtilities.dp(2) * (getColumnsCount() - 1))) / getColumnsCount(); + int photoWidth = (width - (AndroidUtilities.dp(2) * (getColumnsCount() - 1))) / getColumnsCount(); return photoWidth + AndroidUtilities.dp(2); } else if (getViewType() == 3) { return AndroidUtilities.dp(56) + 1; @@ -252,6 +300,8 @@ public class FlickerLoadingView extends View { return AndroidUtilities.dp(56) + 1; } else if (getViewType() == 5) { return AndroidUtilities.dp(80); + } else if (getViewType() == USERS_TYPE) { + return AndroidUtilities.dp(64); } return 0; } @@ -259,4 +309,12 @@ public class FlickerLoadingView extends View { public void showDate(boolean showDate) { this.showDate = showDate; } + + public void setUseHeaderOffset(boolean useHeaderOffset) { + this.useHeaderOffset = useHeaderOffset; + } + + public void skipDrawItemsCount(int i) { + skipDrawItemsCount = i; + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java index 2181d6421..ae9b9dd0f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java @@ -15,24 +15,34 @@ import android.animation.ObjectAnimator; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; +import android.graphics.Canvas; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Typeface; import android.os.Build; import android.os.Bundle; + import androidx.annotation.Keep; + +import android.os.SystemClock; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; import android.util.TypedValue; import android.view.Gravity; +import android.view.HapticFeedbackConstants; +import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; +import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; import org.telegram.messenger.LocaleController; import org.telegram.messenger.LocationController; import org.telegram.messenger.MediaController; @@ -43,6 +53,7 @@ import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; +import org.telegram.messenger.voip.VoIPBaseService; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -50,43 +61,51 @@ import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ChatActivity; +import org.telegram.ui.Components.voip.VoIPHelper; import org.telegram.ui.DialogsActivity; +import org.telegram.ui.GroupCallActivity; import org.telegram.ui.LaunchActivity; import org.telegram.ui.LocationActivity; import java.util.ArrayList; -public class FragmentContextView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate { +public class FragmentContextView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { private ImageView playButton; private PlayPauseDrawable playPauseDrawable; private TextView titleTextView; + private AudioPlayerAlert.ClippingTextViewSwitcher subtitleTextView; private AnimatorSet animatorSet; private BaseFragment fragment; private View applyingView; private FrameLayout frameLayout; + private View shadow; private View selector; + private RLottieImageView muteButton; + private RLottieDrawable muteDrawable; private ImageView closeButton; private ImageView playbackSpeedButton; private FragmentContextView additionalContextView; + private TextView joinButton; + + private boolean isMuted; private MessageObject lastMessageObject; - private float yPosition; private float topPadding; private boolean visible; private int currentStyle = -1; private String lastString; private boolean isMusic; private boolean supportsCalls = true; + private AvatarsImageView avatars; - private int account = UserConfig.selectedAccount; + private final int account = UserConfig.selectedAccount; private boolean isLocation; private FragmentContextViewDelegate delegate; private boolean firstLocationsLoaded; - private boolean loadingSharingCount; private int lastLocationSharingCount = -1; private Runnable checkLocationRunnable = new Runnable() { @Override @@ -97,6 +116,24 @@ public class FragmentContextView extends FrameLayout implements NotificationCent }; private int animationIndex = -1; + boolean checkCallAfterAnimation; + + @Override + public void onAudioSettingsChanged() { + boolean newMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + if (isMuted != newMuted) { + isMuted = newMuted; + muteDrawable.setCustomEndFrame(isMuted ? 15 : 29); + muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame(), false, true); + muteButton.invalidate(); + Theme.getFragmentContextViewWavesDrawable().updateState(visible); + } + if (isMuted) { + micAmplitude = 0; + Theme.getFragmentContextViewWavesDrawable().setAmplitude(0); + } + } + public interface FragmentContextViewDelegate { void onAnimation(boolean start, boolean show); } @@ -117,14 +154,21 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } setTag(1); - frameLayout = new FrameLayout(context); - frameLayout.setWillNotDraw(false); + frameLayout = new FrameLayout(context) { + @Override + public void invalidate() { + super.invalidate(); + if (avatars != null && avatars.getVisibility() == VISIBLE) { + avatars.invalidate(); + } + } + }; addView(frameLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.TOP | Gravity.LEFT, 0, 0, 0, 0)); selector = new View(context); frameLayout.addView(selector, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - View shadow = new View(context); + shadow = new View(context); shadow.setBackgroundResource(R.drawable.blockpanel_shadow); addView(shadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 2, Gravity.LEFT | Gravity.TOP, 0, 36, 0, 0)); @@ -155,6 +199,33 @@ public class FragmentContextView extends FrameLayout implements NotificationCent titleTextView.setGravity(Gravity.CENTER_VERTICAL | Gravity.LEFT); addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 35, 0, 36, 0)); + subtitleTextView = new AudioPlayerAlert.ClippingTextViewSwitcher(context) { + @Override + protected TextView createTextView() { + TextView textView = new TextView(context); + textView.setMaxLines(1); + textView.setLines(1); + textView.setSingleLine(true); + textView.setEllipsize(TextUtils.TruncateAt.END); + textView.setGravity(Gravity.LEFT); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + textView.setTextColor(Theme.getColor(Theme.key_inappPlayerClose)); + return textView; + } + }; + addView(subtitleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 35, 10, 36, 0)); + + joinButton = new TextView(context); + joinButton.setText(LocaleController.getString("VoipChatJoin", R.string.VoipChatJoin)); + joinButton.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + joinButton.setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_featuredStickers_addButton), Theme.getColor(Theme.key_featuredStickers_addButtonPressed))); + joinButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + joinButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + joinButton.setGravity(Gravity.CENTER); + joinButton.setPadding(AndroidUtilities.dp(14), 0, AndroidUtilities.dp(14), 0); + addView(joinButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 28, Gravity.TOP | Gravity.RIGHT, 0, 10, 14, 0)); + joinButton.setOnClickListener(v -> FragmentContextView.this.callOnClick()); + if (!location) { playbackSpeedButton = new ImageView(context); playbackSpeedButton.setScaleType(ImageView.ScaleType.CENTER); @@ -175,6 +246,137 @@ public class FragmentContextView extends FrameLayout implements NotificationCent updatePlaybackButton(); } + avatars = new AvatarsImageView(context); + avatars.setDelegate(() -> updateAvatars(true)); + avatars.setVisibility(GONE); + addView(avatars, LayoutHelper.createFrame(108, 36, Gravity.LEFT | Gravity.TOP)); + + muteDrawable = new RLottieDrawable(R.raw.voice_muted, "" + R.raw.voice_muted, AndroidUtilities.dp(16), AndroidUtilities.dp(20), true, null); + + muteButton = new RLottieImageView(context) { + boolean scheduled; + boolean pressed; + + private final Runnable toggleMicRunnable = () -> { + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().setMicMute(false, true, false); + if (muteDrawable.setCustomEndFrame(isMuted ? 15 : 29)) { + if (isMuted) { + muteDrawable.setCurrentFrame(0); + } else { + muteDrawable.setCurrentFrame(14); + } + } + muteButton.playAnimation(); + + Theme.getFragmentContextViewWavesDrawable().updateState(true); + }; + + + private final Runnable pressRunnable = () -> { + if (!scheduled || VoIPService.getSharedInstance() == null) { + return; + } + scheduled = false; + pressed = true; + isMuted = false; + + AndroidUtilities.runOnUIThread(toggleMicRunnable, 90); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + }; + + + @Override + public boolean onTouchEvent(MotionEvent event) { + if (currentStyle == 3) { + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + AndroidUtilities.cancelRunOnUIThread(pressRunnable); + AndroidUtilities.cancelRunOnUIThread(toggleMicRunnable); + scheduled = false; + pressed = false; + return true; + } + if (event.getAction() == MotionEvent.ACTION_DOWN && service.isMicMute()) { + AndroidUtilities.runOnUIThread(pressRunnable, 300); + scheduled = true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + AndroidUtilities.cancelRunOnUIThread(toggleMicRunnable); + if (scheduled) { + AndroidUtilities.cancelRunOnUIThread(pressRunnable); + scheduled = false; + } else if (pressed) { + isMuted = true; + if (muteDrawable.setCustomEndFrame(isMuted ? 15 : 29)) { + if (isMuted) { + muteDrawable.setCurrentFrame(0); + } else { + muteDrawable.setCurrentFrame(14); + } + } + muteButton.playAnimation(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setMicMute(true, true, false); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + pressed = false; + Theme.getFragmentContextViewWavesDrawable().updateState(true); + MotionEvent cancel = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); + super.onTouchEvent(cancel); + cancel.recycle(); + return true; + } + } + return super.onTouchEvent(event); + } else { + return super.onTouchEvent(event); + } + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + info.setClassName(Button.class.getName()); + info.setText(isMuted ? LocaleController.getString("VoipUnmute", R.string.VoipUnmute) : LocaleController.getString("VoipMute", R.string.VoipMute)); + } + }; + muteButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_returnToCallText), PorterDuff.Mode.MULTIPLY)); + if (Build.VERSION.SDK_INT >= 21) { + muteButton.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_inappPlayerClose) & 0x19ffffff, 1, AndroidUtilities.dp(14))); + } + muteButton.setAnimation(muteDrawable); + muteButton.setScaleType(ImageView.ScaleType.CENTER); + muteButton.setVisibility(GONE); + addView(muteButton, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); + muteButton.setOnClickListener(v -> { + VoIPService voIPService = VoIPService.getSharedInstance(); + if (voIPService == null) { + return; + } + ChatObject.Call call = voIPService.groupCall; + AccountInstance accountInstance = AccountInstance.getInstance(voIPService.getAccount()); + TLRPC.Chat chat = voIPService.getChat(); + TLRPC.TL_groupCallParticipant participant = call.participants.get(accountInstance.getUserConfig().getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(chat)) { + return; + } + + isMuted = !voIPService.isMicMute(); + voIPService.setMicMute(isMuted, false, true); + if (muteDrawable.setCustomEndFrame(isMuted ? 15 : 29)) { + if (isMuted) { + muteDrawable.setCurrentFrame(0); + } else { + muteDrawable.setCurrentFrame(14); + } + } + muteButton.playAnimation(); + Theme.getFragmentContextViewWavesDrawable().updateState(true); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + }); + closeButton = new ImageView(context); closeButton.setImageResource(R.drawable.miniplayer_close); closeButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_inappPlayerClose), PorterDuff.Mode.MULTIPLY)); @@ -234,7 +436,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent dialog_id = ((ChatActivity) fragment).getDialogId(); } if (messageObject.getDialogId() == dialog_id) { - ((ChatActivity) fragment).scrollToMessageId(messageObject.getId(), 0, false, 0, true); + ((ChatActivity) fragment).scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); } else { dialog_id = messageObject.getDialogId(); Bundle args = new Bundle(); @@ -279,6 +481,23 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } else { fragment.showDialog(new SharingLocationsAlert(getContext(), this::openSharingLocation)); } + } else if (currentStyle == 3) { + // long d = Theme.getFragmentContextViewWavesDrawable().getRippleFinishedDelay(); + // AndroidUtilities.runOnUIThread(() -> { + if (VoIPService.getSharedInstance() != null && getContext() instanceof LaunchActivity) { + GroupCallActivity.create((LaunchActivity) getContext(), AccountInstance.getInstance(VoIPService.getSharedInstance().getAccount())); + } + // }, d); + } else if (currentStyle == 4) { + if (fragment.getParentActivity() == null) { + return; + } + ChatActivity chatActivity = (ChatActivity) fragment; + ChatObject.Call call = chatActivity.getGroupCall(); + if (call == null) { + return; + } + VoIPHelper.startCall(chatActivity.getMessagesController().getChat(call.chatId), false, fragment.getParentActivity()); } }); } @@ -340,7 +559,9 @@ public class FragmentContextView extends FrameLayout implements NotificationCent show = LocationController.getInstance(fragment.getCurrentAccount()).isSharingLocation(((ChatActivity) fragment).getDialogId()); } } else { - if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING) { + if (VoIPService.getSharedInstance() != null && !VoIPService.getSharedInstance().isHangingUp() && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING) { + show = true; + } else if (fragment instanceof ChatActivity && ((ChatActivity) fragment).getGroupCall() != null) { show = true; } else { MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); @@ -359,13 +580,10 @@ public class FragmentContextView extends FrameLayout implements NotificationCent View view = applyingView != null ? applyingView : fragment.getFragmentView(); int additionalPadding = 0; if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE && additionalContextView.getParent() != null) { - additionalPadding = AndroidUtilities.dp(36); + additionalPadding = AndroidUtilities.dp(additionalContextView.getStyleHeight()); } if (view != null && getParent() != null) { - view.setPadding(0, (int) topPadding + additionalPadding, 0, 0); - } - if (isLocation && additionalContextView != null) { - ((LayoutParams) additionalContextView.getLayoutParams()).topMargin = -AndroidUtilities.dp(36) - (int) topPadding; + view.setPadding(0, (int) (getVisibility() == View.VISIBLE ? topPadding : 0) + additionalPadding, 0, 0); } } } @@ -374,15 +592,39 @@ public class FragmentContextView extends FrameLayout implements NotificationCent if (currentStyle == style) { return; } + if (currentStyle == 3) { + Theme.getFragmentContextViewWavesDrawable().removeParent(this); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().unregisterStateListener(this); + } + } currentStyle = style; + + + if (avatars != null) { + avatars.setStyle(currentStyle); + avatars.setLayoutParams(LayoutHelper.createFrame(108, getStyleHeight(), Gravity.LEFT | Gravity.TOP)); + } + frameLayout.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, getStyleHeight(), Gravity.TOP | Gravity.LEFT, 0, 0, 0, 0)); + shadow.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 2, Gravity.LEFT | Gravity.TOP, 0, getStyleHeight(), 0, 0)); + + if (topPadding > 0 && topPadding != AndroidUtilities.dp2(getStyleHeight())) { + updatePaddings(); + setTopPadding(AndroidUtilities.dp2(getStyleHeight())); + } if (style == 0 || style == 2) { selector.setBackground(Theme.getSelectorDrawable(false)); frameLayout.setBackgroundColor(Theme.getColor(Theme.key_inappPlayerBackground)); frameLayout.setTag(Theme.key_inappPlayerBackground); + titleTextView.setGravity(Gravity.CENTER_VERTICAL | Gravity.LEFT); titleTextView.setTextColor(Theme.getColor(Theme.key_inappPlayerTitle)); titleTextView.setTag(Theme.key_inappPlayerTitle); + subtitleTextView.setVisibility(GONE); + joinButton.setVisibility(GONE); closeButton.setVisibility(VISIBLE); playButton.setVisibility(VISIBLE); + muteButton.setVisibility(GONE); + avatars.setVisibility(GONE); titleTextView.setTypeface(Typeface.DEFAULT); titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); if (style == 0) { @@ -397,19 +639,66 @@ public class FragmentContextView extends FrameLayout implements NotificationCent titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 35 + 16, 0, 36, 0)); closeButton.setContentDescription(LocaleController.getString("AccDescrStopLiveLocation", R.string.AccDescrStopLiveLocation)); } - } else if (style == 1) { + } else if (style == 4) { + selector.setBackground(Theme.getSelectorDrawable(false)); + frameLayout.setBackgroundColor(Theme.getColor(Theme.key_inappPlayerBackground)); + frameLayout.setTag(Theme.key_inappPlayerBackground); + muteButton.setVisibility(GONE); + + subtitleTextView.setVisibility(VISIBLE); + joinButton.setVisibility(VISIBLE); + + titleTextView.setTextColor(Theme.getColor(Theme.key_inappPlayerPerformer)); + titleTextView.setTag(Theme.key_inappPlayerPerformer); + titleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + titleTextView.setPadding(0, 0, 0, 0); + titleTextView.setText(LocaleController.getString("VoipGroupVoiceChat", R.string.VoipGroupVoiceChat)); + titleTextView.setGravity(Gravity.TOP | Gravity.LEFT); + + avatars.setVisibility(VISIBLE); + updateAvatars(false); + + closeButton.setVisibility(GONE); + playButton.setVisibility(GONE); + if (playbackSpeedButton != null) { + playbackSpeedButton.setVisibility(GONE); + } + } else if (style == 1 || style == 3) { selector.setBackground(null); - frameLayout.setBackgroundColor(Theme.getColor(Theme.key_returnToCallBackground)); - frameLayout.setTag(Theme.key_returnToCallBackground); - titleTextView.setText(LocaleController.getString("ReturnToCall", R.string.ReturnToCall)); + if (style == 3) { + updateGroupCallTitle(); + muteButton.setVisibility(VISIBLE); + avatars.setVisibility(VISIBLE); + updateAvatars(false); + isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + muteDrawable.setCustomEndFrame(isMuted ? 15 : 29); + muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame() - 1, false, true); + muteButton.invalidate(); + frameLayout.setBackground(null); + Theme.getFragmentContextViewWavesDrawable().addParent(this); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().registerStateListener(this); + } + invalidate(); + } else { + frameLayout.setTag(Theme.key_returnToCallBackground); + titleTextView.setText(LocaleController.getString("ReturnToCall", R.string.ReturnToCall)); + muteButton.setVisibility(GONE); + avatars.setVisibility(GONE); + frameLayout.setBackgroundColor(Theme.getColor(Theme.key_returnToCallBackground)); + } + titleTextView.setGravity(Gravity.CENTER_VERTICAL | Gravity.LEFT); titleTextView.setTextColor(Theme.getColor(Theme.key_returnToCallText)); titleTextView.setTag(Theme.key_returnToCallText); closeButton.setVisibility(GONE); playButton.setVisibility(GONE); + subtitleTextView.setVisibility(GONE); + joinButton.setVisibility(GONE); titleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 2)); - titleTextView.setPadding(0, 0, 0, 0); + titleTextView.setPadding(AndroidUtilities.dp(112), 0, AndroidUtilities.dp(112), 0); if (playbackSpeedButton != null) { playbackSpeedButton.setVisibility(GONE); } @@ -419,6 +708,12 @@ public class FragmentContextView extends FrameLayout implements NotificationCent @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); + if (animatorSet != null) { + animatorSet.cancel(); + animatorSet = null; + } + visible = false; + NotificationCenter.getInstance(account).onAnimationFinish(animationIndex); topPadding = 0; if (isLocation) { NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.liveLocationsChanged); @@ -428,11 +723,24 @@ public class FragmentContextView extends FrameLayout implements NotificationCent NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.messagePlayingDidReset); NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.messagePlayingDidStart); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.groupCallTypingsUpdated); } NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.messagePlayingSpeedChanged); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didStartedCall); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcSpeakerAmplitudeEvent); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.groupCallVisibilityChanged); } + + if (currentStyle == 3) { + Theme.getFragmentContextViewWavesDrawable().removeParent(this); + } + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().unregisterStateListener(this); + } + wasDraw = false; } @Override @@ -450,25 +758,55 @@ public class FragmentContextView extends FrameLayout implements NotificationCent NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.messagePlayingDidReset); NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.messagePlayingDidStart); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.groupCallTypingsUpdated); } NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.messagePlayingSpeedChanged); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didStartedCall); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didEndCall); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.webRtcSpeakerAmplitudeEvent); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.groupCallVisibilityChanged); if (additionalContextView != null) { additionalContextView.checkVisibility(); } - if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING) { + + if (VoIPService.getSharedInstance() != null && !VoIPService.getSharedInstance().isHangingUp() && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING && !GroupCallPip.isShowing()) { + checkCall(true); + } else if (fragment instanceof ChatActivity && ((ChatActivity) fragment).getGroupCall() != null && !GroupCallPip.isShowing()) { checkCall(true); } else { checkPlayer(true); updatePlaybackButton(); } } + + if (currentStyle == 3) { + Theme.getFragmentContextViewWavesDrawable().addParent(this); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().registerStateListener(this); + } + boolean newMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + if (isMuted != newMuted) { + isMuted = newMuted; + muteDrawable.setCustomEndFrame(isMuted ? 15 : 29); + muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame() - 1, false, true); + muteButton.invalidate(); + } + } + + if (visible && topPadding == 0) { + updatePaddings(); + setTopPadding(AndroidUtilities.dp2(getStyleHeight())); + } + + speakerAmplitude = 0; + micAmplitude = 0; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, AndroidUtilities.dp2(38)); + super.onMeasure(widthMeasureSpec, AndroidUtilities.dp2(getStyleHeight() + 2)); } @Override @@ -483,14 +821,75 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } } } else if (id == NotificationCenter.messagePlayingDidStart || id == NotificationCenter.messagePlayingPlayStateChanged || id == NotificationCenter.messagePlayingDidReset || id == NotificationCenter.didEndCall) { + if (currentStyle == 3) { + checkCall(false); + } checkPlayer(false); - } else if (id == NotificationCenter.didStartedCall) { + } else if (id == NotificationCenter.didStartedCall || id == NotificationCenter.groupCallUpdated || id == NotificationCenter.groupCallVisibilityChanged) { checkCall(false); + if (currentStyle == 3) { + VoIPService sharedInstance = VoIPService.getSharedInstance(); + if (sharedInstance != null && sharedInstance.groupCall != null) { + if (id == NotificationCenter.didStartedCall) { + sharedInstance.registerStateListener(this); + } + int currentCallState = sharedInstance.getCallState(); + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + + } else { + TLRPC.TL_groupCallParticipant participant = sharedInstance.groupCall.participants.get(AccountInstance.getInstance(sharedInstance.getAccount()).getUserConfig().getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(sharedInstance.getChat())) { + sharedInstance.setMicMute(true, false, false); + final long now = SystemClock.uptimeMillis(); + final MotionEvent e = MotionEvent.obtain(now, now, MotionEvent.ACTION_CANCEL, 0, 0, 0); + muteButton.dispatchTouchEvent(e); + } + } + } + } + } else if (id == NotificationCenter.groupCallTypingsUpdated) { + if (visible && currentStyle == 4) { + ChatObject.Call call = ((ChatActivity) fragment).getGroupCall(); + if (call != null) { + if (call.call.participants_count == 0) { + subtitleTextView.setText(LocaleController.getString("MembersTalkingNobody", R.string.MembersTalkingNobody)); + } else { + subtitleTextView.setText(LocaleController.formatPluralString("Participants", call.call.participants_count)); + } + } + updateAvatars(true); + } } else if (id == NotificationCenter.messagePlayingSpeedChanged) { updatePlaybackButton(); + } else if (id == NotificationCenter.webRtcMicAmplitudeEvent) { + if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { + micAmplitude = 0; + } else { + micAmplitude = (Math.min(GroupCallActivity.MAX_AMPLITUDE, ((float) args[0]) * 4000) / GroupCallActivity.MAX_AMPLITUDE); + } + if (VoIPService.getSharedInstance() != null) { + Theme.getFragmentContextViewWavesDrawable().setAmplitude(Math.max(speakerAmplitude, micAmplitude)); + } + } else if (id == NotificationCenter.webRtcSpeakerAmplitudeEvent) { + float a = (float) args[0] * 15f / 80f; + speakerAmplitude = Math.max(0, Math.min(a, 1)); + if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { + micAmplitude = 0; + } + if (VoIPService.getSharedInstance() != null) { + Theme.getFragmentContextViewWavesDrawable().setAmplitude(Math.max(speakerAmplitude, micAmplitude)); + } + avatars.invalidate(); } } + float speakerAmplitude; + float micAmplitude; + + public int getStyleHeight() { + return currentStyle == 4 ? 48 : 36; + } + private void checkLiveLocation(boolean create) { View fragmentView = fragment.getFragmentView(); if (!create && fragmentView != null) { @@ -538,8 +937,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent updateStyle(2); playButton.setImageDrawable(new ShareLocationDrawable(getContext(), 1)); if (create && topPadding == 0) { - setTopPadding(AndroidUtilities.dp2(36)); - yPosition = 0; + setTopPadding(AndroidUtilities.dp2(getStyleHeight())); } if (!visible) { if (!create) { @@ -548,7 +946,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent animatorSet = null; } animatorSet = new AnimatorSet(); - animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(36))); + animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(getStyleHeight()))); animatorSet.setDuration(200); animatorSet.addListener(new AnimatorListenerAdapter() { @Override @@ -682,6 +1080,9 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } private void checkPlayer(boolean create) { + if (visible && (currentStyle == 3 || currentStyle == 4)) { + return; + } MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); View fragmentView = fragment.getFragmentView(); if (!create && fragmentView != null) { @@ -691,7 +1092,10 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } if (messageObject == null || messageObject.getId() == 0 || messageObject.isVideo()) { lastMessageObject = null; - boolean callAvailable = supportsCalls && VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING; + boolean callAvailable = supportsCalls && VoIPService.getSharedInstance() != null && !VoIPService.getSharedInstance().isHangingUp() && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING && !GroupCallPip.isShowing(); + if (!callAvailable && fragment instanceof ChatActivity && ((ChatActivity) fragment).getGroupCall() != null && !GroupCallPip.isShowing()) { + callAvailable = true; + } if (callAvailable) { checkCall(false); return; @@ -708,6 +1112,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent animatorSet.cancel(); animatorSet = null; } + animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); animatorSet = new AnimatorSet(); animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", 0)); animatorSet.setDuration(200); @@ -724,24 +1129,23 @@ public class FragmentContextView extends FrameLayout implements NotificationCent delegate.onAnimation(false, false); } animatorSet = null; + if (checkCallAfterAnimation) { + checkCall(false); + } } } }); animatorSet.start(); - animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); } + } else { + setVisibility(View.GONE); } } else { int prevStyle = currentStyle; updateStyle(0); if (create && topPadding == 0) { - setTopPadding(AndroidUtilities.dp2(36)); - if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE) { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(72); - } else { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(36); - } - yPosition = 0; + updatePaddings(); + setTopPadding(AndroidUtilities.dp2(getStyleHeight())); if (delegate != null) { delegate.onAnimation(true, true); delegate.onAnimation(false, true); @@ -753,16 +1157,17 @@ public class FragmentContextView extends FrameLayout implements NotificationCent animatorSet.cancel(); animatorSet = null; } + animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); animatorSet = new AnimatorSet(); if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE) { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(72); + ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(getStyleHeight() + additionalContextView.getStyleHeight()); } else { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(36); + ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(getStyleHeight()); } if (delegate != null) { delegate.onAnimation(true, true); } - animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(36))); + animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(getStyleHeight()))); animatorSet.setDuration(200); animatorSet.addListener(new AnimatorListenerAdapter() { @Override @@ -773,11 +1178,13 @@ public class FragmentContextView extends FrameLayout implements NotificationCent delegate.onAnimation(false, true); } animatorSet = null; + if (checkCallAfterAnimation) { + checkCall(false); + } } } }); animatorSet.start(); - animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); } visible = true; setVisibility(VISIBLE); @@ -828,16 +1235,32 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } } - private void checkCall(boolean create) { + public void checkCall(boolean create) { View fragmentView = fragment.getFragmentView(); if (!create && fragmentView != null) { if (fragmentView.getParent() == null || ((View) fragmentView.getParent()).getVisibility() != VISIBLE) { create = true; } } - boolean callAvailable = supportsCalls && VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING; + boolean callAvailable; + boolean groupActive; + if (GroupCallPip.isShowing()) { + callAvailable = false; + groupActive = false; + } else { + callAvailable = !GroupCallActivity.groupCallUiVisible && supportsCalls && VoIPService.getSharedInstance() != null && !VoIPService.getSharedInstance().isHangingUp(); + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null && VoIPService.getSharedInstance().groupCall.call instanceof TLRPC.TL_groupCallDiscarded) { + callAvailable = false; + } + groupActive = false; + if (!GroupCallActivity.groupCallUiVisible && supportsCalls && !callAvailable && fragment instanceof ChatActivity && ((ChatActivity) fragment).getGroupCall() != null) { + callAvailable = true; + groupActive = true; + } + } + if (!callAvailable) { - if (visible) { + if (visible && (create && currentStyle == -1 || currentStyle == 4 || currentStyle == 3)) { visible = false; if (create) { if (getVisibility() != GONE) { @@ -849,31 +1272,93 @@ public class FragmentContextView extends FrameLayout implements NotificationCent animatorSet.cancel(); animatorSet = null; } + final int currentAccount = account; + animationIndex = NotificationCenter.getInstance(currentAccount).setAnimationInProgress(animationIndex, null); animatorSet = new AnimatorSet(); animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", 0)); - animatorSet.setDuration(200); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + NotificationCenter.getInstance(currentAccount).onAnimationFinish(animationIndex); if (animatorSet != null && animatorSet.equals(animation)) { setVisibility(GONE); animatorSet = null; + if (checkCallAfterAnimation) { + checkCall(false); + } } } }); animatorSet.start(); } + } else if (currentStyle == -1 || currentStyle == 4 || currentStyle == 3){ + visible = false; + setVisibility(GONE); } } else { - updateStyle(1); - if (create && topPadding == 0) { - setTopPadding(AndroidUtilities.dp2(36)); - if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE) { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(72); - } else { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(36); + int newStyle; + if (groupActive) { + newStyle = 4; + } else if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { + newStyle = 3; + } else { + newStyle = 1; + } + if (newStyle != currentStyle && animatorSet != null && !create) { + checkCallAfterAnimation = true; + return; + } + if (newStyle != currentStyle && visible && !create) { + if (animatorSet != null) { + animatorSet.cancel(); + animatorSet = null; + } + final int currentAccount = account; + if (animatorSet != null) { + animatorSet.cancel(); + animatorSet = null; + } + animationIndex = NotificationCenter.getInstance(currentAccount).setAnimationInProgress(animationIndex, null); + animatorSet = new AnimatorSet(); + animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", 0)); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + NotificationCenter.getInstance(currentAccount).onAnimationFinish(animationIndex); + if (animatorSet != null && animatorSet.equals(animation)) { + visible = false; + animatorSet = null; + checkCall(false); + } + } + }); + animatorSet.start(); + return; + } + if (groupActive) { + boolean updateAnimated = currentStyle == 4 && visible; + updateStyle(4); + + ChatObject.Call call = ((ChatActivity) fragment).getGroupCall(); + + if (call.call.participants_count == 0) { + subtitleTextView.setText(LocaleController.getString("MembersTalkingNobody", R.string.MembersTalkingNobody)); + } else { + subtitleTextView.setText(LocaleController.formatPluralString("Participants", call.call.participants_count)); + } + + updateAvatars(avatars.wasDraw && updateAnimated); + } else { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { + updateAvatars(currentStyle == 3); + updateStyle(3); + } else { + updateStyle(1); } - yPosition = 0; } if (!visible) { if (!create) { @@ -883,25 +1368,229 @@ public class FragmentContextView extends FrameLayout implements NotificationCent } animatorSet = new AnimatorSet(); if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE) { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(72); + ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(getStyleHeight() + additionalContextView.getStyleHeight()); } else { - ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(36); + ((LayoutParams) getLayoutParams()).topMargin = -AndroidUtilities.dp(getStyleHeight()); } - animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(36))); - animatorSet.setDuration(200); + final int currentAccount = account; + animationIndex = NotificationCenter.getInstance(currentAccount).setAnimationInProgress(animationIndex, null); + animatorSet.playTogether(ObjectAnimator.ofFloat(this, "topPadding", AndroidUtilities.dp2(getStyleHeight()))); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + NotificationCenter.getInstance(currentAccount).onAnimationFinish(animationIndex); if (animatorSet != null && animatorSet.equals(animation)) { animatorSet = null; } + if (checkCallAfterAnimation) { + checkCall(false); + } } }); animatorSet.start(); + } else { + updatePaddings(); + setTopPadding(AndroidUtilities.dp2(getStyleHeight())); } visible = true; setVisibility(VISIBLE); } } } + + private void updateAvatars(boolean animated) { + if (!animated) { + if (avatars.transitionProgressAnimator != null) { + avatars.transitionProgressAnimator.cancel(); + avatars.transitionProgressAnimator = null; + } + } + ChatObject.Call call; + if (avatars.transitionProgressAnimator == null) { + int currentAccount; + if (currentStyle == 4) { + if (fragment instanceof ChatActivity) { + ChatActivity chatActivity = (ChatActivity) fragment; + call = chatActivity.getGroupCall(); + currentAccount = chatActivity.getCurrentAccount(); + } else { + call = null; + currentAccount = account; + } + } else { + if (VoIPService.getSharedInstance() != null) { + call = VoIPService.getSharedInstance().groupCall; + currentAccount = VoIPService.getSharedInstance().getAccount(); + } else { + call = null; + currentAccount = account; + } + } + if (call != null) { + for (int a = 0, N = call.sortedParticipants.size(); a < 3; a++) { + if (a < N) { + avatars.setObject(a, currentAccount, call.sortedParticipants.get(a)); + } else { + avatars.setObject(a, currentAccount, null); + } + } + avatars.commitTransition(animated); + } else { + for (int a = 0; a < 3; a++) { + avatars.setObject(a, currentAccount, null); + } + avatars.commitTransition(animated); + } + + if (currentStyle == 4 && call != null) { + int N = Math.min(3, call.sortedParticipants.size()); + int x = N == 0 ? 10 : (10 + 24 * (N - 1) + 32 + 10); + if (animated) { + int leftMargin = ((LayoutParams) titleTextView.getLayoutParams()).leftMargin; + if (AndroidUtilities.dp(x) != leftMargin) { + float dx = titleTextView.getTranslationX() + leftMargin - AndroidUtilities.dp(x); + titleTextView.setTranslationX(dx); + subtitleTextView.setTranslationX(dx); + titleTextView.animate().translationX(0).setDuration(220).setInterpolator(CubicBezierInterpolator.DEFAULT); + subtitleTextView.animate().translationX(0).setDuration(220).setInterpolator(CubicBezierInterpolator.DEFAULT); + } + } else { + titleTextView.animate().cancel(); + subtitleTextView.animate().cancel(); + titleTextView.setTranslationX(0); + subtitleTextView.setTranslationX(0); + } + titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 5, 36, 0)); + subtitleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 25, 36, 0)); + } + } else { + avatars.updateAfterTransitionEnd(); + } + } + + + boolean collapseTransition; + float extraHeight; + float collapseProgress; + boolean wasDraw; + + public void setCollapseTransition(boolean show, float extraHeight, float progress) { + collapseTransition = show; + this.extraHeight = extraHeight; + this.collapseProgress = progress; + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (drawOverlay && getVisibility() != View.VISIBLE) { + return; + } + boolean clipped = false; + if (currentStyle == 3 && drawOverlay) { + Theme.getFragmentContextViewWavesDrawable().updateState(wasDraw); + float progress = topPadding / AndroidUtilities.dp((getStyleHeight())); + + if (collapseTransition) { + Theme.getFragmentContextViewWavesDrawable().draw(0, AndroidUtilities.dp((getStyleHeight())) - topPadding + extraHeight, getMeasuredWidth(), getMeasuredHeight() - AndroidUtilities.dp(2), canvas, null, Math.min(progress, (1f - collapseProgress))); + } else { + Theme.getFragmentContextViewWavesDrawable().draw(0, AndroidUtilities.dp((getStyleHeight())) - topPadding, getMeasuredWidth(), getMeasuredHeight() - AndroidUtilities.dp(2), canvas, this, progress); + } + float clipTop = AndroidUtilities.dp((getStyleHeight())) - topPadding; + if (collapseTransition) { + clipTop += extraHeight; + } + if (clipTop > getMeasuredHeight()) { + return; + } + clipped = true; + canvas.save(); + canvas.clipRect(0, clipTop, getMeasuredWidth(), getMeasuredHeight()); + invalidate(); + } + super.dispatchDraw(canvas); + if (clipped) { + canvas.restore(); + } + wasDraw = true; + } + + boolean drawOverlay; + + public void setDrawOverlay(boolean drawOverlay) { + this.drawOverlay = drawOverlay; + } + + @Override + public void invalidate() { + super.invalidate(); + if (currentStyle == 3) { + if (getParent() != null) { + ((View) getParent()).invalidate(); + } + } + } + + public int getCurrentStyle() { + return currentStyle; + } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + updatePaddings(); + setTopPadding(topPadding); + if (visibility == View.GONE) { + wasDraw = false; + } + } + + private void updatePaddings() { + int margin = 0; + if (getVisibility() == VISIBLE) { + margin -= AndroidUtilities.dp(getStyleHeight()); + } + if (additionalContextView != null && additionalContextView.getVisibility() == VISIBLE) { + margin -= AndroidUtilities.dp(additionalContextView.getStyleHeight()); + ((LayoutParams) getLayoutParams()).topMargin = margin; + ((LayoutParams) additionalContextView.getLayoutParams()).topMargin = margin; + } else { + ((LayoutParams) getLayoutParams()).topMargin = margin; + } + } + + @Override + public void onStateChanged(int state) { + updateGroupCallTitle(); + } + + private void updateGroupCallTitle() { + VoIPService service = VoIPService.getSharedInstance(); + if (service != null && currentStyle == 3) { + int currentCallState = service.getCallState(); + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + titleTextView.setText(LocaleController.getString("VoipGroupConnecting", R.string. VoipGroupConnecting)); + } else if (service.getChat() != null) { + if (fragment instanceof ChatActivity && ((ChatActivity) fragment).getCurrentChat() != null && ((ChatActivity) fragment).getCurrentChat().id == service.getChat().id) { + titleTextView.setText(LocaleController.getString("VoipGroupViewVoiceChat", R.string.VoipGroupViewVoiceChat)); + } else { + titleTextView.setText(service.getChat().title); + } + } + } + } + + public float hotspotX; + public float hotspotY; + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN) { + hotspotX = ev.getX(); + hotspotY = ev.getY(); + } + return super.onInterceptTouchEvent(ev); + } + } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java new file mode 100644 index 000000000..e85b37c0c --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java @@ -0,0 +1,434 @@ +package org.telegram.ui.Components; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.RadialGradient; +import android.graphics.RectF; +import android.graphics.Shader; +import android.os.Build; +import android.os.SystemClock; +import android.view.View; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; + +import java.util.ArrayList; + +public class FragmentContextViewWavesDrawable { + + public final static int MUTE_BUTTON_STATE_MUTE = 1; + public final static int MUTE_BUTTON_STATE_UNMUTE = 0; + public final static int MUTE_BUTTON_STATE_CONNECTING = 2; + public final static int MUTE_BUTTON_STATE_MUTED_BY_ADMIN = 3; + + WeavingState[] states = new WeavingState[4]; + WeavingState currentState; + WeavingState previousState; + WeavingState pausedState; + + private float amplitude; + private float amplitude2; + private float animateToAmplitude; + private float animateAmplitudeDiff; + private float animateAmplitudeDiff2; + private long lastUpdateTime; + float progressToState = 1f; + + ArrayList parents = new ArrayList<>(); + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + LineBlobDrawable lineBlobDrawable = new LineBlobDrawable(5); + LineBlobDrawable lineBlobDrawable1 = new LineBlobDrawable(7); + LineBlobDrawable lineBlobDrawable2 = new LineBlobDrawable(8); + + RectF rect = new RectF(); + Path path = new Path(); + private final Paint xRefP = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Paint selectedPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + public FragmentContextViewWavesDrawable() { + xRefP.setColor(0); + xRefP.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); + + + for (int i = 0; i < 4; i++) { + states[i] = new WeavingState(i); + } + } + + public void draw(float left, float top, float right, float bottom, Canvas canvas, FragmentContextView parentView, float progress) { + boolean update; + checkColors(); + if (parentView == null) { + update = false; + } else { + update = parents.size() > 0 && parentView == parents.get(0); + } + if (top > bottom) { + return; + } + long dt = 0; + boolean rippleTransition = currentState != null && previousState != null && ((currentState.currentState == MUTE_BUTTON_STATE_MUTE && previousState.currentState == MUTE_BUTTON_STATE_UNMUTE) || (previousState.currentState == MUTE_BUTTON_STATE_MUTE && currentState.currentState == MUTE_BUTTON_STATE_UNMUTE)); + + if (update) { + long newTime = SystemClock.elapsedRealtime(); + dt = newTime - lastUpdateTime; + lastUpdateTime = newTime; + if (dt > 20) { + dt = 17; + } + + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * dt; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + parentView.invalidate(); + } + + if (animateToAmplitude != amplitude2) { + amplitude2 += animateAmplitudeDiff2 * dt; + if (animateAmplitudeDiff2 > 0) { + if (amplitude2 > animateToAmplitude) { + amplitude2 = animateToAmplitude; + } + } else { + if (amplitude2 < animateToAmplitude) { + amplitude2 = animateToAmplitude; + } + } + parentView.invalidate(); + } + + if (previousState != null) { + progressToState += dt / 250f; + if (progressToState > 1f) { + progressToState = 1f; + previousState = null; + } + parentView.invalidate(); + } + } + for (int i = 0; i < 2; i++) { + float alpha; + if (i == 0 && previousState == null) { + continue; + } + + if (i == 0) { + alpha = 1f - progressToState; + previousState.setToPaint(paint); + } else { + if (currentState == null) { + return; + } + alpha = previousState != null ? progressToState : 1f; + if (update) { + currentState.update((int) (bottom - top), (int) (right - left), dt, amplitude); + } + currentState.setToPaint(paint); + } + + lineBlobDrawable.minRadius = 0; + lineBlobDrawable.maxRadius = AndroidUtilities.dp(2) + AndroidUtilities.dp(2) * amplitude; + + lineBlobDrawable1.minRadius = AndroidUtilities.dp(0); + lineBlobDrawable1.maxRadius = AndroidUtilities.dp(3) + AndroidUtilities.dp(9) * amplitude; + + lineBlobDrawable2.minRadius = AndroidUtilities.dp(0); + lineBlobDrawable2.maxRadius = AndroidUtilities.dp(3) + AndroidUtilities.dp(9) * amplitude; + + lineBlobDrawable.update(amplitude, 0.3f); + lineBlobDrawable1.update(amplitude, 0.7f); + lineBlobDrawable2.update(amplitude, 0.7f); + +// if (rippleTransition) { +// paint.setAlpha(76); +// canvas.save(); +// float cx = right - AndroidUtilities.dp(18); +// float r = (right - left) * 1.1f * progressToState; +// float offset = (float) Math.sqrt(r * r - (bottom - top) * (bottom - top)); +// if (i == 0) { +// if (cx - offset > left) { +// canvas.clipRect(left, top - AndroidUtilities.dp(20), cx - offset, bottom); +// float top1 = AndroidUtilities.dp(6) * amplitude2; +// float top2 = AndroidUtilities.dp(6) * amplitude2; +// lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); +// lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); +// } +// } else { +// if (cx - offset > left) { +// canvas.clipRect(cx - offset, top - AndroidUtilities.dp(20), right, bottom); +// } +// float top1 = AndroidUtilities.dp(6) * amplitude2; +// float top2 = AndroidUtilities.dp(6) * amplitude2; +// lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); +// lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); +// } +// canvas.restore(); +// } else { + paint.setAlpha((int) (76 * alpha)); + float top1 = AndroidUtilities.dp(6) * amplitude2; + float top2 = AndroidUtilities.dp(6) * amplitude2; + lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); + lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); + //} + + if (i == 1 && rippleTransition) { + paint.setAlpha(255); + } else if (i == 1) { + paint.setAlpha((int) (255 * alpha)); + } else { + paint.setAlpha(255); + } + if (i == 1 && rippleTransition) { + path.reset(); + float cx = right - AndroidUtilities.dp(18); + float cy = top + (bottom - top) / 2; + float r = (right - left) * 1.1f * alpha; + path.addCircle(cx, cy, r, Path.Direction.CW); + canvas.save(); + + canvas.clipPath(path); + lineBlobDrawable.draw(left, top, right, bottom, canvas, paint, top, progress); + canvas.restore(); + } else { + lineBlobDrawable.draw(left, top, right, bottom, canvas, paint, top, progress); + } + } + +// if (Build.VERSION.SDK_INT > 21 && parentView != null && (parentView.isPressed() || pressedRemoveProgress != 0)) { +// if (parentView.isPressed()) { +// pressedRemoveProgress = 1f; +// } +// if (pressedProgress != 1f) { +// pressedProgress += 16f / 150f; +// if (pressedProgress > 1f) { +// pressedProgress = 1f; +// } +// } else if (!parentView.isPressed() && pressedRemoveProgress != 0) { +// pressedRemoveProgress -= 16f / 150f; +// if (pressedRemoveProgress < 0) { +// pressedRemoveProgress = 0; +// pressedProgress = 0; +// } +// } +// rect.set(left, top - AndroidUtilities.dp(20), right, bottom); +// canvas.saveLayerAlpha(rect, 255, Canvas.ALL_SAVE_FLAG); +// Theme.getColor(Theme.key_listSelector); +// selectedPaint.setColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) (16 * pressedRemoveProgress))); +// +// float hotspotX = left + parentView.hotspotX; +// float rad = Math.max(right - hotspotX, hotspotX - left) * 0.8f; +// canvas.drawCircle(hotspotX, top + parentView.hotspotY, rad * 1.3f * CubicBezierInterpolator.DEFAULT.getInterpolation(pressedProgress), selectedPaint); +// +// lineBlobDrawable.path.toggleInverseFillType(); +// canvas.drawPath(lineBlobDrawable.path, xRefP); +// lineBlobDrawable.path.toggleInverseFillType(); +// canvas.restore(); +// } + } + + float pressedProgress; + float pressedRemoveProgress; + + private void checkColors() { + for (int i = 0; i < states.length; i++) { + states[i].checkColor(); + } + } + + private void setState(int state, boolean animated) { + if (currentState != null && currentState.currentState == state) { + return; + } + if (VoIPService.getSharedInstance() == null && currentState == null) { + currentState = pausedState; + } else { + previousState = animated ? currentState : null; + currentState = states[state]; + if (previousState != null) { + progressToState = 0; + } else { + progressToState = 1; + } + } + } + + public void setAmplitude(float value) { + animateToAmplitude = value; + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (250); + animateAmplitudeDiff2 = (animateToAmplitude - amplitude) / (120); + } + + public void addParent(View parent) { + if (!parents.contains(parent)) { + parents.add(parent); + } + } + public void removeParent(View parent) { + parents.remove(parent); + if (parents.isEmpty()) { + pausedState = currentState; + currentState = null; + previousState = null; + } + } + + public void updateState(boolean animated) { + if (VoIPService.getSharedInstance() != null) { + int currentCallState = VoIPService.getSharedInstance().getCallState(); + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + setState(FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_CONNECTING, animated); + } else { + TLRPC.TL_groupCallParticipant participant = VoIPService.getSharedInstance().groupCall.participants.get(AccountInstance.getInstance(VoIPService.getSharedInstance().getAccount()).getUserConfig().getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(VoIPService.getSharedInstance().getChat())) { + VoIPService.getSharedInstance().setMicMute(true, false, false); + setState(FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTED_BY_ADMIN, animated); + } else { + boolean isMuted = VoIPService.getSharedInstance().isMicMute(); + setState(isMuted ? FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTE : FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_UNMUTE, animated); + } + } + } + } + + public long getRippleFinishedDelay() { + if (pressedProgress != 0 && pressedProgress != 1) { + return (long) ((1f - pressedProgress) * 150); + } + return 0; + } + + public static class WeavingState { + + private float targetX = -1f; + private float targetY = -1f; + private float startX; + private float startY; + private float duration; + private float time; + + public Shader shader; + private final Matrix matrix = new Matrix(); + private final int currentState; + + int color1; + int color2; + int color3; + public WeavingState(int state) { + currentState = state; + createGradients(); + } + + String greenKey1 = Theme.key_voipgroup_topPanelGreen1; + String greenKey2 = Theme.key_voipgroup_topPanelGreen2; + String blueKey1 = Theme.key_voipgroup_topPanelBlue1; + String blueKey2 = Theme.key_voipgroup_topPanelBlue2; + String mutedByAdmin = Theme.key_voipgroup_mutedByAdminGradient; + String mutedByAdmin2 = Theme.key_voipgroup_mutedByAdminGradient2; + String mutedByAdmin3 = Theme.key_voipgroup_mutedByAdminGradient3; + + private void createGradients() { + if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + shader = new RadialGradient(200, 200, 200, new int[]{color1 = Theme.getColor(greenKey1), color2 = Theme.getColor(greenKey2)}, null, Shader.TileMode.CLAMP); + } else if (currentState == MUTE_BUTTON_STATE_MUTE){ + shader = new RadialGradient(200, 200, 200, new int[]{color1 = Theme.getColor(blueKey1), color2 = Theme.getColor(blueKey2)}, null, Shader.TileMode.CLAMP); + } else if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + shader = new RadialGradient(200, 200, 200, new int[]{color1 = Theme.getColor(mutedByAdmin), color3 = Theme.getColor(mutedByAdmin3), color2 = Theme.getColor(mutedByAdmin2)}, new float[]{0f, 0.6f, 1f}, Shader.TileMode.CLAMP); + } + } + + public void update(int height, int width, long dt, float amplitude) { + if (currentState == MUTE_BUTTON_STATE_CONNECTING) { + return; + } + if (duration == 0 || time >= duration) { + duration = Utilities.random.nextInt(700) + 500; + time = 0; + if (targetX == -1f) { + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + targetX = -0.3f + 0.05f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.05f * Utilities.random.nextInt(100) / 100f; + } else if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + targetX = -0.3f + 0.2f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.3f * Utilities.random.nextInt(100) / 100f; + } else { + targetX = 1.1f + 0.2f * (Utilities.random.nextInt(100) / 100f); + targetY = 4f * Utilities.random.nextInt(100) / 100f; + } + } + startX = targetX; + startY = targetY; + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + targetX = -0.3f + 0.05f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.05f * Utilities.random.nextInt(100) / 100f; + } else if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + targetX = -0.3f + 0.2f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.3f * Utilities.random.nextInt(100) / 100f; + } else { + targetX = 1.1f + 0.2f * (Utilities.random.nextInt(100) / 100f); + targetY = 4f * Utilities.random.nextInt(100) / 100f; + } + } + time += dt * (0.5f + BlobDrawable.GRADIENT_SPEED_MIN) + dt * (BlobDrawable.GRADIENT_SPEED_MAX * 2) * amplitude; + if (time > duration) { + time = duration; + } + float interpolation = CubicBezierInterpolator.EASE_OUT.getInterpolation(time / duration); + float x = width * (startX + (targetX - startX) * interpolation) - 200; + float y = height * (startY + (targetY - startY) * interpolation) - 200; + + float scale = width / 400.0f * ((currentState == MUTE_BUTTON_STATE_UNMUTE || currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN)? 3f : 1.5f); + matrix.reset(); + matrix.postTranslate(x, y); + matrix.postScale(scale, scale, x + 200, y + 200); + + shader.setLocalMatrix(matrix); + } + + public void checkColor() { + if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + if (color1 != Theme.getColor(greenKey1) || color2 != Theme.getColor(greenKey2)) { + createGradients(); + } + } else if (currentState == MUTE_BUTTON_STATE_MUTE) { + if (color1 != Theme.getColor(blueKey1) || color2 != Theme.getColor(blueKey2)) { + createGradients(); + } + } else if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + if (color1 != Theme.getColor(mutedByAdmin) || color2 != Theme.getColor(mutedByAdmin2)) { + createGradients(); + } + } + } + + public void setToPaint(Paint paint) { + if (currentState == MUTE_BUTTON_STATE_UNMUTE || currentState == MUTE_BUTTON_STATE_MUTE || currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + paint.setShader(shader); + } else { + paint.setShader(null); + paint.setColor(Theme.getColor(Theme.key_voipgroup_topPanelGray)); + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPip.java new file mode 100644 index 000000000..6740759f0 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPip.java @@ -0,0 +1,1037 @@ +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; +import android.animation.ValueAnimator; +import android.content.Context; +import android.content.SharedPreferences; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.os.Build; +import android.os.SystemClock; +import android.provider.Settings; +import android.view.Gravity; +import android.view.HapticFeedbackConstants; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewConfiguration; +import android.view.ViewParent; +import android.view.ViewTreeObserver; +import android.view.WindowManager; +import android.view.animation.OvershootInterpolator; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.GroupCallActivity; + +import static android.view.WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE; + +public class GroupCallPip implements NotificationCenter.NotificationCenterDelegate { + + private static GroupCallPip instance; + private static boolean forceRemoved = true; + + FrameLayout windowView; + FrameLayout windowRemoveTooltipView; + View removeTooltipView; + FrameLayout windowRemoveTooltipOverlayView; + FrameLayout alertContainer; + GroupCallPipAlertView pipAlertView; + int currentAccount; + WindowManager windowManager; + WindowManager.LayoutParams windowLayoutParams; + AvatarsImageView avatarsImageView; + RLottieDrawable deleteIcon; + boolean showAlert; + + + boolean animateToShowRemoveTooltip; + boolean animateToPrepareRemove; + float prepareToRemoveProgress = 0; + + boolean removed; + + int[] location = new int[2]; + float[] point = new float[2]; + + int lastScreenX; + int lastScreenY; + float xRelative = -1; + float yRelative = -1; + + int windowTop; + int windowLeft; + + float windowX; + float windowY; + float windowOffsetLeft; + float windowOffsetTop; + + + private ValueAnimator.AnimatorUpdateListener updateXlistener = new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(ValueAnimator valueAnimator) { + float x = (float) valueAnimator.getAnimatedValue(); + windowLayoutParams.x = (int) x; + updateAvatarsPosition(); + if (windowView.getParent() != null) { + windowManager.updateViewLayout(windowView, windowLayoutParams); + } + } + }; + + private ValueAnimator.AnimatorUpdateListener updateYlistener = new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(ValueAnimator valueAnimator) { + float y = (float) valueAnimator.getAnimatedValue(); + windowLayoutParams.y = (int) y; + if (windowView.getParent() != null) { + windowManager.updateViewLayout(windowView, windowLayoutParams); + } + } + }; + + private final GroupCallPipButton button; + private final RLottieImageView iconView; + boolean moving; + + public GroupCallPip(Context context, int account) { + currentAccount = account; + float touchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); + windowView = new FrameLayout(context) { + float startX; + float startY; + long startTime; + boolean pressed; + AnimatorSet moveToBoundsAnimator; + + Runnable pressedRunnable = new Runnable() { + @Override + public void run() { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute()) { + ChatObject.Call call = VoIPService.getSharedInstance().groupCall; + TLRPC.TL_groupCallParticipant participant = call.participants.get(UserConfig.getInstance(currentAccount).getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(VoIPService.getSharedInstance().getChat())) { + return; + } + AndroidUtilities.runOnUIThread(micRunnable, 90); + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + pressed = true; + } + } + }; + + Runnable micRunnable = () -> { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute()) { + VoIPService.getSharedInstance().setMicMute(false, true, false); + } + }; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (AndroidUtilities.displaySize.x != lastScreenX || lastScreenY != AndroidUtilities.displaySize.y) { + + lastScreenX = AndroidUtilities.displaySize.x; + lastScreenY = AndroidUtilities.displaySize.y; + + if (xRelative < 0) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("groupcallpipconfig", Context.MODE_PRIVATE); + xRelative = preferences.getFloat("relativeX", 1f); + yRelative = preferences.getFloat("relativeY", 0.4f); + } + + if (instance != null) { + instance.setPosition(xRelative, yRelative); + } + } + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + if (instance == null) { + return false; + } + float x = event.getRawX(); + float y = event.getRawY(); + ViewParent parent = getParent(); + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + + getLocationOnScreen(location); + windowOffsetLeft = location[0] - windowLayoutParams.x; + windowOffsetTop = location[1] - windowLayoutParams.y; + + startX = x; + startY = y; + startTime = System.currentTimeMillis(); + AndroidUtilities.runOnUIThread(pressedRunnable, 300); + windowX = windowLayoutParams.x; + windowY = windowLayoutParams.y; + pressedState = true; + checkButtonAlpha(); + break; + case MotionEvent.ACTION_MOVE: + float dx = x - startX; + float dy = y - startY; + if (!moving && (dx * dx + dy * dy) > touchSlop * touchSlop) { + if (parent != null) { + parent.requestDisallowInterceptTouchEvent(true); + } + AndroidUtilities.cancelRunOnUIThread(pressedRunnable); + moving = true; + showRemoveTooltip(true); + showAlert(false); + startX = x; + startY = y; + dx = 0; + dy = 0; + } + if (moving) { + windowX += dx; + windowY += dy; + startX = x; + startY = y; + + updateButtonPosition(); + + float cx = windowX + getMeasuredWidth() / 2f; + float cy = windowY + getMeasuredHeight() / 2f; + + float cxRemove = windowLeft - windowOffsetLeft + windowRemoveTooltipView.getMeasuredWidth() / 2f; + float cyRemove = windowTop - windowOffsetTop + windowRemoveTooltipView.getMeasuredHeight() / 2f; + float distanceToRemove = (cx - cxRemove) * (cx - cxRemove) + (cy - cyRemove) * (cy - cyRemove); + boolean prepareToRemove = false; + boolean pinnedToCenter = false; + if (distanceToRemove < AndroidUtilities.dp(80) * AndroidUtilities.dp(80)) { + prepareToRemove = true; + double angle = Math.toDegrees(Math.atan((cx - cxRemove) / (cy - cyRemove))); + if ((cx > cxRemove && cy < cyRemove) || (cx < cxRemove && cy < cyRemove)) { + angle = 270 - angle; + } else { + angle = 90 - angle; + } + button.setRemoveAngle(angle); + if (distanceToRemove < AndroidUtilities.dp(50) * AndroidUtilities.dp(50)) { + pinnedToCenter = true; + } + } + pinnedToCenter(pinnedToCenter); + prepareToRemove(prepareToRemove); + } + break; + case MotionEvent.ACTION_CANCEL: + case MotionEvent.ACTION_UP: + AndroidUtilities.cancelRunOnUIThread(micRunnable); + AndroidUtilities.cancelRunOnUIThread(pressedRunnable); + if (animateToPrepareRemove) { + pressed = false; + remove(); + return false; + } + pressedState = false; + checkButtonAlpha(); + if (pressed) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setMicMute(true, false, false); + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + pressed = false; + } else if (event.getAction() == MotionEvent.ACTION_UP && !moving) { + onTap(); + return false; + } + if (parent != null && moving) { + parent.requestDisallowInterceptTouchEvent(false); + + int parentWidth = AndroidUtilities.displaySize.x; + int parentHeight = AndroidUtilities.displaySize.y; + + float left = windowLayoutParams.x; + float right = left + getMeasuredWidth(); + float top = windowLayoutParams.y; + float bottom = top + getMeasuredHeight(); + + moveToBoundsAnimator = new AnimatorSet(); + + float finallyX = left; + float finallyY = top; + + float paddingHorizontal = -AndroidUtilities.dp(36); + + if (left < paddingHorizontal) { + ValueAnimator animator = ValueAnimator.ofFloat(windowLayoutParams.x, finallyX = paddingHorizontal); + animator.addUpdateListener(updateXlistener); + moveToBoundsAnimator.playTogether(animator); + } else if (right > parentWidth - paddingHorizontal) { + ValueAnimator animator = ValueAnimator.ofFloat(windowLayoutParams.x, finallyX = (parentWidth - getMeasuredWidth() - paddingHorizontal)); + animator.addUpdateListener(updateXlistener); + moveToBoundsAnimator.playTogether(animator); + } + + int maxBottom = parentHeight + AndroidUtilities.dp(36); + if (top < AndroidUtilities.statusBarHeight - AndroidUtilities.dp(36)) { + ValueAnimator animator = ValueAnimator.ofFloat(windowLayoutParams.y, finallyY = AndroidUtilities.statusBarHeight - AndroidUtilities.dp(36)); + animator.addUpdateListener(updateYlistener); + moveToBoundsAnimator.playTogether(animator); + } else if (bottom > maxBottom) { + ValueAnimator animator = ValueAnimator.ofFloat(windowLayoutParams.y, finallyY = maxBottom - getMeasuredHeight()); + animator.addUpdateListener(updateYlistener); + moveToBoundsAnimator.playTogether(animator); + } + moveToBoundsAnimator.setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT); + moveToBoundsAnimator.start(); + + if (xRelative >= 0) { + getRelativePosition(finallyX, finallyY, point); + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("groupcallpipconfig", Context.MODE_PRIVATE); + preferences.edit() + .putFloat("relativeX", xRelative = point[0]) + .putFloat("relativeY", yRelative = point[1]) + .apply(); + } + } + moving = false; + showRemoveTooltip(false); + break; + } + return true; + } + + private void onTap() { + if (VoIPService.getSharedInstance() != null) { + showAlert(!showAlert); + //performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING);x + } + } + }; + windowView.setAlpha(0.7f); + + button = new GroupCallPipButton(context, currentAccount, false); + windowView.addView(button, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); + + avatarsImageView = new AvatarsImageView(context); + avatarsImageView.setStyle(5); + avatarsImageView.setCentered(true); + avatarsImageView.setVisibility(View.GONE); + avatarsImageView.setDelegate(() -> updateAvatars(true)); + updateAvatars(false); + windowView.addView(avatarsImageView, LayoutHelper.createFrame(108, 36, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + + windowRemoveTooltipView = new FrameLayout(context) { + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + windowRemoveTooltipView.getLocationOnScreen(location); + windowLeft = location[0]; + windowTop = location[1] - AndroidUtilities.dp(25); + } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + windowRemoveTooltipOverlayView.setVisibility(visibility); + } + }; + removeTooltipView = new View(context) { + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + @Override + protected void onDraw(Canvas canvas) { + if (animateToPrepareRemove && prepareToRemoveProgress != 1f) { + prepareToRemoveProgress += 16 / 250f; + if (prepareToRemoveProgress > 1f) { + prepareToRemoveProgress = 1f; + } + invalidate(); + } else if (!animateToPrepareRemove && prepareToRemoveProgress != 0) { + prepareToRemoveProgress -= 16 / 250f; + if (prepareToRemoveProgress < 0) { + prepareToRemoveProgress = 0; + } + invalidate(); + } + + paint.setColor(ColorUtils.blendARGB(0x66050D15, 0x66350C12, prepareToRemoveProgress)); + float r = AndroidUtilities.dp(35) + AndroidUtilities.dp(5) * prepareToRemoveProgress; + canvas.drawCircle(getMeasuredWidth() / 2f, getMeasuredHeight() / 2f - AndroidUtilities.dp(25), r, paint); + } + + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + windowRemoveTooltipOverlayView.setAlpha(alpha); + } + + @Override + public void setScaleX(float scaleX) { + super.setScaleX(scaleX); + windowRemoveTooltipOverlayView.setScaleX(scaleX); + } + + @Override + public void setScaleY(float scaleY) { + super.setScaleY(scaleY); + windowRemoveTooltipOverlayView.setScaleY(scaleY); + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + windowRemoveTooltipOverlayView.setTranslationY(translationY); + } + }; + windowRemoveTooltipView.addView(removeTooltipView); + + windowRemoveTooltipOverlayView = new FrameLayout(context); + iconView = new RLottieImageView(context); + iconView.setScaleType(ImageView.ScaleType.CENTER); + deleteIcon = new RLottieDrawable(R.raw.group_pip_delete_icon, "" + R.raw.group_pip_delete_icon, AndroidUtilities.dp(40), AndroidUtilities.dp(40), true, null); + deleteIcon.setPlayInDirectionOfCustomEndFrame(true); + iconView.setAnimation(deleteIcon); + iconView.setColorFilter(Color.WHITE); + windowRemoveTooltipOverlayView.addView(iconView, LayoutHelper.createFrame(40, 40, Gravity.CENTER, 0, 0, 0, 25)); + + + alertContainer = new FrameLayout(context) { + int lastSize = -1; + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + int size = AndroidUtilities.displaySize.x + AndroidUtilities.displaySize.y; + if (lastSize > 0 && lastSize != size) { + setVisibility(View.GONE); + showAlert = false; + checkButtonAlpha(); + } + lastSize = size; + } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (visibility == View.GONE) { + lastSize = -1; + } + } + }; + alertContainer.setOnClickListener(view -> showAlert(false)); + alertContainer.setClipChildren(false); + alertContainer.addView(pipAlertView = new GroupCallPipAlertView(context, currentAccount), LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + } + + public static boolean isShowing() { + if (instance != null) { + return true; + } + if (!checkInlinePermissions()) { + return false; + } + VoIPService service = VoIPService.getSharedInstance(); + boolean groupCall = false; + if (service != null && service.groupCall != null && !service.isHangingUp()) { + groupCall = true; + } + return groupCall && !forceRemoved && (ApplicationLoader.mainInterfaceStopped || !GroupCallActivity.groupCallUiVisible); + } + + public static boolean onBackPressed() { + if (instance != null && instance.showAlert) { + instance.showAlert(false); + return true; + } + return false; + } + + private void showAlert(boolean b) { + if (b != showAlert) { + showAlert = b; + alertContainer.animate().setListener(null).cancel(); + if (showAlert) { + + if (alertContainer.getVisibility() != View.VISIBLE) { + alertContainer.setVisibility(View.VISIBLE); + alertContainer.setAlpha(0); + pipAlertView.setScaleX(0.7f); + pipAlertView.setScaleY(0.7f); + } + alertContainer.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + alertContainer.getViewTreeObserver().removeOnPreDrawListener(this); + alertContainer.getLocationOnScreen(location); + + float cx = windowLayoutParams.x + windowOffsetLeft + button.getMeasuredWidth() / 2f - location[0]; + float cy = windowLayoutParams.y + windowOffsetTop + button.getMeasuredWidth() / 2f - location[1]; + + + boolean canHorizontal = cy - AndroidUtilities.dp(45 + 16) > 0 && cy + AndroidUtilities.dp(45 + 16) < alertContainer.getMeasuredHeight(); + if (cx + AndroidUtilities.dp(45 + 16) + pipAlertView.getMeasuredWidth() < alertContainer.getMeasuredWidth() - AndroidUtilities.dp(16) && canHorizontal) { + pipAlertView.setTranslationX(cx + AndroidUtilities.dp(45 + 16)); + float yOffset = cy / (float) (alertContainer.getMeasuredHeight()); + float maxOffset = AndroidUtilities.dp(40) / (float) pipAlertView.getMeasuredHeight(); + + yOffset = Math.max(maxOffset, Math.min(yOffset, 1f - maxOffset)); + pipAlertView.setTranslationY((int) (cy - pipAlertView.getMeasuredHeight() * yOffset)); + pipAlertView.setPosition(GroupCallPipAlertView.POSITION_LEFT, cx, cy); + } else if (cx - AndroidUtilities.dp(45 + 16) - pipAlertView.getMeasuredWidth() > AndroidUtilities.dp(16) && canHorizontal) { + float yOffset = cy / (float) alertContainer.getMeasuredHeight(); + float maxOffset = AndroidUtilities.dp(40) / (float) pipAlertView.getMeasuredHeight(); + yOffset = Math.max(maxOffset, Math.min(yOffset, 1f - maxOffset)); + + pipAlertView.setTranslationX((int) (cx - AndroidUtilities.dp(45 + 16) - pipAlertView.getMeasuredWidth())); + pipAlertView.setTranslationY((int) (cy - pipAlertView.getMeasuredHeight() * yOffset)); + pipAlertView.setPosition(GroupCallPipAlertView.POSITION_RIGHT, cx, cy); + } else if (cy > alertContainer.getMeasuredHeight() * 0.3f) { + float xOffset = cx / (float) alertContainer.getMeasuredWidth(); + float maxOffset = AndroidUtilities.dp(40) / (float) pipAlertView.getMeasuredWidth(); + xOffset = Math.max(maxOffset, Math.min(xOffset, 1f - maxOffset)); + pipAlertView.setTranslationX((int) (cx - pipAlertView.getMeasuredWidth() * xOffset)); + pipAlertView.setTranslationY((int) (cy - pipAlertView.getMeasuredHeight() - AndroidUtilities.dp(45 + 16))); + pipAlertView.setPosition(GroupCallPipAlertView.POSITION_TOP, cx, cy); + } else { + float xOffset = cx / (float) alertContainer.getMeasuredWidth(); + float maxOffset = AndroidUtilities.dp(40) / (float) pipAlertView.getMeasuredWidth(); + xOffset = Math.max(maxOffset, Math.min(xOffset, 1f - maxOffset)); + pipAlertView.setTranslationX((int) (cx - pipAlertView.getMeasuredWidth() * xOffset)); + pipAlertView.setTranslationY((int) (cy + AndroidUtilities.dp(45 + 16))); + pipAlertView.setPosition(GroupCallPipAlertView.POSITION_BOTTOM, cx, cy); + } + return false; + } + }); + alertContainer.animate().alpha(1f).setDuration(150).start(); + pipAlertView.animate().scaleX(1f).scaleY(1f).setDuration(150).start(); + + } else { + pipAlertView.animate().scaleX(0.7f).scaleY(0.7f).setDuration(150).start(); + alertContainer.animate().alpha(0f).setDuration(150).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + alertContainer.setVisibility(View.GONE); + } + }).start(); + } + } + checkButtonAlpha(); + } + + boolean pressedState; + boolean buttonInAlpha; + + private void checkButtonAlpha() { + boolean alpha = pressedState || showAlert; + if (buttonInAlpha != alpha) { + buttonInAlpha = alpha; + if (buttonInAlpha) { + windowView.animate().alpha(1f).start(); + } else { + windowView.animate().alpha(0.7f).start(); + } + button.setPressedState(alpha); + } + } + + public static GroupCallPip getInstance() { + return instance; + } + + private void remove() { + if (instance == null) { + return; + } + removed = true; + forceRemoved = true; + button.removed = true; + + instance.showAlert(false); + + float cx = windowLayoutParams.x + windowView.getMeasuredWidth() / 2f; + float cy = windowLayoutParams.y + windowView.getMeasuredHeight() / 2f; + + float cxRemove = windowLeft - windowOffsetLeft + windowRemoveTooltipView.getMeasuredWidth() / 2f; + float cyRemove = windowTop - windowOffsetTop + windowRemoveTooltipView.getMeasuredHeight() / 2f; + + float dx = cxRemove - cx; + float dy = cyRemove - cy; + + WindowManager windowManager = instance.windowManager; + View windowView = instance.windowView; + View windowRemoveTooltipView = instance.windowRemoveTooltipView; + View windowRemoveTooltipOverlayView = instance.windowRemoveTooltipOverlayView; + View alert = instance.alertContainer; + + onDestroy(); + + instance = null; + AnimatorSet animatorSet = new AnimatorSet(); + + long moveDuration = 350; + long additionalDuration = 0; + if (deleteIcon.getCurrentFrame() < 33) { + additionalDuration = (long) ((1f - deleteIcon.getCurrentFrame() / 33f) * deleteIcon.getDuration() / 2); + } + + ValueAnimator animator = ValueAnimator.ofFloat(windowLayoutParams.x, windowLayoutParams.x + dx); + animator.addUpdateListener(updateXlistener); + animator.setDuration(250).setInterpolator(CubicBezierInterpolator.DEFAULT); + animatorSet.playTogether(animator); + + animator = ValueAnimator.ofFloat(windowLayoutParams.y, windowLayoutParams.y + dy - AndroidUtilities.dp(30), windowLayoutParams.y + dy); + animator.addUpdateListener(updateYlistener); + animator.setDuration(250).setInterpolator(CubicBezierInterpolator.DEFAULT); + + animatorSet.playTogether(animator); + animatorSet.playTogether(ObjectAnimator.ofFloat(windowView, View.SCALE_X, windowView.getScaleX(), 0.1f).setDuration(180)); + animatorSet.playTogether(ObjectAnimator.ofFloat(windowView, View.SCALE_Y, windowView.getScaleY(), 0.1f).setDuration(180)); + + ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(windowView, View.ALPHA, 1f, 0f); + alphaAnimator.setStartDelay((long) (moveDuration * 0.7f)); + alphaAnimator.setDuration((long) (moveDuration * 0.3f)); + animatorSet.playTogether(alphaAnimator); + + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.groupCallVisibilityChanged), moveDuration + 20); + + moveDuration += 180 + additionalDuration; + + ObjectAnimator o = ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_X, 1f, 1.05f); + o.setDuration(moveDuration); + o.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + animatorSet.playTogether(o); + + o = ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_Y, 1f, 1.05f); + o.setDuration(moveDuration); + o.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + animatorSet.playTogether(o); + + o = ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_X, 1f, 0.3f); + o.setStartDelay(moveDuration); + o.setDuration(350); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + animatorSet.playTogether(o); + + o = ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_Y, 1f, 0.3f); + o.setStartDelay(moveDuration); + o.setDuration(350); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + animatorSet.playTogether(o); + + o = ObjectAnimator.ofFloat(removeTooltipView, View.TRANSLATION_Y, 0, AndroidUtilities.dp(60)); + o.setStartDelay(moveDuration); + o.setDuration(350); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + animatorSet.playTogether(o); + + o = ObjectAnimator.ofFloat(removeTooltipView, View.ALPHA, 1f, 0); + o.setStartDelay(moveDuration); + o.setDuration(350); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + animatorSet.playTogether(o); + + + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + NotificationCenter.getInstance(currentAccount).doOnIdle(() -> { + windowView.setVisibility(View.GONE); + windowRemoveTooltipView.setVisibility(View.GONE); + windowManager.removeView(windowView); + windowManager.removeView(windowRemoveTooltipView); + windowManager.removeView(windowRemoveTooltipOverlayView); + windowManager.removeView(alert); + }); + } + }); + animatorSet.start(); + deleteIcon.setCustomEndFrame(66); + iconView.stopAnimation(); + iconView.playAnimation(); + } + + private void updateAvatars(boolean animated) { + if (avatarsImageView.transitionProgressAnimator == null) { + ChatObject.Call call; + + if (VoIPService.getSharedInstance() != null) { + call = VoIPService.getSharedInstance().groupCall; + } else { + call = null; + } + if (call != null) { + for (int a = 0, N = call.sortedParticipants.size(), k = 0; k < 2; a++) { + if (a < N) { + TLRPC.TL_groupCallParticipant participant = call.sortedParticipants.get(a); + if (participant.user_id == UserConfig.getInstance(currentAccount).clientUserId || (SystemClock.uptimeMillis() - participant.lastSpeakTime > 500)) { + continue; + } + avatarsImageView.setObject(k, currentAccount, participant); + k++; + } else { + avatarsImageView.setObject(k, currentAccount, null); + k++; + } + } + avatarsImageView.setObject(2, currentAccount, null); + avatarsImageView.commitTransition(animated); + } else { + for (int a = 0; a < 3; a++) { + avatarsImageView.setObject(a, currentAccount, null); + } + avatarsImageView.commitTransition(animated); + } + } else { + avatarsImageView.updateAfterTransitionEnd(); + } + } + + public static void show(Context context, int account) { + if (instance != null) { + return; + } + instance = new GroupCallPip(context, account); + WindowManager wm = (WindowManager) ApplicationLoader.applicationContext.getSystemService(Context.WINDOW_SERVICE); + instance.windowManager = wm; + + + WindowManager.LayoutParams windowLayoutParams = createWindowLayoutParams(context); + windowLayoutParams.width = WindowManager.LayoutParams.MATCH_PARENT; + windowLayoutParams.height = WindowManager.LayoutParams.MATCH_PARENT; + windowLayoutParams.dimAmount = 0.25f; + windowLayoutParams.flags = FLAG_NOT_FOCUSABLE | + WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS | + WindowManager.LayoutParams.FLAG_DIM_BEHIND; + + wm.addView(instance.alertContainer, windowLayoutParams); + instance.alertContainer.setVisibility(View.GONE); + + windowLayoutParams = createWindowLayoutParams(context); + windowLayoutParams.gravity = Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL; + windowLayoutParams.width = AndroidUtilities.dp(100); + windowLayoutParams.height = AndroidUtilities.dp(150); + wm.addView(instance.windowRemoveTooltipView, windowLayoutParams); + + windowLayoutParams = createWindowLayoutParams(context); + instance.windowLayoutParams = windowLayoutParams; + wm.addView(instance.windowView, windowLayoutParams); + + windowLayoutParams = createWindowLayoutParams(context); + windowLayoutParams.gravity = Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL; + windowLayoutParams.width = AndroidUtilities.dp(100); + windowLayoutParams.height = AndroidUtilities.dp(150); + wm.addView(instance.windowRemoveTooltipOverlayView, windowLayoutParams); + + instance.windowRemoveTooltipView.setVisibility(View.GONE); + + instance.windowView.setScaleX(0.5f); + instance.windowView.setScaleY(0.5f); + instance.windowView.setAlpha(0f); + instance.windowView.animate().alpha(0.7f).scaleY(1f).scaleX(1f).setDuration(350).setInterpolator(new OvershootInterpolator()).start(); + + NotificationCenter.getInstance(instance.currentAccount).addObserver(instance, NotificationCenter.groupCallUpdated); + NotificationCenter.getGlobalInstance().addObserver(instance, NotificationCenter.webRtcSpeakerAmplitudeEvent); + NotificationCenter.getGlobalInstance().addObserver(instance, NotificationCenter.didEndCall); + } + + private void onDestroy() { + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcSpeakerAmplitudeEvent); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.groupCallVisibilityChanged); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall); + } + + private void setPosition(float xRelative, float yRelative) { + float paddingHorizontal = -AndroidUtilities.dp(36); + float w = AndroidUtilities.displaySize.x - paddingHorizontal * 2; + + windowLayoutParams.x = (int) (paddingHorizontal + (w - AndroidUtilities.dp(105)) * xRelative); + windowLayoutParams.y = (int) ((AndroidUtilities.displaySize.y - AndroidUtilities.dp(105)) * yRelative); + updateAvatarsPosition(); + if (windowView.getParent() != null) { + windowManager.updateViewLayout(windowView, windowLayoutParams); + } + } + + + public static void finish() { + if (instance != null) { + instance.showAlert(false); + WindowManager windowManager = instance.windowManager; + View windowView = instance.windowView; + View windowRemoveTooltipView = instance.windowRemoveTooltipView; + View windowRemoveTooltipOverlayView = instance.windowRemoveTooltipOverlayView; + View alert = instance.alertContainer; + instance.windowView.animate().scaleX(0.5f).scaleY(0.5f).alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (windowView.getParent() != null) { + windowView.setVisibility(View.GONE); + windowRemoveTooltipView.setVisibility(View.GONE); + windowRemoveTooltipOverlayView.setVisibility(View.GONE); + windowManager.removeView(windowView); + windowManager.removeView(windowRemoveTooltipView); + windowManager.removeView(windowRemoveTooltipOverlayView); + windowManager.removeView(alert); + } + } + }).start(); + + instance.onDestroy(); + instance = null; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.groupCallVisibilityChanged); + } + } + + private static WindowManager.LayoutParams createWindowLayoutParams(Context context) { + WindowManager.LayoutParams windowLayoutParams = new WindowManager.LayoutParams(); + + windowLayoutParams.height = AndroidUtilities.dp(105); + windowLayoutParams.width = AndroidUtilities.dp(105); + + windowLayoutParams.gravity = Gravity.TOP | Gravity.LEFT; + windowLayoutParams.format = PixelFormat.TRANSLUCENT; + + if (AndroidUtilities.checkInlinePermissions(context)) { + if (Build.VERSION.SDK_INT >= 26) { + windowLayoutParams.type = WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY; + } else { + windowLayoutParams.type = WindowManager.LayoutParams.TYPE_SYSTEM_ALERT; + } + } else { + windowLayoutParams.type = WindowManager.LayoutParams.LAST_APPLICATION_WINDOW; + } + + windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | + WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS; + + return windowLayoutParams; + } + + + AnimatorSet showRemoveAnimator; + + void showRemoveTooltip(boolean show) { + if (animateToShowRemoveTooltip != show) { + animateToShowRemoveTooltip = show; + if (showRemoveAnimator != null) { + showRemoveAnimator.removeAllListeners(); + showRemoveAnimator.cancel(); + } + if (show) { + if (windowRemoveTooltipView.getVisibility() != View.VISIBLE) { + windowRemoveTooltipView.setVisibility(View.VISIBLE); + removeTooltipView.setAlpha(0); + removeTooltipView.setScaleX(0.5f); + removeTooltipView.setScaleY(0.5f); + deleteIcon.setCurrentFrame(0); + } + showRemoveAnimator = new AnimatorSet(); + showRemoveAnimator.playTogether( + ObjectAnimator.ofFloat(removeTooltipView, View.ALPHA, removeTooltipView.getAlpha(), 1f), + ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_X, removeTooltipView.getScaleX(), 1f), + ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_Y, removeTooltipView.getScaleY(), 1f) + ); + showRemoveAnimator.setDuration(150).start(); + } else { + showRemoveAnimator = new AnimatorSet(); + showRemoveAnimator.playTogether( + ObjectAnimator.ofFloat(removeTooltipView, View.ALPHA, removeTooltipView.getAlpha(), 0f), + ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_X, removeTooltipView.getScaleX(), 0.5f), + ObjectAnimator.ofFloat(removeTooltipView, View.SCALE_Y, removeTooltipView.getScaleY(), 0.5f) + ); + showRemoveAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + windowRemoveTooltipView.setVisibility(View.GONE); + animateToPrepareRemove = false; + prepareToRemoveProgress = 0f; + } + }); + showRemoveAnimator.setDuration(150); + showRemoveAnimator.start(); + } + } + } + + void prepareToRemove(boolean prepare) { + if (animateToPrepareRemove != prepare) { + animateToPrepareRemove = prepare; + removeTooltipView.invalidate(); + + if (!removed) { + deleteIcon.setCustomEndFrame(prepare ? 33 : 0); + iconView.playAnimation(); + } + if (prepare) { + button.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + } + button.prepareToRemove(prepare); + } + + boolean animateToPinnedToCenter = false; + float pinnedProgress = 0f; + ValueAnimator pinAnimator; + + void pinnedToCenter(boolean pinned) { + if (removed) { + return; + } + if (animateToPinnedToCenter != pinned) { + animateToPinnedToCenter = pinned; + if (pinAnimator != null) { + pinAnimator.removeAllListeners(); + pinAnimator.cancel(); + } + pinAnimator = ValueAnimator.ofFloat(pinnedProgress, pinned ? 1f : 0); + pinAnimator.addUpdateListener(valueAnimator -> { + if (removed) { + return; + } + pinnedProgress = (float) valueAnimator.getAnimatedValue(); + button.setPinnedProgress(pinnedProgress); + windowView.setScaleX(1f - 0.6f * pinnedProgress); + windowView.setScaleY(1f - 0.6f * pinnedProgress); + if (moving) { + updateButtonPosition(); + } + }); + pinAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (removed) { + return; + } + pinnedProgress = pinned ? 1f : 0f; + button.setPinnedProgress(pinnedProgress); + windowView.setScaleX(1f - 0.6f * pinnedProgress); + windowView.setScaleY(1f - 0.6f * pinnedProgress); + if (moving) { + updateButtonPosition(); + } + } + }); + pinAnimator.setDuration(250); + pinAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + pinAnimator.start(); + } + } + + private void updateButtonPosition() { + float cxRemove = windowLeft - windowOffsetLeft + windowRemoveTooltipView.getMeasuredWidth() / 2f - windowView.getMeasuredWidth() / 2f; + float cyRemove = windowTop - windowOffsetTop + windowRemoveTooltipView.getMeasuredHeight() / 2f - windowView.getMeasuredHeight() / 2f - AndroidUtilities.dp(25); + + windowLayoutParams.x = (int) (windowX * (1f - pinnedProgress) + cxRemove * pinnedProgress); + windowLayoutParams.y = (int) (windowY * (1f - pinnedProgress) + cyRemove * pinnedProgress); + + updateAvatarsPosition(); + if (windowView.getParent() != null) { + windowManager.updateViewLayout(windowView, windowLayoutParams); + } + } + + private void updateAvatarsPosition() { + float x = Math.max(windowLayoutParams.x, -AndroidUtilities.dp(36)); + int parentWidth = AndroidUtilities.displaySize.x; + x = Math.min(x, (parentWidth - windowView.getMeasuredWidth() + AndroidUtilities.dp(36))); + if (x < 0) { + avatarsImageView.setTranslationX(Math.abs(x) / 3f); + } else if (x > parentWidth - windowView.getMeasuredWidth()) { + avatarsImageView.setTranslationX(- Math.abs(x - (parentWidth - windowView.getMeasuredWidth())) / 3f); + } else { + avatarsImageView.setTranslationX(0); + } + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.groupCallUpdated || id == NotificationCenter.webRtcSpeakerAmplitudeEvent) { + updateAvatars(true); + } else if (id == NotificationCenter.didEndCall) { + updateVisibility(ApplicationLoader.applicationContext); + } + } + + private void getRelativePosition(float x, float y, float[] point) { + float width = AndroidUtilities.displaySize.x; + float height = AndroidUtilities.displaySize.y; + + float paddingHorizontal = -AndroidUtilities.dp(36); + + point[0] = (x - paddingHorizontal) / (width - 2 * paddingHorizontal - AndroidUtilities.dp(105)); + point[1] = y / (height - AndroidUtilities.dp(105)); + point[0] = Math.min(1f, Math.max(0, point[0])); + point[1] = Math.min(1f, Math.max(0, point[1])); + } + + public static void updateVisibility(Context context) { + VoIPService service = VoIPService.getSharedInstance(); + + boolean groupCall = false; + if (service != null && service.groupCall != null && !service.isHangingUp()) { + groupCall = true; + } + boolean visible; + if (!AndroidUtilities.checkInlinePermissions(ApplicationLoader.applicationContext)) { + visible = false; + } else { + visible = groupCall && !forceRemoved && (ApplicationLoader.mainInterfaceStopped || !GroupCallActivity.groupCallUiVisible); + } + if (visible) { + show(context, service.getAccount()); + instance.showAvatars(true); + } else { + finish(); + } + } + + private void showAvatars(boolean show) { + boolean isShowing = avatarsImageView.getTag() != null; + if (show != isShowing) { + avatarsImageView.animate().setListener(null).cancel(); + if (show) { + if (avatarsImageView.getVisibility() != View.VISIBLE) { + avatarsImageView.setVisibility(View.VISIBLE); + avatarsImageView.setAlpha(0f); + avatarsImageView.setScaleX(0.5f); + avatarsImageView.setScaleY(0.5f); + } + avatarsImageView.animate().alpha(1f).scaleX(1).scaleY(1f).setDuration(150).start(); + } else { + avatarsImageView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(150).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + avatarsImageView.setVisibility(View.GONE); + } + }).start(); + } + avatarsImageView.setTag(show ? 1 : null); + } + } + + public static void clearForce() { + forceRemoved = false; + } + + public static boolean checkInlinePermissions() { + if (Build.VERSION.SDK_INT < 23 || ApplicationLoader.canDrawOverlays) { + return true; + } + return false; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java new file mode 100644 index 000000000..b7a5ae41c --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java @@ -0,0 +1,392 @@ +package org.telegram.ui.Components; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.content.Intent; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Paint; +import android.graphics.RectF; +import android.graphics.Shader; +import android.os.Build; +import android.os.Vibrator; +import android.provider.Settings; +import android.text.TextUtils; +import android.view.Gravity; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.FileLog; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.voip.VoIPButtonsLayout; +import org.telegram.ui.Components.voip.VoIPToggleButton; +import org.telegram.ui.GroupCallActivity; +import org.telegram.ui.LaunchActivity; + +public class GroupCallPipAlertView extends LinearLayout implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate { + + public static final int POSITION_LEFT = 0; + public static final int POSITION_RIGHT = 1; + public static final int POSITION_BOTTOM = 2; + public static final int POSITION_TOP = 3; + + FrameLayout groupInfoContainer; + TextView titleView; + TextView subtitleView; + + VoIPToggleButton soundButton; + VoIPToggleButton muteButton; + VoIPToggleButton leaveButton; + + BackupImageView avatarImageView; + + RectF rectF = new RectF(); + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + LinearGradient linearGradient; + private int position; + + float cx; + float cy; + private boolean invalidateGradient = true; + int currentAccount; + private boolean mutedByAdmin; + + public GroupCallPipAlertView(Context context, int account) { + super(context); + setOrientation(VERTICAL); + currentAccount = account; + + paint.setAlpha((int) (255 * 0.92f)); + + groupInfoContainer = new FrameLayout(context) { + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, LocaleController.getString("VoipGroupOpenVoiceChat", R.string.VoipGroupOpenVoiceChat))); + } + } + }; + groupInfoContainer.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8)); + + avatarImageView = new BackupImageView(context); + avatarImageView.setRoundRadius(AndroidUtilities.dp(22)); + groupInfoContainer.addView(avatarImageView, LayoutHelper.createFrame(44, 44)); + groupInfoContainer.setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(6), 0, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f)))); + groupInfoContainer.setOnClickListener(view -> { + if (VoIPService.getSharedInstance() != null) { + Intent intent = new Intent(getContext(), LaunchActivity.class).setAction("voip_chat"); + intent.putExtra("currentAccount", VoIPService.getSharedInstance().getAccount()); + getContext().startActivity(intent); + } + }); + + LinearLayout linearLayout = new LinearLayout(context); + linearLayout.setOrientation(LinearLayout.VERTICAL); + + titleView = new TextView(context); + titleView.setTextColor(Color.WHITE); + titleView.setTextSize(15); + titleView.setMaxLines(2); + titleView.setEllipsize(TextUtils.TruncateAt.END); + titleView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + linearLayout.addView(titleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + subtitleView = new TextView(context); + subtitleView.setTextSize(12); + subtitleView.setTextColor(ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.6f))); + + linearLayout.addView(subtitleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + groupInfoContainer.addView(linearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 55, 0, 0, 0)); + + addView(groupInfoContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 10, 10 ,10 ,10)); + + soundButton = new VoIPToggleButton(context, 44f); + soundButton.setTextSize(12); + soundButton.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(getContext(), Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(context)); + }); + soundButton.setCheckable(true); + soundButton.setBackgroundColor(ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.15f)), ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f))); + + muteButton = new VoIPToggleButton(context, 44f); + muteButton.setTextSize(12); + muteButton.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() != null) { + if (VoIPService.getSharedInstance().mutedByAdmin()) { + muteButton.shakeView(); + try { + Vibrator vibrator = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE); + if (vibrator != null) { + vibrator.vibrate(200); + } + } catch (Exception e) { + FileLog.e(e); + } + } else { + VoIPService.getSharedInstance().setMicMute(!VoIPService.getSharedInstance().isMicMute(), false, true); + } + } + }); + + leaveButton = new VoIPToggleButton(context, 44f); + leaveButton.setTextSize(12); + leaveButton.setData(R.drawable.calls_decline, 0xffffffff, 0xFFCE4A4A, 0.3f, false, LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), false, false); + leaveButton.setOnClickListener(v -> GroupCallActivity.onLeaveClick(getContext(), () -> GroupCallPip.updateVisibility(context), Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(context))); + + + VoIPButtonsLayout buttonsContainer = new VoIPButtonsLayout(context); + buttonsContainer.setChildSize(68); + buttonsContainer.setUseStartPadding(false); + buttonsContainer.addView(soundButton, LayoutHelper.createFrame(68, 63)); + buttonsContainer.addView(muteButton, LayoutHelper.createFrame(68, 63)); + buttonsContainer.addView(leaveButton, LayoutHelper.createFrame(68, 63)); + setWillNotDraw(false); + + addView(buttonsContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 6, 0, 6, 0)); + } + + float muteProgress; + float mutedByAdminProgress; + + @SuppressLint("DrawAllocation") + @Override + protected void onDraw(Canvas canvas) { + boolean isMute = VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute() || mutedByAdmin; + if (isMute && muteProgress != 1f) { + muteProgress += 16f / 150f; + if (muteProgress >= 1f) { + muteProgress = 1f; + } + invalidateGradient = true; + invalidate(); + } else if (!isMute && muteProgress != 0){ + muteProgress -= 16f / 150f; + if (muteProgress < 0f) { + muteProgress = 0f; + } + invalidateGradient = true; + invalidate(); + } + + if (mutedByAdmin && mutedByAdminProgress != 1f) { + mutedByAdminProgress += 16f / 150f; + if (mutedByAdminProgress >= 1f) { + mutedByAdminProgress = 1f; + } + invalidateGradient = true; + invalidate(); + } else if (!mutedByAdmin && mutedByAdminProgress != 0){ + mutedByAdminProgress -= 16f / 150f; + if (mutedByAdminProgress < 0f) { + mutedByAdminProgress = 0f; + } + invalidateGradient = true; + invalidate(); + } + if (invalidateGradient) { + int color1 = ColorUtils.blendARGB(Theme.getColor(Theme.key_voipgroup_overlayAlertGradientMuted), Theme.getColor(Theme.key_voipgroup_overlayAlertGradientUnmuted), (1f - muteProgress)); + int color2 = ColorUtils.blendARGB(Theme.getColor(Theme.key_voipgroup_overlayAlertGradientMuted2), Theme.getColor(Theme.key_voipgroup_overlayAlertGradientUnmuted2), (1f - muteProgress)); + + color1 = ColorUtils.blendARGB(color1, Theme.getColor(Theme.key_voipgroup_overlayAlertMutedByAdmin), mutedByAdminProgress); + color2 = ColorUtils.blendARGB(color2, Theme.getColor(Theme.key_voipgroup_overlayAlertMutedByAdmin2), mutedByAdminProgress); + + invalidateGradient = false; + if (position == POSITION_LEFT) { + linearGradient = new LinearGradient(-AndroidUtilities.dp(60), cy - getTranslationY(), getMeasuredWidth(), getMeasuredHeight() / 2f, new int[]{color1, color2}, null, Shader.TileMode.CLAMP); + } else if (position == POSITION_RIGHT) { + linearGradient = new LinearGradient(0, getMeasuredHeight() / 2f, getMeasuredWidth() + AndroidUtilities.dp(60), cy - getTranslationY(), new int[]{color2, color1}, null, Shader.TileMode.CLAMP); + } else if (position == POSITION_BOTTOM) { + linearGradient = new LinearGradient(cx - getTranslationX(), -AndroidUtilities.dp(60), getMeasuredWidth() / 2f, getMeasuredHeight(), new int[]{color1, color2}, null, Shader.TileMode.CLAMP); + } else { + linearGradient = new LinearGradient(getMeasuredWidth() / 2f, 0, cx - getTranslationX(), getMeasuredHeight() + + AndroidUtilities.dp(60), new int[]{color2, color1}, null, Shader.TileMode.CLAMP); + } + } + rectF.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + paint.setShader(linearGradient); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(10), AndroidUtilities.dp(10), paint); + float x, y; + if (position == POSITION_LEFT) { + y = cy - getTranslationY(); + x = 0; + } else if (position == POSITION_RIGHT) { + y = cy - getTranslationY(); + x = getMeasuredWidth(); + } else if (position == POSITION_BOTTOM) { + x = cx - getTranslationX(); + y = 0; + } else { + x = cx - getTranslationX(); + y = getMeasuredHeight(); + } + setPivotX(x); + setPivotY(y); + + canvas.save(); + if (position == POSITION_LEFT) { + canvas.clipRect(x - AndroidUtilities.dp(15), y - AndroidUtilities.dp(15), x, y + AndroidUtilities.dp(15)); + canvas.translate(AndroidUtilities.dp(3), 0); + canvas.rotate(45, x, y); + } else if (position == POSITION_RIGHT) { + canvas.clipRect(x, y - AndroidUtilities.dp(15), x + AndroidUtilities.dp(15), y + AndroidUtilities.dp(15)); + canvas.translate(-AndroidUtilities.dp(3), 0); + canvas.rotate(45, x, y); + } else if (position == POSITION_BOTTOM) { + canvas.clipRect(x - AndroidUtilities.dp(15) , y - AndroidUtilities.dp(15), x + AndroidUtilities.dp(15), y); + canvas.rotate(45, x, y); + canvas.translate(0, AndroidUtilities.dp(3)); + } else { + canvas.clipRect(x - AndroidUtilities.dp(15) , y, x + AndroidUtilities.dp(15), y + AndroidUtilities.dp(15)); + canvas.rotate(45, x, y); + canvas.translate(0, -AndroidUtilities.dp(3)); + } + + rectF.set(x - AndroidUtilities.dp(10), y - AndroidUtilities.dp(10), x + AndroidUtilities.dp(10), y + AndroidUtilities.dp(10)); + + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + canvas.restore(); + + super.onDraw(canvas); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(230), MeasureSpec.EXACTLY), heightMeasureSpec); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + VoIPService service = VoIPService.getSharedInstance(); + if (service != null && service.groupCall != null) { + int color2 = AvatarDrawable.getColorForId(service.getChat().id); + AvatarDrawable avatarDrawable = new AvatarDrawable(); + avatarDrawable.setColor(color2); + avatarDrawable.setInfo(service.getChat()); + avatarImageView.setImage(ImageLocation.getForLocal(service.getChat().photo.photo_small), "50_50", avatarDrawable, null); + + titleView.setText(service.getChat().title); + updateMembersCount(); + service.registerStateListener(this); + + if (VoIPService.getSharedInstance() != null) { + mutedByAdmin = VoIPService.getSharedInstance().mutedByAdmin(); + } + mutedByAdminProgress = mutedByAdmin ? 1f : 0; + boolean isMute = VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute() || mutedByAdmin; + muteProgress = isMute ? 1f : 0f; + } + NotificationCenter.getInstance(currentAccount).addObserver(this,NotificationCenter.groupCallUpdated); + updateButtons(false); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + VoIPService service = VoIPService.getSharedInstance(); + if (service != null) { + service.unregisterStateListener(this); + } + + NotificationCenter.getInstance(currentAccount).removeObserver(this,NotificationCenter.groupCallUpdated); + } + + private void updateMembersCount() { + VoIPService service = VoIPService.getSharedInstance(); + if (service != null && service.groupCall != null) { + int currentCallState = service.getCallState(); + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + subtitleView.setText(LocaleController.getString("VoipGroupConnecting", R.string. VoipGroupConnecting)); + } else { + subtitleView.setText(LocaleController.formatPluralString("Participants", service.groupCall.call.participants_count)); + } + } + } + + private void updateButtons(boolean animated) { + if (soundButton == null || muteButton == null) { + return; + } + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return; + } + + boolean bluetooth = service.isBluetoothOn(); + boolean checked = !bluetooth && service.isSpeakerphoneOn(); + soundButton.setChecked(checked, animated); + + + + if (bluetooth) { + soundButton.setData(R.drawable.calls_bluetooth, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth), false, animated); + } else if (checked) { + soundButton.setData(R.drawable.calls_speaker, Color.WHITE, 0, 0.3f, true, LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); + } else { + if (service.isHeadsetPlugged()) { + soundButton.setData(R.drawable.calls_headphones, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset), false, animated); + } else { + soundButton.setData(R.drawable.calls_speaker, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); + } + } + + if (service.mutedByAdmin()) { + muteButton.setData(R.drawable.calls_unmute, Color.WHITE, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f)), 0.1f, true, LocaleController.getString("VoipMutedByAdminShort", R.string.VoipMutedByAdminShort), true, animated); + } else { + muteButton.setData(R.drawable.calls_unmute, Color.WHITE, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * (service.isMicMute() ? 0.3f : 0.15f))), 0.1f, true, service.isMicMute() ? LocaleController.getString("VoipUnmute", R.string.VoipUnmute) : LocaleController.getString("VoipMute", R.string.VoipMute), service.isMicMute(), animated); + } + invalidate(); + } + + @Override + public void onAudioSettingsChanged() { + updateButtons(true); + } + + @Override + public void onStateChanged(int state) { + updateMembersCount(); + } + + public void setPosition(int position, float cx, float cy) { + this.position = position; + this.cx = cx; + this.cy = cy; + invalidate(); + invalidateGradient = true; + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.groupCallUpdated) { + updateMembersCount(); + if (VoIPService.getSharedInstance() != null) { + boolean mutedByAdminNew = VoIPService.getSharedInstance().mutedByAdmin(); + if (mutedByAdminNew != mutedByAdmin) { + mutedByAdmin = mutedByAdminNew; + invalidate(); + } + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java new file mode 100644 index 000000000..8921e78cf --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java @@ -0,0 +1,531 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.RadialGradient; +import android.graphics.Shader; +import android.os.Build; +import android.os.SystemClock; +import android.view.MotionEvent; +import android.view.View; +import android.view.accessibility.AccessibilityNodeInfo; +import android.view.animation.OvershootInterpolator; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; + +import java.util.Random; + +public class GroupCallPipButton extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + BlobDrawable blobDrawable = new BlobDrawable(8); + BlobDrawable blobDrawable2 = new BlobDrawable(9); + + float amplitude; + float animateToAmplitude; + float animateAmplitudeDiff; + + WeavingState currentState; + WeavingState previousState; + + float progressToState = 1f; + + boolean prepareToRemove; + float progressToPrepareRemove; + private final LinearGradient prepareToRemoveShader; + Matrix matrix = new Matrix(); + + float wavesEnter = 0f; + private final int currentAccount; + + public final static int MUTE_BUTTON_STATE_MUTE = 1; + public final static int MUTE_BUTTON_STATE_UNMUTE = 0; + public final static int MUTE_BUTTON_STATE_RECONNECT = 2; + public final static int MUTE_BUTTON_STATE_MUTED_BY_ADMIN = 3; + + private RLottieImageView muteButton; + private RLottieDrawable bigMicDrawable; + long lastStubUpdateAmplitude; + + private boolean stub; + Random random = new Random(); + public boolean removed; + + public GroupCallPipButton(Context context, int currentAccount, boolean stub) { + super(context); + this.stub = stub; + this.currentAccount = currentAccount; + + for (int i = 0; i < 4; i++) { + states[i] = new WeavingState(i); + } + + blobDrawable.maxRadius = AndroidUtilities.dp(37); + blobDrawable.minRadius = AndroidUtilities.dp(32); + blobDrawable2.maxRadius = AndroidUtilities.dp(37); + blobDrawable2.minRadius = AndroidUtilities.dp(32); + blobDrawable.generateBlob(); + blobDrawable2.generateBlob(); + + bigMicDrawable = new RLottieDrawable(R.raw.voice_outlined, "" + R.raw.voice_outlined, AndroidUtilities.dp(22), AndroidUtilities.dp(30), true, null); + setWillNotDraw(false); + + muteButton = new RLottieImageView(context); + muteButton.setAnimation(bigMicDrawable); + muteButton.setScaleType(ImageView.ScaleType.CENTER); + addView(muteButton); + + prepareToRemoveShader = new LinearGradient(0, 0, AndroidUtilities.dp(100 + 250), 0, new int[] {0xFFD54141, 0xFFF76E7E, Color.TRANSPARENT}, new float[] {0, 0.4f, 1f}, Shader.TileMode.CLAMP); + + if (stub) { + setState(MUTE_BUTTON_STATE_UNMUTE); + } + } + + WeavingState[] states = new WeavingState[4]; + boolean pressedState; + float pressedProgress; + float pinnedProgress; + + public void setPressedState(boolean pressedState) { + this.pressedState = pressedState; + } + + public void setPinnedProgress(float pinnedProgress) { + this.pinnedProgress = pinnedProgress; + } + + + public static class WeavingState { + + private float targetX = -1f; + private float targetY = -1f; + private float startX; + private float startY; + private float duration; + private float time; + + public Shader shader; + private final Matrix matrix = new Matrix(); + private final int currentState; + int color1; + int color2; + int color3; + + public WeavingState(int state) { + currentState = state; + } + + public void update(long dt, float amplitude) { + if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + if (color1 != Theme.getColor(Theme.key_voipgroup_overlayGreen1) || color2 != Theme.getColor(Theme.key_voipgroup_overlayGreen2)) { + shader = new RadialGradient(200, 200, 200, new int[]{color1 = Theme.getColor(Theme.key_voipgroup_overlayGreen1), color2 = Theme.getColor(Theme.key_voipgroup_overlayGreen2)}, null, Shader.TileMode.CLAMP); + } + } else if (currentState == MUTE_BUTTON_STATE_MUTE) { + if (color1 != Theme.getColor(Theme.key_voipgroup_overlayBlue1) || color2 != Theme.getColor(Theme.key_voipgroup_overlayBlue2)) { + shader = new RadialGradient(200, 200, 200, new int[]{color1 = Theme.getColor(Theme.key_voipgroup_overlayBlue1), color2 = Theme.getColor(Theme.key_voipgroup_overlayBlue2)}, null, Shader.TileMode.CLAMP); + } + } else if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN){ + if (color1 != Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient) || color2 != Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient2) || color3 != Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient3)) { + shader = new RadialGradient(200, 200, 200, new int[]{color2 = Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient2), color3 = Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient3), color1 = Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient)}, null, Shader.TileMode.CLAMP); + } + } else { + return; + } + int width = AndroidUtilities.dp(130); + int height = width; + if (duration == 0 || time >= duration) { + duration = Utilities.random.nextInt(700) + 500; + time = 0; + if (targetX == -1f) { + updateTargets(); + } + startX = targetX; + startY = targetY; + updateTargets(); + } + time += dt * (0.5f + BlobDrawable.GRADIENT_SPEED_MIN) + dt * (BlobDrawable.GRADIENT_SPEED_MAX * 2) * amplitude; + if (time > duration) { + time = duration; + } + float interpolation = CubicBezierInterpolator.EASE_OUT.getInterpolation(time / duration); + float x = width * (startX + (targetX - startX) * interpolation) - 200; + float y = height * (startY + (targetY - startY) * interpolation) - 200; + + float s; + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + s = 2f; + } else if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + s = 1.5f; + } else { + s = 1.5f; + } + float scale = width / 400.0f * s; + matrix.reset(); + matrix.postTranslate(x, y); + matrix.postScale(scale, scale, x + 200, y + 200); + + shader.setLocalMatrix(matrix); + } + + private void updateTargets() { + if (currentState == MUTE_BUTTON_STATE_UNMUTE) { + targetX = 0.2f + 0.1f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.1f * Utilities.random.nextInt(100) / 100f; + } else if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + targetX = 0.6f + 0.1f * Utilities.random.nextInt(100) / 100f; + targetY = 0.1f * Utilities.random.nextInt(100) / 100f; + } else { + targetX = 0.8f + 0.2f * (Utilities.random.nextInt(100) / 100f); + targetY = Utilities.random.nextInt(100) / 100f; + } + } + + public void setToPaint(Paint paint) { + if (currentState == MUTE_BUTTON_STATE_RECONNECT) { + paint.setShader(null); + paint.setColor(Theme.getColor(Theme.key_voipgroup_topPanelGray)); + } else { + paint.setShader(shader); + } + } + } + + OvershootInterpolator overshootInterpolator = new OvershootInterpolator(); + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + if (getAlpha() == 0) { + return; + } + float cx = getMeasuredWidth() >> 1; + float cy = getMeasuredHeight() >> 1; + + if (pressedState && pressedProgress != 1f) { + pressedProgress += 16f / 150f; + if (pressedProgress > 1f) { + pressedProgress = 1f; + } + } else if (!pressedState && pressedProgress != 0) { + pressedProgress -= 16f / 150f; + if (pressedProgress < 0f) { + pressedProgress = 0f; + } + } + + float pressedProgress = CubicBezierInterpolator.DEFAULT.getInterpolation(this.pressedProgress); + muteButton.setScaleY(1f + 0.1f * pressedProgress); + muteButton.setScaleX(1f + 0.1f * pressedProgress); + + if (stub) { + long currentTime = System.currentTimeMillis(); + if (currentTime - lastStubUpdateAmplitude > 1000) { + lastStubUpdateAmplitude = currentTime; + animateToAmplitude = 0.5f + 0.5f * Math.abs(random.nextInt() % 100) / 100f; + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 1500.0f * BlobDrawable.AMPLITUDE_SPEED); + } + } + + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * 16; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + } + + if (previousState != null) { + progressToState += 16 / 250f; + if (progressToState > 1f) { + progressToState = 1f; + previousState = null; + } + } + + if (prepareToRemove && progressToPrepareRemove != 1f) { + progressToPrepareRemove += 16f / 350f; + if (progressToPrepareRemove > 1f) { + progressToPrepareRemove = 1f; + } + if (removed) { + invalidate(); + } + } else if (!prepareToRemove && progressToPrepareRemove != 0) { + progressToPrepareRemove -= 16f / 350f; + if (progressToPrepareRemove < 0f) { + progressToPrepareRemove = 0f; + } + } + + boolean showWaves = true; + if (currentState.currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN || currentState.currentState == MUTE_BUTTON_STATE_RECONNECT) { + showWaves = false; + } + + if (showWaves && wavesEnter != 1f) { + wavesEnter += 16f / 350f; + if (wavesEnter > 1f) { + wavesEnter = 1f; + } + } else if (!showWaves && wavesEnter != 0f) { + wavesEnter -= 16f / 350f; + if (wavesEnter < 0f) { + wavesEnter = 0f; + } + } + + float wavesEnter = 0.65f + 0.35f * overshootInterpolator.getInterpolation(this.wavesEnter); + + blobDrawable.update(amplitude, stub ? 0.1f : 0.8f); + blobDrawable2.update(amplitude, stub ? 0.1f : 0.8f); + + + for (int i = 0; i < 3; i++) { + float alpha; + if (i == 0 && previousState == null) { + continue; + } + + if (i == 0) { + if (progressToPrepareRemove == 1f) { + continue; + } + alpha = 1f - progressToState; + previousState.update(16, amplitude); + previousState.setToPaint(paint); + } else if (i == 1) { + if (currentState == null) { + return; + } + if (progressToPrepareRemove == 1f) { + continue; + } + alpha = previousState != null ? progressToState : 1f; + currentState.update(16, amplitude); + currentState.setToPaint(paint); + } else { + if (progressToPrepareRemove == 0) { + continue; + } + alpha = 1f; + paint.setColor(Color.RED); + matrix.reset(); + matrix.postTranslate(-AndroidUtilities.dp(250) * (1f - progressToPrepareRemove), 0); + matrix.postRotate(removeAngle, cx, cy); + prepareToRemoveShader.setLocalMatrix(matrix); + paint.setShader(prepareToRemoveShader); + } + + blobDrawable.maxRadius = AndroidUtilities.dp(40); + blobDrawable.minRadius = AndroidUtilities.dp(32); + + blobDrawable2.maxRadius = AndroidUtilities.dp(38); + blobDrawable2.minRadius = AndroidUtilities.dp(33); + + + if (i != 2) { + paint.setAlpha((int) (76 * alpha * (1f - progressToPrepareRemove))); + } else { + paint.setAlpha((int) (76 * alpha * progressToPrepareRemove)); + } + + if (this.wavesEnter != 0) { + float scale = (1f + 0.3f * amplitude + 0.1f * pressedProgress) * (1f - pinnedProgress); + scale = Math.min(scale, 1.3f) * wavesEnter; + canvas.save(); + canvas.scale(scale, scale, cx, cy); + blobDrawable.draw(cx, cy, canvas, paint); + canvas.restore(); + + scale = (1f + 0.26f * amplitude + 0.1f * pressedProgress) * (1f - pinnedProgress); + scale = Math.min(scale, 1.3f) * wavesEnter; + canvas.save(); + canvas.scale(scale, scale, cx, cy); + blobDrawable2.draw(cx, cy, canvas, paint); + canvas.restore(); + } + + if (i == 2) { + paint.setAlpha((int) (255 * progressToPrepareRemove)); + } else if (i == 1) { + paint.setAlpha((int) (255 * alpha)); + } else { + paint.setAlpha(255); + } + canvas.save(); + canvas.scale(1f + 0.1f * pressedProgress,1f + 0.1f * pressedProgress, cx, cy); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(32), paint); + canvas.restore(); + } + + if (!removed && this.wavesEnter > 0) { + invalidate(); + } + } + + + public final static float MAX_AMPLITUDE = 8_500f; + + private void setAmplitude(double value) { + animateToAmplitude = (float) (Math.min(MAX_AMPLITUDE, value) / MAX_AMPLITUDE); + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500.0f * BlobDrawable.AMPLITUDE_SPEED); + } + + public void setState(int state) { + if (currentState != null && currentState.currentState == state) { + return; + } + previousState = currentState; + currentState = states[state]; + if (previousState != null) { + progressToState = 0; + } else { + progressToState = 1; + boolean showWaves = true; + if (currentState.currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN || currentState.currentState == MUTE_BUTTON_STATE_RECONNECT) { + showWaves = false; + } + wavesEnter = showWaves ? 1f : 0f; + } + String contentDescription = LocaleController.getString("VoipGroupVoiceChat", R.string.VoipGroupVoiceChat); + if (state == MUTE_BUTTON_STATE_UNMUTE) { + contentDescription += ", " + LocaleController.getString("VoipTapToMute", R.string.VoipTapToMute); + } else if (state == MUTE_BUTTON_STATE_RECONNECT) { + contentDescription += ", " + LocaleController.getString("Connecting", R.string.Connecting); + } else if (state == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + contentDescription += ", " + LocaleController.getString("VoipMutedByAdmin", R.string.VoipMutedByAdmin); + } + setContentDescription(contentDescription); + invalidate(); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && GroupCallPip.getInstance() != null) { + final String label = GroupCallPip.getInstance().showAlert ? LocaleController.getString("AccDescrCloseMenu", R.string.AccDescrCloseMenu) : LocaleController.getString("AccDescrOpenMenu2", R.string.AccDescrOpenMenu2); + info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, label)); + } + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (!stub) { + setAmplitude(0); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.groupCallUpdated); + + boolean isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().registerStateListener(this); + } + bigMicDrawable.setCustomEndFrame(isMuted ? 13 : 24); + bigMicDrawable.setCurrentFrame(bigMicDrawable.getCustomEndFrame() - 1, false, true); + updateButtonState(); + } + } + + private void updateButtonState() { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { + int currentCallState = VoIPService.getSharedInstance().getCallState(); + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + setState(FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_CONNECTING); + } else { + TLRPC.TL_groupCallParticipant participant = VoIPService.getSharedInstance().groupCall.participants.get(AccountInstance.getInstance(VoIPService.getSharedInstance().getAccount()).getUserConfig().getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(VoIPService.getSharedInstance().getChat())) { + if (!VoIPService.getSharedInstance().isMicMute()) { + VoIPService.getSharedInstance().setMicMute(true, false, false); + } + setState(FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTED_BY_ADMIN); + final long now = SystemClock.uptimeMillis(); + final MotionEvent e = MotionEvent.obtain(now, now, MotionEvent.ACTION_CANCEL, 0, 0, 0); + if (getParent() != null) { + View parentView = (View) getParent(); + parentView.dispatchTouchEvent(e); + } + } else { + boolean isMuted = VoIPService.getSharedInstance().isMicMute(); + setState(isMuted ? FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTE : FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_UNMUTE); + } + } + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (!stub) { + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupCallUpdated); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().unregisterStateListener(this); + } + } + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.webRtcMicAmplitudeEvent) { + float amplitude = (float) args[0]; + setAmplitude(amplitude * 4000.0f); + } else if (id == NotificationCenter.groupCallUpdated) { + updateButtonState(); + } + } + + @Override + public void onAudioSettingsChanged() { + boolean isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); + boolean changed = bigMicDrawable.setCustomEndFrame(isMuted ? 13 : 24); + if (changed) { + if (isMuted) { + bigMicDrawable.setCurrentFrame(0); + } else { + bigMicDrawable.setCurrentFrame(12); + } + } + muteButton.playAnimation(); + updateButtonState(); + } + + @Override + public void onStateChanged(int state) { + updateButtonState(); + } + + float removeAngle; + + public void setRemoveAngle(double angle) { + removeAngle = (float) angle; + } + public void prepareToRemove(boolean prepare) { + if (this.prepareToRemove != prepare) { + invalidate(); + } + this.prepareToRemove = prepare; + + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java new file mode 100644 index 000000000..a5b8d4e26 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java @@ -0,0 +1,1367 @@ +/* + * This is the source code of Telegram for Android v. 5.x.x. + * It is licensed under GNU GPL v. 2 or later. + * You should have received a copy of the license in this archive (see LICENSE). + * + * Copyright Nikolai Kudashov, 2013-2018. + */ + +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.RectF; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.text.Editable; +import android.text.SpannableStringBuilder; +import android.text.Spanned; +import android.text.TextUtils; +import android.text.TextWatcher; +import android.text.style.ForegroundColorSpan; +import android.util.Property; +import android.util.SparseArray; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.KeyEvent; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.ViewTreeObserver; +import android.view.inputmethod.EditorInfo; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.FileLog; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; +import org.telegram.messenger.Utilities; +import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.TLObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Adapters.SearchAdapterHelper; +import org.telegram.ui.Cells.GraySectionCell; +import org.telegram.ui.Cells.GroupCallTextCell; +import org.telegram.ui.Cells.GroupCallUserCell; +import org.telegram.ui.Cells.ManageChatTextCell; +import org.telegram.ui.Cells.ManageChatUserCell; +import org.telegram.ui.ChatUsersActivity; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; + +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +public class GroupVoipInviteAlert extends BottomSheet { + + private FrameLayout frameLayout; + private RecyclerListView listView; + private SearchAdapter searchListViewAdapter; + private ListAdapter listViewAdapter; + private Drawable shadowDrawable; + private View shadow; + private AnimatorSet shadowAnimation; + private StickerEmptyView emptyView; + private FlickerLoadingView flickerLoadingView; + private SearchField searchView; + + private RectF rect = new RectF(); + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private String linkToCopy; + + private int scrollOffsetY; + + private int delayResults; + + private float colorProgress; + private int backgroundColor; + + private TLRPC.Chat currentChat; + private TLRPC.ChatFull info; + + private ArrayList participants = new ArrayList<>(); + private ArrayList contacts = new ArrayList<>(); + private boolean contactsEndReached; + private SparseArray participantsMap = new SparseArray<>(); + private SparseArray contactsMap = new SparseArray<>(); + private boolean loadingUsers; + private boolean firstLoaded; + + private SparseArray ignoredUsers; + private HashSet invitedUsers; + + private GroupVoipInviteAlertDelegate delegate; + + private int emptyRow; + private int addNewRow; + private int lastRow; + private int participantsStartRow; + private int participantsEndRow; + private int contactsHeaderRow; + private int contactsStartRow; + private int contactsEndRow; + private int membersHeaderRow; + private int flickerProgressRow; + private int rowCount; + + public interface GroupVoipInviteAlertDelegate { + void copyInviteLink(); + void inviteUser(int id); + void needOpenSearch(MotionEvent ev, EditTextBoldCursor editText); + } + + @SuppressWarnings("FieldCanBeLocal") + private class SearchField extends FrameLayout { + + private View searchBackground; + private ImageView searchIconImageView; + private ImageView clearSearchImageView; + private CloseProgressDrawable2 progressDrawable; + private EditTextBoldCursor searchEditText; + private View backgroundView; + + public SearchField(Context context) { + super(context); + + searchBackground = new View(context); + searchBackground.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(18), Theme.getColor(Theme.key_voipgroup_searchBackground))); + addView(searchBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 14, 11, 14, 0)); + + searchIconImageView = new ImageView(context); + searchIconImageView.setScaleType(ImageView.ScaleType.CENTER); + searchIconImageView.setImageResource(R.drawable.smiles_inputsearch); + searchIconImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_voipgroup_searchPlaceholder), PorterDuff.Mode.MULTIPLY)); + addView(searchIconImageView, LayoutHelper.createFrame(36, 36, Gravity.LEFT | Gravity.TOP, 16, 11, 0, 0)); + + clearSearchImageView = new ImageView(context); + clearSearchImageView.setScaleType(ImageView.ScaleType.CENTER); + clearSearchImageView.setImageDrawable(progressDrawable = new CloseProgressDrawable2()); + progressDrawable.setSide(AndroidUtilities.dp(7)); + clearSearchImageView.setScaleX(0.1f); + clearSearchImageView.setScaleY(0.1f); + clearSearchImageView.setAlpha(0.0f); + clearSearchImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_voipgroup_searchPlaceholder), PorterDuff.Mode.MULTIPLY)); + addView(clearSearchImageView, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 14, 11, 14, 0)); + clearSearchImageView.setOnClickListener(v -> { + searchEditText.setText(""); + AndroidUtilities.showKeyboard(searchEditText); + }); + + searchEditText = new EditTextBoldCursor(context) { + @Override + public boolean dispatchTouchEvent(MotionEvent event) { + MotionEvent e = MotionEvent.obtain(event); + e.setLocation(e.getRawX(), e.getRawY() - containerView.getTranslationY()); + if (e.getAction() == MotionEvent.ACTION_UP) { + e.setAction(MotionEvent.ACTION_CANCEL); + } + listView.dispatchTouchEvent(e); + e.recycle(); + return super.dispatchTouchEvent(event); + } + }; + searchEditText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + searchEditText.setHintTextColor(Theme.getColor(Theme.key_voipgroup_searchPlaceholder)); + searchEditText.setTextColor(Theme.getColor(Theme.key_voipgroup_searchText)); + searchEditText.setBackgroundDrawable(null); + searchEditText.setPadding(0, 0, 0, 0); + searchEditText.setMaxLines(1); + searchEditText.setLines(1); + searchEditText.setSingleLine(true); + searchEditText.setImeOptions(EditorInfo.IME_ACTION_SEARCH | EditorInfo.IME_FLAG_NO_EXTRACT_UI); + searchEditText.setHint(LocaleController.getString("VoipGroupSearchMembers", R.string.VoipGroupSearchMembers)); + searchEditText.setCursorColor(Theme.getColor(Theme.key_voipgroup_searchText)); + searchEditText.setCursorSize(AndroidUtilities.dp(20)); + searchEditText.setCursorWidth(1.5f); + addView(searchEditText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 40, Gravity.LEFT | Gravity.TOP, 16 + 38, 9, 16 + 30, 0)); + searchEditText.addTextChangedListener(new TextWatcher() { + @Override + public void beforeTextChanged(CharSequence s, int start, int count, int after) { + + } + + @Override + public void onTextChanged(CharSequence s, int start, int before, int count) { + + } + + @Override + public void afterTextChanged(Editable s) { + boolean show = searchEditText.length() > 0; + boolean showed = clearSearchImageView.getAlpha() != 0; + if (show != showed) { + clearSearchImageView.animate() + .alpha(show ? 1.0f : 0.0f) + .setDuration(150) + .scaleX(show ? 1.0f : 0.1f) + .scaleY(show ? 1.0f : 0.1f) + .start(); + } + String text = searchEditText.getText().toString(); + int oldItemsCount = listView.getAdapter() == null ? 0 : listView.getAdapter().getItemCount(); + searchListViewAdapter.searchUsers(text); + if (TextUtils.isEmpty(text) && listView != null && listView.getAdapter() != listViewAdapter) { + listView.setAnimateEmptyView(false, 0); + listView.setAdapter(listViewAdapter); + listView.setAnimateEmptyView(true, 0); + if (oldItemsCount == 0) { + showItemsAnimated(0); + } + } + flickerLoadingView.setVisibility(View.VISIBLE); + } + }); + searchEditText.setOnEditorActionListener((v, actionId, event) -> { + if (event != null && (event.getAction() == KeyEvent.ACTION_UP && event.getKeyCode() == KeyEvent.KEYCODE_SEARCH || event.getAction() == KeyEvent.ACTION_DOWN && event.getKeyCode() == KeyEvent.KEYCODE_ENTER)) { + AndroidUtilities.hideKeyboard(searchEditText); + } + return false; + }); + } + + public void hideKeyboard() { + AndroidUtilities.hideKeyboard(searchEditText); + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + delegate.needOpenSearch(ev, searchEditText); + return super.onInterceptTouchEvent(ev); + } + } + + public static final Property COLOR_PROGRESS = new AnimationProperties.FloatProperty("colorProgress") { + @Override + public void setValue(GroupVoipInviteAlert object, float value) { + object.setColorProgress(value); + } + + @Override + public Float get(GroupVoipInviteAlert object) { + return object.getColorProgress(); + } + }; + + public GroupVoipInviteAlert(final Context context, int account, TLRPC.Chat chat, TLRPC.ChatFull chatFull, SparseArray participants, HashSet invited) { + super(context, false); + + setDimBehindAlpha(75); + + currentAccount = account; + currentChat = chat; + info = chatFull; + ignoredUsers = participants; + invitedUsers = invited; + + shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); + + containerView = new FrameLayout(context) { + + private boolean ignoreLayout = false; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int totalHeight = MeasureSpec.getSize(heightMeasureSpec); + + if (Build.VERSION.SDK_INT >= 21) { + ignoreLayout = true; + setPadding(backgroundPaddingLeft, AndroidUtilities.statusBarHeight, backgroundPaddingLeft, 0); + ignoreLayout = false; + } + int availableHeight = totalHeight - getPaddingTop(); + int padding; + if (keyboardVisible) { + padding = AndroidUtilities.dp(8); + setAllowNestedScroll(false); + } else { + padding = availableHeight - (availableHeight / 5 * 3) + AndroidUtilities.dp(8); + setAllowNestedScroll(true); + } + if (listView.getPaddingTop() != padding) { + ignoreLayout = true; + listView.setPadding(0, padding, 0, 0); + ignoreLayout = false; + } + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(totalHeight, MeasureSpec.EXACTLY)); + } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + updateLayout(); + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getY() < scrollOffsetY) { + dismiss(); + return true; + } + return super.onInterceptTouchEvent(ev); + } + + @Override + public boolean onTouchEvent(MotionEvent e) { + return !isDismissed() && super.onTouchEvent(e); + } + + @Override + public void requestLayout() { + if (ignoreLayout) { + return; + } + super.requestLayout(); + } + + @Override + protected void onDraw(Canvas canvas) { + canvas.save(); + int y = scrollOffsetY - backgroundPaddingTop + AndroidUtilities.dp(6); + int top = scrollOffsetY - backgroundPaddingTop - AndroidUtilities.dp(13); + int height = getMeasuredHeight() + AndroidUtilities.dp(30) + backgroundPaddingTop; + int statusBarHeight = 0; + float radProgress = 1.0f; + if (Build.VERSION.SDK_INT >= 21) { + top += AndroidUtilities.statusBarHeight; + y += AndroidUtilities.statusBarHeight; + height -= AndroidUtilities.statusBarHeight; + + if (top + backgroundPaddingTop < AndroidUtilities.statusBarHeight * 2) { + int diff = Math.min(AndroidUtilities.statusBarHeight, AndroidUtilities.statusBarHeight * 2 - top - backgroundPaddingTop); + top -= diff; + height += diff; + radProgress = 1.0f - Math.min(1.0f, (diff * 2) / (float) AndroidUtilities.statusBarHeight); + } + if (top + backgroundPaddingTop < AndroidUtilities.statusBarHeight) { + statusBarHeight = Math.min(AndroidUtilities.statusBarHeight, AndroidUtilities.statusBarHeight - top - backgroundPaddingTop); + } + } + + shadowDrawable.setBounds(0, top, getMeasuredWidth(), height); + shadowDrawable.draw(canvas); + + if (radProgress != 1.0f) { + Theme.dialogs_onlineCirclePaint.setColor(backgroundColor); + rect.set(backgroundPaddingLeft, backgroundPaddingTop + top, getMeasuredWidth() - backgroundPaddingLeft, backgroundPaddingTop + top + AndroidUtilities.dp(24)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(12) * radProgress, AndroidUtilities.dp(12) * radProgress, Theme.dialogs_onlineCirclePaint); + } + + int w = AndroidUtilities.dp(36); + rect.set((getMeasuredWidth() - w) / 2, y, (getMeasuredWidth() + w) / 2, y + AndroidUtilities.dp(4)); + Theme.dialogs_onlineCirclePaint.setColor(Theme.getColor(Theme.key_voipgroup_scrollUp)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(2), AndroidUtilities.dp(2), Theme.dialogs_onlineCirclePaint); + + if (statusBarHeight > 0) { + int finalColor = Color.argb(0xff, (int) (Color.red(backgroundColor) * 0.8f), (int) (Color.green(backgroundColor) * 0.8f), (int) (Color.blue(backgroundColor) * 0.8f)); + Theme.dialogs_onlineCirclePaint.setColor(finalColor); + canvas.drawRect(backgroundPaddingLeft, AndroidUtilities.statusBarHeight - statusBarHeight, getMeasuredWidth() - backgroundPaddingLeft, AndroidUtilities.statusBarHeight, Theme.dialogs_onlineCirclePaint); + } + canvas.restore(); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + canvas.save(); + canvas.clipRect(0, getPaddingTop(), getMeasuredWidth(), getMeasuredHeight()); + super.dispatchDraw(canvas); + canvas.restore(); + } + }; + containerView.setWillNotDraw(false); + containerView.setClipChildren(false); + containerView.setPadding(backgroundPaddingLeft, 0, backgroundPaddingLeft, 0); + + frameLayout = new FrameLayout(context); + + searchView = new SearchField(context); + frameLayout.addView(searchView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); + + flickerLoadingView = new FlickerLoadingView(context); + flickerLoadingView.setViewType(FlickerLoadingView.USERS_TYPE); + flickerLoadingView.showDate(false); + flickerLoadingView.setUseHeaderOffset(true); + flickerLoadingView.setColors(Theme.key_voipgroup_inviteMembersBackground, Theme.key_voipgroup_searchBackground, Theme.key_voipgroup_actionBarUnscrolled); + + emptyView = new StickerEmptyView(context, flickerLoadingView, StickerEmptyView.STICKER_TYPE_SEARCH); + emptyView.addView(flickerLoadingView, 0, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0,0,2,0,0)); + emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); + emptyView.setVisibility(View.GONE); + emptyView.setAnimateLayoutChange(true); + emptyView.showProgress(true, false); + emptyView.setColors(Theme.key_voipgroup_nameText, Theme.key_voipgroup_lastSeenText, Theme.key_voipgroup_inviteMembersBackground, Theme.key_voipgroup_searchBackground); + containerView.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 58 + 4, 0, 0)); + + searchListViewAdapter = new SearchAdapter(context); + + listView = new RecyclerListView(context) { + @Override + protected boolean allowSelectChildAtPosition(float x, float y) { + return y >= scrollOffsetY + AndroidUtilities.dp(48) + (Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0); + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + int[] ii = new int[2]; + getLocationInWindow(ii); + } + + @Override + protected boolean emptyViewIsVisible() { + if (getAdapter() == null) { + return false; + } + return getAdapter().getItemCount() <= 2; + } + }; + listView.setTag(13); + listView.setPadding(0, 0, 0, AndroidUtilities.dp(48)); + listView.setClipToPadding(false); + listView.setHideIfEmpty(false); + listView.setSelectorDrawableColor(Theme.getColor(Theme.key_voipgroup_listSelector)); + FillLastLinearLayoutManager layoutManager = new FillLastLinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false, AndroidUtilities.dp(8), listView); + layoutManager.setBind(false); + listView.setLayoutManager(layoutManager); + listView.setHorizontalScrollBarEnabled(false); + listView.setVerticalScrollBarEnabled(false); + containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, 0, 0, 0, 0)); + listView.setAdapter(listViewAdapter = new ListAdapter(context)); + listView.setOnItemClickListener((view, position) -> { + if (position == addNewRow) { + delegate.copyInviteLink(); + dismiss(); + } else if (view instanceof ManageChatUserCell) { + ManageChatUserCell cell = (ManageChatUserCell) view; + if (invitedUsers.contains(cell.getUserId())) { + return; + } + delegate.inviteUser(cell.getUserId()); + } + }); + listView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(RecyclerView recyclerView, int dx, int dy) { + updateLayout(); + } + + @Override + public void onScrollStateChanged(RecyclerView recyclerView, int newState) { + if (newState == RecyclerView.SCROLL_STATE_IDLE) { + if (scrollOffsetY + backgroundPaddingTop + AndroidUtilities.dp(13) < AndroidUtilities.statusBarHeight * 2 && listView.canScrollVertically(1)) { + View child = listView.getChildAt(0); + RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findViewHolderForAdapterPosition(0); + if (holder != null && holder.itemView.getTop() > 0) { + listView.smoothScrollBy(0, holder.itemView.getTop()); + } + } + } + } + }); + + FrameLayout.LayoutParams frameLayoutParams = new FrameLayout.LayoutParams(LayoutHelper.MATCH_PARENT, AndroidUtilities.getShadowHeight(), Gravity.TOP | Gravity.LEFT); + frameLayoutParams.topMargin = AndroidUtilities.dp(58); + shadow = new View(context); + shadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); + shadow.setAlpha(0.0f); + shadow.setTag(1); + containerView.addView(shadow, frameLayoutParams); + + containerView.addView(frameLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 58, Gravity.LEFT | Gravity.TOP)); + + setColorProgress(0.0f); + + loadChatParticipants(0, 200); + updateRows(); + + listView.setEmptyView(emptyView); + listView.setAnimateEmptyView(true, 0); + } + + private float getColorProgress() { + return colorProgress; + } + + private void setColorProgress(float progress) { + colorProgress = progress; + backgroundColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_inviteMembersBackground), Theme.getColor(Theme.key_voipgroup_listViewBackground), progress, 1.0f); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(backgroundColor, PorterDuff.Mode.MULTIPLY)); + frameLayout.setBackgroundColor(backgroundColor); + navBarColor = backgroundColor; + listView.setGlowColor(backgroundColor); + + int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), progress, 1.0f); + int color2 = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_mutedIconUnscrolled), Theme.getColor(Theme.key_voipgroup_mutedIcon), progress, 1.0f);// + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + if (child instanceof GroupCallTextCell) { + GroupCallTextCell cell = (GroupCallTextCell) child; + cell.setColors(color, color); + } else if (child instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) child; + cell.setGrayIconColor(shadow.getTag() != null ? Theme.key_voipgroup_mutedIcon : Theme.key_voipgroup_mutedIconUnscrolled, color2); + } + } + containerView.invalidate(); + listView.invalidate(); + container.invalidate(); + } + + public void setDelegate(GroupVoipInviteAlertDelegate groupVoipInviteAlertDelegate) { + delegate = groupVoipInviteAlertDelegate; + } + + private int getCurrentTop() { + if (listView.getChildCount() != 0) { + View child = listView.getChildAt(0); + RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findContainingViewHolder(child); + if (holder != null) { + return listView.getPaddingTop() - (holder.getAdapterPosition() == 0 && child.getTop() >= 0 ? child.getTop() : 0); + } + } + return -1000; + } + + private void updateRows() { + addNewRow = -1; + emptyRow = -1; + participantsStartRow = -1; + participantsEndRow = -1; + contactsHeaderRow = -1; + contactsStartRow = -1; + contactsEndRow = -1; + membersHeaderRow = -1; + lastRow = -1; + + rowCount = 0; + emptyRow = rowCount++; + if (!TextUtils.isEmpty(currentChat.username) || ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_INVITE)) { + addNewRow = rowCount++; + } + if (!loadingUsers || firstLoaded) { + boolean hasAnyOther = false; + if (!contacts.isEmpty()) { + contactsHeaderRow = rowCount++; + contactsStartRow = rowCount; + rowCount += contacts.size(); + contactsEndRow = rowCount; + hasAnyOther = true; + } + if (!participants.isEmpty()) { + if (hasAnyOther) { + membersHeaderRow = rowCount++; + } + participantsStartRow = rowCount; + rowCount += participants.size(); + participantsEndRow = rowCount; + } + } + if (loadingUsers) { + flickerProgressRow = rowCount++; + } + lastRow = rowCount++; + } + + @Override + protected boolean canDismissWithSwipe() { + return false; + } + + @Override + public void dismiss() { + AndroidUtilities.hideKeyboard(searchView.searchEditText); + super.dismiss(); + } + + @SuppressLint("NewApi") + private void updateLayout() { + if (listView.getChildCount() <= 0) { + return; + } + View child = listView.getChildAt(0); + RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findContainingViewHolder(child); + int top = child.getTop() - AndroidUtilities.dp(8); + int newOffset = top > 0 && holder != null && holder.getAdapterPosition() == 0 ? top : 0; + if (top >= 0 && holder != null && holder.getAdapterPosition() == 0) { + newOffset = top; + runShadowAnimation(false); + } else { + runShadowAnimation(true); + } + if (scrollOffsetY != newOffset) { + listView.setTopGlowOffset(scrollOffsetY = (int) (newOffset)); + frameLayout.setTranslationY(scrollOffsetY); + emptyView.setTranslationY(scrollOffsetY); + containerView.invalidate(); + } + } + + private void runShadowAnimation(final boolean show) { + if (show && shadow.getTag() != null || !show && shadow.getTag() == null) { + shadow.setTag(show ? null : 1); + if (show) { + shadow.setVisibility(View.VISIBLE); + } + if (shadowAnimation != null) { + shadowAnimation.cancel(); + } + shadowAnimation = new AnimatorSet(); + shadowAnimation.playTogether(ObjectAnimator.ofFloat(shadow, View.ALPHA, show ? 1.0f : 0.0f)); + shadowAnimation.setDuration(150); + shadowAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (shadowAnimation != null && shadowAnimation.equals(animation)) { + if (!show) { + shadow.setVisibility(View.INVISIBLE); + } + shadowAnimation = null; + } + } + + @Override + public void onAnimationCancel(Animator animation) { + if (shadowAnimation != null && shadowAnimation.equals(animation)) { + shadowAnimation = null; + } + } + }); + shadowAnimation.start(); + } + } + + private void showItemsAnimated(int from) { + if (!isShowing()) { + return; + } + listView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + int position = listView.getChildAdapterPosition(child); + if (position < from) { + continue; + } + if (position == 1 && listView.getAdapter() == searchListViewAdapter && child instanceof GraySectionCell) { + child = ((GraySectionCell) child).getTextView(); + } + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + animatorSet.start(); + return true; + } + }); + } + + private void loadChatParticipants(int offset, int count) { + if (loadingUsers) { + return; + } + contactsEndReached = false; + loadChatParticipants(offset, count, true); + } + + private void loadChatParticipants(int offset, int count, boolean reset) { + if (!ChatObject.isChannel(currentChat)) { + loadingUsers = false; + participants.clear(); + contacts.clear(); + participantsMap.clear(); + contactsMap.clear(); + if (info != null) { + int selfUserId = UserConfig.getInstance(currentAccount).clientUserId; + for (int a = 0, size = info.participants.participants.size(); a < size; a++) { + TLRPC.ChatParticipant participant = info.participants.participants.get(a); + if (participant.user_id == selfUserId) { + continue; + } + if (ignoredUsers != null && ignoredUsers.indexOfKey(participant.user_id) >= 0) { + continue; + } + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(participant.user_id); + if (user == null || !user.bot) { + participants.add(participant); + participantsMap.put(participant.user_id, participant); + } + } + } + updateRows(); + if (listViewAdapter != null) { + listViewAdapter.notifyDataSetChanged(); + } + } else { + loadingUsers = true; + if (emptyView != null) { + emptyView.showProgress(true, false); + } + if (listViewAdapter != null) { + listViewAdapter.notifyDataSetChanged(); + } + TLRPC.TL_channels_getParticipants req = new TLRPC.TL_channels_getParticipants(); + req.channel = MessagesController.getInputChannel(currentChat); + if (info != null && info.participants_count <= 200) { + req.filter = new TLRPC.TL_channelParticipantsRecent(); + } else { + if (!contactsEndReached) { + delayResults = 2; + req.filter = new TLRPC.TL_channelParticipantsContacts(); + contactsEndReached = true; + loadChatParticipants(0, 200, false); + } else { + req.filter = new TLRPC.TL_channelParticipantsRecent(); + } + } + req.filter.q = ""; + req.offset = offset; + req.limit = count; + int reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (error == null) { + TLRPC.TL_channels_channelParticipants res = (TLRPC.TL_channels_channelParticipants) response; + MessagesController.getInstance(currentAccount).putUsers(res.users, false); + int selfId = UserConfig.getInstance(currentAccount).getClientUserId(); + for (int a = 0; a < res.participants.size(); a++) { + if (res.participants.get(a).user_id == selfId) { + res.participants.remove(a); + break; + } + } + ArrayList objects; + SparseArray map; + delayResults--; + if (req.filter instanceof TLRPC.TL_channelParticipantsContacts) { + objects = contacts; + map = contactsMap; + } else { + objects = participants; + map = participantsMap; + } + objects.clear(); + objects.addAll(res.participants); + for (int a = 0, size = res.participants.size(); a < size; a++) { + TLRPC.ChannelParticipant participant = res.participants.get(a); + map.put(participant.user_id, participant); + } + for (int a = 0, N = participants.size(); a < N; a++) { + TLRPC.ChannelParticipant participant = (TLRPC.ChannelParticipant) participants.get(a); + boolean remove = false; + if (contactsMap.get(participant.user_id) != null) { + remove = true; + } else if (ignoredUsers != null && ignoredUsers.indexOfKey(participant.user_id) >= 0) { + remove = true; + } + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(participant.user_id); + if (user != null && user.bot) { + remove = true; + } + if (remove) { + participants.remove(a); + participantsMap.remove(participant.user_id); + a--; + N--; + } + } + try { + if (info.participants_count <= 200) { + int currentTime = ConnectionsManager.getInstance(currentAccount).getCurrentTime(); + Collections.sort(objects, (lhs, rhs) -> { + TLRPC.ChannelParticipant p1 = (TLRPC.ChannelParticipant) lhs; + TLRPC.ChannelParticipant p2 = (TLRPC.ChannelParticipant) rhs; + TLRPC.User user1 = MessagesController.getInstance(currentAccount).getUser(p1.user_id); + TLRPC.User user2 = MessagesController.getInstance(currentAccount).getUser(p2.user_id); + int status1 = 0; + int status2 = 0; + if (user1 != null && user1.status != null) { + if (user1.self) { + status1 = currentTime + 50000; + } else { + status1 = user1.status.expires; + } + } + if (user2 != null && user2.status != null) { + if (user2.self) { + status2 = currentTime + 50000; + } else { + status2 = user2.status.expires; + } + } + if (status1 > 0 && status2 > 0) { + if (status1 > status2) { + return 1; + } else if (status1 < status2) { + return -1; + } + return 0; + } else if (status1 < 0 && status2 < 0) { + if (status1 > status2) { + return 1; + } else if (status1 < status2) { + return -1; + } + return 0; + } else if (status1 < 0 && status2 > 0 || status1 == 0 && status2 != 0) { + return -1; + } else if (status2 < 0 && status1 > 0 || status2 == 0 && status1 != 0) { + return 1; + } + return 0; + }); + } + } catch (Exception e) { + FileLog.e(e); + } + } + if (delayResults <= 0) { + loadingUsers = false; + firstLoaded = true; + showItemsAnimated(listViewAdapter != null ? listViewAdapter.getItemCount() - 1 : 0); + } + updateRows(); + if (listViewAdapter != null) { + listViewAdapter.notifyDataSetChanged(); + if (emptyView != null && listViewAdapter.getItemCount() == 0 && firstLoaded) { + emptyView.showProgress(false, true); + } + } + })); + } + } + + private class SearchAdapter extends RecyclerListView.SelectionAdapter { + + private Context mContext; + private SearchAdapterHelper searchAdapterHelper; + private Runnable searchRunnable; + private int totalCount; + + private boolean searchInProgress; + + private int lastSearchId; + + private int emptyRow; + private int lastRow; + private int groupStartRow; + private int globalStartRow; + + public SearchAdapter(Context context) { + mContext = context; + searchAdapterHelper = new SearchAdapterHelper(true); + searchAdapterHelper.setDelegate(new SearchAdapterHelper.SearchAdapterHelperDelegate() { + @Override + public void onDataSetChanged(int searchId) { + if (searchId < 0 || searchId != lastSearchId || searchInProgress) { + return; + } + int oldItemCount = getItemCount() - 1; + boolean emptyViewWasVisible = emptyView.getVisibility() == View.VISIBLE; + notifyDataSetChanged(); + if (getItemCount() > oldItemCount) { + showItemsAnimated(oldItemCount); + } + if (!searchAdapterHelper.isSearchInProgress()) { + if (listView.emptyViewIsVisible()) { + emptyView.showProgress(false, emptyViewWasVisible); + } + } + } + + @Override + public SparseArray getExcludeCallParticipants() { + return ignoredUsers; + } + }); + } + + public void searchUsers(final String query) { + if (searchRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(searchRunnable); + searchRunnable = null; + } + searchAdapterHelper.mergeResults(null); + searchAdapterHelper.queryServerSearch(null, true, false, true, false, false, currentChat.id, false, ChatUsersActivity.TYPE_USERS, -1); + + if (!TextUtils.isEmpty(query)) { + emptyView.showProgress(true, true); + listView.setAnimateEmptyView(false, 0); + notifyDataSetChanged(); + listView.setAnimateEmptyView(true, 0); + searchInProgress = true; + int searchId = ++lastSearchId; + AndroidUtilities.runOnUIThread(searchRunnable = () -> { + if (searchRunnable == null) { + return; + } + searchRunnable = null; + processSearch(query, searchId); + }, 300); + + if (listView.getAdapter() != searchListViewAdapter) { + listView.setAdapter(searchListViewAdapter); + } + } else { + lastSearchId = -1; + } + } + + private void processSearch(final String query, int searchId) { + AndroidUtilities.runOnUIThread(() -> { + searchRunnable = null; + + final ArrayList participantsCopy = !ChatObject.isChannel(currentChat) && info != null ? new ArrayList<>(info.participants.participants) : null; + + if (participantsCopy != null) { + Utilities.searchQueue.postRunnable(() -> { + String search1 = query.trim().toLowerCase(); + if (search1.length() == 0) { + updateSearchResults(new ArrayList<>(), searchId); + return; + } + String search2 = LocaleController.getInstance().getTranslitString(search1); + if (search1.equals(search2) || search2.length() == 0) { + search2 = null; + } + String[] search = new String[1 + (search2 != null ? 1 : 0)]; + search[0] = search1; + if (search2 != null) { + search[1] = search2; + } + ArrayList resultArray2 = new ArrayList<>(); + + if (participantsCopy != null) { + for (int a = 0, N = participantsCopy.size(); a < N; a++) { + int userId; + TLObject o = participantsCopy.get(a); + if (o instanceof TLRPC.ChatParticipant) { + userId = ((TLRPC.ChatParticipant) o).user_id; + } else if (o instanceof TLRPC.ChannelParticipant) { + userId = ((TLRPC.ChannelParticipant) o).user_id; + } else { + continue; + } + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(userId); + if (UserObject.isUserSelf(user)) { + continue; + } + + String name = UserObject.getUserName(user).toLowerCase(); + String tName = LocaleController.getInstance().getTranslitString(name); + if (name.equals(tName)) { + tName = null; + } + + int found = 0; + for (String q : search) { + if (name.startsWith(q) || name.contains(" " + q) || tName != null && (tName.startsWith(q) || tName.contains(" " + q))) { + found = 1; + } else if (user.username != null && user.username.startsWith(q)) { + found = 2; + } + + if (found != 0) { + resultArray2.add(o); + break; + } + } + } + } + updateSearchResults(resultArray2, searchId); + }); + } else { + searchInProgress = false; + } + searchAdapterHelper.queryServerSearch(query, ChatObject.canAddUsers(currentChat), false, true, false, false, ChatObject.isChannel(currentChat) ? currentChat.id : 0, false, ChatUsersActivity.TYPE_USERS, searchId); + }); + } + + private void updateSearchResults(final ArrayList participants, int searchId) { + AndroidUtilities.runOnUIThread(() -> { + if (searchId != lastSearchId) { + return; + } + searchInProgress = false; + if (!ChatObject.isChannel(currentChat)) { + searchAdapterHelper.addGroupMembers(participants); + } + int oldItemCount = getItemCount() - 1; + boolean emptyViewWasVisible = emptyView.getVisibility() == View.VISIBLE; + notifyDataSetChanged(); + if (getItemCount() > oldItemCount) { + showItemsAnimated(oldItemCount); + } + if (!searchInProgress && !searchAdapterHelper.isSearchInProgress()) { + if (listView.emptyViewIsVisible()) { + emptyView.showProgress(false, emptyViewWasVisible); + } + } + }); + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ManageChatUserCell) { + ManageChatUserCell cell = (ManageChatUserCell) holder.itemView; + if (invitedUsers.contains(cell.getUserId())) { + return false; + } + } + return holder.getItemViewType() == 0; + } + + @Override + public int getItemCount() { + return totalCount; + } + + @Override + public void notifyDataSetChanged() { + totalCount = 0; + emptyRow = totalCount++; + int count = searchAdapterHelper.getGroupSearch().size(); + if (count != 0) { + groupStartRow = totalCount; + totalCount += count + 1; + } else { + groupStartRow = -1; + } + count = searchAdapterHelper.getGlobalSearch().size(); + if (count != 0) { + globalStartRow = totalCount; + totalCount += count + 1; + } else { + globalStartRow = -1; + } + lastRow = totalCount++; + super.notifyDataSetChanged(); + } + + public TLObject getItem(int i) { + if (groupStartRow >= 0 && i > groupStartRow && i < groupStartRow + 1 + searchAdapterHelper.getGroupSearch().size()) { + return searchAdapterHelper.getGroupSearch().get(i - groupStartRow - 1); + } + if (globalStartRow >= 0 && i > globalStartRow && i < globalStartRow + 1 + searchAdapterHelper.getGlobalSearch().size()) { + return searchAdapterHelper.getGlobalSearch().get(i - globalStartRow - 1); + } + return null; + } + + @Override + public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view; + switch (viewType) { + case 0: + ManageChatUserCell manageChatUserCell = new ManageChatUserCell(mContext, 2, 2, false); + manageChatUserCell.setCustomRightImage(R.drawable.msg_invited); + manageChatUserCell.setNameColor(Theme.getColor(Theme.key_voipgroup_nameText)); + manageChatUserCell.setStatusColors(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_listeningText)); + manageChatUserCell.setDividerColor(Theme.key_voipgroup_listViewBackground); + view = manageChatUserCell; + break; + case 1: + GraySectionCell cell = new GraySectionCell(mContext); + cell.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled)); + cell.setTextColor(Theme.key_voipgroup_searchPlaceholder); + view = cell; + break; + case 2: + view = new View(mContext); + view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, AndroidUtilities.dp(56))); + break; + case 3: + default: + view = new View(mContext); + break; + } + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { + switch (holder.getItemViewType()) { + case 0: { + TLObject object = getItem(position); + TLRPC.User user; + if (object instanceof TLRPC.User) { + user = (TLRPC.User) object; + } else if (object instanceof TLRPC.ChannelParticipant) { + user = MessagesController.getInstance(currentAccount).getUser(((TLRPC.ChannelParticipant) object).user_id); + } else if (object instanceof TLRPC.ChatParticipant) { + user = MessagesController.getInstance(currentAccount).getUser(((TLRPC.ChatParticipant) object).user_id); + } else { + return; + } + + String un = user.username; + CharSequence username = null; + SpannableStringBuilder name = null; + + int count = searchAdapterHelper.getGroupSearch().size(); + boolean ok = false; + String nameSearch = null; + if (count != 0) { + if (count + 1 > position) { + nameSearch = searchAdapterHelper.getLastFoundChannel(); + ok = true; + } else { + position -= count + 1; + } + } + if (!ok && un != null) { + count = searchAdapterHelper.getGlobalSearch().size(); + if (count != 0) { + if (count + 1 > position) { + String foundUserName = searchAdapterHelper.getLastFoundUsername(); + if (foundUserName.startsWith("@")) { + foundUserName = foundUserName.substring(1); + } + try { + int index; + SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(); + spannableStringBuilder.append("@"); + spannableStringBuilder.append(un); + if ((index = AndroidUtilities.indexOfIgnoreCase(un, foundUserName)) != -1) { + int len = foundUserName.length(); + if (index == 0) { + len++; + } else { + index++; + } + spannableStringBuilder.setSpan(new ForegroundColorSpan(Theme.getColor(Theme.key_voipgroup_listeningText)), index, index + len, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } + username = spannableStringBuilder; + } catch (Exception e) { + username = un; + FileLog.e(e); + } + } + } + } + + if (nameSearch != null) { + String u = UserObject.getUserName(user); + name = new SpannableStringBuilder(u); + int idx = AndroidUtilities.indexOfIgnoreCase(u, nameSearch); + if (idx != -1) { + name.setSpan(new ForegroundColorSpan(Theme.getColor(Theme.key_voipgroup_listeningText)), idx, idx + nameSearch.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } + } + + ManageChatUserCell userCell = (ManageChatUserCell) holder.itemView; + userCell.setTag(position); + userCell.setCustomImageVisible(invitedUsers.contains(user.id)); + userCell.setData(user, name, username, false); + + break; + } + case 1: { + GraySectionCell sectionCell = (GraySectionCell) holder.itemView; + if (position == groupStartRow) { + sectionCell.setText(LocaleController.getString("ChannelMembers", R.string.ChannelMembers)); + } else if (position == globalStartRow) { + sectionCell.setText(LocaleController.getString("GlobalSearch", R.string.GlobalSearch)); + } + break; + } + } + } + + @Override + public void onViewRecycled(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ManageChatUserCell) { + ((ManageChatUserCell) holder.itemView).recycle(); + } + } + + @Override + public int getItemViewType(int i) { + if (i == emptyRow) { + return 2; + } else if (i == lastRow) { + return 3; + } + if (i == globalStartRow || i == groupStartRow) { + return 1; + } + return 0; + } + } + + private class ListAdapter extends RecyclerListView.SelectionAdapter { + + private Context mContext; + + public ListAdapter(Context context) { + mContext = context; + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ManageChatUserCell) { + ManageChatUserCell cell = (ManageChatUserCell) holder.itemView; + if (invitedUsers.contains(cell.getUserId())) { + return false; + } + } + int viewType = holder.getItemViewType(); + return viewType == 0 || viewType == 1; + } + + @Override + public int getItemCount() { + return rowCount; + } + + @Override + public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view; + switch (viewType) { + case 0: + ManageChatUserCell manageChatUserCell = new ManageChatUserCell(mContext, 6, 2, false); + manageChatUserCell.setCustomRightImage(R.drawable.msg_invited); + manageChatUserCell.setNameColor(Theme.getColor(Theme.key_voipgroup_nameText)); + manageChatUserCell.setStatusColors(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_listeningText)); + manageChatUserCell.setDividerColor(Theme.key_voipgroup_actionBar); + view = manageChatUserCell; + break; + case 1: + ManageChatTextCell manageChatTextCell = new ManageChatTextCell(mContext); + manageChatTextCell.setColors(Theme.key_voipgroup_listeningText, Theme.key_voipgroup_listeningText); + manageChatTextCell.setDividerColor(Theme.key_voipgroup_actionBar); + view = manageChatTextCell; + break; + case 2: + GraySectionCell cell = new GraySectionCell(mContext); + cell.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled)); + cell.setTextColor(Theme.key_voipgroup_searchPlaceholder); + view = cell; + break; + case 3: + view = new View(mContext); + view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, AndroidUtilities.dp(56))); + break; + case 5: + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); + flickerLoadingView.setViewType(FlickerLoadingView.USERS_TYPE); + flickerLoadingView.setIsSingleCell(true); + flickerLoadingView.setColors(Theme.key_voipgroup_inviteMembersBackground, Theme.key_voipgroup_searchBackground, Theme.key_voipgroup_actionBarUnscrolled); + view = flickerLoadingView; + break; + case 4: + default: + view = new View(mContext); + break; + } + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { + switch (holder.getItemViewType()) { + case 0: + ManageChatUserCell userCell = (ManageChatUserCell) holder.itemView; + userCell.setTag(position); + TLObject item = getItem(position); + int lastRow; + + if (position >= participantsStartRow && position < participantsEndRow) { + lastRow = participantsEndRow; + } else { + lastRow = contactsEndRow; + } + + int userId; + if (item instanceof TLRPC.ChannelParticipant) { + TLRPC.ChannelParticipant participant = (TLRPC.ChannelParticipant) item; + userId = participant.user_id; + } else { + TLRPC.ChatParticipant participant = (TLRPC.ChatParticipant) item; + userId = participant.user_id; + } + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(userId); + if (user != null) { + userCell.setCustomImageVisible(invitedUsers.contains(user.id)); + userCell.setData(user, null, null, position != lastRow - 1); + } + break; + case 1: + ManageChatTextCell actionCell = (ManageChatTextCell) holder.itemView; + if (position == addNewRow) { + boolean showDivider = !(loadingUsers && !firstLoaded) && membersHeaderRow == -1 && !participants.isEmpty(); + actionCell.setText(LocaleController.getString("VoipGroupCopyInviteLink", R.string.VoipGroupCopyInviteLink), null, R.drawable.msg_link, 7, showDivider); + } + break; + case 2: + GraySectionCell sectionCell = (GraySectionCell) holder.itemView; + if (position == membersHeaderRow) { + sectionCell.setText(LocaleController.getString("ChannelOtherMembers", R.string.ChannelOtherMembers)); + } else if (position == contactsHeaderRow) { + sectionCell.setText(LocaleController.getString("GroupContacts", R.string.GroupContacts)); + } + break; + } + } + + @Override + public void onViewRecycled(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ManageChatUserCell) { + ((ManageChatUserCell) holder.itemView).recycle(); + } + } + + @Override + public int getItemViewType(int position) { + if (position >= participantsStartRow && position < participantsEndRow || + position >= contactsStartRow && position < contactsEndRow) { + return 0; + } else if (position == addNewRow) { + return 1; + } else if (position == membersHeaderRow || position == contactsHeaderRow) { + return 2; + } else if (position == emptyRow) { + return 3; + } else if (position == lastRow) { + return 4; + } else if (position == flickerProgressRow) { + return 5; + } + return 0; + } + + public TLObject getItem(int position) { + if (position >= participantsStartRow && position < participantsEndRow) { + return participants.get(position - participantsStartRow); + } else if (position >= contactsStartRow && position < contactsEndRow) { + return contacts.get(position - contactsStartRow); + } + return null; + } + } + +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java index ceddb0d22..86aa0a8cb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java @@ -160,38 +160,38 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter private float[] moldSTMatrix = new float[16]; private static final String VERTEX_SHADER = "uniform mat4 uMVPMatrix;\n" + - "uniform mat4 uSTMatrix;\n" + - "attribute vec4 aPosition;\n" + - "attribute vec4 aTextureCoord;\n" + - "varying vec2 vTextureCoord;\n" + - "void main() {\n" + - " gl_Position = uMVPMatrix * aPosition;\n" + - " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + - "}\n"; + "uniform mat4 uSTMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + + "}\n"; private static final String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n" + - "precision highp float;\n" + - "varying vec2 vTextureCoord;\n" + - "uniform float scaleX;\n" + - "uniform float scaleY;\n" + - "uniform float alpha;\n" + - "uniform samplerExternalOES sTexture;\n" + - "void main() {\n" + - " vec2 coord = vec2((vTextureCoord.x - 0.5) * scaleX, (vTextureCoord.y - 0.5) * scaleY);\n" + - " float coef = ceil(clamp(0.2601 - dot(coord, coord), 0.0, 1.0));\n" + - " vec3 color = texture2D(sTexture, vTextureCoord).rgb * coef + (1.0 - step(0.001, coef));\n" + - " gl_FragColor = vec4(color * alpha, alpha);\n" + - "}\n"; + "precision highp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform float scaleX;\n" + + "uniform float scaleY;\n" + + "uniform float alpha;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " vec2 coord = vec2((vTextureCoord.x - 0.5) * scaleX, (vTextureCoord.y - 0.5) * scaleY);\n" + + " float coef = ceil(clamp(0.2601 - dot(coord, coord), 0.0, 1.0));\n" + + " vec3 color = texture2D(sTexture, vTextureCoord).rgb * coef + (1.0 - step(0.001, coef));\n" + + " gl_FragColor = vec4(color * alpha, alpha);\n" + + "}\n"; private static final String FRAGMENT_SCREEN_SHADER = "#extension GL_OES_EGL_image_external : require\n" + - "precision lowp float;\n" + - "varying vec2 vTextureCoord;\n" + - "uniform samplerExternalOES sTexture;\n" + - "void main() {\n" + - " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + - "}\n"; + "precision lowp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; private FloatBuffer vertexBuffer; private FloatBuffer textureBuffer; @@ -584,7 +584,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter animatorSet = new AnimatorSet(); float toX = 0; if (!open) { - toX = recordedTime > 300 ? AndroidUtilities.dp(24) - getMeasuredWidth() / 2f : 0; + toX = recordedTime > 300 ? AndroidUtilities.dp(24) - getMeasuredWidth() / 2f : 0; } ValueAnimator translationYAnimator = ValueAnimator.ofFloat(open ? 1f : 0f, open ? 0 : 1f); translationYAnimator.addUpdateListener(animation -> { @@ -1090,7 +1090,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter return false; } - int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; + int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE}; eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); if (eglContext == null) { if (BuildVars.LOGS_ENABLED) { @@ -1429,14 +1429,14 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter } } - private static class AudioBufferInfo { - final static int MAX_SAMPLES = 10; - ByteBuffer[] buffer = new ByteBuffer[MAX_SAMPLES]; - long[] offset = new long[MAX_SAMPLES]; - int[] read = new int[MAX_SAMPLES]; - int results; - int lastWroteBuffer; - boolean last; + public static class AudioBufferInfo { + public final static int MAX_SAMPLES = 10; + public ByteBuffer[] buffer = new ByteBuffer[MAX_SAMPLES]; + public long[] offset = new long[MAX_SAMPLES]; + public int[] read = new int[MAX_SAMPLES]; + public int results; + public int lastWroteBuffer; + public boolean last; public AudioBufferInfo() { for (int i = 0; i < MAX_SAMPLES; i++) { @@ -1921,7 +1921,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter public void run() { final TextureView textureView = InstantCameraView.this.textureView; if (textureView != null) { - final Bitmap bitmap = textureView.getBitmap(AndroidUtilities.dp(56), AndroidUtilities.dp(56)); + final Bitmap bitmap = textureView.getBitmap(AndroidUtilities.dp(56), AndroidUtilities.dp(56)); AndroidUtilities.runOnUIThread(() -> { if ((bitmap == null || bitmap.getPixel(0, 0) == 0) && keyframeThumbs.size() > 1) { keyframeThumbs.add(keyframeThumbs.get(keyframeThumbs.size() - 1)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LayoutHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LayoutHelper.java index c8537c412..dc97dfb69 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/LayoutHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LayoutHelper.java @@ -20,7 +20,7 @@ import androidx.core.view.ViewCompat; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; -@SuppressWarnings({"unused", "WeakerAccess"}) +@SuppressWarnings({"WeakerAccess"}) public class LayoutHelper { public static final int MATCH_PARENT = -1; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java new file mode 100644 index 000000000..7b4a67415 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java @@ -0,0 +1,106 @@ +package org.telegram.ui.Components; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.Path; + +import org.telegram.messenger.AndroidUtilities; + +import java.util.Random; + +public class LineBlobDrawable { + + public float minRadius; + public float maxRadius; + + public Path path = new Path(); + public Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private float[] radius; + private float[] radiusNext; + private float[] progress; + private float[] speed; + + final Random random = new Random(); + + private final float N; + + public LineBlobDrawable(int n) { + N = n; + radius = new float[n + 1]; + + radiusNext = new float[n + 1]; + progress = new float[n + 1]; + speed = new float[n + 1]; + + for (int i = 0; i <= N; i++) { + generateBlob(radius, i); + generateBlob(radiusNext, i); + progress[i] = 0; + } + } + + private void generateBlob(float[] radius, int i) { + float radDif = maxRadius - minRadius; + radius[i] = minRadius + Math.abs(((random.nextInt() % 100f) / 100f)) * radDif; + speed[i] = (float) (0.017 + 0.003 * (Math.abs(random.nextInt() % 100f) / 100f)); + } + + public void update(float amplitude, float speedScale) { + for (int i = 0; i <= N; i++) { + progress[i] += (speed[i] * BlobDrawable.MIN_SPEED) + amplitude * speed[i] * BlobDrawable.MAX_SPEED * speedScale; + if (progress[i] >= 1f) { + progress[i] = 0; + radius[i] = radiusNext[i]; + generateBlob(radiusNext, i); + } + } + } + + public void draw(float left, float top, float right, float bottom, Canvas canvas, Paint paint, float pinnedTop, float progressToPinned) { + path.reset(); + + path.moveTo(right, bottom); + path.lineTo(left, bottom); + + for (int i = 0; i <= N; i++) { + if (i == 0) { + float progress = this.progress[i]; + float r1 = radius[i] * (1f - progress) + radiusNext[i] * progress; + float y = (top - r1) * progressToPinned + pinnedTop * (1f - progressToPinned); + path.lineTo(left, y); + } else { + float progress = this.progress[i - 1]; + float r1 = radius[i - 1] * (1f - progress) + radiusNext[i - 1] * progress; + float progressNext = this.progress[i]; + float r2 = radius[i] * (1f - progressNext) + radiusNext[i] * progressNext; + float x1 = (right - left) / N * (i - 1); + float x2 = (right - left) / N * i; + float cx = x1 + (x2 - x1) / 2; + + float y1 = (top - r1) * progressToPinned + pinnedTop * (1f - progressToPinned); + float y2 = (top - r2) * progressToPinned + pinnedTop * (1f - progressToPinned); + path.cubicTo( + cx, y1, + cx, y2, + x2, y2 + ); + if (i == N) { + path.lineTo(right, bottom); + } + } + } + + canvas.drawPath(path, paint); + } + + public void generateBlob() { + for (int i = 0; i < N; i++) { + generateBlob(radius, i); + generateBlob(radiusNext, i); + progress[i] = 0; + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LoadingStickerDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LoadingStickerDrawable.java index 277ecf662..0b2fa719d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/LoadingStickerDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LoadingStickerDrawable.java @@ -16,6 +16,7 @@ import android.os.SystemClock; import android.view.View; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.SvgHelper; import org.telegram.ui.ActionBar.Theme; public class LoadingStickerDrawable extends Drawable { @@ -32,14 +33,17 @@ public class LoadingStickerDrawable extends Drawable { public LoadingStickerDrawable(View parent, String svg, int w, int h) { bitmap = SvgHelper.getBitmapByPathOnly(svg,512,512, w, h); parentView = parent; - int color0 = Theme.getColor(Theme.key_dialogBackground); - int color1 = Theme.getColor(Theme.key_dialogBackgroundGray); + placeholderMatrix = new Matrix(); + setColors(Theme.key_dialogBackground, Theme.key_dialogBackgroundGray); + } + + public void setColors(String key1, String key2) { + int color0 = Theme.getColor(key1); + int color1 = Theme.getColor(key2); color0 = AndroidUtilities.getAverageColor(color1, color0); placeholderPaint.setColor(color1); placeholderGradient = new LinearGradient(0, 0, gradientWidth = AndroidUtilities.dp(500), 0, new int[]{color1, color0, color1}, new float[]{0.0f, 0.18f, 0.36f}, Shader.TileMode.REPEAT); - placeholderMatrix = new Matrix(); placeholderGradient.setLocalMatrix(placeholderMatrix); - Shader shaderB = new BitmapShader(bitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); placeholderPaint.setShader(new ComposeShader(placeholderGradient, shaderB, PorterDuff.Mode.MULTIPLY)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/NumberTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/NumberTextView.java index 39e47a777..15105be3a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/NumberTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/NumberTextView.java @@ -36,6 +36,9 @@ public class NumberTextView extends View { private float progress = 0.0f; private int currentNumber = 1; private boolean addNumber; + private boolean center; + private float textWidth; + private float oldTextWidth; public NumberTextView(Context context) { super(context); @@ -82,15 +85,27 @@ public class NumberTextView extends View { text = String.format(Locale.US, "%d", number); forwardAnimation = number > currentNumber; } + boolean replace = false; + if (center) { + textWidth = textPaint.measureText(text); + oldTextWidth = textPaint.measureText(oldText); + if (textWidth != oldTextWidth) { + replace = true; + } + } + currentNumber = number; progress = 0; for (int a = 0; a < text.length(); a++) { String ch = text.substring(a, a + 1); String oldCh = !oldLetters.isEmpty() && a < oldText.length() ? oldText.substring(a, a + 1) : null; - if (oldCh != null && oldCh.equals(ch)) { + if (!replace && oldCh != null && oldCh.equals(ch)) { letters.add(oldLetters.get(a)); oldLetters.set(a, null); } else { + if (replace && oldCh == null) { + oldLetters.add(new StaticLayout("", textPaint, 0, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false)); + } StaticLayout layout = new StaticLayout(ch, textPaint, (int) Math.ceil(textPaint.measureText(ch)), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); letters.add(layout); } @@ -129,6 +144,10 @@ public class NumberTextView extends View { setNumber(currentNumber, false); } + public void setCenterAlign(boolean center) { + this.center = center; + } + @Override protected void onDraw(Canvas canvas) { if (letters.isEmpty()) { @@ -136,8 +155,15 @@ public class NumberTextView extends View { } float height = letters.get(0).getHeight(); float translationHeight = addNumber ? AndroidUtilities.dp(4) : height; + + float x = 0; + float oldDx = 0; + if (center) { + x = (getMeasuredWidth() - textWidth) / 2f; + oldDx = (getMeasuredWidth() - oldTextWidth) / 2f - x; + } canvas.save(); - canvas.translate(getPaddingLeft(), (getMeasuredHeight() - height) / 2); + canvas.translate(getPaddingLeft() + x, (getMeasuredHeight() - height) / 2); int count = Math.max(letters.size(), oldLetters.size()); for (int a = 0; a < count; a++) { canvas.save(); @@ -147,7 +173,7 @@ public class NumberTextView extends View { if (old != null) { textPaint.setAlpha((int) (255 * progress)); canvas.save(); - canvas.translate(0, (progress - 1.0f) * translationHeight); + canvas.translate(oldDx, (progress - 1.0f) * translationHeight); old.draw(canvas); canvas.restore(); if (layout != null) { @@ -161,7 +187,7 @@ public class NumberTextView extends View { if (old != null) { textPaint.setAlpha((int) (255 * -progress)); canvas.save(); - canvas.translate(0, (1.0f + progress) * translationHeight); + canvas.translate(oldDx, (1.0f + progress) * translationHeight); old.draw(canvas); canvas.restore(); } @@ -181,6 +207,9 @@ public class NumberTextView extends View { } canvas.restore(); canvas.translate(layout != null ? layout.getLineWidth(0) : old.getLineWidth(0) + AndroidUtilities.dp(1), 0); + if (layout != null && old != null) { + oldDx += old.getLineWidth(0) - layout.getLineWidth(0); + } } canvas.restore(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/ColorPicker.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/ColorPicker.java index 631b5ccbc..1b6b87983 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/ColorPicker.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/ColorPicker.java @@ -5,6 +5,7 @@ import android.animation.ObjectAnimator; import android.annotation.SuppressLint; import android.app.Activity; import android.content.Context; +import android.content.SharedPreferences; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.LinearGradient; @@ -108,7 +109,9 @@ public class ColorPicker extends FrameLayout { } }); - location = context.getSharedPreferences("paint", Activity.MODE_PRIVATE).getFloat("last_color_location", 1.0f); + SharedPreferences preferences = context.getSharedPreferences("paint", Activity.MODE_PRIVATE); + location = preferences.getFloat("last_color_location", 1.0f); + setWeight(preferences.getFloat("last_color_weight", 0.016773745f)); setLocation(location); } @@ -225,7 +228,10 @@ public class ColorPicker extends FrameLayout { if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_POINTER_UP) { if (interacting && delegate != null) { delegate.onFinishedColorPicking(); - getContext().getSharedPreferences("paint", Activity.MODE_PRIVATE).edit().putFloat("last_color_location", location).commit(); + SharedPreferences.Editor editor = getContext().getSharedPreferences("paint", Activity.MODE_PRIVATE).edit(); + editor.putFloat("last_color_location", location); + editor.putFloat("last_color_weight", weight); + editor.commit(); } interacting = false; wasChangingWeight = changingWeight; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java index 5f7573c4a..4e0aeca48 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java @@ -539,14 +539,23 @@ public class PhonebookShareAlert extends BottomSheet { android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", item.getValue(false)); clipboard.setPrimaryClip(clip); - if (item.type == 0) { - Toast.makeText(this.parentFragment.getParentActivity(), LocaleController.getString("PhoneCopied", R.string.PhoneCopied), Toast.LENGTH_SHORT).show(); - } else if (item.type == 1) { - Toast.makeText(this.parentFragment.getParentActivity(), LocaleController.getString("EmailCopied", R.string.EmailCopied), Toast.LENGTH_SHORT).show(); - } else if (item.type == 3) { - Toast.makeText(this.parentFragment.getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); - } else { - Toast.makeText(this.parentFragment.getParentActivity(), LocaleController.getString("TextCopied", R.string.TextCopied), Toast.LENGTH_SHORT).show(); + if (BulletinFactory.canShowBulletin(parentFragment)) { + if (item.type == 3) { + BulletinFactory.of((FrameLayout) containerView).createCopyLinkBulletin().show(); + } else { + final Bulletin.SimpleLayout layout = new Bulletin.SimpleLayout(context); + if (item.type == 0) { + layout.textView.setText(LocaleController.getString("PhoneCopied", R.string.PhoneCopied)); + layout.imageView.setImageResource(R.drawable.menu_calls); + } else if (item.type == 1) { + layout.textView.setText(LocaleController.getString("EmailCopied", R.string.EmailCopied)); + layout.imageView.setImageResource(R.drawable.menu_mail); + } else { + layout.textView.setText(LocaleController.getString("TextCopied", R.string.TextCopied)); + layout.imageView.setImageResource(R.drawable.menu_info); + } + Bulletin.make((FrameLayout) containerView, layout, Bulletin.DURATION_SHORT).show(); + } } return true; }); @@ -905,6 +914,23 @@ public class PhonebookShareAlert extends BottomSheet { }); } + @Override + protected void onStart() { + super.onStart(); + Bulletin.addDelegate((FrameLayout) containerView, new Bulletin.Delegate() { + @Override + public int getBottomOffset() { + return AndroidUtilities.dp(74); + } + }); + } + + @Override + protected void onStop() { + super.onStop(); + Bulletin.removeDelegate((FrameLayout) containerView); + } + public void setDelegate(ChatAttachAlertContactsLayout.PhonebookShareAlertDelegate phonebookShareAlertDelegate) { delegate = phonebookShareAlertDelegate; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java index 19081c1ab..568a15b48 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java @@ -10,8 +10,6 @@ package org.telegram.ui.Components; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; -import android.animation.AnimatorSet; -import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; @@ -21,6 +19,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; import android.graphics.drawable.Drawable; +import android.os.Vibrator; import android.text.Editable; import android.text.InputFilter; import android.text.SpannableStringBuilder; @@ -40,6 +39,8 @@ import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; +import androidx.core.graphics.ColorUtils; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; import org.telegram.messenger.LocaleController; @@ -55,6 +56,12 @@ import org.telegram.ui.ActionBar.Theme; public class PhotoViewerCaptionEnterView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, SizeNotifierFrameLayoutPhoto.SizeNotifierFrameLayoutPhotoDelegate { + private final ImageView doneButton; + + public int getCaptionLimitOffset() { + return captionMaxLength - codePointCount; + } + public interface PhotoViewerCaptionEnterViewDelegate { void onCaptionEnter(); void onTextChanged(CharSequence text); @@ -68,8 +75,9 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica private ReplaceableIconDrawable emojiIconDrawable; private EmojiView emojiView; private SizeNotifierFrameLayoutPhoto sizeNotifierLayout; - private Drawable drawable; + private Drawable doneDrawable; private Drawable checkDrawable; + private NumberTextView captionLimitView; private int lineCount; private boolean isInitLineCount; private boolean shouldAnimateEditTextWithBounds; @@ -77,12 +85,6 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica private int messageEditTextPredrawScrollY; private float chatActivityEnterViewAnimateFromTop; - private AnimatorSet runningAnimation; - private AnimatorSet runningAnimation2; - private ObjectAnimator runningAnimationAudio; - private int runningAnimationType; - private int audioInterfaceState; - private int lastSizeChangeValue1; private boolean lastSizeChangeValue2; @@ -97,9 +99,14 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica private boolean popupAnimating; private int captionMaxLength = 1024; + private int codePointCount; private PhotoViewerCaptionEnterViewDelegate delegate; + boolean sendButtonEnabled = true; + private float sendButtonEnabledProgress = 1f; + private ValueAnimator sendButtonColorAnimator; + private View windowView; private TextPaint lengthTextPaint; @@ -206,9 +213,6 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica messageEditText.setTextColor(0xffffffff); messageEditText.setHighlightColor(0x4fffffff); messageEditText.setHintTextColor(0xb2ffffff); - InputFilter[] inputFilters = new InputFilter[1]; - inputFilters[0] = new InputFilter.LengthFilter(captionMaxLength); - messageEditText.setFilters(inputFilters); frameLayout.addView(messageEditText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM, 52, 0, 6, 0)); messageEditText.setOnKeyListener((view, i, keyEvent) -> { if (i == KeyEvent.KEYCODE_BACK) { @@ -233,7 +237,6 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { - } @Override @@ -266,31 +269,94 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica lengthText = null; } PhotoViewerCaptionEnterView.this.invalidate(); - if (innerTextChange) { - return; - } - if (processChange) { - ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); - for (int i = 0; i < spans.length; i++) { - editable.removeSpan(spans[i]); + if (!innerTextChange) { + if (processChange) { + ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); + for (int i = 0; i < spans.length; i++) { + editable.removeSpan(spans[i]); + } + Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); + processChange = false; } - Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); - processChange = false; + } + + int beforeLimit; + codePointCount = Character.codePointCount(editable, 0, editable.length()); + boolean sendButtonEnabledLocal = true; + if (captionMaxLength > 0 && (beforeLimit = captionMaxLength - codePointCount) <= 100) { + if (beforeLimit < -9999) { + beforeLimit = -9999; + } + captionLimitView.setNumber(beforeLimit, captionLimitView.getVisibility() == View.VISIBLE); + if (captionLimitView.getVisibility() != View.VISIBLE) { + captionLimitView.setVisibility(View.VISIBLE); + captionLimitView.setAlpha(0); + captionLimitView.setScaleX(0.5f); + captionLimitView.setScaleY(0.5f); + } + captionLimitView.animate().setListener(null).cancel(); + captionLimitView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).start(); + if (beforeLimit < 0) { + sendButtonEnabledLocal = false; + captionLimitView.setTextColor(0xffEC7777); + } else { + captionLimitView.setTextColor(0xffffffff); + } + } else { + captionLimitView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(100).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + captionLimitView.setVisibility(View.GONE); + } + }); + } + if (sendButtonEnabled != sendButtonEnabledLocal) { + sendButtonEnabled = sendButtonEnabledLocal; + if (sendButtonColorAnimator != null) { + sendButtonColorAnimator.cancel(); + } + sendButtonColorAnimator = ValueAnimator.ofFloat(sendButtonEnabled ? 0 : 1f, sendButtonEnabled ? 1f : 0); + sendButtonColorAnimator.addUpdateListener(valueAnimator -> { + sendButtonEnabledProgress = (float) valueAnimator.getAnimatedValue(); + int color = Theme.getColor(Theme.key_dialogFloatingIcon); + int alpha = Color.alpha(color); + Theme.setDrawableColor(checkDrawable, ColorUtils.setAlphaComponent(color, (int) (alpha * (0.58f + 0.42f * sendButtonEnabledProgress)))); + doneButton.invalidate(); + }); + sendButtonColorAnimator.setDuration(150).start(); } } }); - drawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), 0xff66bffa); + doneDrawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), 0xff66bffa); checkDrawable = context.getResources().getDrawable(R.drawable.input_done).mutate(); - CombinedDrawable combinedDrawable = new CombinedDrawable(drawable, checkDrawable, 0, AndroidUtilities.dp(1)); + CombinedDrawable combinedDrawable = new CombinedDrawable(doneDrawable, checkDrawable, 0, AndroidUtilities.dp(1)); combinedDrawable.setCustomSize(AndroidUtilities.dp(32), AndroidUtilities.dp(32)); - ImageView doneButton = new ImageView(context); + doneButton = new ImageView(context); doneButton.setScaleType(ImageView.ScaleType.CENTER); doneButton.setImageDrawable(combinedDrawable); textFieldContainer.addView(doneButton, LayoutHelper.createLinear(48, 48, Gravity.BOTTOM)); - doneButton.setOnClickListener(view -> delegate.onCaptionEnter()); + doneButton.setOnClickListener(view -> { + if (captionMaxLength - codePointCount < 0) { + AndroidUtilities.shakeView(captionLimitView, 2, 0); + Vibrator v = (Vibrator) captionLimitView.getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (v != null) { + v.vibrate(200); + } + return; + } + delegate.onCaptionEnter(); + }); doneButton.setContentDescription(LocaleController.getString("Done", R.string.Done)); + + captionLimitView = new NumberTextView(context); + captionLimitView.setVisibility(View.GONE); + captionLimitView.setTextSize(15); + captionLimitView.setTextColor(0xffffffff); + captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + captionLimitView.setCenterAlign(true); + addView(captionLimitView, LayoutHelper.createFrame(48, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 3, 48)); } private void onLineCountChanged(int lineCountOld, int lineCountNew) { @@ -356,22 +422,22 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica chatActivityEnterViewAnimateFromTop = 0; } - if (lengthText != null && getMeasuredHeight() > AndroidUtilities.dp(48)) { - int width = (int) Math.ceil(lengthTextPaint.measureText(lengthText)); - int x = (AndroidUtilities.dp(56) - width) / 2; - canvas.drawText(lengthText, x, getMeasuredHeight() - AndroidUtilities.dp(48), lengthTextPaint); - if (animationProgress < 1.0f) { - animationProgress += 17.0f / 120.0f; - invalidate(); - if (animationProgress >= 1.0f) { - animationProgress = 1.0f; - } - lengthTextPaint.setAlpha((int) (255 * animationProgress)); - } - } else { - lengthTextPaint.setAlpha(0); - animationProgress = 0.0f; - } +// if (lengthText != null && getMeasuredHeight() > AndroidUtilities.dp(48)) { +// int width = (int) Math.ceil(lengthTextPaint.measureText(lengthText)); +// int x = (AndroidUtilities.dp(56) - width) / 2; +// canvas.drawText(lengthText, x, getMeasuredHeight() - AndroidUtilities.dp(48), lengthTextPaint); +// if (animationProgress < 1.0f) { +// animationProgress += 17.0f / 120.0f; +// invalidate(); +// if (animationProgress >= 1.0f) { +// animationProgress = 1.0f; +// } +// lengthTextPaint.setAlpha((int) (255 * animationProgress)); +// } +// } else { +// lengthTextPaint.setAlpha(0); +// animationProgress = 0.0f; +// } } public void setForceFloatingEmoji(boolean value) { @@ -379,8 +445,10 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica } public void updateColors() { - Theme.setDrawableColor(drawable, Theme.getColor(Theme.key_dialogFloatingButton)); - Theme.setDrawableColor(checkDrawable, Theme.getColor(Theme.key_dialogFloatingIcon)); + Theme.setDrawableColor(doneDrawable, Theme.getColor(Theme.key_dialogFloatingButton)); + int color = Theme.getColor(Theme.key_dialogFloatingIcon); + int alpha = Color.alpha(color); + Theme.setDrawableColor(checkDrawable, ColorUtils.setAlphaComponent(color, (int) (alpha * (0.58f + 0.42f * sendButtonEnabledProgress)))); if (emojiView != null) { emojiView.updateColors(); } @@ -443,13 +511,7 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica if (delegate != null) { delegate.onTextChanged(messageEditText.getText()); } - int old = captionMaxLength; captionMaxLength = MessagesController.getInstance(UserConfig.selectedAccount).maxCaptionLength; - if (old != captionMaxLength) { - InputFilter[] inputFilters = new InputFilter[1]; - inputFilters[0] = new InputFilter.LengthFilter(captionMaxLength); - messageEditText.setFilters(inputFilters); - } } public int getSelectionLength() { @@ -488,9 +550,6 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica @Override public void onEmojiSelected(String symbol) { - if (messageEditText.length() + symbol.length() > captionMaxLength) { - return; - } int i = messageEditText.getSelectionEnd(); if (i < 0) { i = 0; @@ -760,4 +819,8 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica public void setAllowTextEntitiesIntersection(boolean value) { messageEditText.setAllowTextEntitiesIntersection(value); } + + public EditTextCaption getMessageEditText() { + return messageEditText; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoView.java index 23ca619e6..c9003660e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoView.java @@ -60,6 +60,8 @@ public class PipVideoView { private SharedPreferences preferences; private DecelerateInterpolator decelerateInterpolator; + private AnimatorSet animatorSet; + private class MiniControlsView extends FrameLayout { private Paint progressPaint; @@ -352,8 +354,12 @@ public class PipVideoView { } @Override - public boolean onInterceptTouchEvent(MotionEvent event) { - return super.onInterceptTouchEvent(event); + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (animatorSet != null) { + animatorSet.cancel(); + animatorSet = null; + } } }; @@ -562,7 +568,7 @@ public class PipVideoView { if (decelerateInterpolator == null) { decelerateInterpolator = new DecelerateInterpolator(); } - AnimatorSet animatorSet = new AnimatorSet(); + animatorSet = new AnimatorSet(); animatorSet.setInterpolator(decelerateInterpolator); animatorSet.setDuration(150); if (slideOut) { @@ -570,6 +576,7 @@ public class PipVideoView { animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + animatorSet = null; if (parentSheet != null) { parentSheet.destroy(); } else if (photoViewer != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java index fc106b11d..cfb750afe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java @@ -273,7 +273,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio boolean result = false; if (isScrollingListView) { - result |= parentListView.onTouchEvent(ev); + result = parentListView.onTouchEvent(ev); } if (isSwipingViewPager) { @@ -309,7 +309,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio if (imageLocation == null || thumbLocation == null || settingMainPhoto != 0) { return false; } - if (prevImageLocation == null && imageLocation != null || prevImageLocation != null && prevImageLocation.location.local_id != imageLocation.location.local_id) { + if (prevImageLocation == null || prevImageLocation.location.local_id != imageLocation.location.local_id) { imagesLocations.clear(); MessagesController.getInstance(currentAccount).loadDialogPhotos((int) dialogId, 80, 0, true, parentClassGuid); } @@ -748,7 +748,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } else { filter = null; } - setImageMedia(videoLocations.get(realPosition), filter, imagesLocations.get(realPosition), null, parentAvatarImageView.getImageReceiver().getBitmap(), imagesLocationsSizes.get(realPosition), 1, null); + setImageMedia(videoLocations.get(realPosition), filter, imagesLocations.get(realPosition), null, parentAvatarImageView.getImageReceiver().getBitmap(), imagesLocationsSizes.get(realPosition), 1, "avatar_" + dialogId); } } else { final ImageLocation videoLocation = videoLocations.get(realPosition); @@ -756,7 +756,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio needProgress = true; ImageLocation location = thumbsLocations.get(realPosition); String filter = location.photoSize instanceof TLRPC.TL_photoStrippedSize ? "b" : null; - setImageMedia(videoLocation, null, imagesLocations.get(realPosition), null, thumbsLocations.get(realPosition), filter, null, 0, 1, imagesLocationsSizes.get(realPosition)); + setImageMedia(videoLocation, null, imagesLocations.get(realPosition), null, thumbsLocations.get(realPosition), filter, null, imagesLocationsSizes.get(realPosition), 1, "avatar_" + dialogId); } if (needProgress) { radialProgress = radialProgresses.get(realPosition); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java index 55a10d749..5833ed9d2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java @@ -307,7 +307,7 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { } else { if (customEndFrame >= 0 && playInDirectionOfCustomEndFrame) { if (currentFrame > customEndFrame) { - if (currentFrame - framesPerUpdates > customEndFrame) { + if (currentFrame - framesPerUpdates >= customEndFrame) { currentFrame -= framesPerUpdates; nextFrameIsLast = false; } else { @@ -543,6 +543,10 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { return currentFrame; } + public int getCustomEndFrame() { + return customEndFrame; + } + public long getDuration() { return (long) (metaData[0] / (float) metaData[1] * 1000); } @@ -551,11 +555,12 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { playInDirectionOfCustomEndFrame = value; } - public void setCustomEndFrame(int frame) { - if (customEndFrame > metaData[0]) { - return; + public boolean setCustomEndFrame(int frame) { + if (customEndFrame == frame || customEndFrame > metaData[0]) { + return false; } customEndFrame = frame; + return true; } public void addParentView(View view) { @@ -776,7 +781,7 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { } public void setCurrentFrame(int frame, boolean async) { - setCurrentFrame(frame, true, false); + setCurrentFrame(frame, async, false); } public void setCurrentFrame(int frame, boolean async, boolean resetFrame) { @@ -845,13 +850,17 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { return true; } for (int a = 0, N = parentViews.size(); a < N; a++) { - if (parentViews.get(a).get() == null) { + View view = parentViews.get(a).get(); + if (view == null) { parentViews.remove(a); N--; a--; continue; } - return parentViews.get(a).get() == currentParentView; + if (!view.isShown()) { + continue; + } + return view == currentParentView; } return true; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgressView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgressView.java index add4ec027..eea62c934 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgressView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgressView.java @@ -30,6 +30,7 @@ public class RadialProgressView extends View { private float currentProgressTime; private RectF cicleRect = new RectF(); private boolean useSelfAlpha; + private float drawingCircleLenght; private int progressColor; @@ -44,6 +45,8 @@ public class RadialProgressView extends View { private float progressAnimationStart; private int progressTime; private float animatedProgress; + private boolean toCircle; + private float toCircleProgress; private boolean noProgress = true; @@ -90,7 +93,6 @@ public class RadialProgressView extends View { animatedProgress = value; } progressAnimationStart = animatedProgress; - currentProgress = value; progressTime = 0; } @@ -106,23 +108,56 @@ public class RadialProgressView extends View { int count = (int) (radOffset / 360); radOffset -= count * 360; + if (toCircle && toCircleProgress != 1f) { + toCircleProgress += 16 / 220f; + if (toCircleProgress > 1f) { + toCircleProgress = 1f; + } + } else if (!toCircle && toCircleProgress != 0f) { + toCircleProgress -= 16 / 400f; + if (toCircleProgress < 0) { + toCircleProgress = 0f; + } + } + if (noProgress) { - currentProgressTime += dt; - if (currentProgressTime >= risingTime) { - currentProgressTime = risingTime; - } - if (risingCircleLength) { - currentCircleLength = 4 + 266 * accelerateInterpolator.getInterpolation(currentProgressTime / risingTime); - } else { - currentCircleLength = 4 - 270 * (1.0f - decelerateInterpolator.getInterpolation(currentProgressTime / risingTime)); - } - if (currentProgressTime == risingTime) { - if (risingCircleLength) { - radOffset += 270; - currentCircleLength = -266; + if (toCircleProgress == 0) { + currentProgressTime += dt; + if (currentProgressTime >= risingTime) { + currentProgressTime = risingTime; + } + if (risingCircleLength) { + currentCircleLength = 4 + 266 * accelerateInterpolator.getInterpolation(currentProgressTime / risingTime); + } else { + currentCircleLength = 4 - 270 * (1.0f - decelerateInterpolator.getInterpolation(currentProgressTime / risingTime)); + } + + if (currentProgressTime == risingTime) { + if (risingCircleLength) { + radOffset += 270; + currentCircleLength = -266; + } + risingCircleLength = !risingCircleLength; + currentProgressTime = 0; + } + } else { + if (risingCircleLength) { + float old = currentCircleLength; + currentCircleLength = 4 + 266 * accelerateInterpolator.getInterpolation(currentProgressTime / risingTime); + currentCircleLength += 360 * toCircleProgress; + float dx = old - currentCircleLength; + if (dx > 0) { + radOffset += old - currentCircleLength; + } + } else { + float old = currentCircleLength; + currentCircleLength = 4 - 270 * (1.0f - decelerateInterpolator.getInterpolation(currentProgressTime / risingTime)); + currentCircleLength -= 364 * toCircleProgress; + float dx = old - currentCircleLength; + if (dx > 0) { + radOffset += old - currentCircleLength; + } } - risingCircleLength = !risingCircleLength; - currentProgressTime = 0; } } else { float progressDiff = currentProgress - progressAnimationStart; @@ -154,12 +189,29 @@ public class RadialProgressView extends View { progressPaint.setColor(progressColor); } + public void toCircle(boolean toCircle, boolean animated) { + this.toCircle = toCircle; + if (!animated) { + toCircleProgress = toCircle ? 1f : 0f; + } + } + @Override protected void onDraw(Canvas canvas) { int x = (getMeasuredWidth() - size) / 2; int y = (getMeasuredHeight() - size) / 2; cicleRect.set(x, y, x + size, y + size); - canvas.drawArc(cicleRect, radOffset, currentCircleLength, false, progressPaint); + canvas.drawArc(cicleRect, radOffset, drawingCircleLenght = currentCircleLength, false, progressPaint); updateAnimation(); } + + public void draw(Canvas canvas, float cx, float cy) { + cicleRect.set(cx - size / 2f, cy - size / 2f, cx + size / 2f, cy + size / 2f); + canvas.drawArc(cicleRect, radOffset, drawingCircleLenght = currentCircleLength, false, progressPaint); + updateAnimation(); + } + + public boolean isCircle() { + return Math.abs(drawingCircleLenght) >= 360; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java index 50ddcf73a..f0a83e31c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java @@ -123,6 +123,7 @@ public class RecyclerListView extends RecyclerView { private boolean animateEmptyView; private int emptyViewAnimationType; private int selectorRadius; + private int topBottomSelectorRadius; public interface OnItemClickListener { void onItemClick(View view, int position); @@ -974,6 +975,10 @@ public class RecyclerListView extends RecyclerView { selectorRadius = radius; } + public void setTopBottomSelectorRadius(int radius) { + topBottomSelectorRadius = radius; + } + public void setDrawSelectorBehind(boolean value) { drawSelectorBehind = value; } @@ -982,7 +987,9 @@ public class RecyclerListView extends RecyclerView { if (selectorDrawable != null) { selectorDrawable.setCallback(null); } - if (selectorRadius > 0) { + if (topBottomSelectorRadius > 0) { + selectorDrawable = Theme.createRadSelectorDrawable(color, topBottomSelectorRadius, topBottomSelectorRadius); + } else if (selectorRadius > 0) { selectorDrawable = Theme.createSimpleSelectorRoundRectDrawable(selectorRadius, 0, color, 0xff000000); } else if (selectorType == 2) { selectorDrawable = Theme.getSelectorDrawable(color, false); @@ -1516,7 +1523,9 @@ public class RecyclerListView extends RecyclerView { if (position != NO_POSITION) { selectorPosition = position; } - + if (topBottomSelectorRadius > 0 && getAdapter() != null) { + Theme.setMaskDrawableRad(selectorDrawable, position == 0 ? topBottomSelectorRadius : 0, position == getAdapter().getItemCount() - 2 ? topBottomSelectorRadius : 0); + } selectorRect.set(sel.getLeft(), sel.getTop(), sel.getRight(), sel.getBottom() - bottomPadding); final boolean enabled = sel.isEnabled(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java index 78f9f9bb2..9c3585cb2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java @@ -39,6 +39,7 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -293,7 +294,7 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { tabTypes.put(key, tab); } - public View addStickerTab(TLObject thumb, TLRPC.Document sticker, TLRPC.TL_messages_stickerSet parentObject) { + public View addStickerTab(TLObject thumb, SvgHelper.SvgDrawable svgThumb, TLRPC.Document sticker, TLRPC.TL_messages_stickerSet parentObject) { String key = "set" + parentObject.set.id; final int position = tabCount++; @@ -315,6 +316,7 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { tab.setTag(R.id.index_tag, position); tab.setTag(R.id.parent_tag, parentObject); tab.setTag(R.id.object_tag, sticker); + tab.setTag(R.id.svg_tag, svgThumb); tab.setSelected(position == currentPosition); tabTypes.put(key, tab); @@ -367,6 +369,7 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { View child = tabsContainer.getChildAt(a); Object object = child.getTag(); Object parentObject = child.getTag(R.id.parent_tag); + SvgHelper.SvgDrawable svgThumb = (SvgHelper.SvgDrawable) child.getTag(R.id.svg_tag); TLRPC.Document sticker = (TLRPC.Document) child.getTag(R.id.object_tag); ImageLocation imageLocation; @@ -384,11 +387,15 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { } BackupImageView imageView = (BackupImageView) ((FrameLayout) child).getChildAt(0); if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { - imageView.setImage(ImageLocation.getForDocument(sticker), "30_30", imageLocation, null, 0, parentObject); + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(sticker), "30_30", svgThumb, 0, parentObject); + } else { + imageView.setImage(ImageLocation.getForDocument(sticker), "30_30", imageLocation, null, 0, parentObject); + } } else if (imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { - imageView.setImage(imageLocation, "30_30", "tgs", null, parentObject); + imageView.setImage(imageLocation, "30_30", "tgs", svgThumb, parentObject); } else { - imageView.setImage(imageLocation, null, "webp", null, parentObject); + imageView.setImage(imageLocation, null, "webp", svgThumb, parentObject); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTextTabStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTextTabStrip.java index 4b1d29c43..5df96abd1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTextTabStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTextTabStrip.java @@ -8,6 +8,7 @@ package org.telegram.ui.Components; +import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; @@ -17,10 +18,12 @@ import android.os.SystemClock; import androidx.annotation.Keep; import android.text.Layout; +import android.util.SparseArray; import android.util.SparseIntArray; import android.util.TypedValue; import android.view.Gravity; import android.view.View; +import android.view.ViewGroup; import android.widget.HorizontalScrollView; import android.widget.LinearLayout; import android.widget.TextView; @@ -79,6 +82,13 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { private long lastAnimationTime; private float animationTime; private int previousPosition; + + private int animateFromIndicaxtorX; + private int animateFromIndicatorWidth; + + private float indicatorXAnimationDx; + private float indicatorWidthAnimationDx; + private Runnable animationRunnable = new Runnable() { @Override public void run() { @@ -193,7 +203,7 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { return selectorDrawable; } - public View getTabsContainer() { + public ViewGroup getTabsContainer() { return tabsContainer; } @@ -206,13 +216,14 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { return positionToId.get(currentPosition + (forward ? 1 : -1), -1); } - public void removeTabs() { + public SparseArray removeTabs() { positionToId.clear(); idToPosition.clear(); positionToWidth.clear(); tabsContainer.removeAllViews(); allTextWidth = 0; tabCount = 0; + return null; } public int getTabsCount() { @@ -224,6 +235,9 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { } public void addTextTab(final int id, CharSequence text) { + addTextTab(id, text, null); + } + public void addTextTab(final int id, CharSequence text, SparseArray viewsCache) { int position = tabCount++; if (position == 0 && selectedTabId == -1) { selectedTabId = id; @@ -234,56 +248,63 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { currentPosition = position; prevLayoutWidth = 0; } - TextView tab = new TextView(getContext()); - tab.setWillNotDraw(false); - tab.setGravity(Gravity.CENTER); + TextView tab = null; + if (viewsCache != null) { + tab = (TextView) viewsCache.get(id); + viewsCache.delete(id); + } + if (tab == null) { + tab = new TextView(getContext()); + tab.setWillNotDraw(false); + tab.setGravity(Gravity.CENTER); + tab.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(selectorColorKey), 3)); + tab.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + tab.setSingleLine(true); + tab.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + tab.setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); + tab.setOnClickListener(v -> { + int position1 = tabsContainer.indexOfChild(v); + if (position1 < 0) { + return; + } + if (position1 == currentPosition && delegate != null) { + delegate.onSamePageSelected(); + return; + } + boolean scrollingForward = currentPosition < position1; + scrollingToChild = -1; + previousPosition = currentPosition; + currentPosition = position1; + selectedTabId = id; + + if (animatingIndicator) { + AndroidUtilities.cancelRunOnUIThread(animationRunnable); + animatingIndicator = false; + } + + animationTime = 0; + animatingIndicator = true; + animateIndicatorStartX = indicatorX; + animateIndicatorStartWidth = indicatorWidth; + + TextView nextChild = (TextView) v; + animateIndicatorToWidth = getChildWidth(nextChild); + animateIndicatorToX = nextChild.getLeft() + (nextChild.getMeasuredWidth() - animateIndicatorToWidth) / 2; + setEnabled(false); + + AndroidUtilities.runOnUIThread(animationRunnable, 16); + + if (delegate != null) { + delegate.onPageSelected(id, scrollingForward); + } + scrollToChild(position1); + }); + } tab.setText(text); - tab.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(selectorColorKey), 3)); - tab.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - tab.setSingleLine(true); - tab.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - tab.setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); - tab.setOnClickListener(v -> { - int position1 = tabsContainer.indexOfChild(v); - if (position1 < 0) { - return; - } - if (position1 == currentPosition && delegate != null) { - delegate.onSamePageSelected(); - return; - } - boolean scrollingForward = currentPosition < position1; - scrollingToChild = -1; - previousPosition = currentPosition; - currentPosition = position1; - selectedTabId = id; - - if (animatingIndicator) { - AndroidUtilities.cancelRunOnUIThread(animationRunnable); - animatingIndicator = false; - } - - animationTime = 0; - animatingIndicator = true; - animateIndicatorStartX = indicatorX; - animateIndicatorStartWidth = indicatorWidth; - - TextView nextChild = (TextView) v; - animateIndicatorToWidth = getChildWidth(nextChild); - animateIndicatorToX = nextChild.getLeft() + (nextChild.getMeasuredWidth() - animateIndicatorToWidth) / 2; - setEnabled(false); - - AndroidUtilities.runOnUIThread(animationRunnable, 16); - - if (delegate != null) { - delegate.onPageSelected(id, scrollingForward); - } - scrollToChild(position1); - }); int tabWidth = (int) Math.ceil(tab.getPaint().measureText(text, 0, text.length())) + tab.getPaddingLeft() + tab.getPaddingRight(); + tabsContainer.addView(tab, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT)); allTextWidth += tabWidth; positionToWidth.put(position, tabWidth); - tabsContainer.addView(tab, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT)); } public void finishAddingTabs() { @@ -336,7 +357,9 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { if (child == tabsContainer) { final int height = getMeasuredHeight(); selectorDrawable.setAlpha((int) (255 * tabsContainer.getAlpha())); - selectorDrawable.setBounds(indicatorX, height - AndroidUtilities.dpr(4), indicatorX + indicatorWidth, height); + float x = indicatorX + indicatorXAnimationDx; + float w = x + indicatorWidth + indicatorWidthAnimationDx; + selectorDrawable.setBounds((int) x, height - AndroidUtilities.dpr(4), (int) w, height); selectorDrawable.draw(canvas); } return result; @@ -412,6 +435,26 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { if (child != null) { indicatorWidth = getChildWidth(child); indicatorX = child.getLeft() + (child.getMeasuredWidth() - indicatorWidth) / 2; + + if (animateFromIndicaxtorX > 0 && animateFromIndicatorWidth > 0) { + if (animateFromIndicaxtorX != indicatorX || animateFromIndicatorWidth != indicatorWidth) { + int dX = animateFromIndicaxtorX - indicatorX; + int dW = animateFromIndicatorWidth - indicatorWidth; + ValueAnimator valueAnimator = ValueAnimator.ofFloat(1f, 0); + valueAnimator.addUpdateListener(valueAnimator1 -> { + float v = (float) valueAnimator1.getAnimatedValue(); + indicatorXAnimationDx = dX * v; + indicatorWidthAnimationDx = dW * v; + tabsContainer.invalidate(); + invalidate(); + }); + valueAnimator.setDuration(200); + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.start(); + } + animateFromIndicaxtorX = 0; + animateFromIndicatorWidth = 0; + } } } } @@ -487,4 +530,9 @@ public class ScrollSlidingTextTabStrip extends HorizontalScrollView { } invalidate(); } + + public void recordIndicatorParams() { + animateFromIndicaxtorX = indicatorX; + animateFromIndicatorWidth = indicatorWidth; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java index 0670a25d5..4ef64bac6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java @@ -7,6 +7,7 @@ import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; import android.os.Bundle; +import android.text.TextUtils; import android.view.View; import android.view.ViewTreeObserver; import android.widget.FrameLayout; @@ -40,7 +41,6 @@ import org.telegram.ui.Cells.SharedPhotoVideoCell; import org.telegram.ui.ChatActivity; import org.telegram.ui.DialogsActivity; import org.telegram.ui.FilteredSearchView; -import org.telegram.ui.ViewPagerFixed; import java.util.ArrayList; import java.util.HashMap; @@ -162,7 +162,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie FlickerLoadingView loadingView = new FlickerLoadingView(context); loadingView.setViewType(1); - emptyView = new StickerEmptyView(context, loadingView) { + emptyView = new StickerEmptyView(context, loadingView, StickerEmptyView.STICKER_TYPE_SEARCH) { @Override public void setVisibility(int visibility) { if (noMediaFiltersSearchView.getTag() != null) { @@ -227,7 +227,11 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie public void onTextChanged(String text) { lastSearchString = text; View view = getCurrentView(); - search(view, getCurrentPosition(), text, false); + boolean reset = false; + if (!attached) { + reset = true; + } + search(view, getCurrentPosition(), text, reset); } private void search(View view, int position, String query, boolean reset) { @@ -258,6 +262,8 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie if (reset) { emptyView.showProgress(!dialogsSearchAdapter.isSearching(), false); emptyView.showProgress(dialogsSearchAdapter.isSearching(), false); + } else { + emptyView.showProgress(dialogsSearchAdapter.isSearching(), true); } if (reset) { noMediaFiltersSearchView.setVisibility(View.GONE); @@ -764,6 +770,20 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie } } + boolean attached; + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attached = true; + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attached = false; + } + public interface ChatPreviewDelegate { void startChatPreview(DialogCell cell); void move(float dy); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java index 46161d9e9..a422c6625 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java @@ -38,7 +38,7 @@ public class SeekBarView extends FrameLayout { private int thumbDX; private float progressToSet; private boolean pressed; - private SeekBarViewDelegate delegate; + public SeekBarViewDelegate delegate; private boolean reportChanges; private float bufferedProgress; private Drawable hoverDrawable; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java index 8e614a170..edb6adfaa 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java @@ -14,6 +14,7 @@ import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.annotation.SuppressLint; +import android.app.Activity; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; @@ -50,7 +51,6 @@ import android.widget.Toast; import org.telegram.SQLite.SQLiteCursor; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; -import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.FileLog; @@ -75,6 +75,7 @@ import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Cells.ShareDialogCell; import org.telegram.ui.ChatActivity; import org.telegram.ui.DialogsActivity; +import org.telegram.ui.LaunchActivity; import org.telegram.ui.MessageStatisticActivity; import java.util.ArrayList; @@ -109,6 +110,7 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi private LongSparseArray selectedDialogs = new LongSparseArray<>(); private ChatActivity parentFragment; + private Activity parentActivity; private RectF rect = new RectF(); private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); @@ -136,7 +138,13 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi private int topOffset; public interface ShareAlertDelegate { - void didShare(); + default void didShare() { + + } + + default boolean didCopy() { + return false; + } } @SuppressWarnings("FieldCanBeLocal") @@ -279,6 +287,10 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi public ShareAlert(final Context context, ChatActivity fragment, ArrayList messages, final String text, boolean channel, final String copyLink, boolean fullScreen) { super(context, true); + if (context instanceof Activity) { + parentActivity = (Activity) context; + } + parentFragment = fragment; shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); shadowDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogBackground), PorterDuff.Mode.MULTIPLY)); @@ -1178,7 +1190,7 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi previousScrollOffsetY = scrollOffsetY; gridView.setTopGlowOffset(scrollOffsetY = (int) (newOffset + currentPanTranslationY)); frameLayout.setTranslationY(scrollOffsetY + currentPanTranslationY); - searchEmptyView.setTranslationY(scrollOffsetY + currentPanTranslationY) ; + searchEmptyView.setTranslationY(scrollOffsetY + currentPanTranslationY); containerView.invalidate(); } } @@ -1225,10 +1237,9 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", linkToCopy != null ? linkToCopy : exportedMessageLink.link); clipboard.setPrimaryClip(clip); - if (exportedMessageLink != null && exportedMessageLink.link.contains("/c/")) { - Toast.makeText(ApplicationLoader.applicationContext, LocaleController.getString("LinkCopiedPrivate", R.string.LinkCopiedPrivate), Toast.LENGTH_SHORT).show(); - } else { - Toast.makeText(ApplicationLoader.applicationContext, LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if ((delegate == null || !delegate.didCopy()) && parentActivity instanceof LaunchActivity) { + final boolean isPrivate = exportedMessageLink != null && exportedMessageLink.link.contains("/c/"); + ((LaunchActivity) parentActivity).showBulletin(factory -> factory.createCopyLinkBulletin(isPrivate)); } } catch (Exception e) { FileLog.e(e); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java index 67e5bd900..b18ae7887 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java @@ -18,6 +18,11 @@ import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; +import android.transition.ChangeBounds; +import android.transition.TransitionManager; +import android.transition.TransitionSet; +import android.transition.TransitionValues; +import android.transition.Visibility; import android.util.SparseArray; import android.util.TypedValue; import android.view.Gravity; @@ -95,10 +100,8 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter private static class MediaPage extends FrameLayout { private RecyclerListView listView; private FlickerLoadingView progressView; - private TextView emptyTextView; + private StickerEmptyView emptyView; private ExtendedGridLayoutManager layoutManager; - private ImageView emptyImageView; - private LinearLayout emptyView; private ClippingImageView animatingImageView; private RecyclerAnimationScrollHelper scrollHelper; private int selectedType; @@ -193,6 +196,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter private long mergeDialogId; private BaseFragment parentFragment; private ArrayList delegates = new ArrayList<>(); + private boolean mediaWasLoaded; public SharedMediaPreloader(BaseFragment fragment) { parentFragment = fragment; @@ -279,6 +283,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter lastLoadMediaCount[a] = mediaCount[a]; } } + mediaWasLoaded = true; for (int a = 0, N = delegates.size(); a < N; a++) { delegates.get(a).mediaCountUpdated(); } @@ -465,6 +470,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter parentFragment.getMediaDataController().getMediaCounts(mergeDialogId, parentFragment.getClassGuid()); } } + + public boolean isMediaWasLoaded() { + return mediaWasLoaded; + } } private PhotoViewer.PhotoViewerProvider provider = new PhotoViewer.EmptyPhotoViewerProvider() { @@ -736,67 +745,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter if (scrollSlidingTextTabStrip != null) { initialTab = scrollSlidingTextTabStrip.getCurrentTabId(); } - scrollSlidingTextTabStrip = new ScrollSlidingTextTabStrip(context); - if (initialTab != -1) { - scrollSlidingTextTabStrip.setInitialTabId(initialTab); - initialTab = -1; - } - scrollSlidingTextTabStrip.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - scrollSlidingTextTabStrip.setColors(Theme.key_profile_tabSelectedLine, Theme.key_profile_tabSelectedText, Theme.key_profile_tabText, Theme.key_profile_tabSelector); - scrollSlidingTextTabStrip.setDelegate(new ScrollSlidingTextTabStrip.ScrollSlidingTabStripDelegate() { - @Override - public void onPageSelected(int id, boolean forward) { - if (mediaPages[0].selectedType == id) { - return; - } - mediaPages[1].selectedType = id; - mediaPages[1].setVisibility(View.VISIBLE); - hideFloatingDateView(true); - switchToCurrentSelectedMode(true); - animatingForward = forward; - onSelectedTabChanged(); - } - - @Override - public void onSamePageSelected() { - scrollToTop(); - } - - @Override - public void onPageScrolled(float progress) { - if (progress == 1 && mediaPages[1].getVisibility() != View.VISIBLE) { - return; - } - if (animatingForward) { - mediaPages[0].setTranslationX(-progress * mediaPages[0].getMeasuredWidth()); - mediaPages[1].setTranslationX(mediaPages[0].getMeasuredWidth() - progress * mediaPages[0].getMeasuredWidth()); - } else { - mediaPages[0].setTranslationX(progress * mediaPages[0].getMeasuredWidth()); - mediaPages[1].setTranslationX(progress * mediaPages[0].getMeasuredWidth() - mediaPages[0].getMeasuredWidth()); - } - if (canShowSearchItem()) { - if (searchItemState == 1) { - searchItem.setAlpha(progress); - } else if (searchItemState == 2) { - searchItem.setAlpha(1.0f - progress); - } - } else { - searchItem.setVisibility(INVISIBLE); - searchItem.setAlpha(0.0f); - } - if (progress == 1) { - MediaPage tempPage = mediaPages[0]; - mediaPages[0] = mediaPages[1]; - mediaPages[1] = tempPage; - mediaPages[1].setVisibility(View.GONE); - if (searchItemState == 2) { - searchItem.setVisibility(View.INVISIBLE); - } - searchItemState = 0; - startStopVisibleGifs(); - } - } - }); + scrollSlidingTextTabStrip = createScrollingTextTabStrip(context); for (int a = 1; a >= 0; a--) { selectedFiles[a].clear(); @@ -816,10 +765,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter public void onSearchCollapse() { searching = false; searchWas = false; - documentsSearchAdapter.search(null); - linksSearchAdapter.search(null); - audioSearchAdapter.search(null); - groupUsersSearchAdapter.search(null); + documentsSearchAdapter.search(null, true); + linksSearchAdapter.search(null, true); + audioSearchAdapter.search(null, true); + groupUsersSearchAdapter.search(null, true); onSearchStateChanged(false); if (ignoreSearchCollapse) { ignoreSearchCollapse = false; @@ -835,30 +784,28 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter searchWas = true; } else { searchWas = false; - if (!ignoreSearchCollapse) { - switchToCurrentSelectedMode(false); - } } + switchToCurrentSelectedMode(false); if (mediaPages[0].selectedType == 1) { if (documentsSearchAdapter == null) { return; } - documentsSearchAdapter.search(text); + documentsSearchAdapter.search(text, true); } else if (mediaPages[0].selectedType == 3) { if (linksSearchAdapter == null) { return; } - linksSearchAdapter.search(text); + linksSearchAdapter.search(text, true); } else if (mediaPages[0].selectedType == 4) { if (audioSearchAdapter == null) { return; } - audioSearchAdapter.search(text); + audioSearchAdapter.search(text, true); } else if (mediaPages[0].selectedType == 7) { if (groupUsersSearchAdapter == null) { return; } - groupUsersSearchAdapter.search(text); + groupUsersSearchAdapter.search(text, true); } } @@ -1009,7 +956,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter protected Size getSizeForItem(int i) { TLRPC.Document document; - if (mediaPage.listView.getAdapter() == gifAdapter) { + if (mediaPage.listView.getAdapter() == gifAdapter && !sharedMediaData[5].messages.isEmpty()) { document = sharedMediaData[5].messages.get(i).getDocument(); } else { document = null; @@ -1048,15 +995,46 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter if (mediaPage.listView.getAdapter() != gifAdapter) { return mediaPage.layoutManager.getSpanCount(); } + if (mediaPage.listView.getAdapter() == gifAdapter && sharedMediaData[5].messages.isEmpty()) { + return mediaPage.layoutManager.getSpanCount(); + } return mediaPage.layoutManager.getSpanSizeForItem(position); } }); mediaPages[a].listView = new RecyclerListView(context) { + @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); checkLoadMoreScroll(mediaPage, mediaPage.listView, layoutManager); } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (getAdapter() == photoVideoAdapter) { + for (int i = 0; i < getChildCount(); i++) { + if (getChildViewHolder(getChildAt(i)).getItemViewType() == 1) { + canvas.save(); + canvas.translate(getChildAt(i).getX(), getChildAt(i).getY() - getChildAt(i).getMeasuredHeight() + AndroidUtilities.dp(2)); + getChildAt(i).draw(canvas); + canvas.restore(); + invalidate(); + } + } + } + super.dispatchDraw(canvas); + } + + @Override + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (getAdapter() == photoVideoAdapter) { + if (getChildViewHolder(child).getItemViewType() == 1) { + return true; + } + } + return super.drawChild(canvas, child, drawingTime); + } + }; mediaPages[a].listView.setScrollingTouchSlop(RecyclerView.TOUCH_SLOP_PAGING); mediaPages[a].listView.setPinnedSectionOffsetY(-AndroidUtilities.dp(2)); @@ -1189,24 +1167,6 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter mediaPages[a].animatingImageView.setVisibility(View.GONE); mediaPages[a].listView.addOverlayView(mediaPages[a].animatingImageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - mediaPages[a].emptyView = new LinearLayout(context); - mediaPages[a].emptyView.setWillNotDraw(false); - mediaPages[a].emptyView.setOrientation(LinearLayout.VERTICAL); - mediaPages[a].emptyView.setGravity(Gravity.CENTER); - mediaPages[a].emptyView.setVisibility(View.GONE); - mediaPages[a].addView(mediaPages[a].emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - mediaPages[a].emptyView.setOnTouchListener((v, event) -> true); - - mediaPages[a].emptyImageView = new ImageView(context); - mediaPages[a].emptyView.addView(mediaPages[a].emptyImageView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); - - mediaPages[a].emptyTextView = new TextView(context); - mediaPages[a].emptyTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2)); - mediaPages[a].emptyTextView.setGravity(Gravity.CENTER); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); - mediaPages[a].emptyView.addView(mediaPages[a].emptyTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 24, 0, 0)); - mediaPages[a].progressView = new FlickerLoadingView(context) { @Override @@ -1216,6 +1176,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getViewType() { + setIsSingleCell(false); if (mediaPage.selectedType == 0 || mediaPage.selectedType == 5) { return 2; } else if (mediaPage.selectedType == 1) { @@ -1224,6 +1185,13 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter return 4; } else if (mediaPage.selectedType == 3) { return 5; + } else if (mediaPage.selectedType == 7) { + return FlickerLoadingView.USERS_TYPE; + } else if (mediaPage.selectedType == 6) { + if (scrollSlidingTextTabStrip.getTabsCount() == 1) { + setIsSingleCell(true); + } + return 1; } return 1; } @@ -1236,12 +1204,23 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } }; mediaPages[a].progressView.showDate(false); - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].addView(mediaPages[a].progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); if (a != 0) { mediaPages[a].setVisibility(View.GONE); } + mediaPages[a].emptyView = new StickerEmptyView(context, mediaPages[a].progressView, StickerEmptyView.STICKER_TYPE_SEARCH); + mediaPages[a].emptyView.setVisibility(View.GONE); + mediaPages[a].emptyView.setAnimateLayoutChange(true); + mediaPages[a].addView(mediaPages[a].emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + mediaPages[a].emptyView.setOnTouchListener((v, event) -> true); + mediaPages[a].emptyView.showProgress(true, false); + mediaPages[a].emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + mediaPages[a].emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); + mediaPages[a].emptyView.addView(mediaPages[a].progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); + mediaPages[a].listView.setAnimateEmptyView(true, 0); + mediaPages[a].scrollHelper = new RecyclerAnimationScrollHelper(mediaPages[a].listView, mediaPages[a].layoutManager); } @@ -1253,20 +1232,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter addView(floatingDateView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 48 + 4, 0, 0)); addView(fragmentContextView = new FragmentContextView(context, parent, this, false), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, 48, 0, 0)); - fragmentContextView.setSupportsCalls(false); fragmentContextView.setDelegate((start, show) -> { - if (show && !start) { - for (int a = 0; a < mediaPages.length; a++) { - int translation = (int) mediaPages[a].getTranslationY(); - mediaPages[a].setTranslationY(0); - mediaPages[a].setPadding(0, translation, 0, 0); - } - } else if (!show && start) { - for (int a = 0; a < mediaPages.length; a++) { - int translation = mediaPages[a].getPaddingTop(); - mediaPages[a].setTranslationY(translation); - mediaPages[a].setPadding(0, 0, 0, 0); - } + if (!start) { + requestLayout(); } }); @@ -1279,10 +1247,88 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter layoutParams.topMargin = AndroidUtilities.dp(48) - 1; addView(shadowLine, layoutParams); - updateTabs(); + updateTabs(false); switchToCurrentSelectedMode(false); } + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (fragmentContextView != null && fragmentContextView.getCurrentStyle() == 3) { + canvas.save(); + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.draw(canvas); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } + } + + private ScrollSlidingTextTabStrip createScrollingTextTabStrip(Context context) { + ScrollSlidingTextTabStrip scrollSlidingTextTabStrip = new ScrollSlidingTextTabStrip(context); + if (initialTab != -1) { + scrollSlidingTextTabStrip.setInitialTabId(initialTab); + initialTab = -1; + } + scrollSlidingTextTabStrip.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + scrollSlidingTextTabStrip.setColors(Theme.key_profile_tabSelectedLine, Theme.key_profile_tabSelectedText, Theme.key_profile_tabText, Theme.key_profile_tabSelector); + scrollSlidingTextTabStrip.setDelegate(new ScrollSlidingTextTabStrip.ScrollSlidingTabStripDelegate() { + @Override + public void onPageSelected(int id, boolean forward) { + if (mediaPages[0].selectedType == id) { + return; + } + mediaPages[1].selectedType = id; + mediaPages[1].setVisibility(View.VISIBLE); + hideFloatingDateView(true); + switchToCurrentSelectedMode(true); + animatingForward = forward; + onSelectedTabChanged(); + } + + @Override + public void onSamePageSelected() { + scrollToTop(); + } + + @Override + public void onPageScrolled(float progress) { + if (progress == 1 && mediaPages[1].getVisibility() != View.VISIBLE) { + return; + } + if (animatingForward) { + mediaPages[0].setTranslationX(-progress * mediaPages[0].getMeasuredWidth()); + mediaPages[1].setTranslationX(mediaPages[0].getMeasuredWidth() - progress * mediaPages[0].getMeasuredWidth()); + } else { + mediaPages[0].setTranslationX(progress * mediaPages[0].getMeasuredWidth()); + mediaPages[1].setTranslationX(progress * mediaPages[0].getMeasuredWidth() - mediaPages[0].getMeasuredWidth()); + } + if (canShowSearchItem()) { + if (searchItemState == 1) { + searchItem.setAlpha(progress); + } else if (searchItemState == 2) { + searchItem.setAlpha(1.0f - progress); + } + } else { + searchItem.setVisibility(INVISIBLE); + searchItem.setAlpha(0.0f); + } + if (progress == 1) { + MediaPage tempPage = mediaPages[0]; + mediaPages[0] = mediaPages[1]; + mediaPages[1] = tempPage; + mediaPages[1].setVisibility(View.GONE); + if (searchItemState == 2) { + searchItem.setVisibility(View.INVISIBLE); + } + searchItemState = 0; + startStopVisibleGifs(); + } + } + }); + return scrollSlidingTextTabStrip; + } + private boolean fillMediaData(int type) { SharedMediaData[] mediaData = sharedMediaPreloader.getSharedMediaData(); if (mediaData == null) { @@ -1468,6 +1514,17 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter return scrollSlidingTextTabStrip.getCurrentTabId(); } + public int getClosestTab() { + if (mediaPages[1] != null && mediaPages[1].getVisibility() == View.VISIBLE) { + if (tabsAnimationInProgress && !backAnimation) { + return mediaPages[1].selectedType; + } else if (Math.abs(mediaPages[1].getTranslationX()) < mediaPages[1].getMeasuredWidth() / 2f) { + return mediaPages[1].selectedType; + } + } + return scrollSlidingTextTabStrip.getCurrentTabId(); + } + protected void onSelectedTabChanged() { } @@ -1513,7 +1570,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } System.arraycopy(mediaCounts, 0, hasMedia, 0, 6); - updateTabs(); + updateTabs(true); if (!hadMedia && scrollSlidingTextTabStrip.getCurrentTabId() == 6) { scrollSlidingTextTabStrip.resetTab(); } @@ -1522,7 +1579,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter public void setCommonGroupsCount(int count) { hasMedia[6] = count; - updateTabs(); + updateTabs(true); checkCurrentTabValid(); } @@ -1609,12 +1666,13 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter }); profileActivity.presentFragment(fragment); } else if (id == gotochat) { - if (selectedFiles[0].size() != 1) { + if (selectedFiles[0].size() + selectedFiles[1].size() != 1) { return; } + MessageObject messageObject = selectedFiles[selectedFiles[0].size() == 1 ? 0 : 1].valueAt(0); Bundle args = new Bundle(); - int lower_part = (int) dialog_id; - int high_id = (int) (dialog_id >> 32); + int lower_part = (int) messageObject.getDialogId(); + int high_id = (int) (messageObject.getDialogId() >> 32); if (lower_part != 0) { if (lower_part > 0) { args.putInt("user_id", lower_part); @@ -1629,7 +1687,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } else { args.putInt("enc_id", high_id); } - args.putInt("message_id", selectedFiles[0].keyAt(0)); + args.putInt("message_id", messageObject.getId()); profileActivity.getNotificationCenter().removeObserver(profileActivity, NotificationCenter.closeChats); profileActivity.getNotificationCenter().postNotificationName(NotificationCenter.closeChats); profileActivity.presentFragment(new ChatActivity(args), true); @@ -1679,10 +1737,14 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter super.forceHasOverlappingRendering(hasOverlappingRendering); } + int topPadding; + int lastMeasuredTopPadding; + @Override public void setPadding(int left, int top, int right, int bottom) { + topPadding = top; for (int a = 0; a < mediaPages.length; a++) { - mediaPages[a].setTranslationY(top); + mediaPages[a].setTranslationY(topPadding - lastMeasuredTopPadding); } fragmentContextView.setTranslationY(AndroidUtilities.dp(48) + top); additionalFloatingTranslation = top; @@ -1707,6 +1769,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } if (child instanceof MediaPage) { measureChildWithMargins(child, widthMeasureSpec, 0, MeasureSpec.makeMeasureSpec(heightSize, MeasureSpec.EXACTLY), 0); + ((MediaPage) child).listView.setPadding(0, 0 ,0, topPadding); } else { measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); } @@ -1802,6 +1865,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter searchItem.setAlpha(0.0f); } scrollSlidingTextTabStrip.selectTabWithId(mediaPages[1].selectedType, scrollProgress); + onSelectedTabChanged(); } } else if (ev == null || ev.getPointerId(0) == startedTrackingPointerId && (ev.getAction() == MotionEvent.ACTION_CANCEL || ev.getAction() == MotionEvent.ACTION_UP || ev.getAction() == MotionEvent.ACTION_POINTER_UP)) { velocityTracker.computeCurrentVelocity(1000, maximumVelocity); @@ -1910,6 +1974,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter tabsAnimation.start(); tabsAnimationInProgress = true; startedTracking = false; + onSelectedTabChanged(); } else { maybeStartTracking = false; actionBar.setEnabled(true); @@ -1942,7 +2007,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter public void setVisibleHeight(int height) { height = Math.max(height, AndroidUtilities.dp(120)); for (int a = 0; a < mediaPages.length; a++) { - mediaPages[a].emptyView.setTranslationY(-(getMeasuredHeight() - height) / 2); + float t = -(getMeasuredHeight() - height) / 2f; + mediaPages[a].emptyView.setTranslationY(t); + mediaPages[a].progressView.setTranslationY(-t); } } @@ -1988,7 +2055,6 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter int guid = (Integer) args[3]; int type = (Integer) args[4]; if (guid == profileActivity.getClassGuid()) { - sharedMediaData[type].loading = false; sharedMediaData[type].totalCount = (Integer) args[1]; ArrayList arr = (ArrayList) args[2]; boolean enc = ((int) dialog_id) == 0; @@ -2018,6 +2084,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } else { oldItemCount = 0; } + sharedMediaData[type].loading = false; for (int a = 0; a < arr.size(); a++) { MessageObject message = arr.get(a); sharedMediaData[type].addMessage(message, loadIndex, false, enc); @@ -2036,99 +2103,19 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } int newItemCount = adapter.getItemCount(); - if (type == 0) { - if (oldItemCount > 1) { - adapter.notifyItemRangeChanged(oldItemCount - 2, 2); + if (sharedMediaData[type].messages.isEmpty() && !sharedMediaData[type].loading) { + adapter.notifyDataSetChanged(); + if (listView != null) { + animateItemsEnter(listView, oldItemCount); } } else { - if (oldItemCount > 1) { - adapter.notifyItemChanged(oldItemCount - 2); + adapter.notifyDataSetChanged(); + if (listView != null && newItemCount >= oldItemCount) { + animateItemsEnter(listView, oldItemCount); } } - if (newItemCount > oldItemCount) { - adapter.notifyItemRangeInserted(oldItemCount, newItemCount); - } else if (newItemCount < oldItemCount) { - adapter.notifyItemRangeRemoved(newItemCount, (oldItemCount - newItemCount)); - } - if (listView != null && newItemCount > oldItemCount) { - RecyclerListView finalListView = listView; - int n = finalListView.getChildCount(); - AnimatorSet animatorSet = new AnimatorSet(); - View progressView = null; - for (int i = 0; i < n; i++) { - View child = finalListView.getChildAt(i); - if (child instanceof FlickerLoadingView) { - progressView = child; - } - } - final View finalProgressView = progressView; - if (progressView != null) { - finalListView.removeView(progressView); - } - getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - getViewTreeObserver().removeOnPreDrawListener(this); - int n = finalListView.getChildCount(); - AnimatorSet animatorSet = new AnimatorSet(); - for (int i = 0; i < n; i++) { - View child = finalListView.getChildAt(i); - if (finalListView.getChildAdapterPosition(child) >= oldItemCount - 1) { - child.setAlpha(0); - int s = Math.min(finalListView.getMeasuredHeight(), Math.max(0, child.getTop())); - int delay = (int) ((s / (float) finalListView.getMeasuredHeight()) * 100); - ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); - a.setStartDelay(delay); - a.setDuration(200); - animatorSet.playTogether(a); - } - if (finalProgressView != null && finalProgressView.getParent() == null) { - finalListView.addView(finalProgressView); - RecyclerView.LayoutManager layoutManager = finalListView.getLayoutManager(); - if (layoutManager != null) { - layoutManager.ignoreView(finalProgressView); - Animator animator = ObjectAnimator.ofFloat(finalProgressView, ALPHA, finalProgressView.getAlpha(), 0); - animator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - finalProgressView.setAlpha(1f); - layoutManager.stopIgnoringView(finalProgressView); - finalListView.removeView(finalProgressView); - } - }); - animator.start(); - } - } - } - - animatorSet.start(); - return true; - } - }); - } } scrolling = true; - for (int a = 0; a < mediaPages.length; a++) { - if (mediaPages[a].selectedType == type) { - if (!sharedMediaData[type].loading) { - if (mediaPages[a].progressView != null) { - View v = mediaPages[a].progressView; - v.animate().alpha(0).setListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - v.setAlpha(1f); - v.setVisibility(View.GONE); - } - }); - } - if (mediaPages[a].listView != null) { - if (mediaPages[a].listView.getEmptyView() == null) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } - } - } - } - } } else if (sharedMediaPreloader != null && sharedMediaData[type].messages.isEmpty()) { if (fillMediaData(type)) { RecyclerListView.Adapter adapter = null; @@ -2154,20 +2141,6 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter adapter.notifyDataSetChanged(); } scrolling = true; - for (int a = 0; a < mediaPages.length; a++) { - if (mediaPages[a].selectedType == type) { - if (!sharedMediaData[type].loading) { - if (mediaPages[a].progressView != null) { - mediaPages[a].progressView.setVisibility(View.GONE); - } - if (mediaPages[a].listView != null) { - if (mediaPages[a].listView.getEmptyView() == null) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } - } - } - } - } } } } else if (id == NotificationCenter.messagesDeleted) { @@ -2273,7 +2246,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter gifAdapter.notifyDataSetChanged(); } } - updateTabs(); + updateTabs(true); } } } else if (id == NotificationCenter.messageReceivedByServer) { @@ -2323,6 +2296,61 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } + private void animateItemsEnter(final RecyclerListView finalListView, int oldItemCount) { + int n = finalListView.getChildCount(); + View progressView = null; + for (int i = 0; i < n; i++) { + View child = finalListView.getChildAt(i); + if (child instanceof FlickerLoadingView) { + progressView = child; + } + } + final View finalProgressView = progressView; + if (progressView != null) { + finalListView.removeView(progressView); + } + getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + getViewTreeObserver().removeOnPreDrawListener(this); + int n = finalListView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = finalListView.getChildAt(i); + if (child != finalProgressView && finalListView.getChildAdapterPosition(child) >= oldItemCount - 1) { + child.setAlpha(0); + int s = Math.min(finalListView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) finalListView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + if (finalProgressView != null && finalProgressView.getParent() == null) { + finalListView.addView(finalProgressView); + RecyclerView.LayoutManager layoutManager = finalListView.getLayoutManager(); + if (layoutManager != null) { + layoutManager.ignoreView(finalProgressView); + Animator animator = ObjectAnimator.ofFloat(finalProgressView, ALPHA, finalProgressView.getAlpha(), 0); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + finalProgressView.setAlpha(1f); + layoutManager.stopIgnoringView(finalProgressView); + finalListView.removeView(finalProgressView); + } + }); + animator.start(); + } + } + } + + animatorSet.start(); + return true; + } + }); + } + public void onResume() { scrolling = true; if (photoVideoAdapter != null) { @@ -2371,7 +2399,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter public void setChatUsers(ArrayList sortedUsers, TLRPC.ChatFull chatInfo) { chatUsersAdapter.chatInfo = chatInfo; chatUsersAdapter.sortedUsers = sortedUsers; - updateTabs(); + updateTabs(true); for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].selectedType == 7) { mediaPages[a].listView.getAdapter().notifyDataSetChanged(); @@ -2426,10 +2454,13 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter mergeDialogId = did; } - private void updateTabs() { + private void updateTabs(boolean animated) { if (scrollSlidingTextTabStrip == null) { return; } + if (!profileActivity.isFragmentOpened) { + animated = false; + } boolean changed = false; if ((chatUsersAdapter.chatInfo == null) == scrollSlidingTextTabStrip.hasTab(7)) { changed = true; @@ -2465,35 +2496,68 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter changed = true; } if (changed) { - scrollSlidingTextTabStrip.removeTabs(); + if (animated && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + final TransitionSet transitionSet = new TransitionSet(); + transitionSet.setOrdering(TransitionSet.ORDERING_TOGETHER); + transitionSet.addTransition(new ChangeBounds()); + transitionSet.addTransition(new Visibility() { + @Override + public Animator onAppear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { + AnimatorSet set = new AnimatorSet(); + set.playTogether( + ObjectAnimator.ofFloat(view, View.ALPHA, 0, 1f), + ObjectAnimator.ofFloat(view, View.SCALE_X, 0.5f, 1f), + ObjectAnimator.ofFloat(view, View.SCALE_Y, 0.5f, 1f) + ); + set.setInterpolator(CubicBezierInterpolator.DEFAULT); + return set; + } + @Override + public Animator onDisappear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { + AnimatorSet set = new AnimatorSet(); + set.playTogether( + ObjectAnimator.ofFloat(view, View.ALPHA, view.getAlpha(), 0f), + ObjectAnimator.ofFloat(view, View.SCALE_X, view.getScaleX(), 0.5f), + ObjectAnimator.ofFloat(view, View.SCALE_Y, view.getScaleX(), 0.5f) + ); + set.setInterpolator(CubicBezierInterpolator.DEFAULT); + return set; + } + }); + transitionSet.setDuration(200); + TransitionManager.beginDelayedTransition(scrollSlidingTextTabStrip.getTabsContainer(), transitionSet); + + scrollSlidingTextTabStrip.recordIndicatorParams(); + } + SparseArray idToView = scrollSlidingTextTabStrip.removeTabs(); if (chatUsersAdapter.chatInfo != null) { if (!scrollSlidingTextTabStrip.hasTab(7)) { - scrollSlidingTextTabStrip.addTextTab(7, LocaleController.getString("GroupMembers", R.string.GroupMembers)); + scrollSlidingTextTabStrip.addTextTab(7, LocaleController.getString("GroupMembers", R.string.GroupMembers), idToView); } } if (hasMedia[0] > 0) { if (!scrollSlidingTextTabStrip.hasTab(0)) { if (hasMedia[1] == 0 && hasMedia[2] == 0 && hasMedia[3] == 0 && hasMedia[4] == 0 && hasMedia[5] == 0 && hasMedia[6] == 0 && chatUsersAdapter.chatInfo == null) { - scrollSlidingTextTabStrip.addTextTab(0, LocaleController.getString("SharedMediaTabFull2", R.string.SharedMediaTabFull2)); + scrollSlidingTextTabStrip.addTextTab(0, LocaleController.getString("SharedMediaTabFull2", R.string.SharedMediaTabFull2), idToView); } else { - scrollSlidingTextTabStrip.addTextTab(0, LocaleController.getString("SharedMediaTab2", R.string.SharedMediaTab2)); + scrollSlidingTextTabStrip.addTextTab(0, LocaleController.getString("SharedMediaTab2", R.string.SharedMediaTab2), idToView); } } } if (hasMedia[1] > 0) { if (!scrollSlidingTextTabStrip.hasTab(1)) { - scrollSlidingTextTabStrip.addTextTab(1, LocaleController.getString("SharedFilesTab2", R.string.SharedFilesTab2)); + scrollSlidingTextTabStrip.addTextTab(1, LocaleController.getString("SharedFilesTab2", R.string.SharedFilesTab2), idToView); } } if ((int) dialog_id != 0) { if (hasMedia[3] > 0) { if (!scrollSlidingTextTabStrip.hasTab(3)) { - scrollSlidingTextTabStrip.addTextTab(3, LocaleController.getString("SharedLinksTab2", R.string.SharedLinksTab2)); + scrollSlidingTextTabStrip.addTextTab(3, LocaleController.getString("SharedLinksTab2", R.string.SharedLinksTab2), idToView); } } if (hasMedia[4] > 0) { if (!scrollSlidingTextTabStrip.hasTab(4)) { - scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2)); + scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2), idToView); } } } else { @@ -2501,24 +2565,24 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46) { if (hasMedia[4] > 0) { if (!scrollSlidingTextTabStrip.hasTab(4)) { - scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2)); + scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2), idToView); } } } } if (hasMedia[2] > 0) { if (!scrollSlidingTextTabStrip.hasTab(2)) { - scrollSlidingTextTabStrip.addTextTab(2, LocaleController.getString("SharedVoiceTab2", R.string.SharedVoiceTab2)); + scrollSlidingTextTabStrip.addTextTab(2, LocaleController.getString("SharedVoiceTab2", R.string.SharedVoiceTab2), idToView); } } if (hasMedia[5] > 0) { if (!scrollSlidingTextTabStrip.hasTab(5)) { - scrollSlidingTextTabStrip.addTextTab(5, LocaleController.getString("SharedGIFsTab2", R.string.SharedGIFsTab2)); + scrollSlidingTextTabStrip.addTextTab(5, LocaleController.getString("SharedGIFsTab2", R.string.SharedGIFsTab2), idToView); } } if (hasMedia[6] > 0) { if (!scrollSlidingTextTabStrip.hasTab(6)) { - scrollSlidingTextTabStrip.addTextTab(6, LocaleController.getString("SharedGroupsTab2", R.string.SharedGroupsTab2)); + scrollSlidingTextTabStrip.addTextTab(6, LocaleController.getString("SharedGroupsTab2", R.string.SharedGroupsTab2), idToView); } } } @@ -2566,7 +2630,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter String text = searchItem.getSearchField().getText().toString(); if (mediaPages[a].selectedType == 1) { if (documentsSearchAdapter != null) { - documentsSearchAdapter.search(text); + documentsSearchAdapter.search(text, false); if (currentAdapter != documentsSearchAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(documentsSearchAdapter); @@ -2574,7 +2638,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } else if (mediaPages[a].selectedType == 3) { if (linksSearchAdapter != null) { - linksSearchAdapter.search(text); + linksSearchAdapter.search(text, false); if (currentAdapter != linksSearchAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(linksSearchAdapter); @@ -2582,7 +2646,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } else if (mediaPages[a].selectedType == 4) { if (audioSearchAdapter != null) { - audioSearchAdapter.search(text); + audioSearchAdapter.search(text, false); if (currentAdapter != audioSearchAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(audioSearchAdapter); @@ -2590,19 +2654,13 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } else if (mediaPages[a].selectedType == 7) { if (groupUsersSearchAdapter != null) { - groupUsersSearchAdapter.search(text); + groupUsersSearchAdapter.search(text, false); if (currentAdapter != groupUsersSearchAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(groupUsersSearchAdapter); } } } - if (searchItemState != 2 && mediaPages[a].emptyTextView != null) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoResult", R.string.NoResult)); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(30)); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); - mediaPages[a].emptyImageView.setVisibility(View.GONE); - } } } else { if (mediaPages[a].listView != null) { @@ -2632,102 +2690,51 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter groupUsersSearchAdapter.notifyDataSetChanged(); } } - if (searchItemState != 2 && mediaPages[a].emptyTextView != null) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoResult", R.string.NoResult)); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(30)); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); - mediaPages[a].emptyImageView.setVisibility(View.GONE); - } } - } else { - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); - mediaPages[a].emptyImageView.setVisibility(View.VISIBLE); mediaPages[a].listView.setPinnedHeaderShadowDrawable(null); - if (mediaPages[a].selectedType == 0) { if (currentAdapter != photoVideoAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(photoVideoAdapter); } mediaPages[a].listView.setPinnedHeaderShadowDrawable(pinnedHeaderShadowDrawable); - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip1); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoMediaSecret", R.string.NoMediaSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoMedia", R.string.NoMedia)); - } } else if (mediaPages[a].selectedType == 1) { if (currentAdapter != documentsAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(documentsAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip2); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedFilesSecret", R.string.NoSharedFilesSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedFiles", R.string.NoSharedFiles)); - } } else if (mediaPages[a].selectedType == 2) { if (currentAdapter != voiceAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(voiceAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip5); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedVoiceSecret", R.string.NoSharedVoiceSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedVoice", R.string.NoSharedVoice)); - } } else if (mediaPages[a].selectedType == 3) { if (currentAdapter != linksAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(linksAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip3); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedLinksSecret", R.string.NoSharedLinksSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedLinks", R.string.NoSharedLinks)); - } } else if (mediaPages[a].selectedType == 4) { if (currentAdapter != audioAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(audioAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip4); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedAudioSecret", R.string.NoSharedAudioSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedAudio", R.string.NoSharedAudio)); - } } else if (mediaPages[a].selectedType == 5) { if (currentAdapter != gifAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(gifAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip1); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedGifSecret", R.string.NoSharedGifSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoGIFs", R.string.NoGIFs)); - } } else if (mediaPages[a].selectedType == 6) { if (currentAdapter != commonGroupsAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(commonGroupsAdapter); } - mediaPages[a].emptyImageView.setImageDrawable(null); - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoGroupsInCommon", R.string.NoGroupsInCommon)); } else if (mediaPages[a].selectedType == 7) { if (currentAdapter != chatUsersAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(chatUsersAdapter); } - mediaPages[a].emptyImageView.setImageDrawable(null); - mediaPages[a].emptyTextView.setText(""); } - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); if (mediaPages[a].selectedType == 0 || mediaPages[a].selectedType == 2 || mediaPages[a].selectedType == 5 || mediaPages[a].selectedType == 6 || mediaPages[a].selectedType == 7 && !profileActivity.canSearchMembers()) { if (animated) { searchItemState = 2; @@ -2763,30 +2770,14 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter if (!commonGroupsAdapter.loading && !commonGroupsAdapter.endReached && commonGroupsAdapter.chats.isEmpty()) { commonGroupsAdapter.getChats(0, 100); } - if (commonGroupsAdapter.loading && commonGroupsAdapter.chats.isEmpty()) { - mediaPages[a].progressView.setVisibility(View.VISIBLE); - mediaPages[a].listView.setEmptyView(null); - mediaPages[a].emptyView.setVisibility(View.GONE); - } else { - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } } else if (mediaPages[a].selectedType == 7) { - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); + } else { if (!sharedMediaData[mediaPages[a].selectedType].loading && !sharedMediaData[mediaPages[a].selectedType].endReached[0] && sharedMediaData[mediaPages[a].selectedType].messages.isEmpty()) { sharedMediaData[mediaPages[a].selectedType].loading = true; + documentsAdapter.notifyDataSetChanged(); profileActivity.getMediaDataController().loadMedia(dialog_id, 50, 0, mediaPages[a].selectedType, 1, profileActivity.getClassGuid()); } - if (sharedMediaData[mediaPages[a].selectedType].loading && sharedMediaData[mediaPages[a].selectedType].messages.isEmpty()) { - mediaPages[a].progressView.setVisibility(View.VISIBLE); - mediaPages[a].listView.setEmptyView(null); - mediaPages[a].emptyView.setVisibility(View.GONE); - } else { - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } } mediaPages[a].listView.setVisibility(View.VISIBLE); } @@ -2991,14 +2982,11 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter if (AndroidUtilities.isTablet()) { columnsCount = 3; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); } else { if (rotation == Surface.ROTATION_270 || rotation == Surface.ROTATION_90) { columnsCount = 6; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), 0); } else { columnsCount = 3; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); } } if (num == 0) { @@ -3057,16 +3045,25 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public boolean isEnabled(int section, int row) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return false; + } return section == 0 || row != 0; } @Override public int getSectionCount() { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 1; + } return sharedMediaData[3].sections.size() + (sharedMediaData[3].sections.isEmpty() || sharedMediaData[3].endReached[0] && sharedMediaData[3].endReached[1] ? 0 : 1); } @Override public int getCountForSection(int section) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 1; + } if (section < sharedMediaData[3].sections.size()) { return sharedMediaData[3].sectionArrays.get(sharedMediaData[3].sections.get(section)).size() + (section != 0 ? 1 : 0); } @@ -3102,6 +3099,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter view = new SharedLinkCell(mContext); ((SharedLinkCell) view).setDelegate(sharedLinkCellDelegate); break; + case 3: + View emptyStubView = createEmptyStubView(mContext, 3, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 2: default: FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); @@ -3117,7 +3118,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { - if (holder.getItemViewType() != 2) { + if (holder.getItemViewType() != 2 && holder.getItemViewType() != 3) { String name = sharedMediaData[3].sections.get(section); ArrayList messageObjects = sharedMediaData[3].sectionArrays.get(name); switch (holder.getItemViewType()) { @@ -3146,6 +3147,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemViewType(int section, int position) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 3; + } if (section < sharedMediaData[3].sections.size()) { if (section != 0 && position == 0) { return 0; @@ -3179,12 +3183,18 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public boolean isEnabled(int section, int row) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return false; + } return section == 0 || row != 0; } @Override public int getSectionCount() { - return sharedMediaData[currentType].sections.size() + (sharedMediaData[currentType].sections.isEmpty() || sharedMediaData[currentType].endReached[0] && sharedMediaData[currentType].endReached[1] ? 0 : 1); + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 1; + } + return sharedMediaData[currentType].sections.size() + ((!sharedMediaData[currentType].sections.isEmpty() && (!sharedMediaData[currentType].endReached[0] || !sharedMediaData[currentType].endReached[1])) ? 1 : 0); } @Override @@ -3194,6 +3204,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getCountForSection(int section) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 1; + } if (section < sharedMediaData[currentType].sections.size()) { return sharedMediaData[currentType].sectionArrays.get(sharedMediaData[currentType].sections.get(section)).size() + (section != 0 ? 1 : 0); } @@ -3239,6 +3252,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter flickerLoadingView.showDate(false); flickerLoadingView.setIsSingleCell(true); break; + case 4: + View emptyStubView = createEmptyStubView(mContext, currentType, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 3: default: if (currentType == MediaDataController.MEDIA_MUSIC && !audioCellCache.isEmpty()) { @@ -3274,7 +3291,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { - if (holder.getItemViewType() != 2) { + if (holder.getItemViewType() != 2 && holder.getItemViewType() != 4) { String name = sharedMediaData[currentType].sections.get(section); ArrayList messageObjects = sharedMediaData[currentType].sectionArrays.get(name); switch (holder.getItemViewType()) { @@ -3317,6 +3334,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemViewType(int section, int position) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 4; + } if (section < sharedMediaData[currentType].sections.size()) { if (section != 0 && position == 0) { return 0; @@ -3342,6 +3362,111 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } + public static View createEmptyStubView(Context context, int currentType, long dialog_id) { + EmptyStubView emptyStubView = new EmptyStubView(context); + if (currentType == 0) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip1); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoMediaSecret", R.string.NoMediaSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoMedia", R.string.NoMedia)); + } + } else if (currentType == 1) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip2); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedFilesSecret", R.string.NoSharedFilesSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedFiles", R.string.NoSharedFiles)); + } + } else if (currentType == 2) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip5); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedVoiceSecret", R.string.NoSharedVoiceSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedVoice", R.string.NoSharedVoice)); + } + } else if (currentType == 3) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip3); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedLinksSecret", R.string.NoSharedLinksSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedLinks", R.string.NoSharedLinks)); + } + } else if (currentType == 4) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip4); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedAudioSecret", R.string.NoSharedAudioSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedAudio", R.string.NoSharedAudio)); + } + } else if (currentType == 5) { + emptyStubView.emptyImageView.setImageResource(R.drawable.tip1); + if ((int) dialog_id == 0) { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoSharedGifSecret", R.string.NoSharedGifSecret)); + } else { + emptyStubView.emptyTextView.setText(LocaleController.getString("NoGIFs", R.string.NoGIFs)); + } + } else if (currentType == 6) { + emptyStubView.emptyImageView.setImageDrawable(null); + emptyStubView.emptyTextView.setText(LocaleController.getString("NoGroupsInCommon", R.string.NoGroupsInCommon)); + } else if (currentType == 7) { + emptyStubView.emptyImageView.setImageDrawable(null); + emptyStubView.emptyTextView.setText(""); + } + return emptyStubView; + } + + private static class EmptyStubView extends LinearLayout { + + final TextView emptyTextView; + final ImageView emptyImageView; + + boolean ignoreRequestLayout; + + public EmptyStubView(Context context) { + super(context); + emptyTextView = new TextView(context); + emptyImageView = new ImageView(context); + + setOrientation(LinearLayout.VERTICAL); + setGravity(Gravity.CENTER); + + addView(emptyImageView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + + emptyTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2)); + emptyTextView.setGravity(Gravity.CENTER); + emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); + emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); + addView(emptyTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 24, 0, 0)); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + WindowManager manager = (WindowManager) ApplicationLoader.applicationContext.getSystemService(Activity.WINDOW_SERVICE); + int rotation = manager.getDefaultDisplay().getRotation(); + ignoreRequestLayout = true; + if (AndroidUtilities.isTablet()) { + emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); + } else { + if (rotation == Surface.ROTATION_270 || rotation == Surface.ROTATION_90) { + emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), 0); + } else { + emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); + } + } + ignoreRequestLayout = false; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + @Override + public void requestLayout() { + if (ignoreRequestLayout) { + return; + } + super.requestLayout(); + } + } + private class SharedPhotoVideoAdapter extends RecyclerListView.SelectionAdapter { private Context mContext; @@ -3352,6 +3477,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemCount() { + if (sharedMediaData[0].messages.size() == 0 && !sharedMediaData[0].loading) { + return 1; + } int count = (int) Math.ceil(sharedMediaData[0].messages.size() / (float) columnsCount); if (count != 0 && (!sharedMediaData[0].endReached[0] || !sharedMediaData[0].endReached[1])) { count++; @@ -3398,12 +3526,21 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter cache.add((SharedPhotoVideoCell) view); break; case 1: - default: - FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext) { + @Override + public int getColumnsCount() { + return columnsCount; + } + }; flickerLoadingView.setIsSingleCell(true); flickerLoadingView.setViewType(FlickerLoadingView.PHOTOS_TYPE); view = flickerLoadingView; break; + default: + case 2: + View emptyStubView = createEmptyStubView(mContext, 0, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); } view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -3415,6 +3552,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter ArrayList messageObjects = sharedMediaData[0].messages; SharedPhotoVideoCell cell = (SharedPhotoVideoCell) holder.itemView; cell.setItemsCount(columnsCount); + // cell.setLoading(!sharedMediaData[0].endReached[0] || !sharedMediaData[0].endReached[1]); cell.setIsFirst(position == 0); for (int a = 0; a < columnsCount; a++) { int index = position * columnsCount + a; @@ -3431,12 +3569,20 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } cell.requestLayout(); + } else if (holder.getItemViewType() == 1) { + FlickerLoadingView flickerLoadingView = (FlickerLoadingView) holder.itemView; + int count = (int) Math.ceil(sharedMediaData[0].messages.size() / (float) columnsCount); + flickerLoadingView.skipDrawItemsCount(columnsCount - (columnsCount * count - sharedMediaData[0].messages.size())); } } @Override public int getItemViewType(int position) { - if (position < sharedMediaData[0].messages.size()) { + if (sharedMediaData[0].messages.size() == 0 && !sharedMediaData[0].loading) { + return 2; + } + int count = (int) Math.ceil(sharedMediaData[0].messages.size() / (float) columnsCount); + if (position < count) { return 0; } return 1; @@ -3507,13 +3653,25 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter AndroidUtilities.runOnUIThread(() -> { if (reqId != 0) { if (currentReqId == lastReqId) { + int oldItemCounts = getItemCount(); globalSearch = messageObjects; searchesInProgress--; int count = getItemCount(); if (searchesInProgress == 0 || count != 0) { switchToCurrentSelectedMode(false); } + + for (int a = 0; a < mediaPages.length; a++) { + if (mediaPages[a].selectedType == currentType) { + if (searchesInProgress == 0 && count == 0) { + mediaPages[a].emptyView.showProgress(false, true); + } else if (oldItemCounts == 0) { + animateItemsEnter(mediaPages[a].listView, 0); + } + } + } notifyDataSetChanged(); + } reqId = 0; } @@ -3522,11 +3680,18 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter profileActivity.getConnectionsManager().bindRequestToGuid(reqId, profileActivity.getClassGuid()); } - public void search(final String query) { + public void search(final String query, boolean animated) { if (searchRunnable != null) { AndroidUtilities.cancelRunOnUIThread(searchRunnable); searchRunnable = null; } + + if (!searchResult.isEmpty() || !globalSearch.isEmpty()) { + searchResult.clear(); + globalSearch.clear(); + notifyDataSetChanged(); + } + if (TextUtils.isEmpty(query)) { if (!searchResult.isEmpty() || !globalSearch.isEmpty() || searchesInProgress != 0) { searchResult.clear(); @@ -3537,18 +3702,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter searchesInProgress--; } } - notifyDataSetChanged(); } else { for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].selectedType == currentType) { - //if (getItemCount() != 0) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - mediaPages[a].progressView.setVisibility(View.GONE); - /*} else { - mediaPages[a].listView.setEmptyView(null); - mediaPages[a].emptyView.setVisibility(View.GONE); - mediaPages[a].progressView.setVisibility(View.VISIBLE); - }*/ + mediaPages[a].emptyView.showProgress(true, animated); } } @@ -3635,26 +3792,26 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter return; } searchesInProgress--; + int oldItemCount = getItemCount(); searchResult = documents; int count = getItemCount(); if (searchesInProgress == 0 || count != 0) { switchToCurrentSelectedMode(false); } - notifyDataSetChanged(); - }); - } - @Override - public void notifyDataSetChanged() { - super.notifyDataSetChanged(); - if (searchesInProgress == 0) { for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].selectedType == currentType) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - mediaPages[a].progressView.setVisibility(View.GONE); + if (searchesInProgress == 0 && count == 0) { + mediaPages[a].emptyView.showProgress(false, true); + } else if (oldItemCount == 0) { + animateItemsEnter(mediaPages[a].listView, 0); + } } } - } + + notifyDataSetChanged(); + + }); } @Override @@ -3769,11 +3926,17 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { + if (sharedMediaData[5].messages.size() == 0 && !sharedMediaData[5].loading) { + return false; + } return true; } @Override public int getItemCount() { + if (sharedMediaData[5].messages.size() == 0 && !sharedMediaData[5].loading) { + return 1; + } return sharedMediaData[5].messages.size(); } @@ -3784,11 +3947,19 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemViewType(int position) { + if (sharedMediaData[5].messages.size() == 0 && !sharedMediaData[5].loading) { + return 1; + } return 0; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + if (viewType == 1) { + View emptyStubView = createEmptyStubView(mContext, 5, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); + } ContextLinkCell cell = new ContextLinkCell(mContext, true); cell.setCanPreviewGif(true); return new RecyclerListView.Holder(cell); @@ -3796,15 +3967,17 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { - MessageObject messageObject = sharedMediaData[5].messages.get(position); - TLRPC.Document document = messageObject.getDocument(); - if (document != null) { - ContextLinkCell cell = (ContextLinkCell) holder.itemView; - cell.setGif(document, messageObject, messageObject.messageOwner.date, false); - if (isActionModeShowed) { - cell.setChecked(selectedFiles[messageObject.getDialogId() == dialog_id ? 0 : 1].indexOfKey(messageObject.getId()) >= 0, !scrolling); - } else { - cell.setChecked(false, !scrolling); + if (holder.getItemViewType() != 1) { + MessageObject messageObject = sharedMediaData[5].messages.get(position); + TLRPC.Document document = messageObject.getDocument(); + if (document != null) { + ContextLinkCell cell = (ContextLinkCell) holder.itemView; + cell.setGif(document, messageObject, messageObject.messageOwner.date, false); + if (isActionModeShowed) { + cell.setChecked(selectedFiles[messageObject.getDialogId() == dialog_id ? 0 : 1].indexOfKey(messageObject.getId()) >= 0, !scrolling); + } else { + cell.setChecked(false, !scrolling); + } } } } @@ -3871,44 +4044,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].selectedType == 6) { - if (mediaPages[a].progressView != null) { - View v = mediaPages[a].progressView; - v.animate().alpha(0).setListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - v.setAlpha(1f); - v.setVisibility(View.GONE); - } - }); - } if (mediaPages[a].listView != null) { - if (mediaPages[a].listView.getEmptyView() == null) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } final RecyclerListView listView = mediaPages[a].listView; if (firstLoaded) { - if (listView != null) { - getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - getViewTreeObserver().removeOnPreDrawListener(this); - int n = listView.getChildCount(); - AnimatorSet animatorSet = new AnimatorSet(); - for (int i = 0; i < n; i++) { - View child = listView.getChildAt(i); - child.setAlpha(0); - int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); - int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); - ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); - a.setStartDelay(delay); - a.setDuration(200); - animatorSet.playTogether(a); - } - animatorSet.start(); - return true; - } - }); - } + animateItemsEnter(listView, 0); } } } @@ -3927,6 +4066,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemCount() { + if (chats.isEmpty() && !loading) { + return 1; + } int count = chats.size(); if (!chats.isEmpty()) { if (!endReached) { @@ -3943,6 +4085,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter case 0: view = new ProfileSearchCell(mContext); break; + case 2: + View emptyStubView = createEmptyStubView(mContext, 6, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 1: default: FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); @@ -3968,6 +4114,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemViewType(int i) { + if (chats.isEmpty() && !loading) { + return 2; + } if (i < chats.size()) { return 0; } else { @@ -3993,11 +4142,19 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemCount() { + if (chatInfo != null && chatInfo.participants.participants.isEmpty()) { + return 1; + } return chatInfo != null ? chatInfo.participants.participants.size() : 0; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + if (viewType == 1) { + View emptyStubView = createEmptyStubView(mContext, 7, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); + } View view = new UserCell(mContext, 9, 0, true); view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -4043,6 +4200,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter @Override public int getItemViewType(int i) { + if (chatInfo != null && chatInfo.participants.participants.isEmpty()) { + return 1; + } return 0; } } @@ -4055,11 +4215,28 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter private Runnable searchRunnable; private int totalCount = 0; private TLRPC.Chat currentChat; + int searchCount = 0; public GroupUsersSearchAdapter(Context context) { mContext = context; searchAdapterHelper = new SearchAdapterHelper(true); - searchAdapterHelper.setDelegate(searchId -> notifyDataSetChanged()); + searchAdapterHelper.setDelegate(searchId -> { + notifyDataSetChanged(); + if (searchId == 1) { + searchCount--; + if (searchCount == 0) { + for (int a = 0; a < mediaPages.length; a++) { + if (mediaPages[a].selectedType == 7) { + if (getItemCount() == 0) { + mediaPages[a].emptyView.showProgress(false, true); + } else { + animateItemsEnter(mediaPages[a].listView, 0); + } + } + } + } + } + }); currentChat = profileActivity.getCurrentChat(); } @@ -4076,17 +4253,25 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter return profileActivity.onMemberClick((TLRPC.ChatParticipant) participant, true, resultOnly); } - public void search(final String query) { + public void search(final String query, boolean animated) { if (searchRunnable != null) { Utilities.searchQueue.cancelRunnable(searchRunnable); searchRunnable = null; } - if (TextUtils.isEmpty(query)) { - searchResultNames.clear(); - searchAdapterHelper.mergeResults(null); - searchAdapterHelper.queryServerSearch(null, true, false, true, false, false, ChatObject.isChannel(currentChat) ? currentChat.id : 0, false, 2, 0); - notifyDataSetChanged(); - } else { + searchResultNames.clear(); + searchAdapterHelper.mergeResults(null); + searchAdapterHelper.queryServerSearch(null, true, false, true, false, false, ChatObject.isChannel(currentChat) ? currentChat.id : 0, false, 2, 0); + notifyDataSetChanged(); + + for (int a = 0; a < mediaPages.length; a++) { + if (mediaPages[a].selectedType == 7) { + if (!TextUtils.isEmpty(query)) { + mediaPages[a].emptyView.showProgress(true, animated); + } + } + } + + if (!TextUtils.isEmpty(query)) { Utilities.searchQueue.postRunnable(searchRunnable = () -> processSearch(query), 300); } } @@ -4095,10 +4280,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter AndroidUtilities.runOnUIThread(() -> { searchRunnable = null; - int kickedType; final ArrayList participantsCopy = !ChatObject.isChannel(currentChat) && info != null ? new ArrayList<>(info.participants.participants) : null; - searchAdapterHelper.queryServerSearch(query, false, false, true, false, false, ChatObject.isChannel(currentChat) ? currentChat.id : 0, false, 2, 0); + searchCount = 2; if (participantsCopy != null) { Utilities.searchQueue.postRunnable(() -> { String search1 = query.trim().toLowerCase(); @@ -4160,7 +4344,10 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } updateSearchResults(resultArrayNames, resultArray2); }); + } else { + searchCount--; } + searchAdapterHelper.queryServerSearch(query, false, false, true, false, false, ChatObject.isChannel(currentChat) ? currentChat.id : 0, false, 2, 1); }); } @@ -4170,11 +4357,25 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter return; } searchResultNames = names; + searchCount--; if (!ChatObject.isChannel(currentChat)) { ArrayList search = searchAdapterHelper.getGroupSearch(); search.clear(); search.addAll(participants); } + + if (searchCount == 0) { + for (int a = 0; a < mediaPages.length; a++) { + if (mediaPages[a].selectedType == 7) { + if (getItemCount() == 0) { + mediaPages[a].emptyView.showProgress(false, true); + } else { + animateItemsEnter(mediaPages[a].listView, 0); + } + } + } + } + notifyDataSetChanged(); }); } @@ -4336,7 +4537,6 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_actionBarDefault)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector)); arrayList.add(new ThemeDescription(mediaPages[a].emptyView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_emptyListPlaceholder)); - arrayList.add(new ThemeDescription(mediaPages[a].emptyTextView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText2)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_SECTIONS, new Class[]{GraySectionCell.class}, new String[]{"textView"}, null, null, null, Theme.key_graySectionText)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR | ThemeDescription.FLAG_SECTIONS, new Class[]{GraySectionCell.class}, null, null, null, Theme.key_graySection)); @@ -4361,6 +4561,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundBlue)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); + arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{EmptyStubView.class}, new String[]{"emptyTextView"}, null,null, null, Theme.key_windowBackgroundWhiteGrayText2)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{SharedDocumentCell.class}, new String[]{"nameTextView"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{SharedDocumentCell.class}, new String[]{"dateTextView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText3)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_PROGRESSBAR, new Class[]{SharedDocumentCell.class}, new String[]{"progressView"}, null, null, null, Theme.key_sharedMedia_startStopLoadIcon)); @@ -4398,6 +4599,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_CHECKBOXCHECK, new Class[]{ContextLinkCell.class}, null, null, cellDelegate, Theme.key_checkboxCheck)); arrayList.add(new ThemeDescription(mediaPages[a].listView, 0, null, null, new Drawable[]{pinnedHeaderShadowDrawable}, null, Theme.key_windowBackgroundGrayShadow)); + + arrayList.add(new ThemeDescription(mediaPages[a].emptyView.title, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + arrayList.add(new ThemeDescription(mediaPages[a].emptyView.subtitle, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); } return arrayList; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java index 047a92c51..07adfa448 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java @@ -3,7 +3,7 @@ package org.telegram.ui.Components; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.content.Context; -import android.graphics.Paint; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; @@ -22,8 +22,11 @@ import org.telegram.ui.ActionBar.Theme; public class StickerEmptyView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate { + public final static int STICKER_TYPE_SEARCH = 1; + public final static int STICKER_TYPE_NO_CONTACTS = 0; + private LinearLayout linearLayout; - private BackupImageView stickerView; + public BackupImageView stickerView; private LoadingStickerDrawable stubDrawable; private RadialProgressView progressBar; public final TextView title; @@ -31,6 +34,8 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. private final String stickerSetName = "tg_placeholders"; private boolean progressShowing; + private int stickerType; + public final View progressView; int keyboardSize; @@ -52,10 +57,12 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. } } }; + private boolean animateLayoutChange; - public StickerEmptyView(@NonNull Context context, View progressView) { + public StickerEmptyView(@NonNull Context context, View progressView, int type) { super(context); this.progressView = progressView; + stickerType = type; linearLayout = new LinearLayout(context) { @Override @@ -72,11 +79,17 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. linearLayout.setOrientation(LinearLayout.VERTICAL); stickerView = new BackupImageView(context); stickerView.setOnClickListener(view -> stickerView.getImageReceiver().startAnimation()); - stubDrawable = new LoadingStickerDrawable(stickerView, "M503.1,302.3c-2-20-21.4-29.8-42.4-30.7c13.8-56.8-8.2-121-52.8-164.1C321.6,24,190,51.3,131.7,146.2\n" + - "\tc-21.2-30.5-65-34.3-91.1-7.6c-30,30.6-18.4,82.7,22.5,97.3c-4.7,2.4-6.4,7.6-5.7,12.4c-14.2,10.5-19,28.5-5.1,42.4\n" + - "\tc-5.4,15,13.2,28.8,26.9,18.8c10.5,6.9,21,15,27.8,28.8c-17.1,55.3-8.5,79.4,8.5,98.7v0c47.5,53.8,235.6,45.3,292.2,11.5\n" + - "\tc22.6-13.5,39.5-34.6,30.4-96.8C459.1,322.1,505.7,328.5,503.1,302.3z M107.4,234c0.1,2.8,0.2,5.8,0.4,8.8c-7-2.5-14-3.6-20.5-3.6\n" + - "\tC94.4,238.6,101.2,236.9,107.4,234z", AndroidUtilities.dp(130), AndroidUtilities.dp(130)); + String svg; + if (type == 1) { + svg = "M503.1,302.3c-2-20-21.4-29.8-42.4-30.7c13.8-56.8-8.2-121-52.8-164.1C321.6,24,190,51.3,131.7,146.2\n" + + "\tc-21.2-30.5-65-34.3-91.1-7.6c-30,30.6-18.4,82.7,22.5,97.3c-4.7,2.4-6.4,7.6-5.7,12.4c-14.2,10.5-19,28.5-5.1,42.4\n" + + "\tc-5.4,15,13.2,28.8,26.9,18.8c10.5,6.9,21,15,27.8,28.8c-17.1,55.3-8.5,79.4,8.5,98.7v0c47.5,53.8,235.6,45.3,292.2,11.5\n" + + "\tc22.6-13.5,39.5-34.6,30.4-96.8C459.1,322.1,505.7,328.5,503.1,302.3z M107.4,234c0.1,2.8,0.2,5.8,0.4,8.8c-7-2.5-14-3.6-20.5-3.6\n" + + "\tC94.4,238.6,101.2,236.9,107.4,234z"; + } else { + svg = ContactsEmptyView.svg; + } + stubDrawable = new LoadingStickerDrawable(stickerView, svg, AndroidUtilities.dp(130), AndroidUtilities.dp(130)); stickerView.setImageDrawable(stubDrawable); title = new TextView(context); @@ -84,13 +97,13 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. title.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); title.setTag(Theme.key_windowBackgroundWhiteBlackText); title.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - title.setTextSize(20); + title.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); title.setGravity(Gravity.CENTER); subtitle = new TextView(context); subtitle.setTag(Theme.key_windowBackgroundWhiteGrayText); subtitle.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); - subtitle.setTextSize(14); + subtitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); subtitle.setGravity(Gravity.CENTER); linearLayout.addView(stickerView, LayoutHelper.createLinear(130, 130, Gravity.CENTER_HORIZONTAL)); @@ -107,13 +120,42 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. } } + private int lastH; + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + if (animateLayoutChange && lastH > 0 && lastH != getMeasuredHeight()) { + float y = (lastH - getMeasuredHeight()) / 2f; + linearLayout.setTranslationY(linearLayout.getTranslationY() + y); + linearLayout.animate().translationY(0).setInterpolator(CubicBezierInterpolator.DEFAULT).setDuration(250); + if (progressBar != null) { + progressBar.setTranslationY(progressBar.getTranslationY() + y); + progressBar.animate().translationY(0).setInterpolator(CubicBezierInterpolator.DEFAULT).setDuration(250); + } + } + lastH = getMeasuredHeight(); + } + + public void setColors(String titleKey, String subtitleKey, String key1, String key2) { + title.setTag(titleKey); + title.setTextColor(Theme.getColor(titleKey)); + + subtitle.setTag(subtitleKey); + subtitle.setTextColor(Theme.getColor(subtitleKey)); + + stubDrawable.setColors(key1, key2); + } + @Override public void setVisibility(int visibility) { if (getVisibility() != visibility) { if (visibility == VISIBLE) { if (progressShowing) { linearLayout.animate().alpha(0f).scaleY(0.8f).scaleX(0.8f).setDuration(150).start(); - showProgressRunnable.run(); + progressView.setVisibility(VISIBLE); + progressView.setAlpha(1f); + //showProgressRunnable.run(); } else { linearLayout.animate().alpha(1f).scaleY(1f).scaleX(1f).setDuration(150).start(); if (progressView != null) { @@ -135,6 +177,7 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. if (getVisibility() == VISIBLE) { setSticker(); } else { + lastH = 0; linearLayout.setAlpha(0f); linearLayout.setScaleX(0.8f); linearLayout.setScaleY(0.8f); @@ -178,7 +221,7 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. set = MediaDataController.getInstance(currentAccount).getStickerSetByEmojiOrName(stickerSetName); } if (set != null && set.documents.size() >= 2) { - TLRPC.Document document = set.documents.get(1); + TLRPC.Document document = set.documents.get(stickerType); ImageLocation imageLocation = ImageLocation.getForDocument(document); stickerView.setImage(imageLocation, "130_130", "tgs", stubDrawable, set); stickerView.getImageReceiver().setAutoRepeat(2); @@ -281,4 +324,8 @@ public class StickerEmptyView extends FrameLayout implements NotificationCenter. } } } + + public void setAnimateLayoutChange(boolean animate) { + this.animateLayoutChange = animate; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java index cdb75b727..c1a7dd659 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java @@ -32,11 +32,14 @@ import android.widget.LinearLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.Emoji; +import org.telegram.messenger.FileLoader; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; @@ -109,7 +112,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter private ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override - public void sendSticker(TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { + public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { delegate.onStickerSelected(parent, sticker); } @@ -821,14 +824,13 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter } for (int a = 0; a < stickerSets[currentType].size(); a++) { TLRPC.TL_messages_stickerSet stickerSet = stickerSets[currentType].get(a); - TLObject thumb; TLRPC.Document document = stickerSet.documents.get(0); - if (stickerSet.set.thumb instanceof TLRPC.TL_photoSize || stickerSet.set.thumb instanceof TLRPC.TL_photoSizeProgressive) { - thumb = stickerSet.set.thumb; - } else { + TLObject thumb = FileLoader.getClosestPhotoSizeWithSize(stickerSet.set.thumbs, 90); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(stickerSet.set.thumbs, Theme.key_windowBackgroundGray, 1.0f); + if (thumb == null) { thumb = document; } - stickersTab.addStickerTab(thumb, document, stickerSet).setContentDescription(stickerSet.set.title + ", " + LocaleController.getString("AccDescrStickerSet", R.string.AccDescrStickerSet)); + stickersTab.addStickerTab(thumb, svgThumb, document, stickerSet).setContentDescription(stickerSet.set.title + ", " + LocaleController.getString("AccDescrStickerSet", R.string.AccDescrStickerSet)); } stickersTab.commitUpdate(); stickersTab.updateTabStyles(); @@ -1046,7 +1048,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter View view = null; switch (viewType) { case 0: - view = new StickerEmojiCell(context) { + view = new StickerEmojiCell(context, false) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } @@ -1453,7 +1455,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter View view = null; switch (viewType) { case 0: - view = new StickerEmojiCell(context) { + view = new StickerEmojiCell(context, false) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java index f90e160be..cdfc41e7d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java @@ -56,10 +56,8 @@ public class StickerSetBulletinLayout extends Bulletin.TwoLineLayout { } if (sticker != null) { - TLObject object; - if (stickerSet.thumb instanceof TLRPC.TL_photoSize || stickerSet.thumb instanceof TLRPC.TL_photoSizeProgressive) { - object = stickerSet.thumb; - } else { + TLObject object = FileLoader.getClosestPhotoSizeWithSize(stickerSet.thumbs, 90); + if (object == null) { object = sticker; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java index 65fe1f42f..023fb5e0a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java @@ -76,7 +76,7 @@ import androidx.recyclerview.widget.RecyclerView; public class StickersAlert extends BottomSheet implements NotificationCenter.NotificationCenterDelegate { public interface StickersAlertDelegate { - void onStickerSelected(TLRPC.Document sticker, Object parent, boolean clearsInputField, boolean notify, int scheduleDate); + void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate); boolean canSchedule(); boolean isInScheduleMode(); } @@ -142,11 +142,11 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not private ContentPreviewViewer.ContentPreviewViewerDelegate previewDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override - public void sendSticker(TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate) { + public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { if (delegate == null) { return; } - delegate.onStickerSelected(sticker, parent, clearsInputField, notify, scheduleDate); + delegate.onStickerSelected(sticker, query, parent, clearsInputField, notify, scheduleDate); dismiss(); } @@ -281,41 +281,43 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { reqId = 0; if (error == null) { - Transition addTarget = new Transition() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + Transition addTarget = new Transition() { - @Override - public void captureStartValues(TransitionValues transitionValues) { - transitionValues.values.put("start", true); - transitionValues.values.put("offset", containerView.getTop() + scrollOffsetY); - } + @Override + public void captureStartValues(TransitionValues transitionValues) { + transitionValues.values.put("start", true); + transitionValues.values.put("offset", containerView.getTop() + scrollOffsetY); + } - @Override - public void captureEndValues(TransitionValues transitionValues) { - transitionValues.values.put("start", false); - transitionValues.values.put("offset", containerView.getTop() + scrollOffsetY); - } + @Override + public void captureEndValues(TransitionValues transitionValues) { + transitionValues.values.put("start", false); + transitionValues.values.put("offset", containerView.getTop() + scrollOffsetY); + } - @Override - public Animator createAnimator(ViewGroup sceneRoot, TransitionValues startValues, TransitionValues endValues) { - int scrollOffsetY = StickersAlert.this.scrollOffsetY; - int startValue = (int) startValues.values.get("offset") - (int) endValues.values.get("offset"); - final ValueAnimator animator = ValueAnimator.ofFloat(0f, 1f); - animator.setDuration(250); - animator.addUpdateListener(a -> { - float fraction = a.getAnimatedFraction(); - gridView.setAlpha(fraction); - titleTextView.setAlpha(fraction); - if (startValue != 0) { - int value = (int) (startValue * (1f - fraction)); - setScrollOffsetY(scrollOffsetY + value); - gridView.setTranslationY(value); - } - }); - return animator; - } - }; - addTarget.addTarget(containerView); - TransitionManager.beginDelayedTransition(container, addTarget); + @Override + public Animator createAnimator(ViewGroup sceneRoot, TransitionValues startValues, TransitionValues endValues) { + int scrollOffsetY = StickersAlert.this.scrollOffsetY; + int startValue = (int) startValues.values.get("offset") - (int) endValues.values.get("offset"); + final ValueAnimator animator = ValueAnimator.ofFloat(0f, 1f); + animator.setDuration(250); + animator.addUpdateListener(a -> { + float fraction = a.getAnimatedFraction(); + gridView.setAlpha(fraction); + titleTextView.setAlpha(fraction); + if (startValue != 0) { + int value = (int) (startValue * (1f - fraction)); + setScrollOffsetY(scrollOffsetY + value); + gridView.setTranslationY(value); + } + }); + return animator; + } + }; + addTarget.addTarget(containerView); + TransitionManager.beginDelayedTransition(container, addTarget); + } optionsButton.setVisibility(View.VISIBLE); stickerSet = (TLRPC.TL_messages_stickerSet) response; showEmoji = !stickerSet.set.masks; @@ -658,7 +660,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not previewSendButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); stickerPreviewLayout.addView(previewSendButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM | Gravity.LEFT)); previewSendButton.setOnClickListener(v -> { - delegate.onStickerSelected(selectedSticker, stickerSet, clearsInputField, true, 0); + delegate.onStickerSelected(selectedSticker, null, stickerSet, clearsInputField, true, 0); dismiss(); }); @@ -703,7 +705,14 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } String stickersUrl = "https://" + MessagesController.getInstance(currentAccount).linkPrefix + "/addstickers/" + stickerSet.set.short_name; if (id == 1) { - ShareAlert alert = new ShareAlert(getContext(), null, stickersUrl, false, stickersUrl, false); + Context context = parentActivity; + if (context == null && parentFragment != null) { + context = parentFragment.getParentActivity(); + } + if (context == null) { + context = getContext(); + } + ShareAlert alert = new ShareAlert(context, null, stickersUrl, false, stickersUrl, false); if (parentFragment != null) { parentFragment.showDialog(alert); } else { @@ -712,7 +721,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } else if (id == 2) { try { AndroidUtilities.addToClipboard(stickersUrl); - Toast.makeText(ApplicationLoader.applicationContext, LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + BulletinFactory.of((FrameLayout) containerView).createCopyLinkBulletin().show(); } catch (Exception e) { FileLog.e(e); } @@ -747,9 +756,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not dismiss(); } }; - if (url != null) { - stringBuilder.setSpan(url, start, end, 0); - } + stringBuilder.setSpan(url, start, end, 0); } } catch (Exception e) { FileLog.e(e); @@ -758,7 +765,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not if (stickerSet.set == null || !MediaDataController.getInstance(currentAccount).isStickerPackInstalled(stickerSet.set.id)) { String text; - if (stickerSet.set.masks) { + if (stickerSet.set != null && stickerSet.set.masks) { text = LocaleController.formatString("AddStickersCount", R.string.AddStickersCount, LocaleController.formatPluralString("MasksCount", stickerSet.documents.size())).toUpperCase(); } else { text = LocaleController.formatString("AddStickersCount", R.string.AddStickersCount, LocaleController.formatPluralString("Stickers", stickerSet.documents.size())).toUpperCase(); @@ -929,6 +936,23 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 4); } + @Override + protected void onStart() { + super.onStart(); + Bulletin.addDelegate((FrameLayout) containerView, new Bulletin.Delegate() { + @Override + public int getBottomOffset() { + return pickerBottomLayout != null ? pickerBottomLayout.getHeight() : 0; + } + }); + } + + @Override + protected void onStop() { + super.onStop(); + Bulletin.removeDelegate((FrameLayout) containerView); + } + @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.emojiDidLoad) { @@ -1075,7 +1099,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not View view = null; switch (viewType) { case 0: - StickerEmojiCell cell = new StickerEmojiCell(context) { + StickerEmojiCell cell = new StickerEmojiCell(context, false) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(MeasureSpec.makeMeasureSpec(itemSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java index 6117b5dcc..52cedb8d3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java @@ -17,6 +17,7 @@ import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.Bitmaps; import org.telegram.messenger.DocumentObject; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.ui.ActionBar.Theme; import java.io.File; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java index 0e473ee89..fe40d4b90 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java @@ -5,7 +5,6 @@ import android.animation.AnimatorListenerAdapter; import android.animation.ValueAnimator; import android.content.Context; import android.content.res.Configuration; -import android.graphics.Paint; import android.util.LongSparseArray; import android.util.SparseArray; import android.view.Gravity; @@ -388,7 +387,7 @@ public class TrendingStickersLayout extends FrameLayout implements NotificationC if (delegate.canSendSticker()) { stickersAlertDelegate = new StickersAlert.StickersAlertDelegate() { @Override - public void onStickerSelected(TLRPC.Document sticker, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { + public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { delegate.onStickerSelected(sticker, parent, clearsInputField, notify, scheduleDate); } @@ -636,7 +635,7 @@ public class TrendingStickersLayout extends FrameLayout implements NotificationC View view = null; switch (viewType) { case 0: - StickerEmojiCell stickerCell = new StickerEmojiCell(context) { + StickerEmojiCell stickerCell = new StickerEmojiCell(context, false) { public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(82), MeasureSpec.EXACTLY)); } @@ -761,7 +760,6 @@ public class TrendingStickersLayout extends FrameLayout implements NotificationC installing = false; } else if (removing && !isSetInstalled) { removingStickerSets.remove(stickerSetCovered.set.id); - removing = false; } final FeaturedStickerSetInfoCell cell = (FeaturedStickerSetInfoCell) view; cell.setStickerSet(stickerSetCovered, unread, animated, 0, 0, forceInstalled); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java index dac841bbf..e8bfd705a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java @@ -35,6 +35,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLog; +import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; @@ -51,6 +52,7 @@ public class UndoView extends FrameLayout { private TextView undoTextView; private ImageView undoImageView; private RLottieImageView leftImageView; + private BackupImageView avatarImageView; private LinearLayout undoButton; private int undoViewHeight; @@ -107,6 +109,12 @@ public class UndoView extends FrameLayout { public final static int ACTION_PROXIMITY_SET = 24; public final static int ACTION_PROXIMITY_REMOVED = 25; + public final static int ACTION_VOIP_MUTED = 30; + public final static int ACTION_VOIP_UNMUTED = 31; + public final static int ACTION_VOIP_REMOVED = 32; + public final static int ACTION_VOIP_LINK_COPIED = 33; + public final static int ACTION_VOIP_INVITED = 34; + private CharSequence infoText; public class LinkMovementMethodMy extends LinkMovementMethod { @@ -176,6 +184,10 @@ public class UndoView extends FrameLayout { leftImageView.setLayerColor("Oval.**", Theme.getColor(Theme.key_undo_infoColor)); addView(leftImageView, LayoutHelper.createFrame(54, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.LEFT, 3, 0, 0, 0)); + avatarImageView = new BackupImageView(context); + avatarImageView.setRoundRadius(AndroidUtilities.dp(15)); + addView(avatarImageView, LayoutHelper.createFrame(30, 30, Gravity.CENTER_VERTICAL | Gravity.LEFT, 15, 0, 0, 0)); + undoButton = new LinearLayout(context); undoButton.setOrientation(LinearLayout.HORIZONTAL); addView(undoButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | Gravity.RIGHT, 0, 0, 19, 0)); @@ -231,7 +243,8 @@ public class UndoView extends FrameLayout { currentAction == ACTION_ARCHIVE_PINNED || currentAction == ACTION_CONTACT_ADDED || currentAction == ACTION_OWNER_TRANSFERED_CHANNEL || currentAction == ACTION_OWNER_TRANSFERED_GROUP || currentAction == ACTION_QUIZ_CORRECT || currentAction == ACTION_QUIZ_INCORRECT || currentAction == ACTION_CACHE_WAS_CLEARED || currentAction == ACTION_ADDED_TO_FOLDER || currentAction == ACTION_REMOVED_FROM_FOLDER || currentAction == ACTION_PROFILE_PHOTO_CHANGED || - currentAction == ACTION_CHAT_UNARCHIVED; + currentAction == ACTION_CHAT_UNARCHIVED || currentAction == ACTION_VOIP_MUTED || currentAction == ACTION_VOIP_UNMUTED || currentAction == ACTION_VOIP_REMOVED || + currentAction == ACTION_VOIP_LINK_COPIED || currentAction == ACTION_VOIP_INVITED; } private boolean hasSubInfo() { @@ -337,6 +350,7 @@ public class UndoView extends FrameLayout { undoTextView.setText(LocaleController.getString("Undo", R.string.Undo).toUpperCase()); undoImageView.setVisibility(VISIBLE); infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + avatarImageView.setVisibility(GONE); infoTextView.setGravity(Gravity.LEFT | Gravity.TOP); FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) infoTextView.getLayoutParams(); @@ -359,7 +373,41 @@ public class UndoView extends FrameLayout { int icon; int size = 36; boolean iconIsDrawable = false; - if (action == ACTION_OWNER_TRANSFERED_CHANNEL || action == ACTION_OWNER_TRANSFERED_GROUP) { + if (action == ACTION_VOIP_INVITED) { + TLRPC.User user = (TLRPC.User) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupInvitedUser", R.string.VoipGroupInvitedUser, UserObject.getFirstName(user))); + subInfoText = null; + icon = 0; + AvatarDrawable avatarDrawable = new AvatarDrawable(); + avatarDrawable.setTextSize(AndroidUtilities.dp(12)); + avatarDrawable.setInfo(user); + avatarImageView.setImage(ImageLocation.getForUser(user, false), "50_50", avatarDrawable, user); + avatarImageView.setVisibility(VISIBLE); + timeLeft = 3000; + } else if (action == ACTION_VOIP_LINK_COPIED) { + infoText = LocaleController.getString("VoipGroupCopyInviteLinkCopied", R.string.VoipGroupCopyInviteLinkCopied); + subInfoText = null; + icon = R.raw.voip_invite; + timeLeft = 3000; + } else if (action == ACTION_VOIP_MUTED) { + TLRPC.User user = (TLRPC.User) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupUserCantNowSpeak", R.string.VoipGroupUserCantNowSpeak, UserObject.getFirstName(user))); + subInfoText = null; + icon = R.raw.voip_muted; + timeLeft = 3000; + } else if (action == ACTION_VOIP_UNMUTED) { + TLRPC.User user = (TLRPC.User) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupUserCanNowSpeak", R.string.VoipGroupUserCanNowSpeak, UserObject.getFirstName(user))); + subInfoText = null; + icon = R.raw.voip_unmuted; + timeLeft = 3000; + } else if (action == ACTION_VOIP_REMOVED) { + TLRPC.User user = (TLRPC.User) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupRemovedFromGroup", R.string.VoipGroupRemovedFromGroup, UserObject.getFirstName(user))); + subInfoText = null; + icon = R.raw.voip_group_removed; + timeLeft = 3000; + } else if (action == ACTION_OWNER_TRANSFERED_CHANNEL || action == ACTION_OWNER_TRANSFERED_GROUP) { TLRPC.User user = (TLRPC.User) infoObject; if (action == ACTION_OWNER_TRANSFERED_CHANNEL) { infoText = AndroidUtilities.replaceTags(LocaleController.formatString("EditAdminTransferChannelToast", R.string.EditAdminTransferChannelToast, UserObject.getFirstName(user))); @@ -457,12 +505,6 @@ public class UndoView extends FrameLayout { } subInfoText = null; icon = R.raw.contact_check; - /*iconIsDrawable = true; - if (action == ACTION_ADDED_TO_FOLDER) { - icon = R.drawable.toast_folder; - } else { - icon = R.drawable.toast_folder_minus; - }*/ } else if (action == ACTION_CACHE_WAS_CLEARED) { infoText = this.infoText; subInfoText = null; @@ -482,18 +524,27 @@ public class UndoView extends FrameLayout { } infoTextView.setText(infoText); - if (iconIsDrawable) { - leftImageView.setImageResource(icon); + if (icon != 0) { + if (iconIsDrawable) { + leftImageView.setImageResource(icon); + } else { + leftImageView.setAnimation(icon, size, size); + } + leftImageView.setVisibility(VISIBLE); + if (!iconIsDrawable) { + leftImageView.setProgress(0); + leftImageView.playAnimation(); + } } else { - leftImageView.setAnimation(icon, size, size); + leftImageView.setVisibility(GONE); } if (subInfoText != null) { layoutParams.leftMargin = AndroidUtilities.dp(58); layoutParams.topMargin = AndroidUtilities.dp(6); - layoutParams.rightMargin = 0; + layoutParams.rightMargin = AndroidUtilities.dp(8); layoutParams = (FrameLayout.LayoutParams) subinfoTextView.getLayoutParams(); - layoutParams.rightMargin = 0; + layoutParams.rightMargin = AndroidUtilities.dp(8); subinfoTextView.setText(subInfoText); subinfoTextView.setVisibility(VISIBLE); infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); @@ -501,19 +552,13 @@ public class UndoView extends FrameLayout { } else { layoutParams.leftMargin = AndroidUtilities.dp(58); layoutParams.topMargin = AndroidUtilities.dp(13); - layoutParams.rightMargin = 0; + layoutParams.rightMargin = AndroidUtilities.dp(8); subinfoTextView.setVisibility(GONE); infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); infoTextView.setTypeface(Typeface.DEFAULT); } undoButton.setVisibility(GONE); - leftImageView.setVisibility(VISIBLE); - - if (!iconIsDrawable) { - leftImageView.setProgress(0); - leftImageView.playAnimation(); - } } else if (currentAction == ACTION_PROXIMITY_SET || currentAction == ACTION_PROXIMITY_REMOVED) { int radius = (Integer) infoObject; TLRPC.User user = (TLRPC.User) infoObject2; @@ -784,8 +829,8 @@ public class UndoView extends FrameLayout { undoViewHeight = AndroidUtilities.dp(52); } else if (getParent() instanceof ViewGroup) { ViewGroup parent = (ViewGroup) getParent(); - int width = parent.getMeasuredWidth(); - if (width == 0) { + int width = parent.getMeasuredWidth() - parent.getPaddingLeft() - parent.getPaddingRight(); + if (width <= 0) { width = AndroidUtilities.displaySize.x; } width -= AndroidUtilities.dp(16); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java index 0cbf97fdd..a25621ab3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java @@ -630,7 +630,7 @@ public class VideoPlayer implements ExoPlayer.EventListener, SimpleExoPlayer.Vid if (parent != null) { int i = parent.indexOfChild(textureView); parent.removeView(textureView); - parent.addView(textureView,i); + parent.addView(textureView, i); } player.clearVideoTextureView(textureView); player.setVideoTextureView(textureView); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ViewPagerFixed.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java similarity index 99% rename from TMessagesProj/src/main/java/org/telegram/ui/ViewPagerFixed.java rename to TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java index 0ef46b4fa..4c5c8d364 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ViewPagerFixed.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java @@ -1,4 +1,4 @@ -package org.telegram.ui; +package org.telegram.ui.Components; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; @@ -46,9 +46,6 @@ import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; -import org.telegram.ui.Components.CubicBezierInterpolator; -import org.telegram.ui.Components.LayoutHelper; -import org.telegram.ui.Components.RecyclerListView; import java.util.ArrayList; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java new file mode 100644 index 000000000..f4dbf1bf9 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java @@ -0,0 +1,366 @@ +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorSet; +import android.animation.ValueAnimator; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.os.Build; +import android.os.SystemClock; +import android.view.View; +import android.view.animation.Interpolator; +import android.view.animation.LinearInterpolator; + +import org.telegram.messenger.AndroidUtilities; + +public class WaveDrawable { + + public final static float MAX_AMPLITUDE = 1800f; + + private final static float ROTATION_SPEED = 0.36f * 0.1f; + public final static float SINE_WAVE_SPEED = 0.81f; + public final static float SMALL_WAVE_RADIUS = 0.55f; + public final static float SMALL_WAVE_SCALE = 0.40f; + public final static float SMALL_WAVE_SCALE_SPEED = 0.60f; + public final static float FLING_DISTANCE = 0.50f; + private final static float WAVE_ANGLE = 0.03f; + private final static float RANDOM_RADIUS_SIZE = 0.3f; + private final static float ANIMATION_SPEED_WAVE_HUGE = 0.65f; + private final static float ANIMATION_SPEED_WAVE_SMALL = 0.45f; + private final static float ANIMATION_SPEED_CIRCLE = 0.45f; + public final static float CIRCLE_ALPHA_1 = 0.30f; + public final static float CIRCLE_ALPHA_2 = 0.15f; + + private final static float IDLE_ROTATION_SPEED = 0.2f; + private final static float IDLE_WAVE_ANGLE = 0.5f; + private final static float IDLE_SCALE_SPEED = 0.3f; + private final static float IDLE_RADIUS = 0.56f; + private final static float IDLE_ROTATE_DIF = 0.1f * IDLE_ROTATION_SPEED; + + private final static float animationSpeed = 1f - ANIMATION_SPEED_WAVE_HUGE; + private final static float animationSpeedTiny = 1f - ANIMATION_SPEED_WAVE_SMALL; + public final static float animationSpeedCircle = 1f - ANIMATION_SPEED_CIRCLE; + + private Paint paintRecordWaveBig = new Paint(); + private Paint paintRecordWaveTin = new Paint(); + + public float fling; + private float animateToAmplitude; + private float amplitude; + private float slowAmplitude; + private float animateAmplitudeDiff; + private float animateAmplitudeSlowDiff; + float lastRadius; + float radiusDiff; + float waveDif; + double waveAngle; + private boolean incRandomAdditionals; + + public float rotation; + float idleRotation; + + private float circleRadius; + + private Interpolator linearInterpolator = new LinearInterpolator(); + + public float amplitudeWaveDif; + private final CircleBezierDrawable circleBezierDrawable; + public float amplitudeRadius; + private float idleRadius = 0; + private float idleRadiusK = 0.15f * IDLE_WAVE_ANGLE; + private boolean expandIdleRadius; + private boolean expandScale; + + private boolean isBig; + + private boolean isIdle = true; + private float scaleIdleDif; + private float scaleDif; + public float scaleSpeed = 0.00008f; + public float scaleSpeedIdle = 0.0002f * IDLE_SCALE_SPEED; + public float maxScale; + + private float flingRadius; + private Animator flingAnimator; + + private ValueAnimator animator; + + float randomAdditions = AndroidUtilities.dp(8) * RANDOM_RADIUS_SIZE; + + private final ValueAnimator.AnimatorUpdateListener flingUpdateListener = animation -> flingRadius = (float) animation.getAnimatedValue(); + private float idleGlobalRadius = AndroidUtilities.dp(10f) * IDLE_RADIUS; + + private float sineAngleMax; + + private WaveDrawable tinyWaveDrawable; + + private long lastUpdateTime; + private View parentView; + + public WaveDrawable(View parent, int n, float rotateDif, float radius, WaveDrawable tinyDrawable) { + parentView = parent; + circleBezierDrawable = new CircleBezierDrawable(n); + amplitudeRadius = radius; + isBig = tinyDrawable != null; + tinyWaveDrawable = tinyDrawable; + expandIdleRadius = isBig; + radiusDiff = AndroidUtilities.dp(34) * 0.0012f; + + if (Build.VERSION.SDK_INT >= 26) { + paintRecordWaveBig.setAntiAlias(true); + paintRecordWaveTin.setAntiAlias(true); + } + } + + public void setValue(float value) { + animateToAmplitude = value; + + if (isBig) { + if (animateToAmplitude > amplitude) { + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 300f * animationSpeed); + } else { + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500f * animationSpeed); + } + animateAmplitudeSlowDiff = (animateToAmplitude - slowAmplitude) / (100f + 500 * animationSpeed); + } else { + if (animateToAmplitude > amplitude) { + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 400f * animationSpeedTiny); + } else { + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100f + 500f * animationSpeedTiny); + } + animateAmplitudeSlowDiff = (animateToAmplitude - slowAmplitude) / (100f + 500 * animationSpeedTiny); + } + + boolean idle = value < 0.1f; + if (isIdle != idle && idle && isBig) { + float bRotation = rotation; + int k = 60; + float animateToBRotation = Math.round(rotation / k) * k + k / 2; + float tRotation = tinyWaveDrawable.rotation; + float animateToTRotation = Math.round(tRotation / k) * k; + + float bWaveDif = waveDif; + float tWaveDif = tinyWaveDrawable.waveDif; + animator = ValueAnimator.ofFloat(1f, 0f); + animator.addUpdateListener(animation -> { + float v = (float) animation.getAnimatedValue(); + rotation = animateToBRotation + (bRotation - animateToBRotation) * v; + tinyWaveDrawable.rotation = animateToTRotation + (tRotation - animateToTRotation) * v; + waveDif = 1f + (bWaveDif - 1f) * v; + tinyWaveDrawable.waveDif = 1 + (tWaveDif - 1f) * v; + + waveAngle = (float) Math.acos(waveDif); + tinyWaveDrawable.waveAngle = (float) Math.acos(-tinyWaveDrawable.waveDif); + }); + animator.setDuration(1200); + animator.start(); + } + + isIdle = idle; + + if (!isIdle && animator != null) { + animator.cancel(); + animator = null; + } + } + + private void startFling(float delta) { + if (flingAnimator != null) { + flingAnimator.cancel(); + } + float fling = this.fling * 2; + float flingDistance = delta * amplitudeRadius * (isBig ? 8 : 20) * 16 * fling; + ValueAnimator valueAnimator = ValueAnimator.ofFloat(flingRadius, flingDistance); + valueAnimator.addUpdateListener(flingUpdateListener); + + valueAnimator.setDuration((long) ((isBig ? 200 : 350) * fling)); + valueAnimator.setInterpolator(linearInterpolator); + ValueAnimator valueAnimator1 = ValueAnimator.ofFloat(flingDistance, 0); + valueAnimator1.addUpdateListener(flingUpdateListener); + + valueAnimator1.setInterpolator(linearInterpolator); + valueAnimator1.setDuration((long) ((isBig ? 220 : 380) * fling)); + + AnimatorSet animatorSet = new AnimatorSet(); + flingAnimator = animatorSet; + animatorSet.playSequentially(valueAnimator, valueAnimator1); + animatorSet.start(); + + } + + boolean wasFling; + + public void tick(float circleRadius) { + long newTime = SystemClock.elapsedRealtime(); + long dt = newTime - lastUpdateTime; + lastUpdateTime = newTime; + if (dt > 20) { + dt = 17; + } + + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * dt; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + + if (Math.abs(amplitude - animateToAmplitude) * amplitudeRadius < AndroidUtilities.dp(4)) { + if (!wasFling) { + startFling(animateAmplitudeDiff); + wasFling = true; + } + } else { + wasFling = false; + } + } + + if (animateToAmplitude != slowAmplitude) { + slowAmplitude += animateAmplitudeSlowDiff * dt; + if (Math.abs(slowAmplitude - amplitude) > 0.2f) { + slowAmplitude = amplitude + (slowAmplitude > amplitude ? + 0.2f : -0.2f); + } + if (animateAmplitudeSlowDiff > 0) { + if (slowAmplitude > animateToAmplitude) { + slowAmplitude = animateToAmplitude; + } + } else { + if (slowAmplitude < animateToAmplitude) { + slowAmplitude = animateToAmplitude; + } + } + } + + + idleRadius = circleRadius * idleRadiusK; + if (expandIdleRadius) { + scaleIdleDif += scaleSpeedIdle * dt; + if (scaleIdleDif >= 0.05f) { + scaleIdleDif = 0.05f; + expandIdleRadius = false; + } + } else { + scaleIdleDif -= scaleSpeedIdle * dt; + if (scaleIdleDif < 0f) { + scaleIdleDif = 0f; + expandIdleRadius = true; + } + } + + if (maxScale > 0) { + if (expandScale) { + scaleDif += scaleSpeed * dt; + if (scaleDif >= maxScale) { + scaleDif = maxScale; + expandScale = false; + } + } else { + scaleDif -= scaleSpeed * dt; + if (scaleDif < 0f) { + scaleDif = 0f; + expandScale = true; + } + } + } + + + if (sineAngleMax > animateToAmplitude) { + sineAngleMax -= 0.25f; + if (sineAngleMax < animateToAmplitude) { + sineAngleMax = animateToAmplitude; + } + } else if (sineAngleMax < animateToAmplitude) { + sineAngleMax += 0.25f; + if (sineAngleMax > animateToAmplitude) { + sineAngleMax = animateToAmplitude; + } + } + + if (!isIdle) { + rotation += (ROTATION_SPEED * 0.5f + ROTATION_SPEED * 4f * (amplitude > 0.5f ? 1 : amplitude / 0.5f)) * dt; + if (rotation > 360) rotation %= 360; + } else { + idleRotation += IDLE_ROTATE_DIF * dt; + if (idleRotation > 360) idleRotation %= 360; + } + + if (lastRadius < circleRadius) { + lastRadius = circleRadius; + } else { + lastRadius -= radiusDiff * dt; + if (lastRadius < circleRadius) { + lastRadius = circleRadius; + } + } + + lastRadius = circleRadius; + + if (!isIdle) { + waveAngle += (amplitudeWaveDif * sineAngleMax) * dt; + if (isBig) { + waveDif = (float) Math.cos(waveAngle); + } else { + waveDif = -(float) Math.cos(waveAngle); + } + + if (waveDif > 0f && incRandomAdditionals) { + circleBezierDrawable.calculateRandomAdditionals(); + incRandomAdditionals = false; + } else if (waveDif < 0f && !incRandomAdditionals) { + circleBezierDrawable.calculateRandomAdditionals(); + incRandomAdditionals = true; + } + } + + parentView.invalidate(); + } + + public void draw(float cx, float cy, float scale, Canvas canvas) { + float waveAmplitude = amplitude < 0.3f ? amplitude / 0.3f : 1f; + float radiusDiff = AndroidUtilities.dp(10) + AndroidUtilities.dp(50) * WAVE_ANGLE * animateToAmplitude; + + circleBezierDrawable.idleStateDiff = idleRadius * (1f - waveAmplitude); + + float kDiff = 0.35f * waveAmplitude * waveDif; + circleBezierDrawable.radiusDiff = radiusDiff * kDiff; + circleBezierDrawable.cubicBezierK = 1f + Math.abs(kDiff) * waveAmplitude + (1f - waveAmplitude) * idleRadiusK; + + + circleBezierDrawable.radius = (lastRadius + amplitudeRadius * amplitude) + idleGlobalRadius + (flingRadius * waveAmplitude); + + if (circleBezierDrawable.radius + circleBezierDrawable.radiusDiff < circleRadius) { + circleBezierDrawable.radiusDiff = circleRadius - circleBezierDrawable.radius; + } + + if (isBig) { + circleBezierDrawable.globalRotate = rotation + idleRotation; + } else { + circleBezierDrawable.globalRotate = -rotation + idleRotation; + } + + canvas.save(); + float s = scale + scaleIdleDif * (1f - waveAmplitude) + scaleDif * waveAmplitude; + canvas.scale(s, s, cx, cy); + circleBezierDrawable.setRandomAdditions(waveAmplitude * waveDif * randomAdditions); + + circleBezierDrawable.draw(cx, cy, canvas, isBig ? paintRecordWaveBig : paintRecordWaveTin); + canvas.restore(); + } + + public void setCircleRadius(float radius) { + circleRadius = radius; + } + + public void setColor(int color, int alpha) { + paintRecordWaveBig.setColor(color); + paintRecordWaveTin.setColor(color); + paintRecordWaveBig.setAlpha(alpha); + paintRecordWaveTin.setAlpha(alpha); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPButtonsLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPButtonsLayout.java index 0b7da3a6c..fb4548f89 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPButtonsLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPButtonsLayout.java @@ -19,6 +19,9 @@ public class VoIPButtonsLayout extends FrameLayout { int childWidth; int childPadding; + private int childSize = 68; + private boolean startPadding = true; + @Override public boolean dispatchTouchEvent(MotionEvent ev) { if (!isEnabled()) { @@ -30,14 +33,14 @@ public class VoIPButtonsLayout extends FrameLayout { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = MeasureSpec.getSize(widthMeasureSpec); - int heigth = MeasureSpec.getSize(heightMeasureSpec); + visibleChildCount = 0; for (int i = 0; i < getChildCount(); i++) { if (getChildAt(i).getVisibility() != View.GONE) { visibleChildCount++; } } - childWidth = AndroidUtilities.dp(68); + childWidth = AndroidUtilities.dp(childSize); int maxChildHeigth = 0; childPadding = (width / getChildCount() - childWidth) / 2; for (int i = 0; i < getChildCount(); i++) { @@ -55,13 +58,33 @@ public class VoIPButtonsLayout extends FrameLayout { @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - int startFrom = (int) ((getChildCount() - visibleChildCount) / 2f * (childWidth + childPadding * 2)); - for (int i = 0; i < getChildCount(); i++) { - View child = getChildAt(i); - if (child.getVisibility() != View.GONE) { - child.layout(startFrom + childPadding, 0, startFrom + childPadding + child.getMeasuredWidth(), child.getMeasuredHeight()); - startFrom += childPadding * 2 + child.getMeasuredWidth(); + if (startPadding) { + int startFrom = (int) ((getChildCount() - visibleChildCount) / 2f * (childWidth + childPadding * 2)); + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + if (child.getVisibility() != View.GONE) { + child.layout(startFrom + childPadding, 0, startFrom + childPadding + child.getMeasuredWidth(), child.getMeasuredHeight()); + startFrom += childPadding * 2 + child.getMeasuredWidth(); + } + } + } else { + int padding = visibleChildCount > 0 ? (getMeasuredWidth() - childWidth) / (visibleChildCount - 1) : 0; + int k = 0; + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + if (child.getVisibility() != View.GONE) { + child.layout(k * padding, 0, k * padding + child.getMeasuredWidth(), child.getMeasuredHeight()); + k++; + } } } } + + public void setChildSize(int childSize) { + this.childSize = childSize; + } + + public void setUseStartPadding(boolean startPadding) { + this.startPadding = startPadding; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java index a58459923..efb3d8ea5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java @@ -10,6 +10,7 @@ import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Build; +import android.os.SystemClock; import android.provider.Settings; import android.text.InputType; import android.text.TextUtils; @@ -38,12 +39,14 @@ import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Cells.CheckBoxCell; import org.telegram.ui.Cells.TextCheckCell; import org.telegram.ui.Components.BetterRatingView; import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.GroupCallActivity; import org.telegram.ui.LaunchActivity; import java.io.File; @@ -91,6 +94,7 @@ public class VoIPHelper { } if (Build.VERSION.SDK_INT >= 23) { + int code; ArrayList permissions = new ArrayList<>(); if (activity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { permissions.add(Manifest.permission.RECORD_AUDIO); @@ -99,54 +103,135 @@ public class VoIPHelper { permissions.add(Manifest.permission.CAMERA); } if (permissions.isEmpty()) { - initiateCall(user, videoCall, canVideoCall, activity); + initiateCall(user, null, videoCall, canVideoCall, false, activity); } else { activity.requestPermissions(permissions.toArray(new String[0]), videoCall ? 102 : 101); } } else { - initiateCall(user, videoCall, canVideoCall, activity); + initiateCall(user, null, videoCall, canVideoCall, false, activity); } } - private static void initiateCall(final TLRPC.User user, boolean videoCall, boolean canVideoCall, final Activity activity) { - if (activity == null || user == null) { + public static void startCall(TLRPC.Chat chat, boolean createCall, final Activity activity) { + if (activity == null) { + return; + } + if (ConnectionsManager.getInstance(UserConfig.selectedAccount).getConnectionState() != ConnectionsManager.ConnectionStateConnected) { + boolean isAirplaneMode = Settings.System.getInt(activity.getContentResolver(), Settings.System.AIRPLANE_MODE_ON, 0) != 0; + AlertDialog.Builder bldr = new AlertDialog.Builder(activity) + .setTitle(isAirplaneMode ? LocaleController.getString("VoipOfflineAirplaneTitle", R.string.VoipOfflineAirplaneTitle) : LocaleController.getString("VoipOfflineTitle", R.string.VoipOfflineTitle)) + .setMessage(isAirplaneMode ? LocaleController.getString("VoipGroupOfflineAirplane", R.string.VoipGroupOfflineAirplane) : LocaleController.getString("VoipGroupOffline", R.string.VoipGroupOffline)) + .setPositiveButton(LocaleController.getString("OK", R.string.OK), null); + if (isAirplaneMode) { + final Intent settingsIntent = new Intent(Settings.ACTION_AIRPLANE_MODE_SETTINGS); + if (settingsIntent.resolveActivity(activity.getPackageManager()) != null) { + bldr.setNeutralButton(LocaleController.getString("VoipOfflineOpenSettings", R.string.VoipOfflineOpenSettings), (dialog, which) -> activity.startActivity(settingsIntent)); + } + } + try { + bldr.show(); + } catch (Exception e) { + FileLog.e(e); + } + return; + } + + if (Build.VERSION.SDK_INT >= 23) { + ArrayList permissions = new ArrayList<>(); + if (activity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { + permissions.add(Manifest.permission.RECORD_AUDIO); + } + if (permissions.isEmpty()) { + initiateCall(null, chat, false, false, createCall, activity); + } else { + activity.requestPermissions(permissions.toArray(new String[0]), 103); + } + } else { + initiateCall(null, chat, false, false, createCall, activity); + } + } + + private static void initiateCall(TLRPC.User user, TLRPC.Chat chat, boolean videoCall, boolean canVideoCall, boolean createCall, final Activity activity) { + if (activity == null || user == null && chat == null) { return; } if (VoIPService.getSharedInstance() != null) { - TLRPC.User callUser = VoIPService.getSharedInstance().getUser(); - if (callUser.id != user.id) { + int newId = user != null ? user.id : -chat.id; + int callerId = VoIPService.getSharedInstance().getCallerId(); + if (callerId != newId) { + String newName; + String oldName; + String key1; + int key2; + if (callerId > 0) { + TLRPC.User callUser = VoIPService.getSharedInstance().getUser(); + oldName = ContactsController.formatName(callUser.first_name, callUser.last_name); + if (newId > 0) { + key1 = "VoipOngoingAlert"; + key2 = R.string.VoipOngoingAlert; + } else { + key1 = "VoipOngoingAlert2"; + key2 = R.string.VoipOngoingAlert2; + } + } else { + TLRPC.Chat callChat = VoIPService.getSharedInstance().getChat(); + oldName = callChat.title; + if (newId > 0) { + key1 = "VoipOngoingChatAlert2"; + key2 = R.string.VoipOngoingChatAlert2; + } else { + key1 = "VoipOngoingChatAlert"; + key2 = R.string.VoipOngoingChatAlert; + } + } + if (user != null) { + newName = ContactsController.formatName(user.first_name, user.last_name); + } else { + newName = chat.title; + } + new AlertDialog.Builder(activity) - .setTitle(LocaleController.getString("VoipOngoingAlertTitle", R.string.VoipOngoingAlertTitle)) - .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString("VoipOngoingAlert", R.string.VoipOngoingAlert, - ContactsController.formatName(callUser.first_name, callUser.last_name), - ContactsController.formatName(user.first_name, user.last_name)))) + .setTitle(callerId < 0 ? LocaleController.getString("VoipOngoingChatAlertTitle", R.string.VoipOngoingChatAlertTitle) : LocaleController.getString("VoipOngoingAlertTitle", R.string.VoipOngoingAlertTitle)) + .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString(key1, key2, oldName, newName))) .setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialog, which) -> { if (VoIPService.getSharedInstance() != null) { - VoIPService.getSharedInstance().hangUp(() -> doInitiateCall(user, videoCall, canVideoCall, activity)); + VoIPService.getSharedInstance().hangUp(() -> { + lastCallTime = 0; + doInitiateCall(user, chat, videoCall, canVideoCall, createCall, activity); + }); } else { - doInitiateCall(user, videoCall, canVideoCall, activity); + doInitiateCall(user, chat, videoCall, canVideoCall, createCall, activity); } }) .setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null) .show(); } else { - activity.startActivity(new Intent(activity, LaunchActivity.class).setAction("voip")); + if (user != null || !(activity instanceof LaunchActivity)) { + activity.startActivity(new Intent(activity, LaunchActivity.class).setAction(user != null ? "voip" : "voip_chat")); + } else { + GroupCallActivity.create((LaunchActivity) activity, AccountInstance.getInstance(UserConfig.selectedAccount)); + } } } else if (VoIPService.callIShouldHavePutIntoIntent == null) { - doInitiateCall(user, videoCall, canVideoCall, activity); + doInitiateCall(user, chat, videoCall, canVideoCall, createCall, activity); } } - private static void doInitiateCall(TLRPC.User user, boolean videoCall, boolean canVideoCall, Activity activity) { - if (activity == null || user == null) { + private static void doInitiateCall(TLRPC.User user, TLRPC.Chat chat, boolean videoCall, boolean canVideoCall, boolean createCall, Activity activity) { + if (activity == null || user == null && chat == null) { return; } - if (System.currentTimeMillis() - lastCallTime < 2000) { + if (SystemClock.elapsedRealtime() - lastCallTime < (chat != null ? 200 : 2000)) { return; } - lastCallTime = System.currentTimeMillis(); + lastCallTime = SystemClock.elapsedRealtime(); Intent intent = new Intent(activity, VoIPService.class); - intent.putExtra("user_id", user.id); + if (user != null) { + intent.putExtra("user_id", user.id); + } else { + intent.putExtra("chat_id", chat.id); + intent.putExtra("createGroupCall", createCall); + } intent.putExtra("is_outgoing", true); intent.putExtra("start_incall_activity", true); intent.putExtra("video_call", Build.VERSION.SDK_INT >= 18 && videoCall); @@ -303,7 +388,7 @@ public class VoIPHelper { commentBox.setHintTextColor(Theme.getColor(Theme.key_dialogTextHint)); commentBox.setBackgroundDrawable(Theme.createEditTextDrawable(context, true)); commentBox.setPadding(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); - commentBox.setTextSize(18); + commentBox.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); commentBox.setVisibility(View.GONE); alertView.addView(commentBox, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 8, 8, 8, 0)); @@ -537,4 +622,27 @@ public class VoIPHelper { } return new File(logsDir, callId + ".log").getAbsolutePath(); } + + public static void showGroupCallAlert(BaseFragment fragment, TLRPC.Chat currentChat, boolean recreate) { + if (fragment == null || fragment.getParentActivity() == null) { + return; + } + AlertDialog.Builder builder = new AlertDialog.Builder(fragment.getParentActivity()); + + builder.setTitle(LocaleController.getString("StartVoipChatTitle", R.string.StartVoipChatTitle)); + if (recreate) { + builder.setMessage(LocaleController.getString("VoipGroupEndedStartNew", R.string.VoipGroupEndedStartNew)); + } else { + builder.setMessage(LocaleController.getString("StartVoipChatAlertText", R.string.StartVoipChatAlertText)); + } + + builder.setPositiveButton(LocaleController.getString("Start", R.string.Start), (dialogInterface, i) -> { + if (fragment.getParentActivity() == null) { + return; + } + VoIPHelper.startCall(currentChat, true, fragment.getParentActivity()); + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + fragment.showDialog(builder.create()); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPNotificationsLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPNotificationsLayout.java index ccc6ec1b8..d34b13d07 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPNotificationsLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPNotificationsLayout.java @@ -12,6 +12,7 @@ import android.transition.TransitionManager; import android.transition.TransitionSet; import android.transition.TransitionValues; import android.transition.Visibility; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; @@ -197,7 +198,7 @@ public class VoIPNotificationsLayout extends LinearLayout { textView = new TextView(context); textView.setTextColor(Color.WHITE); - textView.setTextSize(14); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 44, 4, 16, 4)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java index 7baceb7bb..2670f00fe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java @@ -88,7 +88,6 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC }; float[] point = new float[2]; - int[] location = new int[2]; public int xOffset; public int yOffset; @@ -143,7 +142,12 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC WindowManager.LayoutParams windowLayoutParams = createWindowLayoutParams(activity, parentWidth, parentHeight, SCALE_NORMAL); instance = new VoIPPiPView(activity, parentWidth, parentHeight, false); - WindowManager wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); + WindowManager wm; + if (AndroidUtilities.checkInlinePermissions(activity)) { + wm = (WindowManager) ApplicationLoader.applicationContext.getSystemService(Context.WINDOW_SERVICE); + } else { + wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); + } instance.currentAccount = account; instance.windowManager = wm; instance.windowLayoutParams = windowLayoutParams; @@ -383,7 +387,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC @Override public void onStateChanged(int state) { if (state == VoIPBaseService.STATE_ENDED || state == VoIPService.STATE_BUSY || state == VoIPService.STATE_FAILED || state == VoIPService.STATE_HANGING_UP) { - AndroidUtilities.runOnUIThread(() -> finish(), 200); + AndroidUtilities.runOnUIThread(VoIPPiPView::finish, 200); } VoIPService service = VoIPService.getSharedInstance(); if (service == null) { @@ -492,7 +496,6 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC } } - private class FloatingView extends FrameLayout { float touchSlop; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java index b5aea6be6..2f7dde5ab 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java @@ -11,6 +11,7 @@ import android.text.SpannableStringBuilder; import android.text.TextPaint; import android.text.TextUtils; import android.text.style.CharacterStyle; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; @@ -47,7 +48,7 @@ public class VoIPStatusTextView extends FrameLayout { super(context); for (int i = 0; i < 2; i++) { textView[i] = new TextView(context); - textView[i].setTextSize(15); + textView[i].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); textView[i].setShadowLayer(AndroidUtilities.dp(3), 0, AndroidUtilities.dp(.666666667f), 0x4C000000); textView[i].setTextColor(Color.WHITE); textView[i].setGravity(Gravity.CENTER_HORIZONTAL); @@ -55,7 +56,7 @@ public class VoIPStatusTextView extends FrameLayout { } reconnectTextView = new TextView(context); - reconnectTextView.setTextSize(15); + reconnectTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); reconnectTextView.setShadowLayer(AndroidUtilities.dp(3), 0, AndroidUtilities.dp(.666666667f), 0x4C000000); reconnectTextView.setTextColor(Color.WHITE); reconnectTextView.setGravity(Gravity.CENTER_HORIZONTAL); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java index 49b0cff47..ca548ac64 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java @@ -13,6 +13,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.PorterDuffXfermode; import android.graphics.drawable.Drawable; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; @@ -32,6 +33,7 @@ import org.telegram.ui.Components.LayoutHelper; public class VoIPToggleButton extends FrameLayout { Paint circlePaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private boolean drawBackground = true; Drawable[] icon = new Drawable[2]; TextView[] textView = new TextView[2]; @@ -66,18 +68,28 @@ public class VoIPToggleButton extends FrameLayout { private boolean checkable; private boolean checked; + private float checkedProgress; + private int backgroundCheck1; + private int backgroundCheck2; + + private float radius; + private ValueAnimator checkAnimator; public VoIPToggleButton(@NonNull Context context) { + this(context, 52f); + } + public VoIPToggleButton(@NonNull Context context, float radius) { super(context); + this.radius = radius; setWillNotDraw(false); for (int i = 0; i < 2; i++) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER_HORIZONTAL); - textView.setTextSize(11f); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 11); textView.setTextColor(Color.WHITE); textView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 58, 0, 0)); + addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, radius + 4, 0, 0)); this.textView[i] = textView; } textView[1].setVisibility(View.GONE); @@ -89,12 +101,19 @@ public class VoIPToggleButton extends FrameLayout { crossPaint.setStrokeWidth(AndroidUtilities.dp(2)); crossPaint.setStrokeCap(Paint.Cap.ROUND); - rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(52), 0, Color.BLACK); - rippleDrawable.setCallback(this); - bitmapPaint.setFilterBitmap(true); } + public void setTextSize(int size) { + for (int i = 0; i < 2; i++) { + textView[i].setTextSize(TypedValue.COMPLEX_UNIT_DIP, size); + } + } + + public void setDrawBackground(boolean value) { + drawBackground = value; + } + @SuppressLint("DrawAllocation") @Override protected void onDraw(Canvas canvas) { @@ -105,9 +124,15 @@ public class VoIPToggleButton extends FrameLayout { } float cx = getWidth() / 2f; - float cy = AndroidUtilities.dp(52) / 2f; - float radius = AndroidUtilities.dp(52) / 2f; - canvas.drawCircle(cx, cy, AndroidUtilities.dp(52) / 2f, circlePaint); + float cy = AndroidUtilities.dp(radius) / 2f; + float radius = AndroidUtilities.dp(this.radius) / 2f; + if (drawBackground) { + canvas.drawCircle(cx, cy, AndroidUtilities.dp(this.radius) / 2f, circlePaint); + } + if (rippleDrawable == null) { + rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(this.radius), 0, Color.BLACK); + rippleDrawable.setCallback(this); + } rippleDrawable.setBounds((int) (cx - radius), (int) (cy - radius), (int) (cx + radius), (int) (cy + radius)); rippleDrawable.draw(canvas); @@ -179,7 +204,7 @@ public class VoIPToggleButton extends FrameLayout { for (int i = 0; i < ((replaceProgress == 0 || iconChangeColor) ? 1 : 2); i++) { if (icon[i] != null) { canvas.save(); - if (replaceProgress != 0 && !iconChangeColor) { + if (replaceProgress != 0 && !iconChangeColor && icon[0] != null && icon[1] != null) { float p = i == 0 ? 1f - replaceProgress : replaceProgress; canvas.scale(p, p, cx, cy); icon[i].setAlpha((int) (255 * p)); @@ -203,22 +228,39 @@ public class VoIPToggleButton extends FrameLayout { } } + public void setBackgroundColor(int backgroundColor, int backgroundColorChecked) { + backgroundCheck1 = backgroundColor; + backgroundCheck2 = backgroundColorChecked; + this.backgroundColor = ColorUtils.blendARGB(backgroundColor, backgroundColorChecked, checkedProgress); + invalidate(); + } + public void setData(int iconRes, int iconColor, int backgroundColor, String text, boolean cross, boolean animated) { + setData(iconRes, iconColor, backgroundColor, 1.0f, true, text, cross, animated); + } + + public void setData(int iconRes, int iconColor, int backgroundColor, float selectorAlpha, boolean recreateRipple, String text, boolean cross, boolean animated) { if (getVisibility() != View.VISIBLE) { animated = false; setVisibility(View.VISIBLE); } - if (currentIconRes == iconRes && currentIconColor == iconColor && currentBackgroundColor == backgroundColor && (currentText != null && currentText.equals(text))) { + if (currentIconRes == iconRes && currentIconColor == iconColor && (checkable || currentBackgroundColor == backgroundColor) && (currentText != null && currentText.equals(text))) { return; } - if (Color.alpha(backgroundColor) == 255 && AndroidUtilities.computePerceivedBrightness(backgroundColor) > 0.5) { - rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(52), 0, ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.1f))); - rippleDrawable.setCallback(this); - } else { - rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(52), 0, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f))); - rippleDrawable.setCallback(this); + if (rippleDrawable == null || recreateRipple) { + if (Color.alpha(backgroundColor) == 255 && AndroidUtilities.computePerceivedBrightness(backgroundColor) > 0.5) { + rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(radius), 0, ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.1f * selectorAlpha))); + rippleDrawable.setCallback(this); + } else { + rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(radius), 0, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f * selectorAlpha))); + rippleDrawable.setCallback(this); + } + } + + if (replaceAnimator != null) { + replaceAnimator.cancel(); } iconChangeColor = currentIconRes == iconRes; @@ -231,15 +273,13 @@ public class VoIPToggleButton extends FrameLayout { currentText = text; drawCross = cross; - if (replaceAnimator != null) { - replaceAnimator.cancel(); - } - if (!animated) { icon[0] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); icon[0].setColorFilter(new PorterDuffColorFilter(iconColor, PorterDuff.Mode.MULTIPLY)); crossPaint.setColor(iconColor); - this.backgroundColor = backgroundColor; + if (!checkable) { + this.backgroundColor = backgroundColor; + } textView[0].setText(text); crossProgress = drawCross ? 1f : 0; iconChangeColor = false; @@ -250,7 +290,9 @@ public class VoIPToggleButton extends FrameLayout { icon[1] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); icon[1].setColorFilter(new PorterDuffColorFilter(iconColor, PorterDuff.Mode.MULTIPLY)); } - this.animateToBackgroundColor = backgroundColor; + if (!checkable) { + this.animateToBackgroundColor = backgroundColor; + } boolean animateText = !textView[0].getText().toString().equals(text); @@ -281,6 +323,7 @@ public class VoIPToggleButton extends FrameLayout { replaceAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + replaceAnimator = null; if (animateText) { TextView tv = textView[0]; textView[0] = textView[1]; @@ -310,7 +353,9 @@ public class VoIPToggleButton extends FrameLayout { @Override protected void drawableStateChanged() { super.drawableStateChanged(); - rippleDrawable.setState(getDrawableState()); + if (rippleDrawable != null) { + rippleDrawable.setState(getDrawableState()); + } } @Override @@ -330,8 +375,35 @@ public class VoIPToggleButton extends FrameLayout { this.checkable = checkable; } - public void setChecked(boolean checked) { + public void setChecked(boolean checked, boolean animated) { this.checked = checked; + if (animated) { + if (checkAnimator != null) { + checkAnimator.removeAllListeners(); + checkAnimator.cancel(); + } + checkAnimator = ValueAnimator.ofFloat(checkedProgress, checked ? 1f : 0); + checkAnimator.addUpdateListener(valueAnimator -> { + checkedProgress = (float) valueAnimator.getAnimatedValue(); + setBackgroundColor(backgroundCheck1, backgroundCheck2); + }); + checkAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + checkedProgress = checked ? 1f : 0; + setBackgroundColor(backgroundCheck1, backgroundCheck2); + } + }); + checkAnimator.setDuration(150); + checkAnimator.start(); + } else { + checkedProgress = checked ? 1f : 0; + setBackgroundColor(backgroundCheck1, backgroundCheck2); + } + } + + public boolean isChecked() { + return checked; } @Override @@ -344,4 +416,9 @@ public class VoIPToggleButton extends FrameLayout { info.setChecked(checked); } } + + public void shakeView() { + AndroidUtilities.shakeView(textView[0], 2, 0); + AndroidUtilities.shakeView(textView[1], 2, 0); + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java index 64499fce6..8717d8a3f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java @@ -9,9 +9,12 @@ package org.telegram.ui; import android.Manifest; +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.animation.StateListAnimator; +import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.Activity; @@ -43,30 +46,38 @@ import android.view.ViewGroup; import android.view.ViewOutlineProvider; import android.view.ViewTreeObserver; import android.view.animation.AccelerateDecelerateInterpolator; +import android.view.animation.DecelerateInterpolator; import android.view.inputmethod.EditorInfo; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.Toast; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ContactsController; +import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; import org.telegram.messenger.SecretChatHelper; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; -import org.telegram.tgnet.TLRPC; -import org.telegram.messenger.ContactsController; -import org.telegram.messenger.FileLog; -import org.telegram.messenger.MessagesController; -import org.telegram.messenger.NotificationCenter; -import org.telegram.messenger.R; import org.telegram.messenger.Utilities; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.ActionBarMenu; +import org.telegram.ui.ActionBar.ActionBarMenuItem; import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Adapters.ContactsAdapter; @@ -76,26 +87,22 @@ import org.telegram.ui.Cells.LetterSectionCell; import org.telegram.ui.Cells.ProfileSearchCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.UserCell; -import org.telegram.ui.ActionBar.ActionBar; -import org.telegram.ui.ActionBar.ActionBarMenu; -import org.telegram.ui.ActionBar.ActionBarMenuItem; -import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.CombinedDrawable; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EditTextBoldCursor; -import org.telegram.ui.Components.EmptyTextProgressView; +import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.StickerEmptyView; import java.util.ArrayList; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; - public class ContactsActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { private ContactsAdapter listViewAdapter; - private EmptyTextProgressView emptyView; + private StickerEmptyView emptyView; private RecyclerListView listView; private LinearLayoutManager layoutManager; private SearchAdapter searchListViewAdapter; @@ -103,7 +110,7 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter private ActionBarMenuItem sortItem; private boolean sortByName; - private ImageView floatingButton; + private RLottieImageView floatingButton; private FrameLayout floatingButtonContainer; private AccelerateDecelerateInterpolator floatingInterpolator = new AccelerateDecelerateInterpolator(); private int prevPosition; @@ -142,6 +149,8 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter private final static int search_button = 0; private final static int sort_button = 1; + private AnimatorSet bounceIconAnimator; + private int animationIndex = -1; public interface ContactsActivityDelegate { void didSelectContact(TLRPC.User user, String param, ContactsActivity activity); @@ -197,6 +206,13 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.encryptedChatCreated); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.closeChats); delegate = null; + AndroidUtilities.removeAdjustResize(getParentActivity(), classGuid); + } + + @Override + protected void onTransitionAnimationProgress(boolean isOpen, float progress) { + super.onTransitionAnimationProgress(isOpen, progress); + fragmentView.invalidate(); } @Override @@ -257,8 +273,7 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter listViewAdapter.notifyDataSetChanged(); listView.setFastScrollVisible(true); listView.setVerticalScrollBarEnabled(false); - listView.setEmptyView(null); - emptyView.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); + // emptyView.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); if (floatingButtonContainer != null) { floatingButtonContainer.setVisibility(View.VISIBLE); floatingHidden = true; @@ -285,12 +300,14 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter listView.setFastScrollVisible(false); listView.setVerticalScrollBarEnabled(true); } - if (emptyView != null) { - listView.setEmptyView(emptyView); - emptyView.setText(LocaleController.getString("NoResult", R.string.NoResult)); + emptyView.showProgress(true, true); + searchListViewAdapter.searchDialogs(text); + } else { + if (listView != null) { + listView.setAdapter(listViewAdapter); + listView.setSectionsType(1); } } - searchListViewAdapter.searchDialogs(text); } }); item.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); @@ -300,7 +317,15 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter sortItem.setContentDescription(LocaleController.getString("AccDescrContactSorting", R.string.AccDescrContactSorting)); } - searchListViewAdapter = new SearchAdapter(context, ignoreUsers, allowUsernameSearch, false, false, allowBots, allowSelf, true, 0); + searchListViewAdapter = new SearchAdapter(context, ignoreUsers, allowUsernameSearch, false, false, allowBots, allowSelf, true, 0) { + @Override + protected void onSearchProgressChanged() { + if (!searchInProgress() && getItemCount() == 0) { + emptyView.showProgress(false, true); + } + showItemsAnimated(); + } + }; int inviteViaLink; if (chatId != 0) { TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(chatId); @@ -323,10 +348,10 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter if (listView != null && listView.getAdapter() == this) { int count = super.getItemCount(); if (needPhonebook) { - emptyView.setVisibility(count == 2 ? View.VISIBLE : View.GONE); + // emptyView.setVisibility(count == 2 ? View.VISIBLE : View.GONE); listView.setFastScrollVisible(count != 2); } else { - emptyView.setVisibility(count == 0 ? View.VISIBLE : View.GONE); + //emptyView.setVisibility(count == 0 ? View.VISIBLE : View.GONE); listView.setFastScrollVisible(count != 0); } } @@ -350,10 +375,16 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter }; FrameLayout frameLayout = (FrameLayout) fragmentView; - emptyView = new EmptyTextProgressView(context); - emptyView.setShowAtCenter(true); - emptyView.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); - emptyView.showTextView(); + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(context); + flickerLoadingView.setViewType(FlickerLoadingView.USERS_TYPE); + flickerLoadingView.showDate(false); + + emptyView = new StickerEmptyView(context, flickerLoadingView, StickerEmptyView.STICKER_TYPE_SEARCH); + emptyView.addView(flickerLoadingView, 0); + emptyView.setAnimateLayoutChange(true); + emptyView.showProgress(true, false); + emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); frameLayout.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); listView = new RecyclerListView(context) { @@ -372,8 +403,11 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter listView.setAdapter(listViewAdapter); frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + listView.setEmptyView(emptyView); + listView.setAnimateEmptyView(true, 0); + listView.setOnItemClickListener((view, position) -> { - if (searching && searchWas) { + if (listView.getAdapter() == searchListViewAdapter) { Object object = searchListViewAdapter.getItem(position); if (object instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) object; @@ -579,10 +613,10 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter if (!createSecretChat && !returnAsResult) { floatingButtonContainer = new FrameLayout(context); - frameLayout.addView(floatingButtonContainer, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 14, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.BOTTOM, LocaleController.isRTL ? 4 : 0, 0, LocaleController.isRTL ? 0 : 4, 0)); + frameLayout.addView(floatingButtonContainer, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.BOTTOM, LocaleController.isRTL ? 4 : 0, 0, LocaleController.isRTL ? 0 : 4, 0)); floatingButtonContainer.setOnClickListener(v -> presentFragment(new NewContactActivity())); - floatingButton = new ImageView(context); + floatingButton = new RLottieImageView(context); floatingButton.setScaleType(ImageView.ScaleType.CENTER); Drawable drawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(56), Theme.getColor(Theme.key_chats_actionBackground), Theme.getColor(Theme.key_chats_actionPressedBackground)); if (Build.VERSION.SDK_INT < 21) { @@ -594,7 +628,7 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter } floatingButton.setBackgroundDrawable(drawable); floatingButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_actionIcon), PorterDuff.Mode.MULTIPLY)); - floatingButton.setImageResource(R.drawable.add_contact_new); + floatingButton.setAnimation(R.raw.write_contacts_fab_icon, 52, 52); floatingButtonContainer.setContentDescription(LocaleController.getString("CreateNewContact", R.string.CreateNewContact)); if (Build.VERSION.SDK_INT >= 21) { StateListAnimator animator = new StateListAnimator(); @@ -609,7 +643,7 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter } }); } - floatingButtonContainer.addView(floatingButton, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60), (Build.VERSION.SDK_INT >= 21 ? 56 : 60), Gravity.LEFT | Gravity.TOP, 10, 0, 10, 0)); + floatingButtonContainer.addView(floatingButton, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60), (Build.VERSION.SDK_INT >= 21 ? 56 : 60), Gravity.LEFT | Gravity.TOP, 10, 6, 10, 0)); } if (initialSearchString != null) { @@ -739,6 +773,7 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter @Override public void onResume() { super.onResume(); + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); if (listViewAdapter != null) { listViewAdapter.notifyDataSetChanged(); } @@ -917,6 +952,240 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter this.initialSearchString = initialSearchString; } + private void showItemsAnimated() { + int from = layoutManager == null ? 0 : layoutManager.findLastVisibleItemPosition(); + listView.invalidate(); + listView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) <= from) { + continue; + } + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + animatorSet.start(); + return true; + } + }); + } + + @Override + protected AnimatorSet onCustomTransitionAnimation(boolean isOpen, Runnable callback) { + ValueAnimator valueAnimator = isOpen ? ValueAnimator.ofFloat(1f, 0) : ValueAnimator.ofFloat(0, 1f); + ViewGroup parent = (ViewGroup) fragmentView.getParent(); + BaseFragment previousFragment = parentLayout.fragmentsStack.size() > 1 ? parentLayout.fragmentsStack.get(parentLayout.fragmentsStack.size() - 2) : null; + DialogsActivity dialogsActivity = null; + if (previousFragment instanceof DialogsActivity) { + dialogsActivity = (DialogsActivity) previousFragment; + } + if (dialogsActivity == null) { + return null; + } + RLottieImageView previousFab = dialogsActivity.getFloatingButton(); + if (previousFab == null || floatingButtonContainer == null || previousFab.getVisibility() != View.VISIBLE || Math.abs(previousFab.getTranslationY()) > AndroidUtilities.dp(4) || Math.abs(floatingButtonContainer.getTranslationY()) > AndroidUtilities.dp(4)) { + return null; + } + previousFab.setVisibility(View.GONE); + if (isOpen) { + parent.setAlpha(0f); + } + valueAnimator.addUpdateListener(valueAnimator1 -> { + float v = (float) valueAnimator.getAnimatedValue(); + parent.setTranslationX(AndroidUtilities.dp(48) * v); + parent.setAlpha(1f - v); + }); + if (floatingButtonContainer != null) { + ((ViewGroup) fragmentView).removeView(floatingButtonContainer); + ((FrameLayout) parent.getParent()).addView(floatingButtonContainer); + } + valueAnimator.setDuration(150); + valueAnimator.setInterpolator(new DecelerateInterpolator(1.5f)); + + final int currentAccount = this.currentAccount; + + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (floatingButtonContainer != null) { + ViewGroup viewParent; + if (floatingButtonContainer.getParent() instanceof ViewGroup) { + viewParent = (ViewGroup) floatingButtonContainer.getParent(); + viewParent.removeView(floatingButtonContainer); + } + ((ViewGroup) fragmentView).addView(floatingButtonContainer); + + previousFab.setVisibility(View.VISIBLE); + if (!isOpen) { + previousFab.setAnimation(R.raw.write_contacts_fab_icon_reverse, 52, 52); + previousFab.getAnimatedDrawable().setCurrentFrame(floatingButton.getAnimatedDrawable().getCurrentFrame()); + previousFab.playAnimation(); + } + } + callback.run(); + } + }); + animatorSet.playTogether(valueAnimator); + AndroidUtilities.runOnUIThread(() -> { + animationIndex = NotificationCenter.getInstance(currentAccount).setAnimationInProgress(animationIndex, null); + animatorSet.start(); + if (isOpen) { + floatingButton.setAnimation(R.raw.write_contacts_fab_icon, 52, 52); + floatingButton.playAnimation(); + } else { + floatingButton.setAnimation(R.raw.write_contacts_fab_icon_reverse, 52, 52); + floatingButton.playAnimation(); + } + if (bounceIconAnimator != null) { + bounceIconAnimator.cancel(); + } + bounceIconAnimator = new AnimatorSet(); + float totalDuration = floatingButton.getAnimatedDrawable().getDuration(); + long delay = 0; + if (isOpen) { + for (int i = 0; i < 6; i++) { + AnimatorSet set = new AnimatorSet(); + if (i == 0) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1f, 0.9f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1f, 0.9f) + ); + set.setDuration((long) (6f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_OUT); + } else if (i == 1) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.9f, 1.06f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.9f, 1.06f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 0.9f, 1.06f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 0.9f, 1.06f) + ); + set.setDuration((long) (17f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else if (i == 2) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1.06f, 0.9f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1.06f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1.06f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1.06f, 0.9f) + ); + set.setDuration((long) (10f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else if (i == 3) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.9f, 1.03f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.9f, 1.03f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 0.9f, 1.03f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 0.9f, 1.03f) + ); + set.setDuration((long) (5f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else if (i == 4) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1.03f, 0.98f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1.03f, 0.98f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1.03f, 0.98f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1.03f, 0.98f) + ); + set.setDuration((long) (5f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.98f, 1f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.98f, 1f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 0.98f, 1f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 0.98f, 1f) + ); + + set.setDuration((long) (4f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_IN); + } + set.setStartDelay(delay); + delay += set.getDuration(); + bounceIconAnimator.playTogether(set); + } + } else { + for (int i = 0; i < 5; i++) { + AnimatorSet set = new AnimatorSet(); + if (i == 0) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1f, 0.9f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1f, 0.9f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1f, 0.9f) + ); + set.setDuration((long) (7f / 36f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_OUT); + } else if (i == 1) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.9f, 1.06f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.9f, 1.06f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 0.9f, 1.06f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 0.9f, 1.06f) + ); + set.setDuration((long) (8f / 36f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else if (i == 2) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1.06f, 0.92f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1.06f, 0.92f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1.06f, 0.92f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1.06f, 0.92f) + ); + set.setDuration((long) (7f / 36f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else if (i == 3) { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.92f, 1.02f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.92f, 1.02f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 0.92f, 1.02f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 0.92f, 1.02f) + ); + set.setDuration((long) (9f / 36f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_BOTH); + } else { + set.playTogether( + ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1.02f, 1f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1.02f, 1f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_X, 1.02f, 1f), + ObjectAnimator.ofFloat(previousFab, View.SCALE_Y, 1.02f, 1f) + ); + set.setDuration((long) (5f / 47f * totalDuration)); + set.setInterpolator(CubicBezierInterpolator.EASE_IN); + } + set.setStartDelay(delay); + delay += set.getDuration(); + bounceIconAnimator.playTogether(set); + } + } + bounceIconAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + floatingButton.setScaleX(1f); + floatingButton.setScaleY(1f); + previousFab.setScaleX(1f); + previousFab.setScaleY(1f); + bounceIconAnimator = null; + NotificationCenter.getInstance(currentAccount).onAnimationFinish(animationIndex); + } + }); + bounceIconAnimator.start(); + }, 50); + return animatorSet; + } + @Override public ArrayList getThemeDescriptions() { ArrayList themeDescriptions = new ArrayList<>(); @@ -951,8 +1220,6 @@ public class ContactsActivity extends BaseFragment implements NotificationCenter themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, Theme.dividerPaint, null, null, Theme.key_divider)); - themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_emptyListPlaceholder)); - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollActive)); themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollInactive)); themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_FASTSCROLL, null, null, null, null, Theme.key_fastScrollText)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java index 44df6ef6d..1025898d6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java @@ -72,13 +72,17 @@ public class ContentPreviewViewer { } public interface ContentPreviewViewerDelegate { - void sendSticker(TLRPC.Document sticker, Object parent, boolean notify, int scheduleDate); + void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate); void openSet(TLRPC.InputStickerSet set, boolean clearInputField); boolean needSend(); boolean canSchedule(); boolean isInScheduleMode(); long getDialogId(); + default String getQuery(boolean isGif) { + return null; + } + default boolean needOpen() { return true; } @@ -188,7 +192,7 @@ public class ContentPreviewViewer { } if (actions.get(which) == 0) { if (delegate != null) { - delegate.sendSticker(currentDocument, parentObject, true, 0); + delegate.sendSticker(currentDocument, currentQuery, parentObject, true, 0); } } else if (actions.get(which) == 1) { if (delegate != null) { @@ -199,8 +203,9 @@ public class ContentPreviewViewer { } else if (actions.get(which) == 3) { TLRPC.Document sticker = currentDocument; Object parent = parentObject; + String query = currentQuery; ContentPreviewViewerDelegate stickerPreviewViewerDelegate = delegate; - AlertsCreator.createScheduleDatePickerDialog(parentActivity, stickerPreviewViewerDelegate.getDialogId(), (notify, scheduleDate) -> stickerPreviewViewerDelegate.sendSticker(sticker, parent, notify, scheduleDate)); + AlertsCreator.createScheduleDatePickerDialog(parentActivity, stickerPreviewViewerDelegate.getDialogId(), (notify, scheduleDate) -> stickerPreviewViewerDelegate.sendSticker(sticker, query, parent, notify, scheduleDate)); } else if (actions.get(which) == 4) { MediaDataController.getInstance(currentAccount).addRecentSticker(MediaDataController.TYPE_IMAGE, parentObject, currentDocument, (int) (System.currentTimeMillis() / 1000), true); } @@ -304,6 +309,7 @@ public class ContentPreviewViewer { private int currentContentType; private TLRPC.Document currentDocument; + private String currentQuery; private TLRPC.BotInlineResult inlineResult; private TLRPC.InputStickerSet currentStickerSet; private Object parentObject; @@ -448,16 +454,16 @@ public class ContentPreviewViewer { clearsInputField = false; if (currentPreviewCell instanceof StickerEmojiCell) { StickerEmojiCell stickerEmojiCell = (StickerEmojiCell) currentPreviewCell; - open(stickerEmojiCell.getSticker(), null, contentType, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); + open(stickerEmojiCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentType, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); stickerEmojiCell.setScaled(true); } else if (currentPreviewCell instanceof StickerCell) { StickerCell stickerCell = (StickerCell) currentPreviewCell; - open(stickerCell.getSticker(), null, contentType, false, stickerCell.getParentObject()); + open(stickerCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentType, false, stickerCell.getParentObject()); stickerCell.setScaled(true); clearsInputField = stickerCell.isClearsInputField(); } else if (currentPreviewCell instanceof ContextLinkCell) { ContextLinkCell contextLinkCell = (ContextLinkCell) currentPreviewCell; - open(contextLinkCell.getDocument(), contextLinkCell.getBotInlineResult(), contentType, false, contextLinkCell.getInlineBot()); + open(contextLinkCell.getDocument(), delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentType, false, contextLinkCell.getInlineBot()); if (contentType != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } @@ -545,16 +551,16 @@ public class ContentPreviewViewer { clearsInputField = false; if (currentPreviewCell instanceof StickerEmojiCell) { StickerEmojiCell stickerEmojiCell = (StickerEmojiCell) currentPreviewCell; - open(stickerEmojiCell.getSticker(), null, contentTypeFinal, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); + open(stickerEmojiCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); stickerEmojiCell.setScaled(true); } else if (currentPreviewCell instanceof StickerCell) { StickerCell stickerCell = (StickerCell) currentPreviewCell; - open(stickerCell.getSticker(), null, contentTypeFinal, false, stickerCell.getParentObject()); + open(stickerCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, false, stickerCell.getParentObject()); stickerCell.setScaled(true); clearsInputField = stickerCell.isClearsInputField(); } else if (currentPreviewCell instanceof ContextLinkCell) { ContextLinkCell contextLinkCell = (ContextLinkCell) currentPreviewCell; - open(contextLinkCell.getDocument(), contextLinkCell.getBotInlineResult(), contentTypeFinal, false, contextLinkCell.getInlineBot()); + open(contextLinkCell.getDocument(), delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentTypeFinal, false, contextLinkCell.getInlineBot()); if (contentTypeFinal != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } @@ -623,7 +629,7 @@ public class ContentPreviewViewer { keyboardHeight = height; } - public void open(TLRPC.Document document, TLRPC.BotInlineResult botInlineResult, int contentType, boolean isRecent, Object parent) { + public void open(TLRPC.Document document, String query, TLRPC.BotInlineResult botInlineResult, int contentType, boolean isRecent, Object parent) { if (parentActivity == null || windowView == null) { return; } @@ -701,6 +707,7 @@ public class ContentPreviewViewer { currentContentType = contentType; currentDocument = document; + currentQuery = query; inlineResult = botInlineResult; parentObject = parent; containerView.invalidate(); @@ -751,6 +758,7 @@ public class ContentPreviewViewer { } currentDocument = null; currentStickerSet = null; + currentQuery = null; delegate = null; isVisible = false; NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 8); @@ -760,6 +768,7 @@ public class ContentPreviewViewer { isVisible = false; delegate = null; currentDocument = null; + currentQuery = null; currentStickerSet = null; try { if (visibleDialog != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java index de3f4b40f..fbe5090d7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java @@ -8,20 +8,23 @@ package org.telegram.ui; -import android.animation.AnimatorSet; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; +import android.os.Build; +import android.text.TextUtils; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; +import android.widget.LinearLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.BuildVars; import org.telegram.messenger.DownloadController; +import org.telegram.messenger.ImageLoader; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; @@ -35,6 +38,7 @@ import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.NotificationsCheckCell; +import org.telegram.ui.Cells.RadioColorCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCheckCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; @@ -44,6 +48,7 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.voip.VoIPHelper; +import java.io.File; import java.util.ArrayList; import androidx.recyclerview.widget.LinearLayoutManager; @@ -55,12 +60,14 @@ public class DataSettingsActivity extends BaseFragment { private RecyclerListView listView; @SuppressWarnings("FieldCanBeLocal") private LinearLayoutManager layoutManager; - private AnimatorSet animatorSet; + + private ArrayList storageDirs; private int mediaDownloadSectionRow; private int mobileRow; private int roamingRow; private int wifiRow; + private int storageNumRow; private int resetDownloadRow; private int mediaDownloadSection2Row; private int usageSectionRow; @@ -98,6 +105,13 @@ public class DataSettingsActivity extends BaseFragment { usageSectionRow = rowCount++; storageUsageRow = rowCount++; dataUsageRow = rowCount++; + storageNumRow = -1; + if (Build.VERSION.SDK_INT >= 19) { + storageDirs = AndroidUtilities.getRootDirs(); + if (storageDirs.size() > 1) { + storageNumRow = rowCount++; + } + } usageSection2Row = rowCount++; mediaDownloadSectionRow = rowCount++; mobileRow = rowCount++; @@ -323,6 +337,42 @@ public class DataSettingsActivity extends BaseFragment { dlg.show(); } else if (position == dataUsageRow) { presentFragment(new DataUsageActivity()); + } else if (position == storageNumRow) { + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); + builder.setTitle(LocaleController.getString("StoragePath", R.string.StoragePath)); + final LinearLayout linearLayout = new LinearLayout(getParentActivity()); + linearLayout.setOrientation(LinearLayout.VERTICAL); + builder.setView(linearLayout); + + String dir = storageDirs.get(0).getAbsolutePath(); + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0, N = storageDirs.size(); a < N; a++) { + String path = storageDirs.get(a).getAbsolutePath(); + if (path.startsWith(SharedConfig.storageCacheDir)) { + dir = path; + break; + } + } + } + + for (int a = 0, N = storageDirs.size(); a < N; a++) { + String storageDir = storageDirs.get(a).getAbsolutePath(); + RadioColorCell cell = new RadioColorCell(context); + cell.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + cell.setTag(a); + cell.setCheckColor(Theme.getColor(Theme.key_radioBackground), Theme.getColor(Theme.key_dialogRadioBackgroundChecked)); + cell.setTextAndValue(storageDir, storageDir.startsWith(dir)); + linearLayout.addView(cell); + cell.setOnClickListener(v -> { + SharedConfig.storageCacheDir = storageDir; + SharedConfig.saveConfig(); + ImageLoader.getInstance().checkMediaPaths(); + builder.getDismissRunnable().run(); + listAdapter.notifyItemChanged(storageNumRow); + }); + } + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + showDialog(builder.create()); } else if (position == proxyRow) { presentFragment(new ProxyListActivity()); } else if (position == enableStreamRow) { @@ -436,7 +486,19 @@ public class DataSettingsActivity extends BaseFragment { } textCell.setTextAndValue(LocaleController.getString("VoipUseLessData", R.string.VoipUseLessData), value, true); } else if (position == dataUsageRow) { - textCell.setText(LocaleController.getString("NetworkUsage", R.string.NetworkUsage), false); + textCell.setText(LocaleController.getString("NetworkUsage", R.string.NetworkUsage), storageNumRow != -1); + } else if (position == storageNumRow) { + String dir = storageDirs.get(0).getAbsolutePath(); + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0, N = storageDirs.size(); a < N; a++) { + String path = storageDirs.get(a).getAbsolutePath(); + if (path.startsWith(SharedConfig.storageCacheDir)) { + dir = path; + break; + } + } + } + textCell.setTextAndValue(LocaleController.getString("StoragePath", R.string.StoragePath), dir, false); } else if (position == proxyRow) { textCell.setText(LocaleController.getString("ProxySettings", R.string.ProxySettings), false); } else if (position == resetDownloadRow) { @@ -587,7 +649,8 @@ public class DataSettingsActivity extends BaseFragment { !controller.highPreset.equals(controller.getCurrentWiFiPreset()) || controller.highPreset.isEnabled() != controller.wifiPreset.enabled; } return position == mobileRow || position == roamingRow || position == wifiRow || position == storageUsageRow || position == useLessDataForCallsRow || position == dataUsageRow || position == proxyRow || position == clearDraftsRow || - position == enableCacheStreamRow || position == enableStreamRow || position == enableAllStreamRow || position == enableMkvRow || position == quickRepliesRow || position == autoplayVideoRow || position == autoplayGifsRow; + position == enableCacheStreamRow || position == enableStreamRow || position == enableAllStreamRow || position == enableMkvRow || position == quickRepliesRow || position == autoplayVideoRow || position == autoplayGifsRow || + position == storageNumRow; } @Override @@ -619,6 +682,7 @@ public class DataSettingsActivity extends BaseFragment { view.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); break; case 5: + default: view = new NotificationsCheckCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java index cd50329b6..e52e20d06 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java @@ -39,6 +39,14 @@ import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Vibrator; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.ItemTouchHelper; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.LinearSmoothScrollerCustom; +import androidx.recyclerview.widget.RecyclerView; +import androidx.viewpager.widget.ViewPager; + import android.text.TextUtils; import android.util.Property; import android.util.StateSet; @@ -64,13 +72,9 @@ import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; -import androidx.annotation.NonNull; import androidx.core.graphics.ColorUtils; -import androidx.recyclerview.widget.ItemTouchHelper; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.LinearSmoothScrollerCustom; -import androidx.recyclerview.widget.RecyclerView; -import androidx.viewpager.widget.ViewPager; + +import com.google.android.exoplayer2.util.Log; import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; @@ -152,6 +156,7 @@ import org.telegram.ui.Components.PacmanAnimation; import org.telegram.ui.Components.ProxyDrawable; import org.telegram.ui.Components.PullForegroundDrawable; import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.RadialProgressView; import org.telegram.ui.Components.RecyclerAnimationScrollHelper; import org.telegram.ui.Components.RecyclerListView; @@ -159,12 +164,14 @@ import org.telegram.ui.Components.SearchViewPager; import org.telegram.ui.Components.SizeNotifierFrameLayout; import org.telegram.ui.Components.StickersAlert; import org.telegram.ui.Components.UndoView; +import org.telegram.ui.Components.ViewPagerFixed; import java.util.ArrayList; public class DialogsActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { - private ViewPagerFixed.TabsView searchTabsView; + private boolean canShowFilterTabsView; + private boolean filterTabsViewIsVisible; private class ViewPage extends FrameLayout { private DialogsRecyclerView listView; @@ -190,6 +197,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } + private ViewPagerFixed.TabsView searchTabsView; private ViewPage[] viewPages; private FiltersView filtersView; private ActionBarMenuItem passcodeItem; @@ -199,7 +207,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private ActionBarMenuItem searchItem; private ActionBarMenuItem doneItem; private ProxyDrawable proxyDrawable; - private ImageView floatingButton; + private RLottieImageView floatingButton; private FrameLayout floatingButtonContainer; private ChatAvatarContainer avatarContainer; private UndoView[] undoView = new UndoView[2]; @@ -326,7 +334,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private int topPadding; private int lastMeasuredTopPadding; - + private int folderId; private final static int pin = 100; @@ -367,7 +375,13 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean searchWasFullyShowed; private boolean whiteActionBar; private float progressToActionMode; - ValueAnimator actionBarColorAnimator; + private ValueAnimator actionBarColorAnimator; + + private ValueAnimator filtersTabAnimator; + private float filterTabsProgress; + private float filterTabsMoveFrom; + private float tabsYOffset; + private float scrollAdditionalOffset; public final Property SCROLL_Y = new AnimationProperties.FloatProperty("animationValue") { @Override @@ -464,7 +478,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. float h = actionBar.getHeight(); float filtersTabsHeight = 0; if (filterTabsView != null && filterTabsView.getVisibility() != GONE) { - filtersTabsHeight = filterTabsView.getMeasuredHeight(); + filtersTabsHeight = filterTabsView.getMeasuredHeight() - (1f - filterTabsProgress) * filterTabsView.getMeasuredHeight(); } float searchTabsHeight = 0; if (searchTabsView != null && searchTabsView.getVisibility() != View.GONE) { @@ -476,6 +490,9 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == fragmentContextView && fragmentContextView.getCurrentStyle() == 3) { + return true; + } boolean result; if (child == viewPages[0] || (viewPages.length > 1 && child == viewPages[1]) || child == fragmentContextView || child == fragmentLocationContextView) { canvas.save(); @@ -563,12 +580,31 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. canvas.drawRect(0, top, getMeasuredWidth(), top + actionBarHeight, actionBarDefaultPaint); } } + tabsYOffset = 0; + if (filtersTabAnimator != null && filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { + tabsYOffset = - (1f - filterTabsProgress) * filterTabsView.getMeasuredHeight(); + filterTabsView.setTranslationY(actionBar.getTranslationY() + tabsYOffset); + filterTabsView.setAlpha(filterTabsProgress); + viewPages[0].setTranslationY(-(1f - filterTabsProgress) * filterTabsMoveFrom); + } else if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { + filterTabsView.setTranslationY(actionBar.getTranslationY()); + filterTabsView.setAlpha(1f); + } + updateContextViewPosition(); super.dispatchDraw(canvas); if (whiteActionBar && searchAnimationProgress > 0 && searchAnimationProgress < 1f && searchTabsView != null) { windowBackgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); windowBackgroundPaint.setAlpha((int) (windowBackgroundPaint.getAlpha() * searchAnimationProgress)); canvas.drawRect(0, top + actionBarHeight, getMeasuredWidth(), top + actionBar.getMeasuredHeight() + searchTabsView.getMeasuredHeight(), windowBackgroundPaint); } + if (fragmentContextView != null && fragmentContextView.getCurrentStyle() == 3) { + canvas.save(); + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.draw(canvas); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } if (scrimView != null) { canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); canvas.save(); @@ -642,6 +678,12 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else { h = heightSize - inputFieldHeight + AndroidUtilities.dp(2) - (onlySelect ? 0 : actionBar.getMeasuredHeight()) - topPadding; } + + if (filtersTabAnimator != null && filterTabsView != null && filterTabsView.getVisibility() == VISIBLE) { + h += filterTabsMoveFrom; + } else { + child.setTranslationY(0); + } child.measure(contentWidthSpec, View.MeasureSpec.makeMeasureSpec(Math.max(AndroidUtilities.dp(10), h), View.MeasureSpec.EXACTLY)); child.setPivotX(child.getMeasuredWidth() / 2); } else if (child == searchViewPager) { @@ -806,6 +848,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. filterTabsView.selectTabWithId(viewPages[0].selectedType, 1f); filterTabsView.selectTabWithId(viewPages[1].selectedType, additionalOffset / viewPages[0].getMeasuredWidth()); switchToCurrentSelectedMode(true); + viewPages[0].dialogsAdapter.resume(); + viewPages[1].dialogsAdapter.pause(); } } else { if (startedTrackingX < viewPages[1].getMeasuredWidth() + viewPages[1].getTranslationX()) { @@ -817,6 +861,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. filterTabsView.selectTabWithId(viewPages[0].selectedType, 1f); filterTabsView.selectTabWithId(viewPages[1].selectedType, -additionalOffset / viewPages[0].getMeasuredWidth()); switchToCurrentSelectedMode(true); + viewPages[0].dialogsAdapter.resume(); + viewPages[1].dialogsAdapter.pause(); } else { additionalOffset = viewPages[0].getTranslationX(); } @@ -949,6 +995,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. viewPages[1] = tempPage; filterTabsView.selectTabWithId(viewPages[0].selectedType, 1.0f); updateCounters(false); + viewPages[0].dialogsAdapter.resume(); + viewPages[1].dialogsAdapter.pause(); } if (parentLayout != null) { parentLayout.getDrawerLayoutContainer().setAllowOpenDrawerBySwipe(viewPages[0].selectedType == filterTabsView.getFirstTabId() || searchIsShowed); @@ -996,6 +1044,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean ignoreLayout; private ViewPage parentPage; private int appliedPaddingTop; + private int lastTop; + private int lastListPadding; public DialogsRecyclerView(Context context, ViewPage page) { super(context); @@ -1058,6 +1108,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (slidingView != null && pacmanAnimation != null) { pacmanAnimation.draw(canvas, slidingView.getTop() + slidingView.getMeasuredHeight() / 2); } + } @Override @@ -1078,6 +1129,25 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. @Override protected void onMeasure(int widthSpec, int heightSpec) { int t = 0; + if (!onlySelect) { + if (filterTabsView != null && filterTabsView.getVisibility() == VISIBLE) { + t = AndroidUtilities.dp(44); + } else { + t = actionBar.getMeasuredHeight(); + } + } + + int pos = parentPage.layoutManager.findFirstVisibleItemPosition(); + if (pos != RecyclerView.NO_POSITION) { + RecyclerView.ViewHolder holder = parentPage.listView.findViewHolderForAdapterPosition(pos); + if (holder != null) { + int top = holder.itemView.getTop(); + + ignoreLayout = true; + parentPage.layoutManager.scrollToPositionWithOffset(pos, (int) (top - lastListPadding + scrollAdditionalOffset)); + ignoreLayout = false; + } + } if (!onlySelect) { ignoreLayout = true; if (filterTabsView != null && filterTabsView.getVisibility() == VISIBLE) { @@ -1087,16 +1157,6 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } setTopGlowOffset(t); setPadding(0, t, 0, 0); - if (appliedPaddingTop != t) { - int pos = parentPage.layoutManager.findFirstVisibleItemPosition(); - if (pos != RecyclerView.NO_POSITION) { - RecyclerView.ViewHolder holder = parentPage.listView.findViewHolderForAdapterPosition(pos); - if (holder != null) { - int top = holder.itemView.getTop(); - parentPage.layoutManager.scrollToPositionWithOffset(pos, top - appliedPaddingTop); - } - } - } ignoreLayout = false; } @@ -1121,7 +1181,10 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); - appliedPaddingTop = getPaddingTop(); + lastListPadding = getPaddingTop(); + lastTop = t; + scrollAdditionalOffset = 0; + if ((dialogRemoveFinished != 0 || dialogInsertFinished != 0 || dialogChangeFinished != 0) && !parentPage.dialogsItemAnimator.isRunning()) { onDialogAnimationFinished(); } @@ -1559,6 +1622,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().addObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().addObserver(this, NotificationCenter.didDatabaseCleared); + if (!dialogsLoaded[currentAccount]) { MessagesController messagesController = getMessagesController(); messagesController.loadGlobalNotificationsSettings(); @@ -1610,6 +1675,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetPasscode); } + + getNotificationCenter().removeObserver(this, NotificationCenter.didDatabaseCleared); if (commentView != null) { commentView.onDestroy(); } @@ -1622,6 +1689,13 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. @Override protected ActionBar createActionBar(Context context) { ActionBar actionBar = new ActionBar(context) { + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + fragmentView.invalidate(); + } + @Override protected boolean shouldClipChild(View child) { return super.shouldClipChild(child) || child == doneItem; @@ -1848,7 +1922,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. }; filterTabsView.setVisibility(View.GONE); - filterTabsView.setTag(null); + canShowFilterTabsView = false; filterTabsView.setDelegate(new FilterTabsView.FilterTabsViewDelegate() { private void showDeleteAlert(MessagesController.DialogFilter dialogFilter) { @@ -1862,12 +1936,12 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { })); - if (getMessagesController().dialogFilters.size() > 1) { - filterTabsView.beginCrossfade(); - } +// if (getMessagesController().dialogFilters.size() > 1) { +// filterTabsView.beginCrossfade(); +// } getMessagesController().removeFilter(dialogFilter); getMessagesStorage().deleteDialogFilter(dialogFilter); - filterTabsView.commitCrossfade(); + // filterTabsView.commitCrossfade(); }); AlertDialog alertDialog = builder.create(); showDialog(alertDialog); @@ -1938,6 +2012,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. showScrollbars(true); updateCounters(false); checkListLoad(viewPages[0]); + viewPages[0].dialogsAdapter.resume(); + viewPages[1].dialogsAdapter.pause(); } } @@ -2028,7 +2104,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. linearLayout.setOrientation(LinearLayout.VERTICAL); scrimPopupWindowItems = new ActionBarMenuSubItem[3]; for (int a = 0, N = (tabView.getId() == Integer.MAX_VALUE ? 2 : 3); a < N; a++) { - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity()); + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == N - 1); if (a == 0) { if (getMessagesController().dialogFilters.size() <= 1) { continue; @@ -2504,7 +2580,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } } - if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && recyclerView == viewPages[0].listView && !searching && !actionBar.isActionModeShowed() && !disableActionBarScrolling) { + if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && recyclerView == viewPages[0].listView && !searching && !actionBar.isActionModeShowed() && !disableActionBarScrolling && filterTabsViewIsVisible) { if (dy > 0 && hasHiddenArchive() && viewPages[0].dialogsType == 0) { View child = recyclerView.getChildAt(0); if (child != null) { @@ -2748,7 +2824,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. floatingButtonContainer = new FrameLayout(context); floatingButtonContainer.setVisibility(onlySelect || folderId != 0 ? View.GONE : View.VISIBLE); - contentView.addView(floatingButtonContainer, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 14, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.BOTTOM, LocaleController.isRTL ? 4 : 0, 0, LocaleController.isRTL ? 0 : 4, 0)); + contentView.addView(floatingButtonContainer, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.BOTTOM, LocaleController.isRTL ? 4 : 0, 0, LocaleController.isRTL ? 0 : 4, 0)); floatingButtonContainer.setOnClickListener(v -> { Bundle args = new Bundle(); args.putBoolean("destroyAfterSelect", true); @@ -2756,7 +2832,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. }); - floatingButton = new ImageView(context); + floatingButton = new RLottieImageView(context); floatingButton.setScaleType(ImageView.ScaleType.CENTER); Drawable drawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(56), Theme.getColor(Theme.key_chats_actionBackground), Theme.getColor(Theme.key_chats_actionPressedBackground)); if (Build.VERSION.SDK_INT < 21) { @@ -2768,7 +2844,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } floatingButton.setBackgroundDrawable(drawable); floatingButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_actionIcon), PorterDuff.Mode.MULTIPLY)); - floatingButton.setImageResource(R.drawable.floating_pencil); + floatingButton.setAnimation(R.raw.write_contacts_fab_icon, 52, 52); if (Build.VERSION.SDK_INT >= 21) { StateListAnimator animator = new StateListAnimator(); animator.addState(new int[]{android.R.attr.state_pressed}, ObjectAnimator.ofFloat(floatingButton, View.TRANSLATION_Z, AndroidUtilities.dp(2), AndroidUtilities.dp(4)).setDuration(200)); @@ -2783,22 +2859,9 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. }); } floatingButtonContainer.setContentDescription(LocaleController.getString("NewMessageTitle", R.string.NewMessageTitle)); - floatingButtonContainer.addView(floatingButton, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60), (Build.VERSION.SDK_INT >= 21 ? 56 : 60), Gravity.LEFT | Gravity.TOP, 10, 0, 10, 0)); + floatingButtonContainer.addView(floatingButton, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60), (Build.VERSION.SDK_INT >= 21 ? 56 : 60), Gravity.LEFT | Gravity.TOP, 10, 6, 10, 0)); searchTabsView = null; - if (searchString != null) { - showSearch(true, false); - actionBar.openSearchField(searchString, false); - } else if (initialSearchString != null) { - showSearch(true, false); - actionBar.openSearchField(initialSearchString, false); - initialSearchString = null; - if (filterTabsView != null) { - filterTabsView.setTranslationY(-AndroidUtilities.dp(44)); - } - } else { - showSearch(false, false); - } if (!onlySelect && initialDialogsType == 0) { fragmentLocationContextView = new FragmentContextView(context, this, true); @@ -3010,10 +3073,24 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } - updateFilterTabs(false); + updateFilterTabs(false, false); + + if (searchString != null) { + showSearch(true, false); + actionBar.openSearchField(searchString, false); + } else if (initialSearchString != null) { + showSearch(true, false); + actionBar.openSearchField(initialSearchString, false); + initialSearchString = null; + if (filterTabsView != null) { + filterTabsView.setTranslationY(-AndroidUtilities.dp(44)); + } + } else { + showSearch(false, false); + } if (folderId != 0) { - FiltersView.MediaFilterData filterData = new FiltersView.MediaFilterData(R.drawable.chats_archive, R.drawable.chats_archive, LocaleController.getString("Archive", R.string.Archive), null, FiltersView.FILTER_TYPE_ARCHIVE); + FiltersView.MediaFilterData filterData = new FiltersView.MediaFilterData(R.drawable.chats_archive, R.drawable.chats_archive, LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter), null, FiltersView.FILTER_TYPE_ARCHIVE); filterData.removable = false; actionBar.setSearchFilter(filterData); searchItem.collapseSearchFilters(); @@ -3036,14 +3113,14 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (fragmentLocationContextView != null && fragmentLocationContextView.getVisibility() == View.VISIBLE) { from += AndroidUtilities.dp(36); } - fragmentContextView.setTranslationY(from + fragmentContextView.getTopPadding() + actionBar.getTranslationY() + filtersTabsHeight * (1f - searchAnimationProgress) + searchTabsHeight * searchAnimationProgress); + fragmentContextView.setTranslationY(from + fragmentContextView.getTopPadding() + actionBar.getTranslationY() + filtersTabsHeight * (1f - searchAnimationProgress) + searchTabsHeight * searchAnimationProgress + tabsYOffset); } if (fragmentLocationContextView != null) { float from = 0; - if (fragmentContextView != null) { - from += fragmentContextView.getTopPadding(); + if (fragmentContextView != null && fragmentContextView.getVisibility() == View.VISIBLE) { + from += AndroidUtilities.dp(fragmentContextView.getStyleHeight()) + fragmentContextView.getTopPadding(); } - fragmentLocationContextView.setTranslationY(from + fragmentLocationContextView.getTopPadding() + actionBar.getTranslationY() + filtersTabsHeight * (1f - searchAnimationProgress) + searchTabsHeight * searchAnimationProgress); + fragmentLocationContextView.setTranslationY(from + fragmentLocationContextView.getTopPadding() + actionBar.getTranslationY() + filtersTabsHeight * (1f - searchAnimationProgress) + searchTabsHeight * searchAnimationProgress + tabsYOffset); } } @@ -3309,7 +3386,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. updateCounters(false); } - private void updateFilterTabs(boolean force) { + private void updateFilterTabs(boolean force, boolean animated) { if (filterTabsView == null || inPreviewMode || searchIsShowed) { return; } @@ -3321,20 +3398,28 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); if (!filters.isEmpty()) { if (force || filterTabsView.getVisibility() != View.VISIBLE) { - filterTabsView.setVisibility(View.VISIBLE); - filterTabsView.setTag(1); + boolean animatedUpdateItems = animated; + if (filterTabsView.getVisibility() != View.VISIBLE) { + animatedUpdateItems = false; + } + canShowFilterTabsView = true; + updateFilterTabsVisibility(animated); int id = filterTabsView.getCurrentTabId(); if (id != Integer.MAX_VALUE && id >= filters.size()) { filterTabsView.resetTabId(); } filterTabsView.removeTabs(); - filterTabsView.addTab(Integer.MAX_VALUE, LocaleController.getString("FilterAllChats", R.string.FilterAllChats)); + filterTabsView.addTab(Integer.MAX_VALUE, 0, LocaleController.getString("FilterAllChats", R.string.FilterAllChats)); for (int a = 0, N = filters.size(); a < N; a++) { - filterTabsView.addTab(a, filters.get(a).name); + filterTabsView.addTab(a, filters.get(a).localId, filters.get(a).name); } id = filterTabsView.getCurrentTabId(); + boolean updateCurrentTab = false; if (id >= 0) { - viewPages[0].selectedType = id; + if (viewPages[0].selectedType != id) { + updateCurrentTab = true; + viewPages[0].selectedType = id; + } } for (int a = 0; a < viewPages.length; a++) { if (viewPages[a].selectedType != Integer.MAX_VALUE && viewPages[a].selectedType >= filters.size()) { @@ -3342,8 +3427,10 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } viewPages[a].listView.setScrollingTouchSlop(RecyclerView.TOUCH_SLOP_PAGING); } - filterTabsView.finishAddingTabs(); - switchToCurrentSelectedMode(false); + filterTabsView.finishAddingTabs(animatedUpdateItems); + if (updateCurrentTab) { + switchToCurrentSelectedMode(false); + } if (parentLayout != null) { parentLayout.getDrawerLayoutContainer().setAllowOpenDrawerBySwipe(id == filterTabsView.getFirstTabId()); } @@ -3370,8 +3457,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. viewPages[1].dialogsAdapter.setDialogsType(0); viewPages[1].dialogsType = 0; viewPages[1].dialogsAdapter.notifyDataSetChanged(); - filterTabsView.setVisibility(View.GONE); - filterTabsView.setTag(null); + canShowFilterTabsView = false; + updateFilterTabsVisibility(animated); for (int a = 0; a < viewPages.length; a++) { if (viewPages[a].dialogsType == 0 && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive()) { int p = viewPages[a].layoutManager.findFirstVisibleItemPosition(); @@ -3516,12 +3603,20 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (viewPages[a].dialogsType == 0 && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && viewPages[a].layoutManager.findFirstVisibleItemPosition() == 0 && hasHiddenArchive()) { viewPages[a].layoutManager.scrollToPositionWithOffset(1, 0); } + if (a == 0) { + viewPages[a].dialogsAdapter.resume(); + } else { + viewPages[a].dialogsAdapter.pause(); + } } } showNextSupportedSuggestion(); Bulletin.addDelegate(this, new Bulletin.Delegate() { @Override public void onOffsetChange(float offset) { + if (undoView[0] != null && undoView[0].getVisibility() == View.VISIBLE) { + return; + } additionalFloatingTranslation = offset; if (additionalFloatingTranslation < 0) { additionalFloatingTranslation = 0; @@ -3530,12 +3625,32 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. updateFloatingButtonOffset(); } } + + @Override + public void onShow(Bulletin bulletin) { + if (undoView[0] != null && undoView[0].getVisibility() == View.VISIBLE) { + undoView[0].hide(true, 2); + } + } }); if (searchIsShowed) { AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); } } + @Override + public boolean presentFragment(BaseFragment fragment) { + boolean b = super.presentFragment(fragment); + if (b) { + if (viewPages != null) { + for (int a = 0; a < viewPages.length; a++) { + viewPages[a].dialogsAdapter.pause(); + } + } + } + return b; + } + @Override public void onPause() { super.onPause(); @@ -3549,6 +3664,12 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. undoView[0].hide(true, 0); } Bulletin.removeDelegate(this); + + if (viewPages != null) { + for (int a = 0; a < viewPages.length; a++) { + viewPages[a].dialogsAdapter.pause(); + } + } } @Override @@ -3589,7 +3710,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } closeSearchFieldOnHide = false; } - if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { + if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && filterTabsViewIsVisible) { int scrollY = (int) -actionBar.getTranslationY(); int actionBarHeight = ActionBar.getCurrentActionBarHeight(); if (scrollY != 0 && scrollY != actionBarHeight) { @@ -3613,7 +3734,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. ((ViewGroup.MarginLayoutParams) actionBar.getLayoutParams()).topMargin = 0; actionBar.removeView(avatarContainer); avatarContainer = null; - updateFilterTabs(false); + updateFilterTabs(false, false); floatingButton.setVisibility(View.VISIBLE); final ContentView contentView = (ContentView) fragmentView; if (fragmentContextView != null) { @@ -3647,7 +3768,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } private void showSearch(boolean show, boolean animated) { - if (initialDialogsType != 0) { + if (initialDialogsType != 0 && initialDialogsType != 3) { animated = false; } if (searchAnimator != null) { @@ -3712,7 +3833,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else { AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); } - if (!show && filterTabsView != null && filterTabsView.getTag() != null) { + if (!show && filterTabsView != null && canShowFilterTabsView) { filterTabsView.setVisibility(View.VISIBLE); } if (animated) { @@ -3851,7 +3972,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. filterTabsView.getTabsContainer().setAlpha(show ? 0.0f : 1.0f); } if (filterTabsView != null) { - if (filterTabsView.getTag() != null && !show) { + if (canShowFilterTabsView && !show) { filterTabsView.setVisibility(View.VISIBLE); } else { filterTabsView.setVisibility(View.GONE); @@ -3863,6 +3984,75 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } + private void updateFilterTabsVisibility(boolean animated) { + if (isPaused) { + animated = false; + } + if (searchIsShowed) { + if (filtersTabAnimator != null) { + filtersTabAnimator.cancel(); + } + filterTabsViewIsVisible = canShowFilterTabsView; + filterTabsProgress = filterTabsViewIsVisible ? 1f : 0; + return; + } + boolean visible = canShowFilterTabsView; + if (filterTabsViewIsVisible != visible) { + if (filtersTabAnimator != null) { + filtersTabAnimator.cancel(); + } + filterTabsViewIsVisible = visible; + if (animated) { + if (visible) { + if (filterTabsView.getVisibility() != View.VISIBLE) { + filterTabsView.setVisibility(View.VISIBLE); + } + filtersTabAnimator = ValueAnimator.ofFloat(0, 1f); + filterTabsMoveFrom = AndroidUtilities.dp(44); + } else { + filtersTabAnimator = ValueAnimator.ofFloat(1f, 0f); + filterTabsMoveFrom = Math.max(0, AndroidUtilities.dp(44) + actionBar.getTranslationY()); + } + float animateFromScrollY = actionBar.getTranslationY(); + final int account = currentAccount; + filtersTabAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + filtersTabAnimator = null; + scrollAdditionalOffset = AndroidUtilities.dp(44) - filterTabsMoveFrom; + if (!visible) { + filterTabsView.setVisibility(View.GONE); + } + if (fragmentView != null) { + fragmentView.requestLayout(); + } + NotificationCenter.getInstance(account).onAnimationFinish(animationIndex); + } + }); + filtersTabAnimator.addUpdateListener(valueAnimator -> { + filterTabsProgress = (float) valueAnimator.getAnimatedValue(); + if (!visible) { + setScrollY(animateFromScrollY * filterTabsProgress); + } + if (fragmentView != null) { + fragmentView.invalidate(); + } + }); + filtersTabAnimator.setDuration(220); + filtersTabAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); + filtersTabAnimator.start(); + fragmentView.requestLayout(); + } else { + filterTabsProgress = visible ? 1f : 0; + filterTabsView.setVisibility(visible ? View.VISIBLE : View.GONE); + if (fragmentView != null) { + fragmentView.invalidate(); + } + } + } + } + private void setSearchAnimationProgress(float progress) { searchAnimationProgress = progress; if (whiteActionBar) { @@ -5524,7 +5714,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. finishFragment(); } } else if (id == NotificationCenter.dialogFiltersUpdated) { - updateFilterTabs(true); + updateFilterTabs(true, true); } else if (id == NotificationCenter.filterSettingsUpdated) { showFiltersHint(); } else if (id == NotificationCenter.newSuggestionsAvailable) { @@ -5535,6 +5725,10 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. int channelId = (int) args[1]; searchViewPager.messagesDeleted(channelId, markAsDeletedMessages); } + } else if (id == NotificationCenter.didDatabaseCleared) { + for (int a = 0; a < viewPages.length; a++) { + viewPages[a].dialogsAdapter.didDatabaseCleared(); + } } } @@ -5886,6 +6080,10 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } + public RLottieImageView getFloatingButton() { + return floatingButton; + } + @Override public ArrayList getThemeDescriptions() { ThemeDescription.ThemeDescriptionDelegate cellDelegate = () -> { @@ -6357,6 +6555,25 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_actionBarIconBlue)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_groupcreate_spanBackground)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayGreen1)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayGreen2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayBlue1)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayBlue2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGreen1)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGreen2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelBlue1)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelBlue2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_topPanelGray)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientMuted)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientMuted2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientUnmuted)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertGradientUnmuted2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient2)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_mutedByAdminGradient3)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin2)); + if (filtersView != null) { arrayList.addAll(filtersView.getThemeDescriptions()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java index 349f7951a..75dd7824f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java @@ -8,14 +8,9 @@ import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.graphics.Canvas; -import android.graphics.LinearGradient; -import android.graphics.Matrix; import android.graphics.Paint; -import android.graphics.RectF; -import android.graphics.Shader; import android.graphics.drawable.Drawable; import android.os.Build; -import android.os.SystemClock; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.util.SparseArray; @@ -272,7 +267,34 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente parentFragment = fragment; Context context = parentActivity = fragment.getParentActivity(); setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - recyclerListView = new RecyclerListView(context); + recyclerListView = new RecyclerListView(context) { + + @Override + protected void dispatchDraw(Canvas canvas) { + if (getAdapter() == sharedPhotoVideoAdapter) { + for (int i = 0; i < getChildCount(); i++) { + if (getChildViewHolder(getChildAt(i)).getItemViewType() == 1) { + canvas.save(); + canvas.translate(getChildAt(i).getX(), getChildAt(i).getY() - getChildAt(i).getMeasuredHeight() + AndroidUtilities.dp(2)); + getChildAt(i).draw(canvas); + canvas.restore(); + invalidate(); + } + } + } + super.dispatchDraw(canvas); + } + + @Override + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (getAdapter() == sharedPhotoVideoAdapter) { + if (getChildViewHolder(child).getItemViewType() == 1) { + return true; + } + } + return super.drawChild(canvas, child, drawingTime); + } + }; recyclerListView.setOnItemClickListener((view, position) -> { if (view instanceof SharedDocumentCell) { FilteredSearchView.this.onItemClick(position, view, ((SharedDocumentCell) view).getMessage(), 0); @@ -386,7 +408,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente sharedAudioAdapter = new SharedDocumentsAdapter(getContext(), 4); sharedVoiceAdapter = new SharedDocumentsAdapter(getContext(), 2); - emptyView = new StickerEmptyView(context, loadingView); + emptyView = new StickerEmptyView(context, loadingView, StickerEmptyView.STICKER_TYPE_SEARCH); addView(emptyView); recyclerListView.setEmptyView(emptyView); emptyView.setVisibility(View.GONE); @@ -719,7 +741,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente if (progressView != null) { recyclerListView.removeView(progressView); } - if (loadingView.getVisibility() == View.VISIBLE && recyclerListView.getChildCount() == 0 || progressView != null) { + if ((loadingView.getVisibility() == View.VISIBLE && recyclerListView.getChildCount() == 0) || (recyclerListView.getAdapter() != sharedPhotoVideoAdapter && progressView != null)) { int finalProgressViewPosition = progressViewPosition; getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override @@ -848,7 +870,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente if (messages.isEmpty()) { return 0; } - return (int) Math.ceil(messages.size() / (float) columnsCount); + return (int) Math.ceil(messages.size() / (float) columnsCount) + (endReached ? 0 : 1); } @Override @@ -885,7 +907,15 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente break; case 1: default: - view = new LoadingCell(mContext, AndroidUtilities.dp(32), AndroidUtilities.dp(74)); + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext) { + @Override + public int getColumnsCount() { + return columnsCount; + } + }; + flickerLoadingView.setIsSingleCell(true); + flickerLoadingView.setViewType(FlickerLoadingView.PHOTOS_TYPE); + view = flickerLoadingView; break; } view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); @@ -927,12 +957,18 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } else { cell.setChecked(false, animated); } + } else if (holder.getItemViewType() == 1) { + FlickerLoadingView flickerLoadingView = (FlickerLoadingView) holder.itemView; + int count = (int) Math.ceil(messages.size() / (float) columnsCount); + flickerLoadingView.skipDrawItemsCount(columnsCount - (columnsCount * count - messages.size())); + } } @Override public int getItemViewType(int position) { - if (position < messages.size()) { + int count = (int) Math.ceil(messages.size() / (float) columnsCount); + if (position < count) { return 0; } return 1; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java new file mode 100644 index 000000000..c998526cf --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java @@ -0,0 +1,2872 @@ +package org.telegram.ui; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; +import android.animation.ValueAnimator; +import android.annotation.SuppressLint; +import android.content.Context; +import android.content.DialogInterface; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.RadialGradient; +import android.graphics.Rect; +import android.graphics.RectF; +import android.graphics.Shader; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.os.Bundle; +import android.os.SystemClock; +import android.provider.Settings; +import android.os.Vibrator; +import android.text.TextUtils; +import android.util.Property; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.HapticFeedbackConstants; +import android.view.KeyEvent; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.view.accessibility.AccessibilityNodeInfo; +import android.view.animation.OvershootInterpolator; +import android.widget.Button; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.ScrollView; +import android.widget.TextView; + +import androidx.annotation.Nullable; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.DiffUtil; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.ListUpdateCallback; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.FileLog; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.ActionBarMenuSubItem; +import org.telegram.ui.ActionBar.ActionBarPopupWindow; +import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ActionBar.ThemeDescription; +import org.telegram.ui.Cells.CheckBoxCell; +import org.telegram.ui.Cells.GroupCallInvitedCell; +import org.telegram.ui.Cells.GroupCallTextCell; +import org.telegram.ui.Cells.GroupCallUserCell; +import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.AnimationProperties; +import org.telegram.ui.Components.AvatarDrawable; +import org.telegram.ui.Components.BackupImageView; +import org.telegram.ui.Components.BlobDrawable; +import org.telegram.ui.Components.CheckBoxSquare; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.EditTextBoldCursor; +import org.telegram.ui.Components.FillLastLinearLayoutManager; +import org.telegram.ui.Components.GroupVoipInviteAlert; +import org.telegram.ui.Components.GroupCallPip; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.RadialProgressView; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.ShareAlert; +import org.telegram.ui.Components.UndoView; +import org.telegram.ui.Components.WaveDrawable; +import org.telegram.ui.Components.voip.VoIPToggleButton; + +import java.util.ArrayList; + +public class GroupCallActivity extends BottomSheet implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { + + private static final int eveyone_can_speak_item = 1; + private static final int admin_can_speak_item = 2; + private static final int share_invite_link_item = 3; + private static final int leave_item = 4; + + private static final int MUTE_BUTTON_STATE_UNMUTE = 0; + private static final int MUTE_BUTTON_STATE_MUTE = 1; + private static final int MUTE_BUTTON_STATE_MUTED_BY_ADMIN = 2; + private static final int MUTE_BUTTON_STATE_CONNECTING = 3; + + public static GroupCallActivity groupCallInstance; + public static boolean groupCallUiVisible; + + private AccountInstance accountInstance; + + private View actionBarBackground; + private ActionBar actionBar; + private ListAdapter listAdapter; + private RecyclerListView listView; + private FillLastLinearLayoutManager layoutManager; + private VoIPToggleButton soundButton; + private VoIPToggleButton leaveButton; + private RLottieImageView muteButton; + private TextView[] muteLabel = new TextView[2]; + private TextView[] muteSubLabel = new TextView[2]; + private FrameLayout buttonsContainer; + private RadialProgressView radialProgressView; + private Drawable shadowDrawable; + private View actionBarShadow; + private AnimatorSet actionBarAnimation; + private LaunchActivity parentActivity; + private UndoView[] undoView = new UndoView[2]; + + private ShareAlert shareAlert; + + private boolean delayedGroupCallUpdated; + + private RectF rect = new RectF(); + + private boolean enterEventSent; + private boolean anyEnterEventSent; + + private float scrollOffsetY; + + private boolean scrolling; + + private TLRPC.TL_groupCallParticipant selfDummyParticipant; + + private Paint listViewBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private ArrayList oldParticipants = new ArrayList<>(); + private ArrayList oldInvited = new ArrayList<>(); + private int oldCount; + + private RLottieDrawable bigMicDrawable; + + private final BlobDrawable tinyWaveDrawable; + private final BlobDrawable bigWaveDrawable; + + private float amplitude; + private float animateToAmplitude; + private float animateAmplitudeDiff; + + private RadialGradient radialGradient; + private final Matrix radialMatrix; + private final Paint radialPaint; + + private ValueAnimator muteButtonAnimator; + + public TLRPC.Chat currentChat; + public ChatObject.Call call; + + private TextView titleTextView; + private ActionBarMenuItem otherItem; + private ActionBarMenuItem pipItem; + private ActionBarMenuSubItem inviteItem; + private ActionBarMenuSubItem everyoneItem; + private ActionBarMenuSubItem adminItem; + private ActionBarMenuSubItem leaveItem; + private View dividerItem; + private final LinearLayout menuItemsContainer; + + private GroupVoipInviteAlert groupVoipInviteAlert; + + private int muteButtonState = MUTE_BUTTON_STATE_UNMUTE; + + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG | Paint.FILTER_BITMAP_FLAG); + private Paint paintTmp = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG | Paint.FILTER_BITMAP_FLAG); + private Paint leaveBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private WeavingState[] states = new WeavingState[4]; + private float switchProgress = 1.0f; + private WeavingState prevState; + private WeavingState currentState; + private long lastUpdateTime; + private int shaderBitmapSize = 200; + private float showWavesProgress; + private float showLightingProgress; + + private boolean scheduled; + private boolean pressed; + + private int currentCallState; + + private float colorProgress; + private int backgroundColor; + private boolean invalidateColors = true; + private final int[] colorsTmp = new int[3]; + + private Runnable unmuteRunnable = () -> VoIPService.getSharedInstance().setMicMute(false, true, false); + + private Runnable pressRunnable = () -> { + if (!scheduled || VoIPService.getSharedInstance() == null) { + return; + } + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + updateMuteButton(MUTE_BUTTON_STATE_MUTE, true); + AndroidUtilities.runOnUIThread(unmuteRunnable, 80); + scheduled = false; + pressed = true; + }; + + public static final Property COLOR_PROGRESS = new AnimationProperties.FloatProperty("colorProgress") { + @Override + public void setValue(GroupCallActivity object, float value) { + object.setColorProgress(value); + } + + @Override + public Float get(GroupCallActivity object) { + return object.getColorProgress(); + } + }; + + private class WeavingState { + private float targetX = -1f; + private float targetY = -1f; + private float startX; + private float startY; + private float duration; + private float time; + private Shader shader; + private Matrix matrix = new Matrix(); + private int currentState; + + public WeavingState(int state) { + currentState = state; + } + + public void update(int top, int left, int size, long dt) { + if (shader == null) { + return; + } + if (duration == 0 || time >= duration) { + duration = Utilities.random.nextInt(700) + 500; + time = 0; + if (targetX == -1f) { + setTarget(); + } + startX = targetX; + startY = targetY; + setTarget(); + } + time += dt * (0.5f + BlobDrawable.GRADIENT_SPEED_MIN) + dt * (BlobDrawable.GRADIENT_SPEED_MAX * 2) * amplitude; + if (time > duration) { + time = duration; + } + float interpolation = CubicBezierInterpolator.EASE_OUT.getInterpolation(time / duration); + float x = left + size * (startX + (targetX - startX) * interpolation) - 200; + float y = top + size * (startY + (targetY - startY) * interpolation) - 200; + + float s; + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + s = 1f; + } else { + s = currentState == MUTE_BUTTON_STATE_MUTE ? 4 : 2.5f; + } + float scale = AndroidUtilities.dp(122) / 400.0f * s; + matrix.reset(); + matrix.postTranslate(x, y); + matrix.postScale(scale, scale, x + 200, y + 200); + + shader.setLocalMatrix(matrix); + } + + private void setTarget() { + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + targetX = 0.85f + 0.25f * Utilities.random.nextInt(100) / 100f; + targetY = 1f; + } else if (currentState == MUTE_BUTTON_STATE_MUTE) { + targetX = 0.2f + 0.3f * Utilities.random.nextInt(100) / 100f; + targetY = 0.7f + 0.3f * Utilities.random.nextInt(100) / 100f; + } else { + targetX = 0.8f + 0.2f * (Utilities.random.nextInt(100) / 100f); + targetY = Utilities.random.nextInt(100) / 100f; + } + } + } + + @SuppressWarnings("FieldCanBeLocal") + private static class LabeledButton extends FrameLayout { + + private ImageView imageView; + private TextView textView; + + public LabeledButton(Context context, String text, int resId, int color) { + super(context); + + imageView = new ImageView(context); + if (Build.VERSION.SDK_INT >= 21) { + imageView.setBackground(Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(50), color, 0x1fffffff)); + } else { + imageView.setBackground(Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(50), color, color)); + } + imageView.setImageResource(resId); + imageView.setScaleType(ImageView.ScaleType.CENTER); + addView(imageView, LayoutHelper.createFrame(50, 50, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + + textView = new TextView(context); + textView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 12); + textView.setGravity(Gravity.CENTER_HORIZONTAL); + textView.setText(text); + addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 50 + 5, 0, 0)); + } + + public void setColor(int color) { + Theme.setSelectorDrawableColor(imageView.getBackground(), color, false); + if (Build.VERSION.SDK_INT < 21) { + Theme.setSelectorDrawableColor(imageView.getBackground(), color, true); + } + imageView.invalidate(); + } + } + + @Override + protected boolean canDismissWithSwipe() { + return false; + } + + @Override + protected boolean onCustomOpenAnimation() { + groupCallUiVisible = true; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.groupCallVisibilityChanged); + GroupCallPip.updateVisibility(getContext()); + return super.onCustomOpenAnimation(); + } + + @Override + public void dismiss() { + groupCallUiVisible = false; + if (groupVoipInviteAlert != null) { + groupVoipInviteAlert.dismiss(); + } + delayedGroupCallUpdated = true; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.groupCallVisibilityChanged); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.needShowAlert); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.didLoadChatAdmins); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall); + super.dismiss(); + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.groupCallUpdated) { + Long callId = (Long) args[1]; + if (call != null && call.call.id == callId) { + if (call.call instanceof TLRPC.TL_groupCallDiscarded) { + dismiss(); + } else { + updateItems(); + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + if (child instanceof GroupCallUserCell) { + ((GroupCallUserCell) child).applyParticipantChanges(true); + } + } + if (scrimView != null) { + delayedGroupCallUpdated = true; + } else { + applyCallParticipantUpdates(); + } + + if (actionBar != null) { + int count = call.call.participants_count + (listAdapter.addSelfToCounter() ? 1 : 0); + actionBar.setSubtitle(LocaleController.formatPluralString("Members", count)); + } + updateState(true, (Boolean) args[2]); + } + } + } else if (id == NotificationCenter.webRtcMicAmplitudeEvent) { + float amplitude = (float) args[0]; + setAmplitude(amplitude * 4000.0f); + if (listView != null) { + TLRPC.TL_groupCallParticipant participant = call.participants.get(accountInstance.getUserConfig().getClientUserId()); + if (participant != null) { + ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.sortedParticipants; + int idx = array.indexOf(participant); + if (idx >= 0) { + RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); + if (holder != null && holder.itemView instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; + cell.setAmplitude(amplitude * 15.0f); + if (holder.itemView == scrimView) { + containerView.invalidate(); + } + } + } + } + } + } else if (id == NotificationCenter.needShowAlert) { + int num = (Integer) args[0]; + if (num == 6) { + String text = (String) args[1]; + String error; + if ("ANONYMOUS_CALLS_DISABLED".equals(text) || "GROUPCALL_ANONYMOUS_FORBIDDEN".equals(text)) { + error = LocaleController.getString("VoipGroupJoinAnonymousAdmin", R.string.VoipGroupJoinAnonymousAdmin); + } else { + error = LocaleController.getString("ErrorOccurred", R.string.ErrorOccurred) + "\n" + text; + } + + AlertDialog.Builder builder = AlertsCreator.createSimpleAlert(getContext(), LocaleController.getString("VoipGroupVoiceChat", R.string.VoipGroupVoiceChat), error); + builder.setOnDismissListener(dialog -> dismiss()); + try { + builder.show(); + } catch (Exception e) { + FileLog.e(e); + } + } + } else if (id == NotificationCenter.didEndCall) { + if (VoIPService.getSharedInstance() == null) { + dismiss(); + } + } else if (id == NotificationCenter.chatInfoDidLoad) { + TLRPC.ChatFull chatFull = (TLRPC.ChatFull) args[0]; + if (chatFull.id == currentChat.id) { + updateItems(); + updateState(isShowing(), false); + } + } else if (id == NotificationCenter.didLoadChatAdmins) { + int chatId = (Integer) args[0]; + if (chatId == currentChat.id) { + updateItems(); + updateState(isShowing(), false); + } + } + } + + private void applyCallParticipantUpdates() { + int count = listView.getChildCount(); + View minChild = null; + int minPosition = 0; + for (int a = 0; a < count; a++) { + View child = listView.getChildAt(a); + RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); + if (holder != null) { + if (minChild == null || minPosition > holder.getAdapterPosition()) { + minChild = child; + minPosition = holder.getAdapterPosition(); + } + } + } + try { + UpdateCallback updateCallback = new UpdateCallback(listAdapter); + setOldRows(listAdapter.addMemberRow, listAdapter.selfUserRow, listAdapter.usersStartRow, listAdapter.usersEndRow, listAdapter.invitedStartRow, listAdapter.invitedEndRow); + listAdapter.updateRows(); + DiffUtil.calculateDiff(diffUtilsCallback).dispatchUpdatesTo(updateCallback); + } catch (Exception e) { + FileLog.e(e); + listAdapter.notifyDataSetChanged(); + } + if (minChild != null) { + layoutManager.scrollToPositionWithOffset(minPosition, minChild.getTop() - listView.getPaddingTop()); + } + oldParticipants.clear(); + oldParticipants.addAll(call.sortedParticipants); + oldInvited.clear(); + oldInvited.addAll(call.invitedUsers); + oldCount = listAdapter.getItemCount(); + for (int a = 0; a < count; a++) { + View child = listView.getChildAt(a); + if (child instanceof GroupCallUserCell || child instanceof GroupCallInvitedCell) { + RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); + if (holder != null) { + if (child instanceof GroupCallUserCell) { + ((GroupCallUserCell) child).setDrawDivider(holder.getAdapterPosition() != listAdapter.getItemCount() - 2); + } else if (child instanceof GroupCallInvitedCell) { + ((GroupCallInvitedCell) child).setDrawDivider(holder.getAdapterPosition() != listAdapter.getItemCount() - 2); + } + } + } + } + } + + private void updateItems() { + TLRPC.Chat newChat = accountInstance.getMessagesController().getChat(currentChat.id); + if (newChat != null) { + currentChat = newChat; + } + boolean anyVisible = false; + if (ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_INVITE)) { + inviteItem.setVisibility(View.VISIBLE); + anyVisible = true; + } else { + inviteItem.setVisibility(View.GONE); + } + if (ChatObject.canManageCalls(currentChat)) { + leaveItem.setVisibility(View.VISIBLE); + anyVisible = true; + } else { + leaveItem.setVisibility(View.GONE); + } + if (ChatObject.canManageCalls(currentChat) && call.call.can_change_join_muted) { + everyoneItem.setVisibility(View.VISIBLE); + adminItem.setVisibility(View.VISIBLE); + dividerItem.setVisibility(View.VISIBLE); + anyVisible = true; + } else { + everyoneItem.setVisibility(View.GONE); + adminItem.setVisibility(View.GONE); + dividerItem.setVisibility(View.GONE); + } + otherItem.setVisibility(anyVisible ? View.VISIBLE : View.GONE); + ((FrameLayout.LayoutParams) menuItemsContainer.getLayoutParams()).rightMargin = anyVisible ? 0 : AndroidUtilities.dp(6); + actionBar.setTitleRightMargin(AndroidUtilities.dp(48) * (anyVisible ? 2 : 1)); + } + + protected void makeFocusable(EditTextBoldCursor editText, boolean showKeyboard) { + if (!enterEventSent) { + BaseFragment fragment = parentActivity.getActionBarLayout().fragmentsStack.get(parentActivity.getActionBarLayout().fragmentsStack.size() - 1); + if (fragment instanceof ChatActivity) { + boolean keyboardVisible = ((ChatActivity) fragment).needEnterText(); + enterEventSent = true; + anyEnterEventSent = true; + AndroidUtilities.runOnUIThread(() -> { + if (groupVoipInviteAlert != null) { + groupVoipInviteAlert.setFocusable(true); + editText.requestFocus(); + if (showKeyboard) { + AndroidUtilities.runOnUIThread(() -> AndroidUtilities.showKeyboard(editText)); + } + } + }, keyboardVisible ? 200 : 0); + } else { + enterEventSent = true; + anyEnterEventSent = true; + groupVoipInviteAlert.setFocusable(true); + editText.requestFocus(); + if (showKeyboard) { + AndroidUtilities.runOnUIThread(() -> AndroidUtilities.showKeyboard(editText)); + } + } + } + } + + public static void create(LaunchActivity activity, AccountInstance account) { + if (groupCallInstance != null || VoIPService.getSharedInstance() == null) { + return; + } + ChatObject.Call call = VoIPService.getSharedInstance().groupCall; + if (call == null) { + return; + } + TLRPC.Chat chat = account.getMessagesController().getChat(call.chatId); + if (chat == null) { + return; + } + groupCallInstance = new GroupCallActivity(activity, account, call, chat); + groupCallInstance.parentActivity = activity; + groupCallInstance.show(); + } + + public GroupCallActivity(Context context, AccountInstance account, ChatObject.Call call, TLRPC.Chat chat) { + super(context, false); + this.accountInstance = account; + this.call = call; + this.currentChat = chat; + this.currentAccount = account.getCurrentAccount(); + drawNavigationBar = true; + scrollNavBar = true; + navBarColorKey = null; + //useLightNavBar = true; + + scrimPaint = new Paint() { + @Override + public void setAlpha(int a) { + super.setAlpha(a); + if (containerView != null) { + containerView.invalidate(); + } + } + }; + setOnDismissListener(dialog -> { + BaseFragment fragment = parentActivity.getActionBarLayout().fragmentsStack.get(parentActivity.getActionBarLayout().fragmentsStack.size() - 1); + if (anyEnterEventSent) { + if (fragment instanceof ChatActivity) { + ((ChatActivity) fragment).onEditTextDialogClose(true); + } + } + }); + + setDimBehindAlpha(75); + + oldParticipants.addAll(call.sortedParticipants); + oldInvited.addAll(call.invitedUsers); + + selfDummyParticipant = new TLRPC.TL_groupCallParticipant(); + selfDummyParticipant.user_id = accountInstance.getUserConfig().getClientUserId(); + selfDummyParticipant.muted = true; + selfDummyParticipant.can_self_unmute = true; + selfDummyParticipant.date = accountInstance.getConnectionsManager().getCurrentTime(); + + currentCallState = VoIPService.getSharedInstance().getCallState(); + + VoIPService.audioLevelsCallback = (uids, levels, voice) -> { + for (int a = 0; a < uids.length; a++) { + TLRPC.TL_groupCallParticipant participant = call.participantsBySources.get(uids[a]); + if (participant != null) { + ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.sortedParticipants; + int idx = array.indexOf(participant); + if (idx >= 0) { + RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); + if (holder != null) { + GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; + cell.setAmplitude(levels[a] * 15.0f); + if (holder.itemView == scrimView) { + containerView.invalidate(); + } + } + } + } + } + }; + + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.needShowAlert); + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.chatInfoDidLoad); + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.didLoadChatAdmins); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didEndCall); + + shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); + + bigMicDrawable = new RLottieDrawable(R.raw.voice_outlined, "" + R.raw.voice_outlined, AndroidUtilities.dp(28), AndroidUtilities.dp(38), true, null); + + containerView = new FrameLayout(context) { + + private boolean ignoreLayout = false; + private RectF rect = new RectF(); + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int totalHeight = MeasureSpec.getSize(heightMeasureSpec); + if (Build.VERSION.SDK_INT >= 21) { + ignoreLayout = true; + setPadding(backgroundPaddingLeft, AndroidUtilities.statusBarHeight, backgroundPaddingLeft, 0); + ignoreLayout = false; + } + int availableHeight = totalHeight - getPaddingTop() - AndroidUtilities.dp(14 + 231); + + LayoutParams layoutParams = (LayoutParams) listView.getLayoutParams(); + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(14); + + layoutParams = (LayoutParams) actionBarShadow.getLayoutParams(); + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); + + int contentSize = Math.max(AndroidUtilities.dp(64 + 50 + 58 * 2.5f), availableHeight / 5 * 3); + int padding = Math.max(0, availableHeight - contentSize + AndroidUtilities.dp(8)); + if (listView.getPaddingTop() != padding) { + ignoreLayout = true; + listView.setPadding(0, padding, 0, 0); + ignoreLayout = false; + } + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(totalHeight, MeasureSpec.EXACTLY)); + } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + updateLayout(false); + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && scrollOffsetY != 0 && ev.getY() < scrollOffsetY - AndroidUtilities.dp(37) && actionBar.getAlpha() == 0.0f) { + dismiss(); + return true; + } + return super.onInterceptTouchEvent(ev); + } + + @Override + public boolean onTouchEvent(MotionEvent e) { + return !isDismissed() && super.onTouchEvent(e); + } + + @Override + public void requestLayout() { + if (ignoreLayout) { + return; + } + super.requestLayout(); + } + + @Override + protected void onDraw(Canvas canvas) { + int offset = AndroidUtilities.dp(74); + float top = scrollOffsetY - offset; + + int height = getMeasuredHeight() + AndroidUtilities.dp(15) + backgroundPaddingTop; + float rad = 1.0f; + + if (top + backgroundPaddingTop < ActionBar.getCurrentActionBarHeight()) { + int willMoveUpTo = offset - backgroundPaddingTop - AndroidUtilities.dp(14); + float moveProgress = Math.min(1.0f, (ActionBar.getCurrentActionBarHeight() - top - backgroundPaddingTop) / willMoveUpTo); + int diff = (int) ((ActionBar.getCurrentActionBarHeight() - willMoveUpTo) * moveProgress); + top -= diff; + height += diff; + rad = 1.0f - moveProgress; + } + + top += getPaddingTop(); + + shadowDrawable.setBounds(0, (int) top, getMeasuredWidth(), height); + shadowDrawable.draw(canvas); + + if (rad != 1.0f) { + Theme.dialogs_onlineCirclePaint.setColor(backgroundColor); + rect.set(backgroundPaddingLeft, backgroundPaddingTop + top, getMeasuredWidth() - backgroundPaddingLeft, backgroundPaddingTop + top + AndroidUtilities.dp(24)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(12) * rad, AndroidUtilities.dp(12) * rad, Theme.dialogs_onlineCirclePaint); + } + + int finalColor = Color.argb((int) (255 * actionBar.getAlpha()), (int) (Color.red(backgroundColor) * 0.8f), (int) (Color.green(backgroundColor) * 0.8f), (int) (Color.blue(backgroundColor) * 0.8f)); + Theme.dialogs_onlineCirclePaint.setColor(finalColor); + canvas.drawRect(backgroundPaddingLeft, 0, getMeasuredWidth() - backgroundPaddingLeft, AndroidUtilities.statusBarHeight, Theme.dialogs_onlineCirclePaint); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (scrimView != null) { + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + float listTop = listView.getY(); + + boolean groupedBackgroundWasDraw = false; + + int count = listView.getChildCount(); + for (int num = 0; num < count; num++) { + View child = listView.getChildAt(num); + if (child != scrimView) { + continue; + } + + float viewClipLeft = Math.max(listView.getLeft(), listView.getLeft() + child.getX()); + float viewClipTop = Math.max(listTop, listView.getTop() + child.getY()); + float viewClipRight = Math.min(listView.getRight(), listView.getLeft() + child.getX() + child.getMeasuredWidth()); + float viewClipBottom = Math.min(listView.getY() + listView.getMeasuredHeight(), listView.getY() + child.getY() + child.getMeasuredHeight()); + + if (viewClipTop < viewClipBottom) { + if (child.getAlpha() != 1f) { + canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * child.getAlpha()), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + + canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom); + canvas.translate(listView.getLeft() + child.getX(), listView.getY() + child.getY()); + rect.set(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight()); + canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); + child.draw(canvas); + canvas.restore(); + } + } + } + } + }; + containerView.setWillNotDraw(false); + containerView.setPadding(backgroundPaddingLeft, 0, backgroundPaddingLeft, 0); + containerView.setKeepScreenOn(true); + containerView.setClipChildren(false); + + listView = new RecyclerListView(context) { + + @Override + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == scrimView) { + return false; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + float maxBottom = 0; + float minTop = 0; + for (int a = 0, N = getChildCount(); a < N; a++) { + View child = getChildAt(a); + ViewHolder holder = findContainingViewHolder(child); + if (holder.getItemViewType() == 3) { + continue; + } + maxBottom = Math.max(maxBottom, child.getY() + child.getMeasuredHeight()); + if (a == 0) { + minTop = Math.max(0, child.getY()); + } else { + minTop = Math.min(minTop, Math.max(0, child.getY())); + } + } + rect.set(0, minTop, getMeasuredWidth(), Math.min(getMeasuredHeight(), maxBottom)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); + + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight()); + super.dispatchDraw(canvas); + canvas.restore(); + } + }; + listView.setClipToPadding(false); + listView.setClipChildren(false); + DefaultItemAnimator itemAnimator = new DefaultItemAnimator() { + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + listView.invalidate(); + updateLayout(true); + } + }; + itemAnimator.setDelayAnimations(false); + listView.setItemAnimator(itemAnimator); + listView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(RecyclerView recyclerView, int dx, int dy) { + if (listView.getChildCount() <= 0) { + return; + } + if (!call.loadingMembers && !call.membersLoadEndReached && layoutManager.findLastVisibleItemPosition() > listAdapter.getItemCount() - 5) { + call.loadMembers(false); + } + updateLayout(true); + } + + @Override + public void onScrollStateChanged(RecyclerView recyclerView, int newState) { + if (newState == RecyclerView.SCROLL_STATE_IDLE) { + int offset = AndroidUtilities.dp(74); + float top = scrollOffsetY - offset; + if (top + backgroundPaddingTop < ActionBar.getCurrentActionBarHeight() && listView.canScrollVertically(1)) { + View child = listView.getChildAt(0); + RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findViewHolderForAdapterPosition(0); + if (holder != null && holder.itemView.getTop() > 0) { + listView.smoothScrollBy(0, holder.itemView.getTop()); + } + } + } + scrolling = newState == RecyclerView.SCROLL_STATE_DRAGGING; + } + }); + + listView.setVerticalScrollBarEnabled(false); + listView.setLayoutManager(layoutManager = new FillLastLinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false, 0, listView)); + layoutManager.setBind(false); + containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, 14, 14, 14, 231)); + listView.setAdapter(listAdapter = new ListAdapter(context)); + listView.setTopBottomSelectorRadius(13); + listView.setSelectorDrawableColor(Theme.getColor(Theme.key_voipgroup_listSelector)); + listView.setOnItemClickListener((view, position, x, y) -> { + if (view instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) view; + if (cell.isSelfUser()) { + return; + } + Bundle args = new Bundle(); + args.putInt("user_id", cell.getParticipant().user_id); + parentActivity.presentFragment(new ProfileActivity(args)); + dismiss(); + } else if (view instanceof GroupCallInvitedCell) { + GroupCallInvitedCell cell = (GroupCallInvitedCell) view; + if (cell.getUser() == null) { + return; + } + Bundle args = new Bundle(); + args.putInt("user_id", cell.getUser().id); + parentActivity.presentFragment(new ProfileActivity(args)); + dismiss(); + } else if (position == listAdapter.addMemberRow) { + TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); + if (chatFull == null) { + return; + } + enterEventSent = false; + groupVoipInviteAlert = new GroupVoipInviteAlert(getContext(), accountInstance.getCurrentAccount(), currentChat, chatFull, call.participants, call.invitedUsersMap); + groupVoipInviteAlert.setOnDismissListener(dialog -> groupVoipInviteAlert = null); + groupVoipInviteAlert.setDelegate(new GroupVoipInviteAlert.GroupVoipInviteAlertDelegate() { + @Override + public void copyInviteLink() { + getLink(true); + } + + @Override + public void inviteUser(int id) { + inviteUserToCall(id, true); + } + + @Override + public void needOpenSearch(MotionEvent ev, EditTextBoldCursor editText) { + if (!enterEventSent) { + if (ev.getX() > editText.getLeft() && ev.getX() < editText.getRight() + && ev.getY() > editText.getTop() && ev.getY() < editText.getBottom()) { + makeFocusable(editText, true); + } else { + makeFocusable(editText, false); + } + } + } + }); + groupVoipInviteAlert.show(); + } + }); + listView.setOnItemLongClickListener((view, position) -> { + if (view instanceof GroupCallUserCell) { + updateItems(); + if (!ChatObject.canManageCalls(currentChat)) { + return false; + } + GroupCallUserCell cell = (GroupCallUserCell) view; + return cell.clickMuteButton(); + } + return false; + }); + + buttonsContainer = new FrameLayout(context) { + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + int cw = AndroidUtilities.dp(122); + int w = (getMeasuredWidth() - cw) / 2; + int h = getMeasuredHeight(); + + int x = (w - soundButton.getMeasuredWidth()) / 2; + int y = (h - leaveButton.getMeasuredHeight()) / 2 - AndroidUtilities.dp(9); + soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); + + x = getMeasuredWidth() - w + (w - leaveButton.getMeasuredWidth()) / 2; + leaveButton.layout(x, y, x + leaveButton.getMeasuredWidth(), y + leaveButton.getMeasuredHeight()); + + x = (getMeasuredWidth() - muteButton.getMeasuredWidth()) / 2; + y = (h - muteButton.getMeasuredHeight()) / 2 - AndroidUtilities.dp(18); + muteButton.layout(x, y, x + muteButton.getMeasuredWidth(), y + muteButton.getMeasuredHeight()); + + for (int a = 0; a < 2; a++) { + x = (getMeasuredWidth() - muteLabel[a].getMeasuredWidth()) / 2; + y = h - AndroidUtilities.dp(35) - muteLabel[a].getMeasuredHeight(); + muteLabel[a].layout(x, y, x + muteLabel[a].getMeasuredWidth(), y + muteLabel[a].getMeasuredHeight()); + + x = (getMeasuredWidth() - muteSubLabel[a].getMeasuredWidth()) / 2; + y = h - AndroidUtilities.dp(17) - muteSubLabel[a].getMeasuredHeight(); + muteSubLabel[a].layout(x, y, x + muteSubLabel[a].getMeasuredWidth(), y + muteSubLabel[a].getMeasuredHeight()); + } + } + + final OvershootInterpolator overshootInterpolator = new OvershootInterpolator(1.5f); + int currentLightColor; + + @SuppressLint("DrawAllocation") + @Override + protected void dispatchDraw(Canvas canvas) { + int offset = (getMeasuredWidth() - getMeasuredHeight()) / 2; + + long newTime = SystemClock.elapsedRealtime(); + long dt = newTime - lastUpdateTime; + lastUpdateTime = newTime; + if (dt > 20) { + dt = 17; + } + + if (currentState != null) { + currentState.update(0, offset, getMeasuredHeight(), dt); + } + + tinyWaveDrawable.minRadius = AndroidUtilities.dp(62); + tinyWaveDrawable.maxRadius = AndroidUtilities.dp(62) + AndroidUtilities.dp(20) * BlobDrawable.FORM_SMALL_MAX; + + bigWaveDrawable.minRadius = AndroidUtilities.dp(65); + bigWaveDrawable.maxRadius = AndroidUtilities.dp(65) + AndroidUtilities.dp(20) * BlobDrawable.FORM_BIG_MAX; + + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * dt; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + invalidate(); + } + + boolean canSwitchProgress = true; + if (prevState != null && prevState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + radialProgressView.toCircle(true, true); + if (!radialProgressView.isCircle()) { + canSwitchProgress = false; + } + } else if (prevState != null && currentState != null && currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + radialProgressView.toCircle(true, false); + } + if (canSwitchProgress) { + if (switchProgress != 1f) { + if (prevState != null && prevState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + switchProgress += dt / 100f; + } else { + switchProgress += dt / 180f; + } + + if (switchProgress >= 1.0f) { + switchProgress = 1f; + prevState = null; + if (currentState != null && currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + radialProgressView.toCircle(false, true); + } + } + invalidateColors = true; + invalidate(); + } + + if (invalidateColors && currentState != null) { + invalidateColors = false; + int lightingColor; + int soundButtonColor; + int soundButtonColorChecked; + if (prevState != null) { + fillColors(prevState.currentState, colorsTmp); + int oldLight = colorsTmp[0]; + int oldSound = colorsTmp[1]; + int oldSound2 = colorsTmp[2]; + fillColors(currentState.currentState, colorsTmp); + lightingColor = ColorUtils.blendARGB(oldLight, colorsTmp[0], switchProgress); + soundButtonColorChecked = ColorUtils.blendARGB(oldSound, colorsTmp[1], switchProgress); + soundButtonColor = ColorUtils.blendARGB(oldSound2, colorsTmp[2], switchProgress); + } else { + fillColors(currentState.currentState, colorsTmp); + lightingColor = colorsTmp[0]; + soundButtonColorChecked = colorsTmp[1]; + soundButtonColor = colorsTmp[2]; + } + if (currentLightColor != lightingColor) { + radialGradient = new RadialGradient(0, 0, AndroidUtilities.dp(100), new int[]{ColorUtils.setAlphaComponent(lightingColor, 60), ColorUtils.setAlphaComponent(lightingColor, 0)}, null, Shader.TileMode.CLAMP); + radialPaint.setShader(radialGradient); + currentLightColor = lightingColor; + } + + soundButton.setBackgroundColor(soundButtonColor, soundButtonColorChecked); + } + + boolean showWaves = false; + boolean showLighting = false; + if (currentState != null) { + showWaves = currentState.currentState == MUTE_BUTTON_STATE_MUTE || currentState.currentState == MUTE_BUTTON_STATE_UNMUTE; + showLighting = currentState.currentState != MUTE_BUTTON_STATE_CONNECTING; + } + + if (prevState != null && currentState != null && currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + showWavesProgress -= dt / 180f; + if (showWavesProgress < 0f) { + showWavesProgress = 0f; + } + invalidate(); + } else { + if (showWaves && showWavesProgress != 1f) { + showWavesProgress += dt / 350f; + if (showWavesProgress > 1f) { + showWavesProgress = 1f; + } + invalidate(); + } else if (!showWaves && showWavesProgress != 0) { + showWavesProgress -= dt / 350f; + if (showWavesProgress < 0f) { + showWavesProgress = 0f; + } + invalidate(); + } + } + + if (showLighting && showLightingProgress != 1f) { + showLightingProgress += dt / 350f; + if (showLightingProgress > 1f) { + showLightingProgress = 1f; + } + invalidate(); + } else if (!showLighting && showLightingProgress != 0) { + showLightingProgress -= dt / 350f; + if (showLightingProgress < 0f) { + showLightingProgress = 0f; + } + invalidate(); + } + } + + float showWavesProgressInterpolated = overshootInterpolator.getInterpolation(GroupCallActivity.this.showWavesProgress); + + showWavesProgressInterpolated = 0.4f + 0.6f * showWavesProgressInterpolated; + + bigWaveDrawable.update(amplitude, 1f); + tinyWaveDrawable.update(amplitude, 1f); + + if (prevState != null && currentState != null && (currentState.currentState == MUTE_BUTTON_STATE_CONNECTING || prevState.currentState == MUTE_BUTTON_STATE_CONNECTING)) { + float progress; + if (currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + progress = switchProgress; + paint.setShader(prevState.shader); + } else { + progress = 1f - switchProgress; + paint.setShader(currentState.shader); + } + + paintTmp.setColor(AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_disabledButton), colorProgress, 1.0f)); + + int cx = muteButton.getLeft() + muteButton.getMeasuredWidth() / 2; + int cy = muteButton.getTop() + muteButton.getMeasuredHeight() / 2; + radialMatrix.setTranslate(cx, cy); + radialGradient.setLocalMatrix(radialMatrix); + + paint.setAlpha(76); + + float radius = AndroidUtilities.dp(52) / 2f; + canvas.drawCircle(leaveButton.getX() + leaveButton.getMeasuredWidth() / 2f, leaveButton.getY() + radius, radius, leaveBackgroundPaint); + + + canvas.save(); + + canvas.scale(BlobDrawable.GLOBAL_SCALE, BlobDrawable.GLOBAL_SCALE, cx, cy); + + canvas.save(); + float scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * 0.5f; + canvas.scale(scale * showLightingProgress, scale * showLightingProgress, cx, cy); + + float scaleLight = 0.7f + BlobDrawable.LIGHT_GRADIENT_SIZE; + canvas.save(); + canvas.scale(scaleLight, scaleLight, cx, cy); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(160), radialPaint); + canvas.restore(); + + canvas.restore(); + canvas.save(); + scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude; + canvas.scale(scale * showWavesProgressInterpolated, scale * showWavesProgressInterpolated, cx, cy); + bigWaveDrawable.draw(cx, cy, canvas, paint); + canvas.restore(); + + canvas.save(); + scale = BlobDrawable.SCALE_SMALL_MIN + BlobDrawable.SCALE_SMALL * amplitude; + canvas.scale(scale * showWavesProgressInterpolated, scale * showWavesProgressInterpolated, cx, cy); + tinyWaveDrawable.draw(cx, cy, canvas, paint); + canvas.restore(); + + paint.setAlpha(255); + + if (canSwitchProgress) { + canvas.drawCircle(cx, cy, AndroidUtilities.dp(57), paint); + paint.setColor(Theme.getColor(Theme.key_voipgroup_connectingProgress)); + if (progress != 0) { + paint.setAlpha((int) (255 * progress)); + paint.setShader(null); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(57), paint); + } + } + canvas.drawCircle(cx, cy, AndroidUtilities.dp(55) * progress, paintTmp); + if (!canSwitchProgress) { + radialProgressView.draw(canvas, cx, cy); + } + canvas.restore(); + invalidate(); + } else { + for (int i = 0; i < 2; i++) { + float alpha; + float buttonRadius = AndroidUtilities.dp(57); + if (i == 0 && prevState != null) { + paint.setShader(prevState.shader); + alpha = 1f - switchProgress; + if (prevState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + buttonRadius -= alpha * AndroidUtilities.dp(2); + } + } else if (i == 1) { + paint.setShader(currentState.shader); + alpha = switchProgress; + if (currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + buttonRadius -= alpha * AndroidUtilities.dp(2); + } + } else { + continue; + } + if (paint.getShader() == null) { + paint.setColor(AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_disabledButton), colorProgress, 1.0f)); + } + + int cx = muteButton.getLeft() + muteButton.getMeasuredWidth() / 2; + int cy = muteButton.getTop() + muteButton.getMeasuredHeight() / 2; + radialMatrix.setTranslate(cx, cy); + radialGradient.setLocalMatrix(radialMatrix); + + paint.setAlpha((int) (76 * alpha)); + if (i == 1) { + float radius = AndroidUtilities.dp(52) / 2f; + canvas.drawCircle(leaveButton.getX() + leaveButton.getMeasuredWidth() / 2, leaveButton.getY() + radius, radius, leaveBackgroundPaint); + } + + canvas.save(); + canvas.scale(BlobDrawable.GLOBAL_SCALE, BlobDrawable.GLOBAL_SCALE, cx, cy); + + canvas.save(); + float scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * 0.5f; + canvas.scale(scale * showLightingProgress, scale * showLightingProgress, cx, cy); + if (i == 1) { + float scaleLight = 0.7f + BlobDrawable.LIGHT_GRADIENT_SIZE; + canvas.save(); + canvas.scale(scaleLight, scaleLight, cx, cy); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(160), radialPaint); + canvas.restore(); + } + canvas.restore(); + canvas.save(); + scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude; + canvas.scale(scale * showWavesProgressInterpolated, scale * showWavesProgressInterpolated, cx, cy); + bigWaveDrawable.draw(cx, cy, canvas, paint); + canvas.restore(); + + canvas.save(); + scale = BlobDrawable.SCALE_SMALL_MIN + BlobDrawable.SCALE_SMALL * amplitude; + canvas.scale(scale * showWavesProgressInterpolated, scale * showWavesProgressInterpolated, cx, cy); + tinyWaveDrawable.draw(cx, cy, canvas, paint); + canvas.restore(); + if (i == 0) { + paint.setAlpha(255); + } else { + paint.setAlpha((int) (255 * alpha)); + } + canvas.drawCircle(cx, cy, buttonRadius, paint); + + canvas.restore(); + + if (i == 1 && currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { + radialProgressView.draw(canvas, cx, cy); + } + } + invalidate(); + } + super.dispatchDraw(canvas); + } + + }; + buttonsContainer.setWillNotDraw(false); + containerView.addView(buttonsContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 231, Gravity.LEFT | Gravity.BOTTOM)); + + int color = Theme.getColor(Theme.key_voipgroup_unmuteButton2); + int r = Color.red(color); + int g = Color.green(color); + int b = Color.blue(color); + radialMatrix = new Matrix(); + radialGradient = new RadialGradient(0, 0, AndroidUtilities.dp(160), new int[]{Color.argb(50, r, g, b), Color.argb(0, r, g, b)}, null, Shader.TileMode.CLAMP); + radialPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + radialPaint.setShader(radialGradient); + + tinyWaveDrawable = new BlobDrawable(9); + bigWaveDrawable = new BlobDrawable(12); + + tinyWaveDrawable.minRadius = AndroidUtilities.dp(62); + tinyWaveDrawable.maxRadius = AndroidUtilities.dp(72); + tinyWaveDrawable.generateBlob(); + + bigWaveDrawable.minRadius = AndroidUtilities.dp(65); + bigWaveDrawable.maxRadius = AndroidUtilities.dp(75); + bigWaveDrawable.generateBlob(); + + tinyWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + bigWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_1))); + + soundButton = new VoIPToggleButton(context); + soundButton.setCheckable(true); + soundButton.setTextSize(12); + buttonsContainer.addView(soundButton, LayoutHelper.createFrame(68, 90)); + soundButton.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(getContext(), false); + }); + + leaveButton = new VoIPToggleButton(context); + leaveButton.setDrawBackground(false); + leaveButton.setTextSize(12); + leaveButton.setData(R.drawable.calls_decline, 0xffffffff, Theme.getColor(Theme.key_voipgroup_leaveButton), 0.3f, false, LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), false, false); + buttonsContainer.addView(leaveButton, LayoutHelper.createFrame(68, 80)); + leaveButton.setOnClickListener(v -> { + updateItems(); + onLeaveClick(context, this::dismiss, false); + }); + + muteButton = new RLottieImageView(context) { + + @Override + public boolean onTouchEvent(MotionEvent event) { + if (event.getAction() == MotionEvent.ACTION_DOWN && muteButtonState == MUTE_BUTTON_STATE_UNMUTE) { + AndroidUtilities.runOnUIThread(pressRunnable, 300); + scheduled = true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + if (scheduled) { + AndroidUtilities.cancelRunOnUIThread(pressRunnable); + scheduled = false; + } else if (pressed) { + AndroidUtilities.cancelRunOnUIThread(unmuteRunnable); + updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setMicMute(true, true, false); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + pressed = false; + MotionEvent cancel = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); + super.onTouchEvent(cancel); + cancel.recycle(); + return true; + } + } + return super.onTouchEvent(event); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + + info.setClassName(Button.class.getName()); + info.setEnabled(muteButtonState == MUTE_BUTTON_STATE_UNMUTE || muteButtonState == MUTE_BUTTON_STATE_MUTE); + + if (muteButtonState == MUTE_BUTTON_STATE_MUTE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, LocaleController.getString("VoipMute", R.string.VoipMute))); + } + } + }; + muteButton.setAnimation(bigMicDrawable); + muteButton.setScaleType(ImageView.ScaleType.CENTER); + buttonsContainer.addView(muteButton, LayoutHelper.createFrame(122, 122, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + muteButton.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() == null || muteButtonState == MUTE_BUTTON_STATE_CONNECTING) { + return; + } + if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + AndroidUtilities.shakeView(muteLabel[0], 2, 0); + AndroidUtilities.shakeView(muteSubLabel[0], 2, 0); + try { + Vibrator vibrator = (Vibrator) parentActivity.getSystemService(Context.VIBRATOR_SERVICE); + if (vibrator != null) { + vibrator.vibrate(200); + } + } catch (Exception e) { + FileLog.e(e); + } + return; + } + if (muteButtonState == MUTE_BUTTON_STATE_UNMUTE) { + updateMuteButton(MUTE_BUTTON_STATE_MUTE, true); + VoIPService.getSharedInstance().setMicMute(false, false, true); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } else { + updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); + VoIPService.getSharedInstance().setMicMute(true, false, true); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + }); + + radialProgressView = new RadialProgressView(context); + radialProgressView.setSize(AndroidUtilities.dp(110)); + radialProgressView.setStrokeWidth(4); + radialProgressView.setProgressColor(Theme.getColor(Theme.key_voipgroup_connectingProgress)); + //buttonsContainer.addView(radialProgressView, LayoutHelper.createFrame(126, 126, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + + for (int a = 0; a < 2; a++) { + muteLabel[a] = new TextView(context); + muteLabel[a].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + muteLabel[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); + muteLabel[a].setGravity(Gravity.CENTER_HORIZONTAL); + buttonsContainer.addView(muteLabel[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 0, 0, 26)); + + muteSubLabel[a] = new TextView(context); + muteSubLabel[a].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + muteSubLabel[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 12); + muteSubLabel[a].setGravity(Gravity.CENTER_HORIZONTAL); + buttonsContainer.addView(muteSubLabel[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 0, 0, 10)); + if (a == 1) { + muteLabel[a].setVisibility(View.INVISIBLE); + muteSubLabel[a].setVisibility(View.INVISIBLE); + } + } + + actionBar = new ActionBar(context) { + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + containerView.invalidate(); + } + }; + actionBar.setBackButtonImage(R.drawable.ic_ab_back); + actionBar.setOccupyStatusBar(false); + actionBar.setAllowOverlayTitle(false); + actionBar.setItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), false); + actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_actionBarActionModeDefaultSelector), false); + actionBar.setTitleColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + actionBar.setSubtitleColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled)); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + dismiss(); + } else if (id == eveyone_can_speak_item) { + call.call.join_muted = false; + toggleAdminSpeak(); + } else if (id == admin_can_speak_item) { + call.call.join_muted = true; + toggleAdminSpeak(); + } else if (id == share_invite_link_item) { + getLink(false); + } else if (id == leave_item) { + AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); + + builder.setTitle(LocaleController.getString("VoipGroupEndAlertTitle", R.string.VoipGroupEndAlertTitle)); + builder.setMessage(LocaleController.getString("VoipGroupEndAlertText", R.string.VoipGroupEndAlertText)); + + builder.setPositiveButton(LocaleController.getString("VoipGroupEnd", R.string.VoipGroupEnd), (dialogInterface, i) -> { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().hangUp(1); + } + dismiss(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall); + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + AlertDialog dialog = builder.create(); + + dialog.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_dialogBackground)); + dialog.show(); + TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); + if (button != null) { + button.setTextColor(Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + } + dialog.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + } + } + }); + + actionBar.setAlpha(0.0f); + actionBar.getBackButton().setScaleX(0.9f); + actionBar.getBackButton().setScaleY(0.9f); + actionBar.getBackButton().setTranslationX(-AndroidUtilities.dp(14)); + + actionBar.getTitleTextView().setTranslationY(AndroidUtilities.dp(23)); + actionBar.getSubtitleTextView().setTranslationY(AndroidUtilities.dp(20)); + + otherItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); + otherItem.setLongClickEnabled(false); + otherItem.setIcon(R.drawable.ic_ab_other); + otherItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); + otherItem.setSubMenuOpenSide(2); + otherItem.setDelegate(id -> actionBar.getActionBarMenuOnItemClick().onItemClick(id)); + otherItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); + otherItem.setOnClickListener(v -> { + updateItems(); + if (call.call.join_muted) { + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + everyoneItem.setChecked(false); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setChecked(true); + } else { + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + everyoneItem.setChecked(true); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + adminItem.setChecked(false); + } + otherItem.toggleSubMenu(); + }); + otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), false); + otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), true); + + pipItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); + pipItem.setLongClickEnabled(false); + pipItem.setIcon(R.drawable.msg_voice_pip); + pipItem.setContentDescription(LocaleController.getString("AccDescrPipMode", R.string.AccDescrPipMode)); + pipItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); + pipItem.setOnClickListener(v -> { + if (Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(parentActivity)) { + GroupCallPip.clearForce(); + dismiss(); + } else { + AlertsCreator.createDrawOverlayGroupCallPermissionDialog(getContext()).show(); + } + }); + + titleTextView = new TextView(context); + titleTextView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + titleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + titleTextView.setGravity(Gravity.LEFT | Gravity.TOP); + titleTextView.setText(LocaleController.getString("VoipGroupVoiceChat", R.string.VoipGroupVoiceChat)); + + actionBarBackground = new View(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), ActionBar.getCurrentActionBarHeight()); + } + }; + actionBarBackground.setAlpha(0.0f); + + containerView.addView(actionBarBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); + containerView.addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 23, 0, 48, 0)); + containerView.addView(actionBar, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); + + menuItemsContainer = new LinearLayout(context); + menuItemsContainer.setOrientation(LinearLayout.HORIZONTAL); + menuItemsContainer.addView(pipItem, LayoutHelper.createLinear(48, 48)); + menuItemsContainer.addView(otherItem, LayoutHelper.createLinear(48, 48)); + containerView.addView(menuItemsContainer, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.RIGHT)); + + actionBarShadow = new View(context); + actionBarShadow.setAlpha(0.0f); + actionBarShadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); + containerView.addView(actionBarShadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1)); + + for (int a = 0; a < 2; a++) { + undoView[a] = new UndoView(context); + undoView[a].setAdditionalTranslationY(AndroidUtilities.dp(10)); + if (Build.VERSION.SDK_INT >= 21) { + undoView[a].setTranslationZ(AndroidUtilities.dp(5)); + } + containerView.addView(undoView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); + } + + everyoneItem = otherItem.addSubItem(eveyone_can_speak_item, 0, LocaleController.getString("VoipGroupAllCanSpeak", R.string.VoipGroupAllCanSpeak), true); + adminItem = otherItem.addSubItem(admin_can_speak_item, 0, LocaleController.getString("VoipGroupOnlyAdminsCanSpeak", R.string.VoipGroupOnlyAdminsCanSpeak), true); + + everyoneItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + dividerItem = otherItem.addDivider(Theme.getColor(Theme.key_voipgroup_listViewBackground)); + + inviteItem = otherItem.addSubItem(share_invite_link_item, R.drawable.msg_link, LocaleController.getString("VoipGroupShareInviteLink", R.string.VoipGroupShareInviteLink)); + leaveItem = otherItem.addSubItem(leave_item, R.drawable.msg_endcall, LocaleController.getString("VoipGroupEndChat", R.string.VoipGroupEndChat)); + otherItem.setPopupItemsSelectorColor(Theme.getColor(Theme.key_voipgroup_listSelector)); + + leaveItem.setColors(Theme.getColor(Theme.key_voipgroup_leaveCallMenu), Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + inviteItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + + listAdapter.notifyDataSetChanged(); + oldCount = listAdapter.getItemCount(); + + actionBar.setTitle(currentChat.title); + actionBar.setSubtitle(LocaleController.formatPluralString("Participants", call.call.participants_count + (listAdapter.addSelfToCounter() ? 1 : 0))); + actionBar.setTitleRightMargin(AndroidUtilities.dp(48) * 2); + + VoIPService.getSharedInstance().registerStateListener(this); + updateItems(); + updateSpeakerPhoneIcon(false); + updateState(false, false); + setColorProgress(0.0f); + + leaveBackgroundPaint.setColor(Theme.getColor(Theme.key_voipgroup_leaveButton)); + } + + @Override + public void dismissInternal() { + super.dismissInternal(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().unregisterStateListener(this); + } + if (groupCallInstance == this) { + groupCallInstance = null; + } + groupCallUiVisible = false; + + VoIPService.audioLevelsCallback = null; + GroupCallPip.updateVisibility(getContext()); + } + + public final static float MAX_AMPLITUDE = 8_500f; + + private void setAmplitude(double value) { + animateToAmplitude = (float) (Math.min(MAX_AMPLITUDE, value) / MAX_AMPLITUDE); + animateAmplitudeDiff = (animateToAmplitude - amplitude) / (100 + 500.0f * BlobDrawable.AMPLITUDE_SPEED); + } + + @Override + public void onStateChanged(int state) { + currentCallState = state; + updateState(isShowing(), false); + } + + private UndoView getUndoView() { + if (undoView[0].getVisibility() == View.VISIBLE) { + UndoView old = undoView[0]; + undoView[0] = undoView[1]; + undoView[1] = old; + old.hide(true, 2); + containerView.removeView(undoView[0]); + containerView.addView(undoView[0]); + } + return undoView[0]; + } + + private float getColorProgress() { + return colorProgress; + } + + private void setColorProgress(float progress) { + colorProgress = progress; + backgroundColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled), Theme.getColor(Theme.key_voipgroup_actionBar), progress, 1.0f); + actionBarBackground.setBackgroundColor(backgroundColor); + otherItem.redrawPopup(0xff232A33); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(backgroundColor, PorterDuff.Mode.MULTIPLY)); + navBarColor = backgroundColor; + + int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_listViewBackground), progress, 1.0f); + dividerItem.setBackgroundColor(color); + listViewBackgroundPaint.setColor(color); + listView.setGlowColor(color); + + if (muteButtonState == MUTE_BUTTON_STATE_CONNECTING || muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + muteButton.invalidate(); + } + + color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_leaveButton), Theme.getColor(Theme.key_voipgroup_leaveButtonScrolled), progress, 1.0f); + leaveButton.setBackgroundColor(color, color); + + color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), progress, 1.0f); + int color2 = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_mutedIconUnscrolled), Theme.getColor(Theme.key_voipgroup_mutedIcon), progress, 1.0f); + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + if (child instanceof GroupCallTextCell) { + GroupCallTextCell cell = (GroupCallTextCell) child; + cell.setColors(color2, color); + } else if (child instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) child; + cell.setGrayIconColor(actionBar.getTag() != null ? Theme.key_voipgroup_mutedIcon : Theme.key_voipgroup_mutedIconUnscrolled, color2); + } else if (child instanceof GroupCallInvitedCell) { + GroupCallInvitedCell cell = (GroupCallInvitedCell) child; + cell.setGrayIconColor(actionBar.getTag() != null ? Theme.key_voipgroup_mutedIcon : Theme.key_voipgroup_mutedIconUnscrolled, color2); + } + } + containerView.invalidate(); + listView.invalidate(); + container.invalidate(); + } + + private void getLink(boolean copy) { + TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); + String url; + if (!TextUtils.isEmpty(currentChat.username)) { + url = accountInstance.getMessagesController().linkPrefix + "/" + currentChat.username; + } else { + url = chatFull != null && chatFull.exported_invite instanceof TLRPC.TL_chatInviteExported ? chatFull.exported_invite.link : null; + } + if (TextUtils.isEmpty(url)) { + TLRPC.TL_messages_exportChatInvite req = new TLRPC.TL_messages_exportChatInvite(); + req.peer = MessagesController.getInputPeer(currentChat); + accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response instanceof TLRPC.TL_chatInviteExported) { + TLRPC.ExportedChatInvite invite = (TLRPC.ExportedChatInvite) response; + if (chatFull != null) { + chatFull.exported_invite = invite; + } + openShareAlert(invite.link, copy); + } + })); + } else { + openShareAlert(url, copy); + } + } + + private void openShareAlert(String url, boolean copy) { + if (copy) { + AndroidUtilities.addToClipboard(url); + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_LINK_COPIED, null, null, null, null); + } else { + String message = LocaleController.formatString("VoipGroupInviteText", R.string.VoipGroupInviteText, url); + boolean keyboardIsOpen = false; + if (parentActivity != null) { + BaseFragment fragment = parentActivity.getActionBarLayout().fragmentsStack.get(parentActivity.getActionBarLayout().fragmentsStack.size() - 1); + if (fragment instanceof ChatActivity) { + keyboardIsOpen = ((ChatActivity) fragment).needEnterText(); + anyEnterEventSent = true; + enterEventSent = true; + } + } + + shareAlert = new ShareAlert(getContext(), null, message, false, url, false); + shareAlert.setDelegate(new ShareAlert.ShareAlertDelegate() { + @Override + public boolean didCopy() { + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_LINK_COPIED, null, null, null, null); + return true; + } + }); + shareAlert.setOnDismissListener(dialog -> shareAlert = null); + AndroidUtilities.runOnUIThread(() -> { + if (shareAlert != null) { + shareAlert.show(); + } + }, keyboardIsOpen ? 200 : 0); + } + } + + private void inviteUserToCall(int id, boolean shouldAdd) { + TLRPC.User user = accountInstance.getMessagesController().getUser(id); + if (user == null) { + return; + } + final AlertDialog[] progressDialog = new AlertDialog[]{new AlertDialog(getContext(), 3)}; + TLRPC.TL_phone_inviteToGroupCall req = new TLRPC.TL_phone_inviteToGroupCall(); + req.call = call.getInputGroupCall(); + TLRPC.TL_inputUser inputUser = new TLRPC.TL_inputUser(); + inputUser.user_id = user.id; + inputUser.access_hash = user.access_hash; + req.users.add(inputUser); + int requestId = accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); + AndroidUtilities.runOnUIThread(() -> { + if (call != null && !delayedGroupCallUpdated) { + call.addInvitedUser(id); + applyCallParticipantUpdates(); + if (groupVoipInviteAlert != null) { + groupVoipInviteAlert.dismiss(); + } + try { + progressDialog[0].dismiss(); + } catch (Throwable ignore) { + + } + progressDialog[0] = null; + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_INVITED, user, null, null, null); + } + }); + } else { + AndroidUtilities.runOnUIThread(() -> { + try { + progressDialog[0].dismiss(); + } catch (Throwable ignore) { + + } + progressDialog[0] = null; + if (shouldAdd && "USER_NOT_PARTICIPANT".equals(error.text)) { + processSelectedOption(id, 3); + } else { + BaseFragment fragment = parentActivity.getActionBarLayout().fragmentsStack.get(parentActivity.getActionBarLayout().fragmentsStack.size() - 1); + AlertsCreator.processError(currentAccount, error, fragment, req); + } + }); + } + }); + if (requestId != 0) { + AndroidUtilities.runOnUIThread(() -> { + if (progressDialog[0] == null) { + return; + } + progressDialog[0].setOnCancelListener(dialog -> accountInstance.getConnectionsManager().cancelRequest(requestId, true)); + progressDialog[0].show(); + }, 500); + } + } + + private void updateLayout(boolean animated) { + if (listView.getChildCount() <= 0) { + listView.setTopGlowOffset((int) (scrollOffsetY = listView.getPaddingTop())); + containerView.invalidate(); + return; + } + RecyclerListView.Holder holder = null; + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + holder = (RecyclerListView.Holder) listView.findContainingViewHolder(child); + if (holder != null) { + if (holder.getAdapterPosition() == 0) { + break; + } else { + holder = null; + } + } + } + float newOffset = holder != null ? Math.max(0, holder.itemView.getY()) : 0; + boolean show = newOffset <= ActionBar.getCurrentActionBarHeight() - AndroidUtilities.dp(14); + + if (show && actionBar.getTag() == null || !show && actionBar.getTag() != null) { + actionBar.setTag(show ? 1 : null); + if (actionBarAnimation != null) { + actionBarAnimation.cancel(); + actionBarAnimation = null; + } + + actionBar.getBackButton().animate() + .scaleX(show ? 1.0f : 0.9f) + .scaleY(show ? 1.0f : 0.9f) + .translationX(show ? 0.0f : -AndroidUtilities.dp(14)) + .setDuration(300) + .setInterpolator(CubicBezierInterpolator.DEFAULT) + .start(); + + actionBar.getTitleTextView().animate() + .translationY(show ? 0.0f : AndroidUtilities.dp(23)) + .setDuration(300) + .setInterpolator(CubicBezierInterpolator.DEFAULT) + .start(); + + actionBar.getSubtitleTextView().animate() + .translationY(show ? 0.0f : AndroidUtilities.dp(20)) + .setDuration(300) + .setInterpolator(CubicBezierInterpolator.DEFAULT) + .start(); + + /*titleTextView.animate() + .scaleX(show ? 0.9f : 1.0f) + .scaleY(show ? 0.9f : 1.0f) + .alpha(show ? 0.0f : 1.0f) + .setDuration(140) + .setInterpolator(CubicBezierInterpolator.DEFAULT) + .start();*/ + + actionBarAnimation = new AnimatorSet(); + actionBarAnimation.setDuration(140); + actionBarAnimation.playTogether( + ObjectAnimator.ofFloat(actionBar, View.ALPHA, show ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(actionBarBackground, View.ALPHA, show ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(actionBarShadow, View.ALPHA, show ? 1.0f : 0.0f)); + actionBarAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + actionBarAnimation = null; + } + }); + actionBarAnimation.start(); + } + + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) listView.getLayoutParams(); + newOffset += layoutParams.topMargin; + if (scrollOffsetY != newOffset) { + listView.setTopGlowOffset((int) ((scrollOffsetY = newOffset) - layoutParams.topMargin)); + + int offset = AndroidUtilities.dp(74); + float t = scrollOffsetY - offset; + int diff; + if (t + backgroundPaddingTop < ActionBar.getCurrentActionBarHeight() * 2) { + int willMoveUpTo = offset - backgroundPaddingTop - AndroidUtilities.dp(14) + ActionBar.getCurrentActionBarHeight(); + float moveProgress = Math.min(1.0f, (ActionBar.getCurrentActionBarHeight() * 2 - t - backgroundPaddingTop) / willMoveUpTo); + diff = (int) (AndroidUtilities.dp(AndroidUtilities.isTablet() ? 17 : 13) * moveProgress); + float newProgress = Math.min(1.0f, moveProgress); + if (Math.abs(newProgress - colorProgress) > 0.0001f) { + setColorProgress(Math.min(1.0f, moveProgress)); + } + titleTextView.setScaleX(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); + titleTextView.setScaleY(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); + titleTextView.setAlpha(Math.max(0.0f, 1.0f - moveProgress * 1.2f)); + } else { + diff = 0; + titleTextView.setScaleX(1.0f); + titleTextView.setScaleY(1.0f); + titleTextView.setAlpha(1.0f); + if (colorProgress > 0.0001f) { + setColorProgress(0.0f); + } + } + + menuItemsContainer.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(53) - diff)); + titleTextView.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(44) - diff)); + containerView.invalidate(); + } + } + + private void cancelMutePress() { + if (scheduled) { + scheduled = false; + AndroidUtilities.cancelRunOnUIThread(pressRunnable); + } + if (pressed) { + pressed = false; + MotionEvent cancel = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); + muteButton.onTouchEvent(cancel); + cancel.recycle(); + } + } + + private void updateState(boolean animated, boolean selfUpdated) { + if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { + cancelMutePress(); + updateMuteButton(MUTE_BUTTON_STATE_CONNECTING, animated); + } else { + if (VoIPService.getSharedInstance() == null) { + return; + } + TLRPC.TL_groupCallParticipant participant = call.participants.get(accountInstance.getUserConfig().getClientUserId()); + if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(currentChat)) { + cancelMutePress(); + updateMuteButton(MUTE_BUTTON_STATE_MUTED_BY_ADMIN, animated); + VoIPService.getSharedInstance().setMicMute(true, false, false); + } else { + boolean micMuted = VoIPService.getSharedInstance().isMicMute(); + if (selfUpdated && participant != null && participant.muted && !micMuted) { + cancelMutePress(); + VoIPService.getSharedInstance().setMicMute(true, false, false); + micMuted = true; + } + if (micMuted) { + updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, animated); + } else { + updateMuteButton(MUTE_BUTTON_STATE_MUTE, animated); + } + } + } + } + + @Override + public void onAudioSettingsChanged() { + updateSpeakerPhoneIcon(true); + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + if (child instanceof GroupCallUserCell) { + ((GroupCallUserCell) child).applyParticipantChanges(true); + } + } + } + + private void updateSpeakerPhoneIcon(boolean animated) { + if (soundButton == null) { + return; + } + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return; + } + + boolean bluetooth = service.isBluetoothOn() || service.isBluetoothWillOn(); + boolean checked = !bluetooth && service.isSpeakerphoneOn(); + + if (bluetooth) { + soundButton.setData(R.drawable.calls_bluetooth, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth), false, animated); + } else if (checked) { + soundButton.setData(R.drawable.calls_speaker, Color.WHITE, 0, 0.3f, true, LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); + } else { + if (service.isHeadsetPlugged()) { + soundButton.setData(R.drawable.calls_headphones, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset), false, animated); + } else { + soundButton.setData(R.drawable.calls_speaker, Color.WHITE, 0, 0.1f, true, LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); + } + } + soundButton.setChecked(checked, animated); + } + + private void updateMuteButton(int state, boolean animated) { + if (muteButtonState == state && animated) { + return; + } + if (muteButtonAnimator != null) { + muteButtonAnimator.cancel(); + muteButtonAnimator = null; + } + + String newText; + String newSubtext; + + boolean changed; + if (state == MUTE_BUTTON_STATE_UNMUTE) { + newText = LocaleController.getString("VoipGroupUnmute", R.string.VoipGroupUnmute); + newSubtext = LocaleController.getString("VoipHoldAndTalk", R.string.VoipHoldAndTalk); + changed = bigMicDrawable.setCustomEndFrame(13); + } else if (state == MUTE_BUTTON_STATE_MUTE) { + newText = LocaleController.getString("VoipTapToMute", R.string.VoipTapToMute); + newSubtext = ""; + changed = bigMicDrawable.setCustomEndFrame(24); + } else { + if (state == MUTE_BUTTON_STATE_CONNECTING) { + newText = LocaleController.getString("Connecting", R.string.Connecting); + newSubtext = ""; + } else { + newText = LocaleController.getString("VoipMutedByAdmin", R.string.VoipMutedByAdmin); + newSubtext = LocaleController.getString("VoipMutedByAdminInfo", R.string.VoipMutedByAdminInfo); + } + changed = bigMicDrawable.setCustomEndFrame(13); + } + + final String contentDescription; + if (!TextUtils.isEmpty(newSubtext)) { + contentDescription = newText + " " + newSubtext; + } else { + contentDescription = newText; + } + muteButton.setContentDescription(contentDescription); + + if (animated) { + if (changed) { + if (state == MUTE_BUTTON_STATE_MUTE) { + bigMicDrawable.setCurrentFrame(12); + } else { + bigMicDrawable.setCurrentFrame(0); + } + } + muteButton.playAnimation(); + muteLabel[1].setVisibility(View.VISIBLE); + muteLabel[1].setAlpha(0.0f); + muteLabel[1].setTranslationY(-AndroidUtilities.dp(5)); + muteLabel[1].setText(newText); + muteSubLabel[1].setVisibility(View.VISIBLE); + muteSubLabel[1].setAlpha(0.0f); + muteSubLabel[1].setTranslationY(-AndroidUtilities.dp(5)); + muteSubLabel[1].setText(newSubtext); + + muteButtonAnimator = ValueAnimator.ofFloat(0.0f, 1.0f); + muteButtonAnimator.addUpdateListener(animation -> { + float v = (float) animation.getAnimatedValue(); + muteLabel[0].setAlpha(1.0f - v); + muteLabel[0].setTranslationY(AndroidUtilities.dp(5) * v); + muteSubLabel[0].setAlpha(1.0f - v); + muteSubLabel[0].setTranslationY(AndroidUtilities.dp(5) * v); + muteLabel[1].setAlpha(v); + muteLabel[1].setTranslationY(AndroidUtilities.dp(-5 + 5 * v)); + muteSubLabel[1].setAlpha(v); + muteSubLabel[1].setTranslationY(AndroidUtilities.dp(-5 + 5 * v)); + }); + muteButtonAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + muteButtonAnimator = null; + TextView temp = muteLabel[0]; + muteLabel[0] = muteLabel[1]; + muteLabel[1] = temp; + temp.setVisibility(View.INVISIBLE); + temp = muteSubLabel[0]; + muteSubLabel[0] = muteSubLabel[1]; + muteSubLabel[1] = temp; + temp.setVisibility(View.INVISIBLE); + for (int a = 0; a < 2; a++) { + muteLabel[a].setTranslationY(0); + muteSubLabel[a].setTranslationY(0); + } + } + }); + muteButtonAnimator.setDuration(180); + muteButtonAnimator.start(); + muteButtonState = state; + } else { + muteButtonState = state; + bigMicDrawable.setCurrentFrame(bigMicDrawable.getCustomEndFrame() - 1, false, true); + muteLabel[0].setText(newText); + muteSubLabel[0].setText(newSubtext); + } + updateMuteButtonState(animated); + } + + private void fillColors(int state, int[] colorsToSet) { + if (state == MUTE_BUTTON_STATE_UNMUTE) { + colorsToSet[0] = Theme.getColor(Theme.key_voipgroup_unmuteButton2); + colorsToSet[1] = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_soundButtonActive), Theme.getColor(Theme.key_voipgroup_soundButtonActiveScrolled), colorProgress, 1.0f); + colorsToSet[2] = Theme.getColor(Theme.key_voipgroup_soundButton); + } else if (state == MUTE_BUTTON_STATE_MUTE) { + colorsToSet[0] = Theme.getColor(Theme.key_voipgroup_muteButton2); + colorsToSet[1] = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_soundButtonActive2), Theme.getColor(Theme.key_voipgroup_soundButtonActive2Scrolled), colorProgress, 1.0f); + colorsToSet[2] = Theme.getColor(Theme.key_voipgroup_soundButton2); + } else if (state == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + colorsToSet[0] = Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient3); + colorsToSet[1] = Theme.getColor(Theme.key_voipgroup_mutedByAdminMuteButton); + colorsToSet[2] = Theme.getColor(Theme.key_voipgroup_mutedByAdminMuteButtonDisabled); + } else { + colorsToSet[0] = Theme.getColor(Theme.key_voipgroup_disabledButton); + colorsToSet[1] = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_disabledButtonActive), Theme.getColor(Theme.key_voipgroup_disabledButtonActiveScrolled), colorProgress, 1.0f); + colorsToSet[2] = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_disabledButton), colorProgress, 1.0f); + } + } + + private void updateMuteButtonState(boolean animated) { + muteButton.invalidate(); + + if (states[muteButtonState] == null) { + states[muteButtonState] = new WeavingState(muteButtonState); + if (muteButtonState == MUTE_BUTTON_STATE_CONNECTING) { + states[muteButtonState].shader = null; + } else { + if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + states[muteButtonState].shader = new LinearGradient(0,400, 400, 0, new int[]{Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient),Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient3), Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient2)}, null, Shader.TileMode.CLAMP); + } else if (muteButtonState == MUTE_BUTTON_STATE_MUTE) { + states[muteButtonState].shader = new RadialGradient(200, 200, 200, new int[]{Theme.getColor(Theme.key_voipgroup_muteButton), Theme.getColor(Theme.key_voipgroup_muteButton3)}, null, Shader.TileMode.CLAMP); + } else { + states[muteButtonState].shader = new RadialGradient(200, 200, 200, new int[]{Theme.getColor(Theme.key_voipgroup_unmuteButton2), Theme.getColor(Theme.key_voipgroup_unmuteButton)}, null, Shader.TileMode.CLAMP); + } + } + } + if (states[muteButtonState] != currentState) { + prevState = currentState; + currentState = states[muteButtonState]; + if (prevState == null || !animated) { + switchProgress = 1; + prevState = null; + } else { + switchProgress = 0; + } + } + + if (!animated) { + boolean showWaves = false; + boolean showLighting = false; + if (currentState != null) { + showWaves = currentState.currentState == MUTE_BUTTON_STATE_MUTE || currentState.currentState == MUTE_BUTTON_STATE_UNMUTE; + showLighting = currentState.currentState != MUTE_BUTTON_STATE_CONNECTING; + } + showWavesProgress = showWaves ? 1f : 0f; + showLightingProgress = showLighting ? 1f : 0f; + } + + buttonsContainer.invalidate(); + } + + private static void processOnLeave(ChatObject.Call call, boolean discard, Runnable onLeave) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().hangUp(discard ? 1 : 0); + } + if (call != null) { + int selfUserId = UserConfig.getInstance(call.currentAccount).clientUserId; + TLRPC.TL_groupCallParticipant participant = call.participants.get(selfUserId); + if (participant != null) { + call.participants.delete(selfUserId); + call.sortedParticipants.remove(participant); + call.call.participants_count--; + } + } + if (onLeave != null) { + onLeave.run(); + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall); + } + + public static void onLeaveClick(Context context, Runnable onLeave, boolean fromOverlayWindow) { + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return; + } + TLRPC.Chat currentChat = service.getChat(); + ChatObject.Call call = service.groupCall; + if (!ChatObject.canManageCalls(currentChat)) { + processOnLeave(call, false, onLeave); + return; + } + AlertDialog.Builder builder = new AlertDialog.Builder(context); + + builder.setTitle(LocaleController.getString("VoipGroupLeaveAlertTitle", R.string.VoipGroupLeaveAlertTitle)); + builder.setMessage(LocaleController.getString("VoipGroupLeaveAlertText", R.string.VoipGroupLeaveAlertText)); + + int currentAccount = service.getAccount(); + + CheckBoxCell[] cells = new CheckBoxCell[1]; + + LinearLayout linearLayout = new LinearLayout(context); + linearLayout.setOrientation(LinearLayout.VERTICAL); + + cells[0] = new CheckBoxCell(context, 1); + cells[0].setBackgroundDrawable(Theme.getSelectorDrawable(false)); + if (fromOverlayWindow) { + cells[0].setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + } else { + cells[0].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + CheckBoxSquare checkBox = (CheckBoxSquare) cells[0].getCheckBoxView(); + checkBox.setColors(Theme.key_voipgroup_mutedIcon, Theme.key_voipgroup_listeningText, Theme.key_voipgroup_nameText); + } + cells[0].setTag(0); + cells[0].setText(LocaleController.getString("VoipGroupLeaveAlertEndChat", R.string.VoipGroupLeaveAlertEndChat), "", false, false); + + cells[0].setPadding(LocaleController.isRTL ? AndroidUtilities.dp(16) : AndroidUtilities.dp(8), 0, LocaleController.isRTL ? AndroidUtilities.dp(8) : AndroidUtilities.dp(16), 0); + linearLayout.addView(cells[0], LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + cells[0].setOnClickListener(v -> { + Integer num = (Integer) v.getTag(); + cells[num].setChecked(!cells[num].isChecked(), true); + }); + + builder.setCustomViewOffset(12); + builder.setView(linearLayout); + + builder.setPositiveButton(LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), (dialogInterface, position) -> processOnLeave(call, cells[0].isChecked(), onLeave)); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + if (fromOverlayWindow) { + builder.setDimEnabled(false); + } + AlertDialog dialog = builder.create(); + if (fromOverlayWindow) { + if (Build.VERSION.SDK_INT >= 26) { + dialog.getWindow().setType(WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY); + } else { + dialog.getWindow().setType(WindowManager.LayoutParams.TYPE_SYSTEM_ALERT); + } + dialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND); + } + if (!fromOverlayWindow) { + dialog.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_dialogBackground)); + } + dialog.show(); + if (!fromOverlayWindow) { + TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); + if (button != null) { + button.setTextColor(Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + } + dialog.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + } + } + + private Paint scrimPaint; + private View scrimView; + private int popupAnimationIndex = -1; + private AnimatorSet scrimAnimatorSet; + private ActionBarPopupWindow scrimPopupWindow; + private ActionBarMenuSubItem[] scrimPopupWindowItems; + + private void processSelectedOption(int userId, int option) { + TLRPC.User user = accountInstance.getMessagesController().getUser(userId); + if (option == 0 || option == 2 || option == 3) { + if (option == 0) { + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().editCallMember(user, true); + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_MUTED, user, null, null, null); + return; + } + AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); + + TextView messageTextView = new TextView(getContext()); + messageTextView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + messageTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + messageTextView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); + + FrameLayout frameLayout = new FrameLayout(getContext()); + builder.setView(frameLayout); + + AvatarDrawable avatarDrawable = new AvatarDrawable(); + avatarDrawable.setTextSize(AndroidUtilities.dp(12)); + + BackupImageView imageView = new BackupImageView(getContext()); + imageView.setRoundRadius(AndroidUtilities.dp(20)); + frameLayout.addView(imageView, LayoutHelper.createFrame(40, 40, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, 22, 5, 22, 0)); + + avatarDrawable.setInfo(user); + imageView.setImage(ImageLocation.getForUser(user, false), "50_50", avatarDrawable, user); + String name = UserObject.getFirstName(user); + + TextView textView = new TextView(getContext()); + textView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setLines(1); + textView.setMaxLines(1); + textView.setSingleLine(true); + textView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); + textView.setEllipsize(TextUtils.TruncateAt.END); + if (option == 0) { + textView.setText(LocaleController.getString("VoipGroupMuteMemberAlertTitle", R.string.VoipGroupMuteMemberAlertTitle)); + messageTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupMuteMemberAlertText", R.string.VoipGroupMuteMemberAlertText, name))); + } else if (option == 2) { + textView.setText(LocaleController.getString("VoipGroupRemoveMemberAlertTitle", R.string.VoipGroupRemoveMemberAlertTitle)); + messageTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupRemoveMemberAlertText", R.string.VoipGroupRemoveMemberAlertText, name))); + } else { + textView.setText(LocaleController.getString("VoipGroupAddMemberTitle", R.string.VoipGroupAddMemberTitle)); + messageTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("VoipGroupAddMemberText", R.string.VoipGroupAddMemberText, name, currentChat.title))); + } + + frameLayout.addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, (LocaleController.isRTL ? 21 : 76), 11, (LocaleController.isRTL ? 76 : 21), 0)); + frameLayout.addView(messageTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, 24, 57, 24, 9)); + + if (option == 0) { + builder.setPositiveButton(LocaleController.getString("VoipGroupMute", R.string.VoipGroupMute), (dialogInterface, i) -> { + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().editCallMember(user, true); + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_MUTED, user, null, null, null); + }); + } else if (option == 2) { + builder.setPositiveButton(LocaleController.getString("VoipGroupUserRemove", R.string.VoipGroupUserRemove), (dialogInterface, i) -> { + accountInstance.getMessagesController().deleteUserFromChat(currentChat.id, user, null); + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_REMOVED, user, null, null, null); + }); + } else { + builder.setPositiveButton(LocaleController.getString("VoipGroupAdd", R.string.VoipGroupAdd), (dialogInterface, i) -> { + BaseFragment fragment = parentActivity.getActionBarLayout().fragmentsStack.get(parentActivity.getActionBarLayout().fragmentsStack.size() - 1); + accountInstance.getMessagesController().addUserToChat(currentChat.id, user, 0, null, fragment, () -> inviteUserToCall(userId, false)); + }); + } + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + AlertDialog dialog = builder.create(); + dialog.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_dialogBackground)); + dialog.show(); + if (option == 2) { + TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); + if (button != null) { + button.setTextColor(Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + } + } + } else { + VoIPService.getSharedInstance().editCallMember(user, false); + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_UNMUTED, user, null, null, null); + } + } + + private boolean showMenuForCell(GroupCallUserCell view) { + if (scrimPopupWindow != null) { + scrimPopupWindow.dismiss(); + scrimPopupWindow = null; + scrimPopupWindowItems = null; + return false; + } + + Rect rect = new Rect(); + + ActionBarPopupWindow.ActionBarPopupWindowLayout popupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(getContext()); + popupLayout.setOnTouchListener(new View.OnTouchListener() { + + private int[] pos = new int[2]; + + @Override + public boolean onTouch(View v, MotionEvent event) { + if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { + if (scrimPopupWindow != null && scrimPopupWindow.isShowing()) { + View contentView = scrimPopupWindow.getContentView(); + contentView.getLocationInWindow(pos); + rect.set(pos[0], pos[1], pos[0] + contentView.getMeasuredWidth(), pos[1] + contentView.getMeasuredHeight()); + if (!rect.contains((int) event.getX(), (int) event.getY())) { + scrimPopupWindow.dismiss(); + } + } + } else if (event.getActionMasked() == MotionEvent.ACTION_OUTSIDE) { + if (scrimPopupWindow != null && scrimPopupWindow.isShowing()) { + scrimPopupWindow.dismiss(); + } + } + return false; + } + }); + popupLayout.setDispatchKeyEventListener(keyEvent -> { + if (keyEvent.getKeyCode() == KeyEvent.KEYCODE_BACK && keyEvent.getRepeatCount() == 0 && scrimPopupWindow != null && scrimPopupWindow.isShowing()) { + scrimPopupWindow.dismiss(); + } + }); + Rect backgroundPaddings = new Rect(); + Drawable shadowDrawable = getContext().getResources().getDrawable(R.drawable.popup_fixed_alert).mutate(); + shadowDrawable.getPadding(backgroundPaddings); + popupLayout.setBackgroundDrawable(shadowDrawable); + popupLayout.setBackgroundColor(backgroundColor); + + LinearLayout linearLayout = new LinearLayout(getContext()); + ScrollView scrollView; + if (Build.VERSION.SDK_INT >= 21) { + scrollView = new ScrollView(getContext(), null, 0, R.style.scrollbarShapeStyle) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + setMeasuredDimension(linearLayout.getMeasuredWidth(), getMeasuredHeight()); + } + }; + } else { + scrollView = new ScrollView(getContext()); + } + scrollView.setClipToPadding(false); + popupLayout.addView(scrollView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + + linearLayout.setMinimumWidth(AndroidUtilities.dp(200)); + linearLayout.setOrientation(LinearLayout.VERTICAL); + + TLRPC.TL_groupCallParticipant participant = view.getParticipant(); + ArrayList items = new ArrayList<>(2); + ArrayList icons = new ArrayList<>(2); + ArrayList options = new ArrayList<>(2); + boolean isAdmin = false; + if (currentChat.megagroup) { + isAdmin = accountInstance.getMessagesController().getAdminRank(currentChat.id, participant.user_id) != null; + } else { + TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); + if (chatFull != null) { + for (int a = 0, N = chatFull.participants.participants.size(); a < N; a++) { + TLRPC.ChatParticipant chatParticipant = chatFull.participants.participants.get(a); + if (chatParticipant.user_id == participant.user_id) { + isAdmin = chatParticipant instanceof TLRPC.TL_chatParticipantAdmin || chatParticipant instanceof TLRPC.TL_chatParticipantCreator; + break; + } + } + } + } + if (!isAdmin || !participant.muted) { + if (!participant.muted || participant.can_self_unmute) { + items.add(LocaleController.getString("VoipGroupMute", R.string.VoipGroupMute)); + icons.add(R.drawable.msg_voice_muted); + options.add(0); + } else { + items.add(LocaleController.getString("VoipGroupAllowToSpeak", R.string.VoipGroupAllowToSpeak)); + icons.add(R.drawable.msg_voice_unmuted); + options.add(1); + } + } + if (!isAdmin) { + items.add(LocaleController.getString("VoipGroupUserRemove", R.string.VoipGroupUserRemove)); + icons.add(R.drawable.msg_block2); + options.add(2); + } + if (options.isEmpty()) { + return false; + } + + scrimPopupWindowItems = new ActionBarMenuSubItem[items.size()]; + for (int a = 0, N = items.size(); a < N; a++) { + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), a == 0, a == N - 1); + if (options.get(a) != 2) { + cell.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + } else { + cell.setColors(Theme.getColor(Theme.key_voipgroup_leaveCallMenu), Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + } + cell.setSelectorColor(Theme.getColor(Theme.key_voipgroup_listSelector)); + cell.setTextAndIcon(items.get(a), icons.get(a)); + scrimPopupWindowItems[a] = cell; + linearLayout.addView(cell); + final int i = a; + cell.setOnClickListener(v1 -> { + if (i >= options.size()) { + return; + } + processSelectedOption(participant.user_id, options.get(i)); + if (scrimPopupWindow != null) { + scrimPopupWindow.dismiss(); + } + }); + } + scrollView.addView(linearLayout, LayoutHelper.createScroll(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); + scrimPopupWindow = new ActionBarPopupWindow(popupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { + @Override + public void dismiss() { + super.dismiss(); + if (scrimPopupWindow != this) { + return; + } + scrimPopupWindow = null; + scrimPopupWindowItems = null; + if (scrimAnimatorSet != null) { + scrimAnimatorSet.cancel(); + scrimAnimatorSet = null; + } + layoutManager.setCanScrollVertically(true); + scrimAnimatorSet = new AnimatorSet(); + ArrayList animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofInt(scrimPaint, AnimationProperties.PAINT_ALPHA, 0)); + scrimAnimatorSet.playTogether(animators); + scrimAnimatorSet.setDuration(220); + scrimAnimatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + scrimView = null; + containerView.invalidate(); + listView.invalidate(); + if (delayedGroupCallUpdated) { + delayedGroupCallUpdated = false; + applyCallParticipantUpdates(); + } + } + }); + scrimAnimatorSet.start(); + } + }; + scrimPopupWindow.setPauseNotifications(true); + scrimPopupWindow.setDismissAnimationDuration(220); + scrimPopupWindow.setOutsideTouchable(true); + scrimPopupWindow.setClippingEnabled(true); + scrimPopupWindow.setAnimationStyle(R.style.PopupContextAnimation); + scrimPopupWindow.setFocusable(true); + popupLayout.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); + scrimPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + scrimPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); + scrimPopupWindow.getContentView().setFocusableInTouchMode(true); + int popupX = AndroidUtilities.dp(14) + listView.getMeasuredWidth() + AndroidUtilities.dp(6) - popupLayout.getMeasuredWidth(); + int popupY = (int) (listView.getY() + view.getY() + view.getMeasuredHeight()); + + scrimPopupWindow.showAtLocation(listView, Gravity.LEFT | Gravity.TOP, popupX, popupY); + listView.stopScroll(); + layoutManager.setCanScrollVertically(false); + scrimView = view; + containerView.invalidate(); + listView.invalidate(); + if (scrimAnimatorSet != null) { + scrimAnimatorSet.cancel(); + } + scrimAnimatorSet = new AnimatorSet(); + ArrayList animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofInt(scrimPaint, AnimationProperties.PAINT_ALPHA, 0, 100)); + scrimAnimatorSet.playTogether(animators); + scrimAnimatorSet.setDuration(150); + scrimAnimatorSet.start(); + return true; + } + + private class ListAdapter extends RecyclerListView.SelectionAdapter { + + private Context mContext; + private int usersStartRow; + private int usersEndRow; + private int invitedStartRow; + private int invitedEndRow; + private int addMemberRow; + private int selfUserRow; + private int lastRow; + private int rowsCount; + + public ListAdapter(Context context) { + mContext = context; + } + + public boolean addSelfToCounter() { + if (selfUserRow < 0) { + return false; + } + if (VoIPService.getSharedInstance() == null) { + return false; + } + return !VoIPService.getSharedInstance().isJoined(); + } + + @Override + public int getItemCount() { + return rowsCount; + } + + private void updateRows() { + if (delayedGroupCallUpdated) { + return; + } + rowsCount = 0; + if (ChatObject.canWriteToChat(currentChat)) { + addMemberRow = rowsCount++; + } else { + addMemberRow = -1; + } + if (call.participants.indexOfKey(selfDummyParticipant.user_id) < 0) { + selfUserRow = rowsCount++; + } else { + selfUserRow = -1; + } + usersStartRow = rowsCount; + rowsCount += call.sortedParticipants.size(); + usersEndRow = rowsCount; + if (call.invitedUsers.isEmpty()) { + invitedStartRow = -1; + invitedEndRow = -1; + } else { + invitedStartRow = rowsCount; + rowsCount += call.invitedUsers.size(); + invitedEndRow = rowsCount; + } + lastRow = rowsCount++; + } + + @Override + public void notifyDataSetChanged() { + updateRows(); + super.notifyDataSetChanged(); + } + + @Override + public void notifyItemChanged(int position) { + updateRows(); + super.notifyItemChanged(position); + } + + @Override + public void notifyItemChanged(int position, @Nullable Object payload) { + updateRows(); + super.notifyItemChanged(position, payload); + } + + @Override + public void notifyItemRangeChanged(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeChanged(positionStart, itemCount); + } + + @Override + public void notifyItemRangeChanged(int positionStart, int itemCount, @Nullable Object payload) { + updateRows(); + super.notifyItemRangeChanged(positionStart, itemCount, payload); + } + + @Override + public void notifyItemInserted(int position) { + updateRows(); + super.notifyItemInserted(position); + } + + @Override + public void notifyItemMoved(int fromPosition, int toPosition) { + updateRows(); + super.notifyItemMoved(fromPosition, toPosition); + } + + @Override + public void notifyItemRangeInserted(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeInserted(positionStart, itemCount); + } + + @Override + public void notifyItemRemoved(int position) { + updateRows(); + super.notifyItemRemoved(position); + } + + @Override + public void notifyItemRangeRemoved(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeRemoved(positionStart, itemCount); + } + + @Override + public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { + int type = holder.getItemViewType(); + if (type == 1) { + GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; + String key = actionBar.getTag() != null ? Theme.key_voipgroup_mutedIcon : Theme.key_voipgroup_mutedIconUnscrolled; + cell.setGrayIconColor(key, Theme.getColor(key)); + cell.setDrawDivider(holder.getAdapterPosition() != getItemCount() - 2); + } else if (type == 2) { + GroupCallInvitedCell cell = (GroupCallInvitedCell) holder.itemView; + String key = actionBar.getTag() != null ? Theme.key_voipgroup_mutedIcon : Theme.key_voipgroup_mutedIconUnscrolled; + cell.setGrayIconColor(key, Theme.getColor(key)); + cell.setDrawDivider(holder.getAdapterPosition() != getItemCount() - 2); + } + } + + @Override + public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { + switch (holder.getItemViewType()) { + case 0: + GroupCallTextCell textCell = (GroupCallTextCell) holder.itemView; + int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), actionBar.getTag() != null ? 1.0f : 0.0f, 1.0f); + textCell.setColors(color, color); + textCell.setTextAndIcon(LocaleController.getString("VoipGroupInviteMember", R.string.VoipGroupInviteMember), R.drawable.actions_addmember2, true); + break; + case 1: + GroupCallUserCell userCell = (GroupCallUserCell) holder.itemView; + TLRPC.TL_groupCallParticipant participant; + if (position == selfUserRow) { + participant = selfDummyParticipant; + } else { + int row = position - usersStartRow; + if (delayedGroupCallUpdated) { + if (row >= 0 && row < oldParticipants.size()) { + participant = oldParticipants.get(row); + } else { + participant = null; + } + } else { + if (row >= 0 && row < call.sortedParticipants.size()) { + participant = call.sortedParticipants.get(row); + } else { + participant = null; + } + } + } + if (participant != null) { + userCell.setData(accountInstance, participant, call); + } + break; + case 2: + GroupCallInvitedCell invitedCell = (GroupCallInvitedCell) holder.itemView; + Integer uid; + int row = position - invitedStartRow; + if (delayedGroupCallUpdated) { + if (row >= 0 && row < oldInvited.size()) { + uid = oldInvited.get(row); + } else { + uid = null; + } + } else { + if (row >= 0 && row < call.invitedUsers.size()) { + uid = call.invitedUsers.get(row); + } else { + uid = null; + } + } + if (uid != null) { + invitedCell.setData(currentAccount, uid); + } + break; + } + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + int type = holder.getItemViewType(); + if (type == 1) { + GroupCallUserCell userCell = (GroupCallUserCell) holder.itemView; + return !userCell.isSelfUser(); + } else if (type == 3) { + return false; + } + return true; + } + + @Override + public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view; + switch (viewType) { + case 0: + view = new GroupCallTextCell(mContext); + break; + case 1: + view = new GroupCallUserCell(mContext) { + @Override + protected void onMuteClick(GroupCallUserCell cell) { + showMenuForCell(cell); + } + }; + break; + case 2: + view = new GroupCallInvitedCell(mContext); + break; + case 3: + default: + view = new View(mContext); + break; + } + view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); + return new RecyclerListView.Holder(view); + } + + @Override + public int getItemViewType(int position) { + if (position == lastRow) { + return 3; + } + if (position == addMemberRow) { + return 0; + } + if (position == selfUserRow || position >= usersStartRow && position < usersEndRow) { + return 1; + } + return 2; + } + } + + private int oldAddMemberRow; + private int oldSelfUserRow; + private int oldUsersStartRow; + private int oldUsersEndRow; + private int oldInvitedStartRow; + private int oldInvitedEndRow; + + public void setOldRows(int addMemberRow, int selfUserRow, int usersStartRow, int usersEndRow, int invitedStartRow, int invitedEndRow) { + oldAddMemberRow = addMemberRow; + oldSelfUserRow = selfUserRow; + oldUsersStartRow = usersStartRow; + oldUsersEndRow = usersEndRow; + oldInvitedStartRow = invitedStartRow; + oldInvitedEndRow = invitedEndRow; + } + + private DiffUtil.Callback diffUtilsCallback = new DiffUtil.Callback() { + + @Override + public int getOldListSize() { + return oldCount; + } + + @Override + public int getNewListSize() { + return listAdapter.rowsCount; + } + + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + if (listAdapter.addMemberRow >= 0) { + if (oldItemPosition == oldAddMemberRow && newItemPosition == listAdapter.addMemberRow) { + return true; + } else if (oldItemPosition == oldAddMemberRow && newItemPosition != listAdapter.addMemberRow || + oldItemPosition != oldAddMemberRow && newItemPosition == listAdapter.addMemberRow) { + return false; + } + } + if (oldItemPosition == oldCount - 1 && newItemPosition == listAdapter.rowsCount - 1) { + return true; + } else if (oldItemPosition == oldCount - 1 || newItemPosition == listAdapter.rowsCount - 1) { + return false; + } + if ((newItemPosition == listAdapter.selfUserRow || newItemPosition >= listAdapter.usersStartRow && newItemPosition < listAdapter.usersEndRow) && + (oldItemPosition == oldSelfUserRow || oldItemPosition >= oldUsersStartRow && oldItemPosition < oldUsersEndRow)) { + TLRPC.TL_groupCallParticipant oldItem; + TLRPC.TL_groupCallParticipant newItem; + if (oldItemPosition == oldSelfUserRow) { + oldItem = selfDummyParticipant; + } else { + oldItem = oldParticipants.get(oldItemPosition - oldUsersStartRow); + } + if (newItemPosition == listAdapter.selfUserRow) { + newItem = selfDummyParticipant; + } else { + newItem = call.sortedParticipants.get(newItemPosition - listAdapter.usersStartRow); + } + return oldItem.user_id == newItem.user_id; + } else if (newItemPosition >= listAdapter.invitedStartRow && newItemPosition < listAdapter.invitedEndRow && + oldItemPosition >= oldInvitedStartRow && oldItemPosition < oldInvitedEndRow) { + Integer oldItem = oldInvited.get(oldItemPosition - oldInvitedStartRow); + Integer newItem = call.invitedUsers.get(newItemPosition - listAdapter.invitedStartRow); + return oldItem.equals(newItem); + } + return false; + } + + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return true; + } + }; + + private static class UpdateCallback implements ListUpdateCallback { + + final RecyclerView.Adapter adapter; + boolean changed; + + private UpdateCallback(RecyclerView.Adapter adapter) { + this.adapter = adapter; + } + + @Override + public void onInserted(int position, int count) { + changed = true; + adapter.notifyItemRangeInserted(position, count); + } + + @Override + public void onRemoved(int position, int count) { + changed = true; + adapter.notifyItemRangeRemoved(position, count); + } + + @Override + public void onMoved(int fromPosition, int toPosition) { + changed = true; + adapter.notifyItemMoved(fromPosition, toPosition); + } + + @Override + public void onChanged(int position, int count, @Nullable Object payload) { + adapter.notifyItemRangeChanged(position, count, payload); + } + } + + private void toggleAdminSpeak() { + TLRPC.TL_phone_toggleGroupCallSettings req = new TLRPC.TL_phone_toggleGroupCallSettings(); + req.call = call.getInputGroupCall(); + req.join_muted = call.call.join_muted; + req.flags |= 1; + accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + }); + } + + @Override + public ArrayList getThemeDescriptions() { + return new ArrayList<>(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java index f24f69ff5..7aa38b9dd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java @@ -45,6 +45,7 @@ import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewOutlineProvider; +import android.view.ViewTreeObserver; import android.view.inputmethod.EditorInfo; import android.widget.ImageView; import android.widget.LinearLayout; @@ -76,7 +77,8 @@ import org.telegram.ui.Cells.GroupCreateUserCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.EditTextBoldCursor; -import org.telegram.ui.Components.EmptyTextProgressView; +import org.telegram.ui.Components.FlickerLoadingView; +import org.telegram.ui.Components.StickerEmptyView; import org.telegram.ui.Components.VerticalPositionAutoAnimator; import org.telegram.ui.Components.GroupCreateDividerItemDecoration; import org.telegram.ui.Components.GroupCreateSpan; @@ -94,7 +96,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen private SpansContainer spansContainer; private EditTextBoldCursor editText; private RecyclerListView listView; - private EmptyTextProgressView emptyView; + private StickerEmptyView emptyView; private GroupCreateAdapter adapter; private GroupCreateActivityDelegate delegate; private ContactsAddActivityDelegate delegate2; @@ -488,14 +490,19 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen } } + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + parentLayout.drawHeaderShadow(canvas, Math.min(maxSize, measuredContainerHeight + containerHeight - measuredContainerHeight)); + } + @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (child == listView || child == emptyView) { + if (child == listView) { canvas.save(); canvas.clipRect(child.getLeft(), Math.min(maxSize, measuredContainerHeight + containerHeight - measuredContainerHeight), child.getRight(), child.getBottom()); boolean result = super.drawChild(canvas, child, drawingTime); canvas.restore(); - parentLayout.drawHeaderShadow(canvas, Math.min(maxSize, measuredContainerHeight + containerHeight - measuredContainerHeight)); return result; } else if (child == scrollView) { canvas.save(); @@ -568,17 +575,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen editText.setImeOptions(EditorInfo.IME_ACTION_DONE | EditorInfo.IME_FLAG_NO_EXTRACT_UI); editText.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); spansContainer.addView(editText); - if (chatType == ChatObject.CHAT_TYPE_CHANNEL) { - editText.setHintText(LocaleController.getString("AddMutual", R.string.AddMutual)); - } else { - if (addToGroup) { - editText.setHintText(LocaleController.getString("SearchForPeople", R.string.SearchForPeople)); - } else if (isAlwaysShare || isNeverShare) { - editText.setHintText(LocaleController.getString("SearchForPeopleAndGroups", R.string.SearchForPeopleAndGroups)); - } else { - editText.setHintText(LocaleController.getString("SendMessageTo", R.string.SendMessageTo)); - } - } + updateEditTextHint(); editText.setCustomSelectionActionModeCallback(new ActionMode.Callback() { public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return false; @@ -637,24 +634,24 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen itemDecoration.setSearching(true); listView.setFastScrollVisible(false); listView.setVerticalScrollBarEnabled(true); - emptyView.setText(LocaleController.getString("NoResult", R.string.NoResult)); - emptyView.showProgress(); } adapter.searchDialogs(editText.getText().toString()); + emptyView.showProgress(true, false); } else { closeSearch(); } } }); - emptyView = new EmptyTextProgressView(context); - if (ContactsController.getInstance(currentAccount).isLoadingContacts()) { - emptyView.showProgress(); - } else { - emptyView.showTextView(); - } - emptyView.setShowAtCenter(true); - emptyView.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(context); + flickerLoadingView.setViewType(FlickerLoadingView.USERS_TYPE); + flickerLoadingView.showDate(false); + + emptyView = new StickerEmptyView(context, flickerLoadingView, StickerEmptyView.STICKER_TYPE_SEARCH); + emptyView.addView(flickerLoadingView); + emptyView.showProgress(true, false); + emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + frameLayout.addView(emptyView); LinearLayoutManager linearLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); @@ -742,7 +739,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen } } MessagesController.getInstance(currentAccount).putUser(user, !searching); - } else if (object instanceof TLRPC.Chat) { + } else { TLRPC.Chat chat = (TLRPC.Chat) object; MessagesController.getInstance(currentAccount).putChat(chat, !searching); } @@ -770,6 +767,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen } } }); + listView.setAnimateEmptyView(true, 0); floatingButton = new ImageView(context); floatingButton.setScaleType(ImageView.ScaleType.CENTER); @@ -818,6 +816,52 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen return fragmentView; } + private void updateEditTextHint() { + if (editText == null) { + return; + } + if (chatType == ChatObject.CHAT_TYPE_CHANNEL) { + editText.setHintText(LocaleController.getString("AddMutual", R.string.AddMutual)); + } else { + if (addToGroup || (adapter != null && adapter.noContactsStubRow == 0)) { + editText.setHintText(LocaleController.getString("SearchForPeople", R.string.SearchForPeople)); + } else if (isAlwaysShare || isNeverShare) { + editText.setHintText(LocaleController.getString("SearchForPeopleAndGroups", R.string.SearchForPeopleAndGroups)); + } else { + editText.setHintText(LocaleController.getString("SendMessageTo", R.string.SendMessageTo)); + } + } + } + + private void showItemsAnimated(int from) { + if (isPaused) { + return; + } + listView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) < from) { + continue; + } + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + animatorSet.start(); + return true; + } + }); + } + @Override public void onResume() { super.onResume(); @@ -830,9 +874,6 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.contactsDidLoad) { - if (emptyView != null) { - emptyView.showTextView(); - } if (adapter != null) { adapter.notifyDataSetChanged(); } @@ -1029,7 +1070,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen adapter.searchDialogs(null); listView.setFastScrollVisible(true); listView.setVerticalScrollBarEnabled(false); - emptyView.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); + showItemsAnimated(0); } private void updateHint() { @@ -1098,6 +1139,14 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen private ArrayList contacts = new ArrayList<>(); private int usersStartRow; private int inviteViaLink; + private int noContactsStubRow; + private int currentItemsCount; + + @Override + public void notifyDataSetChanged() { + super.notifyDataSetChanged(); + updateEditTextHint(); + } public GroupCreateAdapter(Context ctx) { context = ctx; @@ -1144,8 +1193,9 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen searchAdapterHelper = new SearchAdapterHelper(false); searchAdapterHelper.setDelegate((searchId) -> { - if (searchRunnable == null && !searchAdapterHelper.isSearchInProgress()) { - emptyView.showTextView(); + showItemsAnimated(currentItemsCount); + if (searchRunnable == null && !searchAdapterHelper.isSearchInProgress() && getItemCount() == 0) { + emptyView.showProgress(false, true); } notifyDataSetChanged(); }); @@ -1195,6 +1245,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen @Override public int getItemCount() { int count; + noContactsStubRow = -1; if (searching) { count = searchResult.size(); int localServerCount = searchAdapterHelper.getLocalServerSearch().size(); @@ -1203,6 +1254,7 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen if (globalCount != 0) { count += globalCount + 1; } + currentItemsCount = count; return count; } else { count = contacts.size(); @@ -1221,7 +1273,12 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen count++; } } + if (count == 0) { + noContactsStubRow = 0; + count++; + } } + currentItemsCount = count; return count; } @@ -1235,6 +1292,20 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen case 1: view = new GroupCreateUserCell(context, true, 0, false); break; + case 3: + StickerEmptyView stickerEmptyView = new StickerEmptyView(context, null, StickerEmptyView.STICKER_TYPE_NO_CONTACTS) { + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + stickerView.getImageReceiver().startAnimation(); + } + }; + stickerEmptyView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + stickerEmptyView.subtitle.setVisibility(View.GONE); + stickerEmptyView.title.setText(LocaleController.getString("NoContacts", R.string.NoContacts)); + stickerEmptyView.setAnimateLayoutChange(true); + view = stickerEmptyView; + break; case 2: default: view = new TextCell(context); @@ -1358,6 +1429,9 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen if (inviteViaLink != 0 && position == 0) { return 2; } + if (noContactsStubRow == position) { + return 3; + } return 1; } } @@ -1392,13 +1466,14 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen Utilities.searchQueue.cancelRunnable(searchRunnable); searchRunnable = null; } - if (query == null) { - searchResult.clear(); - searchResultNames.clear(); - searchAdapterHelper.mergeResults(null); - searchAdapterHelper.queryServerSearch(null, true, isAlwaysShare || isNeverShare, false, false, false, 0, false, 0, 0); - notifyDataSetChanged(); - } else { + + searchResult.clear(); + searchResultNames.clear(); + searchAdapterHelper.mergeResults(null); + searchAdapterHelper.queryServerSearch(null, true, isAlwaysShare || isNeverShare, false, false, false, 0, false, 0, 0); + notifyDataSetChanged(); + + if (!TextUtils.isEmpty(query)){ Utilities.searchQueue.postRunnable(searchRunnable = () -> AndroidUtilities.runOnUIThread(() -> { searchAdapterHelper.queryServerSearch(query, true, isAlwaysShare || isNeverShare, true, false, false, 0, false, 0, 0); Utilities.searchQueue.postRunnable(searchRunnable = () -> { @@ -1480,10 +1555,11 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen searchResult = users; searchResultNames = names; searchAdapterHelper.mergeResults(searchResult); - if (searching && !searchAdapterHelper.isSearchInProgress()) { - emptyView.showTextView(); - } + showItemsAnimated(currentItemsCount); notifyDataSetChanged(); + if (searching && !searchAdapterHelper.isSearchInProgress() && getItemCount() == 0) { + emptyView.showProgress(false, true); + } }); } } @@ -1553,6 +1629,9 @@ public class GroupCreateActivity extends BaseFragment implements NotificationCen themeDescriptions.add(new ThemeDescription(spansContainer, 0, new Class[]{GroupCreateSpan.class}, null, null, null, Theme.key_groupcreate_spanDelete)); themeDescriptions.add(new ThemeDescription(spansContainer, 0, new Class[]{GroupCreateSpan.class}, null, null, null, Theme.key_avatar_backgroundBlue)); + themeDescriptions.add(new ThemeDescription(emptyView.title, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(emptyView.subtitle, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); + return themeDescriptions; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupInviteActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupInviteActivity.java index e19eb70a2..0c7dd37a0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupInviteActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupInviteActivity.java @@ -14,7 +14,6 @@ import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; @@ -34,6 +33,7 @@ import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Cells.TextBlockCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.EmptyTextProgressView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; @@ -132,7 +132,7 @@ public class GroupInviteActivity extends BaseFragment implements NotificationCen android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", invite.link); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + BulletinFactory.createCopyLinkBulletin(this).show(); } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java index 3182f4741..404237bcb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java @@ -414,6 +414,9 @@ public class LanguageSelectActivity extends BaseFragment implements Notification @Override public int getItemViewType(int i) { + if (search) { + return 0; + } if (!unofficialLanguages.isEmpty() && (i == unofficialLanguages.size() || i == unofficialLanguages.size() + sortedLanguages.size() + 1) || unofficialLanguages.isEmpty() && i == sortedLanguages.size()) { return 1; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index 9307066aa..ff3b39ef1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -14,6 +14,7 @@ import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.app.Activity; import android.app.ActivityManager; +import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageInfo; @@ -26,6 +27,7 @@ import android.graphics.Color; import android.graphics.Point; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; +import android.location.LocationManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; @@ -33,6 +35,7 @@ import android.os.Parcelable; import android.os.StatFs; import android.os.SystemClock; import android.provider.ContactsContract; +import android.provider.Settings; import android.text.TextUtils; import android.util.Base64; import android.view.ActionMode; @@ -52,6 +55,7 @@ import android.widget.RelativeLayout; import android.widget.Toast; import androidx.annotation.NonNull; +import androidx.arch.core.util.Function; import androidx.core.content.pm.ShortcutInfoCompat; import androidx.core.content.pm.ShortcutManagerCompat; import androidx.recyclerview.widget.ItemTouchHelper; @@ -107,8 +111,11 @@ import org.telegram.ui.Cells.LanguageCell; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.AudioPlayerAlert; import org.telegram.ui.Components.BlockingUpdateView; +import org.telegram.ui.Components.Bulletin; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.Easings; import org.telegram.ui.Components.EmbedBottomSheet; +import org.telegram.ui.Components.GroupCallPip; import org.telegram.ui.Components.JoinGroupAlert; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.PasscodeView; @@ -221,7 +228,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } SharedPreferences preferences = MessagesController.getGlobalMainSettings(); long crashed_time = preferences.getLong("intro_crashed_time", 0); - boolean fromIntro = intent.getBooleanExtra("fromIntro", false); + boolean fromIntro = intent != null && intent.getBooleanExtra("fromIntro", false); if (fromIntro) { preferences.edit().putLong("intro_crashed_time", 0).commit(); } @@ -238,7 +245,6 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } } - requestWindowFeature(Window.FEATURE_NO_TITLE); setTheme(R.style.Theme_TMessages); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { @@ -253,7 +259,6 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } - getWindow().setBackgroundDrawableResource(R.drawable.transparent); if (SharedConfig.passcodeHash.length() > 0 && !SharedConfig.allowScreenCapture) { try { @@ -267,11 +272,11 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (Build.VERSION.SDK_INT >= 24) { AndroidUtilities.isInMultiwindow = isInMultiWindowMode(); } - Theme.createChatResources(this, false); + Theme.createChatResources(this, false); //TODO optimize if (SharedConfig.passcodeHash.length() != 0 && SharedConfig.appLocked) { SharedConfig.lastPauseTime = (int) (SystemClock.elapsedRealtime() / 1000); } - + //FileLog.d("UI create5 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); AndroidUtilities.fillStatusBarHeight(this); actionBarLayout = new ActionBarLayout(this) { @Override @@ -311,7 +316,6 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa frameLayout.addView(themeSwitchSunView, LayoutHelper.createFrame(48, 48)); themeSwitchSunView.setVisibility(View.GONE); } - if (AndroidUtilities.isTablet()) { getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); @@ -442,7 +446,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } else { drawerLayoutContainer.addView(actionBarLayout, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); } - + //FileLog.d("UI create7 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); sideMenu = new RecyclerListView(this) { @Override public boolean drawChild(Canvas canvas, View child, long drawingTime) { @@ -460,6 +464,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa return result; } }; + //FileLog.d("UI create34 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); itemAnimator = new SideMenultItemAnimator(sideMenu); sideMenu.setItemAnimator(itemAnimator); sideMenu.setBackgroundColor(Theme.getColor(Theme.key_chats_menuBackground)); @@ -540,9 +545,36 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa args.putInt("user_id", UserConfig.getInstance(currentAccount).getClientUserId()); presentFragment(new ChatActivity(args)); drawerLayoutContainer.closeDrawer(false); + } else if (id == 12) { + if (Build.VERSION.SDK_INT >= 23) { + if (checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { + presentFragment(new ActionIntroActivity(ActionIntroActivity.ACTION_TYPE_NEARBY_LOCATION_ACCESS)); + drawerLayoutContainer.closeDrawer(false); + return; + } + } + boolean enabled = true; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + LocationManager lm = (LocationManager) ApplicationLoader.applicationContext.getSystemService(Context.LOCATION_SERVICE); + enabled = lm.isLocationEnabled(); + } else if (Build.VERSION.SDK_INT >= 19) { + try { + int mode = Settings.Secure.getInt(ApplicationLoader.applicationContext.getContentResolver(), Settings.Secure.LOCATION_MODE, Settings.Secure.LOCATION_MODE_OFF); + enabled = (mode != Settings.Secure.LOCATION_MODE_OFF); + } catch (Throwable e) { + FileLog.e(e); + } + } + if (enabled) { + presentFragment(new PeopleNearbyActivity()); + } else { + presentFragment(new ActionIntroActivity(ActionIntroActivity.ACTION_TYPE_NEARBY_LOCATION_ENABLED)); + } + drawerLayoutContainer.closeDrawer(false); } } }); + //FileLog.d("UI create33 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); final ItemTouchHelper sideMenuTouchHelper = new ItemTouchHelper(new ItemTouchHelper.SimpleCallback(ItemTouchHelper.UP | ItemTouchHelper.DOWN, 0) { private RecyclerView.ViewHolder selectedViewHolder; @@ -619,6 +651,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa view.setTranslationY(dY); } }); + //FileLog.d("UI create32 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); sideMenuTouchHelper.attachToRecyclerView(sideMenu); sideMenu.setOnItemLongClickListener((view, position) -> { if (view instanceof DrawerUserCell) { @@ -651,13 +684,14 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } return false; }); - + //FileLog.d("UI create31 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); drawerLayoutContainer.setParentActionBarLayout(actionBarLayout); actionBarLayout.setDrawerLayoutContainer(drawerLayoutContainer); actionBarLayout.init(mainFragmentsStack); actionBarLayout.setDelegate(this); - + //FileLog.d("UI create30 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); Theme.loadWallpaper(); + //FileLog.d("UI create8 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); passcodeView = new PasscodeView(this); drawerLayoutContainer.addView(passcodeView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -668,7 +702,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeOtherAppActivities, this); currentConnectionState = ConnectionsManager.getInstance(currentAccount).getConnectionState(); - + //FileLog.d("UI create10 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.needShowAlert); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.reloadInterface); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.suggestedLangpack); @@ -766,11 +800,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } drawerLayoutContainer.setAllowOpenDrawer(allowOpen, false); } + //FileLog.d("UI create11 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); checkLayout(); checkSystemBarColors(); - + //FileLog.d("UI create12 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); handleIntent(getIntent(), false, savedInstanceState != null, false); - + //FileLog.d("UI create9 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); try { String os1 = Build.DISPLAY; String os2 = Build.USER; @@ -809,6 +844,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } MediaController.getInstance().setBaseActivity(this, true); AndroidUtilities.startAppCenter(this); + //FileLog.d("UI create time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); } private void openSettings(boolean expanded) { @@ -1107,15 +1143,17 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } return true; } + //FileLog.d("UI create13 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); if (PhotoViewer.hasInstance() && PhotoViewer.getInstance().isVisible()) { if (intent == null || !Intent.ACTION_MAIN.equals(intent.getAction())) { PhotoViewer.getInstance().closePhoto(false, true); } } int flags = intent.getFlags(); + String action = intent.getAction(); final int[] intentAccount = new int[]{intent.getIntExtra("currentAccount", UserConfig.selectedAccount)}; switchToAccount(intentAccount[0], true); - boolean isVoipIntent = intent.getAction() != null && intent.getAction().equals("voip"); + boolean isVoipIntent = action != null && action.equals("voip"); if (!fromPassword && (AndroidUtilities.needShowPasscode(true) || SharedConfig.isWaitingForPasscodeEnter)) { showPasscodeActivity(); UserConfig.getInstance(currentAccount).saveConfig(false); @@ -1127,7 +1165,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } boolean pushOpened = false; - + //FileLog.d("UI create14 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); int push_user_id = 0; int push_chat_id = 0; int push_enc_id = 0; @@ -1138,6 +1176,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa boolean showDialogsList = false; boolean showPlayer = false; boolean showLocations = false; + boolean showGroupVoip = false; boolean showCallLog = false; boolean audioCallUser = false; boolean videoCallUser = false; @@ -1471,7 +1510,10 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } } else if (path.startsWith("login/")) { - code = path.replace("login/", ""); + int intCode = Utilities.parseInt(path.replace("login/", "")); + if (intCode != 0) { + code = "" + intCode; + } } else if (path.startsWith("joinchat/")) { group = path.replace("joinchat/", ""); } else if (path.startsWith("addstickers/")) { @@ -1706,7 +1748,10 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa url = url.replace("tg:login", "tg://telegram.org").replace("tg://login", "tg://telegram.org"); data = Uri.parse(url); login = data.getQueryParameter("token"); - code = data.getQueryParameter("code"); + int intCode = Utilities.parseInt(data.getQueryParameter("code")); + if (intCode != 0) { + code = "" + intCode; + } } else if (url.startsWith("tg:openmessage") || url.startsWith("tg://openmessage")) { url = url.replace("tg:openmessage", "tg://telegram.org").replace("tg://openmessage", "tg://telegram.org"); data = Uri.parse(url); @@ -1834,11 +1879,11 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } if (intent.hasExtra(EXTRA_ACTION_TOKEN)) { final boolean success = UserConfig.getInstance(currentAccount).isClientActivated() && "tg".equals(scheme) && unsupportedUrl == null; - final Action action = new AssistActionBuilder() + final Action assistAction = new AssistActionBuilder() .setActionToken(intent.getStringExtra(EXTRA_ACTION_TOKEN)) .setActionStatus(success ? Action.Builder.STATUS_TYPE_COMPLETED : Action.Builder.STATUS_TYPE_FAILED) .build(); - FirebaseUserActions.getInstance().end(action); + FirebaseUserActions.getInstance().end(assistAction); intent.removeExtra(EXTRA_ACTION_TOKEN); } if (code != null || UserConfig.getInstance(currentAccount).isClientActivated()) { @@ -1905,10 +1950,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa showPlayer = true; } else if (intent.getAction().equals("org.tmessages.openlocations")) { showLocations = true; + } else if (action.equals("voip_chat")) { + showGroupVoip = true; } } } - + //FileLog.d("UI create15 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); if (UserConfig.getInstance(currentAccount).isClientActivated()) { if (searchQuery != null) { final BaseFragment lastFragment = actionBarLayout.getLastFragment(); @@ -2159,6 +2206,11 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa drawerLayoutContainer.setAllowOpenDrawer(true, false); } pushOpened = true; + } else if (showGroupVoip) { + GroupCallActivity.create(this, AccountInstance.getInstance(currentAccount)); + if (GroupCallActivity.groupCallInstance != null) { + GroupCallActivity.groupCallUiVisible = true; + } } else if (newContactAlert) { final BaseFragment lastFragment = actionBarLayout.getLastFragment(); if (lastFragment != null && lastFragment.getParentActivity() != null) { @@ -2193,7 +2245,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa pushOpened = true; } } - + //FileLog.d("UI create16 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); if (!pushOpened && !isNew) { if (AndroidUtilities.isTablet()) { if (!UserConfig.getInstance(currentAccount).isClientActivated()) { @@ -2234,12 +2286,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa rightActionBarLayout.showLastFragment(); } } - + //FileLog.d("UI create17 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); if (isVoipIntent) { VoIPFragment.show(this, intentAccount[0]); } intent.setAction(null); + //FileLog.d("UI create18 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); return pushOpened; } @@ -2435,7 +2488,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa args12.putInt("chat_id", -(int) did); if (mainFragmentsStack.isEmpty() || MessagesController.getInstance(intentAccount).checkCanOpenChat(args12, mainFragmentsStack.get(mainFragmentsStack.size() - 1))) { NotificationCenter.getInstance(intentAccount).postNotificationName(NotificationCenter.closeChats); - MessagesController.getInstance(intentAccount).addUserToChat(-(int) did, user, null, 0, botChat, null, null); + MessagesController.getInstance(intentAccount).addUserToChat(-(int) did, user, 0, botChat, null, null); actionBarLayout.presentFragment(new ChatActivity(args12), true, false, true, false); } }); @@ -2444,15 +2497,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa long dialog_id; boolean isBot = false; Bundle args = new Bundle(); - final TLRPC.Chat chat; if (!res.chats.isEmpty()) { args.putInt("chat_id", res.chats.get(0).id); dialog_id = -res.chats.get(0).id; - chat = res.chats.get(0); } else { args.putInt("user_id", res.users.get(0).id); dialog_id = res.users.get(0).id; - chat = null; } if (botUser != null && res.users.size() > 0 && res.users.get(0).bot) { args.putString("botUser", botUser); @@ -2466,25 +2516,31 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (isBot && lastFragment instanceof ChatActivity && ((ChatActivity) lastFragment).getDialogId() == dialog_id) { ((ChatActivity) lastFragment).setBotUser(botUser); } else { - MessagesController.getInstance(intentAccount).ensureMessagesLoaded(dialog_id, ChatObject.isChannel(chat), messageId == null ? 0 : messageId, () -> { - try { - progressDialog.dismiss(); - } catch (Exception e) { - FileLog.e(e); + MessagesController.getInstance(intentAccount).ensureMessagesLoaded(dialog_id, messageId == null ? 0 : messageId, new MessagesController.MessagesLoadedCallback() { + @Override + public void onMessagesLoaded(boolean fromCache) { + try { + progressDialog.dismiss(); + } catch (Exception e) { + FileLog.e(e); + } + if (!LaunchActivity.this.isFinishing()) { + ChatActivity fragment = new ChatActivity(args); + actionBarLayout.presentFragment(fragment); + } } - if (!LaunchActivity.this.isFinishing()) { - ChatActivity fragment = new ChatActivity(args); - actionBarLayout.presentFragment(fragment); - } - }, () -> { - if (!LaunchActivity.this.isFinishing()) { - BaseFragment fragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); - AlertsCreator.showSimpleAlert(fragment, LocaleController.getString("JoinToGroupErrorNotExist", R.string.JoinToGroupErrorNotExist)); - } - try { - progressDialog.dismiss(); - } catch (Exception e) { - FileLog.e(e); + + @Override + public void onError() { + if (!LaunchActivity.this.isFinishing()) { + BaseFragment fragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); + AlertsCreator.showSimpleAlert(fragment, LocaleController.getString("JoinToGroupErrorNotExist", R.string.JoinToGroupErrorNotExist)); + } + try { + progressDialog.dismiss(); + } catch (Exception e) { + FileLog.e(e); + } } }); hideProgressDialog = false; @@ -2493,7 +2549,11 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } else { try { - Toast.makeText(LaunchActivity.this, LocaleController.getString("NoUsernameFound", R.string.NoUsernameFound), Toast.LENGTH_SHORT).show(); + if (error != null && error.text != null && error.text.startsWith("FLOOD_WAIT")) { + Toast.makeText(LaunchActivity.this, LocaleController.getString("FloodWait", R.string.FloodWait), Toast.LENGTH_SHORT).show(); + } else { + Toast.makeText(LaunchActivity.this, LocaleController.getString("NoUsernameFound", R.string.NoUsernameFound), Toast.LENGTH_SHORT).show(); + } } catch (Exception e) { FileLog.e(e); } @@ -2507,7 +2567,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } } - })); + }, ConnectionsManager.RequestFlagFailOnServerErrors)); } else if (group != null) { if (state == 0) { final TLRPC.TL_messages_checkChatInvite req = new TLRPC.TL_messages_checkChatInvite(); @@ -2528,29 +2588,35 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa boolean[] canceled = new boolean[1]; progressDialog.setOnCancelListener(dialog -> canceled[0] = true); - MessagesController.getInstance(intentAccount).ensureMessagesLoaded(-invite.chat.id, ChatObject.isChannel(invite.chat), 0, () -> { - try { - progressDialog.dismiss(); - } catch (Exception e) { - FileLog.e(e); + MessagesController.getInstance(intentAccount).ensureMessagesLoaded(-invite.chat.id, 0, new MessagesController.MessagesLoadedCallback() { + @Override + public void onMessagesLoaded(boolean fromCache) { + try { + progressDialog.dismiss(); + } catch (Exception e) { + FileLog.e(e); + } + if (canceled[0]) { + return; + } + ChatActivity fragment = new ChatActivity(args); + if (invite instanceof TLRPC.TL_chatInvitePeek) { + fragment.setChatInvite(invite); + } + actionBarLayout.presentFragment(fragment); } - if (canceled[0]) { - return; - } - ChatActivity fragment = new ChatActivity(args); - if (invite instanceof TLRPC.TL_chatInvitePeek) { - fragment.setChatInvite(invite); - } - actionBarLayout.presentFragment(fragment); - }, () -> { - if (!LaunchActivity.this.isFinishing()) { - BaseFragment fragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); - AlertsCreator.showSimpleAlert(fragment, LocaleController.getString("JoinToGroupErrorNotExist", R.string.JoinToGroupErrorNotExist)); - } - try { - progressDialog.dismiss(); - } catch (Exception e) { - FileLog.e(e); + + @Override + public void onError() { + if (!LaunchActivity.this.isFinishing()) { + BaseFragment fragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); + AlertsCreator.showSimpleAlert(fragment, LocaleController.getString("JoinToGroupErrorNotExist", R.string.JoinToGroupErrorNotExist)); + } + try { + progressDialog.dismiss(); + } catch (Exception e) { + FileLog.e(e); + } } }); hideProgressDialog = false; @@ -3122,6 +3188,20 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa return null; } + public void showBulletin(Function createBulletin) { + BaseFragment topFragment = null; + if (!layerFragmentsStack.isEmpty()) { + topFragment = layerFragmentsStack.get(layerFragmentsStack.size() - 1); + } else if (!rightFragmentsStack.isEmpty()) { + topFragment = rightFragmentsStack.get(rightFragmentsStack.size() - 1); + } else if (!mainFragmentsStack.isEmpty()) { + topFragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); + } + if (BulletinFactory.canShowBulletin(topFragment)) { + createBulletin.apply(BulletinFactory.of(topFragment)).show(); + } + } + @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); @@ -3327,6 +3407,18 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } UserConfig.getInstance(currentAccount).saveConfig(false); } + if (requestCode == 105) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ApplicationLoader.canDrawOverlays = Settings.canDrawOverlays(this)) { + GroupCallActivity.groupCallInstance.dismissInternal(); + AndroidUtilities.runOnUIThread(() -> { + GroupCallPip.clearForce(); + GroupCallPip.updateVisibility(LaunchActivity.this); + }, 200); + } + } + return; + } super.onActivityResult(requestCode, resultCode, data); if (requestCode == PLAY_SERVICES_REQUEST_CHECK_SETTINGS) { LocationController.getInstance(currentAccount).startFusedLocationRequest(resultCode == Activity.RESULT_OK); @@ -3477,12 +3569,16 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa protected void onStart() { super.onStart(); Browser.bindCustomTabsService(this); + ApplicationLoader.mainInterfaceStopped = false; + GroupCallPip.updateVisibility(this); } @Override protected void onStop() { super.onStop(); Browser.unbindCustomTabsService(this); + ApplicationLoader.mainInterfaceStopped = true; + GroupCallPip.updateVisibility(this); } @Override @@ -3502,6 +3598,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (ContentPreviewViewer.hasInstance()) { ContentPreviewViewer.getInstance().destroy(); } + if (GroupCallActivity.groupCallInstance != null) { + GroupCallActivity.groupCallInstance.dismissInternal(); + } PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance(); MediaController.getInstance().setBaseActivity(this, false); MediaController.getInstance().setFeedbackView(actionBarLayout, false); @@ -3540,6 +3639,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa @Override protected void onResume() { super.onResume(); + //FileLog.d("UI resume time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 4096); MediaController.getInstance().setFeedbackView(actionBarLayout, true); ApplicationLoader.mainInterfacePaused = false; @@ -3584,6 +3684,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } checkAppUpdate(false); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + ApplicationLoader.canDrawOverlays = Settings.canDrawOverlays(this); + } if (VoIPFragment.getInstance() != null) { VoIPFragment.onResume(); } @@ -3644,7 +3747,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa drawerLayoutAdapter.notifyDataSetChanged(); } else if (id == NotificationCenter.needShowAlert) { final Integer reason = (Integer) args[0]; - if (reason == 3 && proxyErrorDialog != null) { + if (reason == 6 || reason == 3 && proxyErrorDialog != null) { return; } else if (reason == 4) { showTosActivity(account, (TLRPC.TL_help_termsOfService) args[1]); @@ -3652,7 +3755,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(LocaleController.getString("AppName", R.string.AppName)); - if (reason != 2 && reason != 3) { + if (reason != 2 && reason != 3 && reason != 6) { builder.setNegativeButton(LocaleController.getString("MoreInfo", R.string.MoreInfo), (dialogInterface, i) -> { if (!mainFragmentsStack.isEmpty()) { MessagesController.getInstance(account).openByUserName("spambot", mainFragmentsStack.get(mainFragmentsStack.size() - 1), 1); @@ -4459,7 +4562,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa @Override public boolean dispatchKeyEvent(KeyEvent event) { int keyCode = event.getKeyCode(); - if (!mainFragmentsStack.isEmpty() && (!PhotoViewer.hasInstance() || !PhotoViewer.getInstance().isVisible()) && event.getRepeatCount() == 0 && event.getAction() == KeyEvent.ACTION_DOWN && (event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP || event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_DOWN)) { + if (VoIPService.getSharedInstance() == null && !mainFragmentsStack.isEmpty() && (!PhotoViewer.hasInstance() || !PhotoViewer.getInstance().isVisible()) && event.getRepeatCount() == 0 && event.getAction() == KeyEvent.ACTION_DOWN && (event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP || event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_DOWN)) { BaseFragment fragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); if (fragment instanceof ChatActivity) { if (((ChatActivity) fragment).maybePlayVisibleVideo()) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java index 4e87dd2fc..1c765d52e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java @@ -431,6 +431,7 @@ public class LoginActivity extends BaseFragment { ConnectionsManager.getInstance(currentAccount).setAppPaused(false, false); } AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + fragmentView.requestLayout(); try { if (currentViewNum >= 1 && currentViewNum <= 4 && views[currentViewNum] instanceof LoginActivitySmsView) { int time = ((LoginActivitySmsView) views[currentViewNum]).openTime; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java index 42aef8d74..7c8ae3216 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java @@ -25,7 +25,6 @@ import android.os.Bundle; import android.text.TextUtils; import android.util.Property; import android.util.SparseArray; -import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; import android.view.Surface; @@ -39,8 +38,6 @@ import android.view.animation.DecelerateInterpolator; import android.view.animation.Interpolator; import android.widget.EditText; import android.widget.FrameLayout; -import android.widget.ImageView; -import android.widget.LinearLayout; import android.widget.TextView; import androidx.annotation.NonNull; @@ -86,13 +83,14 @@ import org.telegram.ui.Components.AnimationProperties; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.ClippingImageView; import org.telegram.ui.Components.EmbedBottomSheet; +import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.FragmentContextView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.NumberTextView; -import org.telegram.ui.Components.RadialProgressView; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.ScrollSlidingTextTabStrip; import org.telegram.ui.Components.SharedMediaLayout; +import org.telegram.ui.Components.StickerEmptyView; import java.util.ArrayList; import java.util.Collections; @@ -103,12 +101,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No private static class MediaPage extends FrameLayout { private RecyclerListView listView; - private LinearLayout progressView; - private TextView emptyTextView; + private FlickerLoadingView progressView; + private StickerEmptyView emptyView; private LinearLayoutManager layoutManager; - private ImageView emptyImageView; - private LinearLayout emptyView; - private RadialProgressView progressBar; private ClippingImageView animatingImageView; private int selectedType; @@ -791,6 +786,22 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No if (parentLayout != null) { parentLayout.drawHeaderShadow(canvas, actionBar.getMeasuredHeight() + (int) actionBar.getTranslationY()); } + if (fragmentContextView != null && fragmentContextView.getCurrentStyle() == 3) { + canvas.save(); + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.draw(canvas); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == fragmentContextView && fragmentContextView.getCurrentStyle() == 3) { + return true; + } + return super.drawChild(canvas, child, drawingTime); } @Override @@ -1178,48 +1189,54 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No mediaPages[a].animatingImageView.setVisibility(View.GONE); mediaPages[a].listView.addOverlayView(mediaPages[a].animatingImageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - mediaPages[a].emptyView = new LinearLayout(context) { + mediaPages[a].progressView = new FlickerLoadingView(context) { + + @Override + public int getColumnsCount() { + return columnsCount; + } + + @Override + public int getViewType() { + if (mediaPage.selectedType == 0 || mediaPage.selectedType == 5) { + return 2; + } else if (mediaPage.selectedType == 1) { + return 3; + } else if (mediaPage.selectedType == 2 || mediaPage.selectedType == 4) { + return 4; + } else if (mediaPage.selectedType == 3) { + return 5; + } else if (mediaPage.selectedType == 7) { + return FlickerLoadingView.USERS_TYPE; + } + return 1; + } + @Override protected void onDraw(Canvas canvas) { - backgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundGray)); - canvas.drawRect(0, actionBar.getMeasuredHeight() + actionBar.getTranslationY(), getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); + backgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); + super.onDraw(canvas); } }; - mediaPages[a].emptyView.setWillNotDraw(false); - mediaPages[a].emptyView.setOrientation(LinearLayout.VERTICAL); - mediaPages[a].emptyView.setGravity(Gravity.CENTER); - mediaPages[a].emptyView.setVisibility(View.GONE); - mediaPages[a].addView(mediaPages[a].emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - mediaPages[a].emptyView.setOnTouchListener((v, event) -> true); - - mediaPages[a].emptyImageView = new ImageView(context); - mediaPages[a].emptyView.addView(mediaPages[a].emptyImageView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); - - mediaPages[a].emptyTextView = new TextView(context); - mediaPages[a].emptyTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2)); - mediaPages[a].emptyTextView.setGravity(Gravity.CENTER); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); - mediaPages[a].emptyView.addView(mediaPages[a].emptyTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 24, 0, 0)); - - mediaPages[a].progressView = new LinearLayout(context) { - @Override - protected void onDraw(Canvas canvas) { - backgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundGray)); - canvas.drawRect(0, actionBar.getMeasuredHeight() + actionBar.getTranslationY(), getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); - } - }; - mediaPages[a].progressView.setWillNotDraw(false); - mediaPages[a].progressView.setGravity(Gravity.CENTER); - mediaPages[a].progressView.setOrientation(LinearLayout.VERTICAL); - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].addView(mediaPages[a].progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - - mediaPages[a].progressBar = new RadialProgressView(context); - mediaPages[a].progressView.addView(mediaPages[a].progressBar, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + mediaPages[a].progressView.setUseHeaderOffset(true); + mediaPages[a].progressView.showDate(false); if (a != 0) { mediaPages[a].setVisibility(View.GONE); } + + mediaPages[a].emptyView = new StickerEmptyView(context, mediaPages[a].progressView, StickerEmptyView.STICKER_TYPE_SEARCH); + mediaPages[a].emptyView.setVisibility(View.GONE); + mediaPages[a].emptyView.setAnimateLayoutChange(true); + mediaPages[a].addView(mediaPages[a].emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + mediaPages[a].emptyView.setOnTouchListener((v, event) -> true); + mediaPages[a].emptyView.showProgress(true, false); + mediaPages[a].emptyView.title.setText(LocaleController.getString("NoResult", R.string.NoResult)); + mediaPages[a].emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); + mediaPages[a].emptyView.addView(mediaPages[a].progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); + mediaPages[a].listView.setAnimateEmptyView(true, 0); } if (!AndroidUtilities.isTablet()) { @@ -1321,30 +1338,22 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No mediaPages[a].listView.stopScroll(); } } - int newItemCount = adapter.getItemCount(); - if (oldItemCount > 1) { - adapter.notifyItemChanged(oldItemCount - 2); - } - if (newItemCount > oldItemCount) { - adapter.notifyItemRangeInserted(oldItemCount, newItemCount); - } else if (newItemCount < oldItemCount) { - adapter.notifyItemRangeRemoved(newItemCount, (oldItemCount - newItemCount)); + if (sharedMediaData[type].messages.size() == 0 && !sharedMediaData[type].loading) { + adapter.notifyDataSetChanged(); + } else { + int newItemCount = adapter.getItemCount(); + if (oldItemCount > 1) { + adapter.notifyItemChanged(oldItemCount - 2); + } + if (newItemCount > oldItemCount) { + adapter.notifyItemRangeInserted(oldItemCount, newItemCount); + } else if (newItemCount < oldItemCount) { + adapter.notifyItemRangeRemoved(newItemCount, (oldItemCount - newItemCount)); + } } } scrolling = true; for (int a = 0; a < mediaPages.length; a++) { - if (mediaPages[a].selectedType == type) { - if (!sharedMediaData[type].loading) { - if (mediaPages[a].progressView != null) { - mediaPages[a].progressView.setVisibility(View.GONE); - } - if (mediaPages[a].selectedType == type && mediaPages[a].listView != null) { - if (mediaPages[a].listView.getEmptyView() == null) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } - } - } - } if (oldItemCount == 0 && actionBar.getTranslationY() != 0 && mediaPages[a].listView.getAdapter() == adapter) { mediaPages[a].layoutManager.scrollToPositionWithOffset(0, (int) actionBar.getTranslationY()); } @@ -1738,12 +1747,6 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } } } - if (searchItemState != 2 && mediaPages[a].emptyTextView != null) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoResult", R.string.NoResult)); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(30)); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); - mediaPages[a].emptyImageView.setVisibility(View.GONE); - } } } else { if (mediaPages[a].listView != null) { @@ -1767,17 +1770,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No audioSearchAdapter.notifyDataSetChanged(); } } - if (searchItemState != 2 && mediaPages[a].emptyTextView != null) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoResult", R.string.NoResult)); - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(30)); - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); - mediaPages[a].emptyImageView.setVisibility(View.GONE); - } } } else { - mediaPages[a].emptyTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); - mediaPages[a].emptyImageView.setVisibility(View.VISIBLE); mediaPages[a].listView.setPinnedHeaderShadowDrawable(null); if (mediaPages[a].selectedType == 0) { @@ -1786,58 +1781,27 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No mediaPages[a].listView.setAdapter(photoVideoAdapter); } mediaPages[a].listView.setPinnedHeaderShadowDrawable(pinnedHeaderShadowDrawable); - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip1); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoMediaSecret", R.string.NoMediaSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoMedia", R.string.NoMedia)); - } } else if (mediaPages[a].selectedType == 1) { if (currentAdapter != documentsAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(documentsAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip2); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedFilesSecret", R.string.NoSharedFilesSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedFiles", R.string.NoSharedFiles)); - } } else if (mediaPages[a].selectedType == 2) { if (currentAdapter != voiceAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(voiceAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip5); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedVoiceSecret", R.string.NoSharedVoiceSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedVoice", R.string.NoSharedVoice)); - } } else if (mediaPages[a].selectedType == 3) { if (currentAdapter != linksAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(linksAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip3); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedLinksSecret", R.string.NoSharedLinksSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedLinks", R.string.NoSharedLinks)); - } } else if (mediaPages[a].selectedType == 4) { if (currentAdapter != audioAdapter) { recycleAdapter(currentAdapter); mediaPages[a].listView.setAdapter(audioAdapter); } - mediaPages[a].emptyImageView.setImageResource(R.drawable.tip4); - if ((int) dialog_id == 0) { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedAudioSecret", R.string.NoSharedAudioSecret)); - } else { - mediaPages[a].emptyTextView.setText(LocaleController.getString("NoSharedAudio", R.string.NoSharedAudio)); - } } - mediaPages[a].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); if (mediaPages[a].selectedType == 0 || mediaPages[a].selectedType == 2) { if (animated) { searchItemState = 2; @@ -1864,14 +1828,6 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No sharedMediaData[mediaPages[a].selectedType].loading = true; MediaDataController.getInstance(currentAccount).loadMedia(dialog_id, 50, 0, mediaPages[a].selectedType, 1, classGuid); } - if (sharedMediaData[mediaPages[a].selectedType].loading && sharedMediaData[mediaPages[a].selectedType].messages.isEmpty()) { - mediaPages[a].progressView.setVisibility(View.VISIBLE); - mediaPages[a].listView.setEmptyView(null); - mediaPages[a].emptyView.setVisibility(View.GONE); - } else { - mediaPages[a].progressView.setVisibility(View.GONE); - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - } mediaPages[a].listView.setVisibility(View.VISIBLE); } if (searchItemState == 2 && actionBar.isSearchFieldVisible()) { @@ -2063,14 +2019,11 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No if (AndroidUtilities.isTablet()) { columnsCount = 3; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); } else { if (rotation == Surface.ROTATION_270 || rotation == Surface.ROTATION_90) { columnsCount = 6; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), 0); } else { columnsCount = 3; - mediaPages[num].emptyTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), AndroidUtilities.dp(128)); } } if (num == 0) { @@ -2154,11 +2107,17 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getSectionCount() { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 1; + } return sharedMediaData[3].sections.size() + (sharedMediaData[3].sections.isEmpty() || sharedMediaData[3].endReached[0] && sharedMediaData[3].endReached[1] ? 0 : 1); } @Override public int getCountForSection(int section) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 1; + } if (section < sharedMediaData[3].sections.size()) { return sharedMediaData[3].sectionArrays.get(sharedMediaData[3].sections.get(section)).size() + 1; } @@ -2171,7 +2130,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No view = new GraySectionCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_graySection) & 0xf2ffffff); } - if (section < sharedMediaData[3].sections.size()) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + view.setAlpha(0); + } else if (section < sharedMediaData[3].sections.size()) { + view.setAlpha(1f); String name = sharedMediaData[3].sections.get(section); ArrayList messageObjects = sharedMediaData[3].sectionArrays.get(name); MessageObject messageObject = messageObjects.get(0); @@ -2191,6 +2153,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No view = new SharedLinkCell(mContext); ((SharedLinkCell) view).setDelegate(sharedLinkCellDelegate); break; + case 3: + View emptyStubView = SharedMediaLayout.createEmptyStubView(mContext, 3, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 2: default: view = new LoadingCell(mContext, AndroidUtilities.dp(32), AndroidUtilities.dp(54)); @@ -2201,7 +2167,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { - if (holder.getItemViewType() != 2) { + if (holder.getItemViewType() != 2 && holder.getItemViewType() != 3) { String name = sharedMediaData[3].sections.get(section); ArrayList messageObjects = sharedMediaData[3].sectionArrays.get(name); switch (holder.getItemViewType()) { @@ -2227,6 +2193,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getItemViewType(int section, int position) { + if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { + return 3; + } if (section < sharedMediaData[3].sections.size()) { if (position == 0) { return 0; @@ -2265,6 +2234,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getSectionCount() { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 1; + } return sharedMediaData[currentType].sections.size() + (sharedMediaData[currentType].sections.isEmpty() || sharedMediaData[currentType].endReached[0] && sharedMediaData[currentType].endReached[1] ? 0 : 1); } @@ -2275,6 +2247,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getCountForSection(int section) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 1; + } if (section < sharedMediaData[currentType].sections.size()) { return sharedMediaData[currentType].sectionArrays.get(sharedMediaData[currentType].sections.get(section)).size() + 1; } @@ -2287,7 +2262,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No view = new GraySectionCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_graySection) & 0xf2ffffff); } - if (section < sharedMediaData[currentType].sections.size()) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + view.setAlpha(0); + } else if (section < sharedMediaData[currentType].sections.size()) { + view.setAlpha(1f); String name = sharedMediaData[currentType].sections.get(section); ArrayList messageObjects = sharedMediaData[currentType].sectionArrays.get(name); MessageObject messageObject = messageObjects.get(0); @@ -2309,6 +2287,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No case 2: view = new LoadingCell(mContext, AndroidUtilities.dp(32), AndroidUtilities.dp(54)); break; + case 4: + View emptyStubView = SharedMediaLayout.createEmptyStubView(mContext, currentType, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 3: default: if (currentType == MediaDataController.MEDIA_MUSIC && !audioCellCache.isEmpty()) { @@ -2343,7 +2325,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { - if (holder.getItemViewType() != 2) { + if (holder.getItemViewType() != 2 && holder.getItemViewType() != 4) { String name = sharedMediaData[currentType].sections.get(section); ArrayList messageObjects = sharedMediaData[currentType].sectionArrays.get(name); switch (holder.getItemViewType()) { @@ -2380,6 +2362,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getItemViewType(int section, int position) { + if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { + return 4; + } if (section < sharedMediaData[currentType].sections.size()) { if (position == 0) { return 0; @@ -2425,11 +2410,17 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getSectionCount() { + if (sharedMediaData[0].sections.size() == 0 && !sharedMediaData[0].loading) { + return 1; + } return sharedMediaData[0].sections.size() + (sharedMediaData[0].sections.isEmpty() || sharedMediaData[0].endReached[0] && sharedMediaData[0].endReached[1] ? 0 : 1); } @Override public int getCountForSection(int section) { + if (sharedMediaData[0].sections.size() == 0 && !sharedMediaData[0].loading) { + return 1; + } if (section < sharedMediaData[0].sections.size()) { return (int) Math.ceil(sharedMediaData[0].sectionArrays.get(sharedMediaData[0].sections.get(section)).size() / (float) columnsCount) + 1; } @@ -2442,7 +2433,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No view = new SharedMediaSectionCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite) & 0xe5ffffff); } - if (section < sharedMediaData[0].sections.size()) { + if (sharedMediaData[0].sections.size() == 0 && !sharedMediaData[0].loading) { + view.setAlpha(0); + } else if (section < sharedMediaData[0].sections.size()) { + view.setAlpha(1f); String name = sharedMediaData[0].sections.get(section); ArrayList messageObjects = sharedMediaData[0].sectionArrays.get(name); MessageObject messageObject = messageObjects.get(0); @@ -2487,6 +2481,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No }); cache.add((SharedPhotoVideoCell) view); break; + case 3: + View emptyStubView = SharedMediaLayout.createEmptyStubView(mContext, 0, dialog_id); + emptyStubView.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + return new RecyclerListView.Holder(emptyStubView); case 2: default: view = new LoadingCell(mContext, AndroidUtilities.dp(32), AndroidUtilities.dp(74)); @@ -2497,7 +2495,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { - if (holder.getItemViewType() != 2) { + if (holder.getItemViewType() != 2 && holder.getItemViewType() != 3) { String name = sharedMediaData[0].sections.get(section); ArrayList messageObjects = sharedMediaData[0].sectionArrays.get(name); switch (holder.getItemViewType()) { @@ -2533,6 +2531,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No @Override public int getItemViewType(int section, int position) { + if (sharedMediaData[0].sections.size() == 0 && !sharedMediaData[0].loading) { + return 3; + } if (section < sharedMediaData[0].sections.size()) { if (position == 0) { return 0; @@ -2622,10 +2623,17 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No searchesInProgress--; int count = getItemCount(); notifyDataSetChanged(); + for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].listView.getAdapter() == this && count == 0 && actionBar.getTranslationY() != 0) { mediaPages[a].layoutManager.scrollToPositionWithOffset(0, (int) actionBar.getTranslationY()); - break; + } + if (mediaPages[a].selectedType == currentType) { + if (searchesInProgress == 0 && count == 0) { + mediaPages[a].emptyView.showProgress(false, true); + } else if (count == 0) { + animateItemsEnter(mediaPages[a].listView, 0); + } } } } @@ -2641,28 +2649,21 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No AndroidUtilities.cancelRunOnUIThread(searchRunnable); searchRunnable = null; } - if (TextUtils.isEmpty(query)) { - if (!searchResult.isEmpty() || !globalSearch.isEmpty() || searchesInProgress != 0) { - searchResult.clear(); - globalSearch.clear(); - if (reqId != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); - reqId = 0; - searchesInProgress--; - } + + if (!searchResult.isEmpty() || !globalSearch.isEmpty() || searchesInProgress != 0) { + searchResult.clear(); + globalSearch.clear(); + if (reqId != 0) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); + reqId = 0; + searchesInProgress--; } - notifyDataSetChanged(); - } else { + } + notifyDataSetChanged(); + if (!TextUtils.isEmpty(query)) { for (int a = 0; a < mediaPages.length; a++) { if (mediaPages[a].selectedType == currentType) { - //if (getItemCount() != 0) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - mediaPages[a].progressView.setVisibility(View.GONE); - /*} else { - mediaPages[a].listView.setEmptyView(null); - mediaPages[a].emptyView.setVisibility(View.GONE); - mediaPages[a].progressView.setVisibility(View.VISIBLE); - }*/ + mediaPages[a].emptyView.showProgress(true, true); } } @@ -2761,18 +2762,6 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No }); } - @Override - public void notifyDataSetChanged() { - super.notifyDataSetChanged(); - if (searchesInProgress == 0) { - for (int a = 0; a < mediaPages.length; a++) { - if (mediaPages[a].selectedType == currentType) { - mediaPages[a].listView.setEmptyView(mediaPages[a].emptyView); - mediaPages[a].progressView.setVisibility(View.GONE); - } - } - } - } @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { @@ -2875,6 +2864,64 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } } + private void animateItemsEnter(RecyclerListView listView, int oldItemCount) { + if (listView == null) { + return; + } + int n = listView.getChildCount(); + View progressView = null; + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (child instanceof FlickerLoadingView) { + progressView = child; + } + } + final View finalProgressView = progressView; + if (progressView != null) { + listView.removeView(progressView); + } + listView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) >= oldItemCount - 1) { + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + if (finalProgressView != null && finalProgressView.getParent() == null) { + listView.addView(finalProgressView); + RecyclerView.LayoutManager layoutManager = listView.getLayoutManager(); + if (layoutManager != null) { + layoutManager.ignoreView(finalProgressView); + Animator animator = ObjectAnimator.ofFloat(finalProgressView, View.ALPHA, finalProgressView.getAlpha(), 0); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + finalProgressView.setAlpha(1f); + layoutManager.stopIgnoringView(finalProgressView); + listView.removeView(finalProgressView); + } + }); + animator.start(); + } + } + } + + animatorSet.start(); + return true; + } + }); + } + @Override public ArrayList getThemeDescriptions() { ArrayList arrayList = new ArrayList<>(); @@ -2932,10 +2979,6 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector)); arrayList.add(new ThemeDescription(mediaPages[a].emptyView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_emptyListPlaceholder)); - arrayList.add(new ThemeDescription(mediaPages[a].progressBar, ThemeDescription.FLAG_PROGRESSBAR, null, null, null, null, Theme.key_progressCircle)); - - arrayList.add(new ThemeDescription(mediaPages[a].emptyTextView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText2)); - arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_SECTIONS, new Class[]{GraySectionCell.class}, new String[]{"textView"}, null, null, null, Theme.key_graySectionText)); arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR | ThemeDescription.FLAG_SECTIONS, new Class[]{GraySectionCell.class}, null, null, null, Theme.key_graySection)); @@ -2972,6 +3015,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No arrayList.add(new ThemeDescription(mediaPages[a].listView, ThemeDescription.FLAG_CHECKBOXCHECK, new Class[]{SharedPhotoVideoCell.class}, null, null, cellDelegate, Theme.key_checkboxCheck)); arrayList.add(new ThemeDescription(mediaPages[a].listView, 0, null, null, new Drawable[]{pinnedHeaderShadowDrawable}, null, Theme.key_windowBackgroundGrayShadow)); + + arrayList.add(new ThemeDescription(mediaPages[a].emptyView.title, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + arrayList.add(new ThemeDescription(mediaPages[a].emptyView.subtitle, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); } return arrayList; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/MessageStatisticActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/MessageStatisticActivity.java index ed7ad469f..15a8685a3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/MessageStatisticActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/MessageStatisticActivity.java @@ -18,6 +18,7 @@ import android.os.Bundle; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextUtils; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; @@ -253,7 +254,7 @@ public class MessageStatisticActivity extends BaseFragment implements Notificati imageView.playAnimation(); TextView loadingTitle = new TextView(context); - loadingTitle.setTextSize(20); + loadingTitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); loadingTitle.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); loadingTitle.setTextColor(Theme.getColor(Theme.key_player_actionBarTitle)); loadingTitle.setTag(Theme.key_player_actionBarTitle); @@ -261,7 +262,7 @@ public class MessageStatisticActivity extends BaseFragment implements Notificati loadingTitle.setGravity(Gravity.CENTER_HORIZONTAL); TextView loadingSubtitle = new TextView(context); - loadingSubtitle.setTextSize(15); + loadingSubtitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); loadingSubtitle.setTextColor(Theme.getColor(Theme.key_player_actionBarSubtitle)); loadingSubtitle.setTag(Theme.key_player_actionBarSubtitle); loadingSubtitle.setText(LocaleController.getString("LoadingStatsDescription", R.string.LoadingStatsDescription)); @@ -371,7 +372,7 @@ public class MessageStatisticActivity extends BaseFragment implements Notificati if (!messageObject.needDrawBluredPreview() && (messageObject.isPhoto() || messageObject.isNewGif() || messageObject.isVideo())) { String type = messageObject.isWebpage() ? messageObject.messageOwner.media.webpage.type : null; - if (!("app".equals(type) || "profile".equals(type) || "article".equals(type))) { + if (!("app".equals(type) || "profile".equals(type) || "article".equals(type) || type != null && type.startsWith("telegram_"))) { TLRPC.PhotoSize smallThumb = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, 40); TLRPC.PhotoSize bigThumb = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, AndroidUtilities.getPhotoSize()); if (smallThumb == bigThumb) { @@ -848,8 +849,8 @@ public class MessageStatisticActivity extends BaseFragment implements Notificati title[j] = new TextView(context); primary[j].setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - primary[j].setTextSize(17); - title[j].setTextSize(13); + primary[j].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); + title[j].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); contentCell.addView(primary[j]); contentCell.addView(title[j]); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java index f6026c711..62004b223 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java @@ -849,6 +849,7 @@ public class NotificationsCustomSettingsActivity extends BaseFragment { editor.putString("ChannelSoundPath", "NoSound"); } } + getNotificationsController().deleteNotificationChannelGlobal(currentType); editor.commit(); getNotificationsController().updateServerNotificationsSettings(currentType); RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(requestCode); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java index 2c37ff071..6173a75ba 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java @@ -587,9 +587,9 @@ public class PasscodeActivity extends BaseFragment implements NotificationCenter dropDownContainer.setLayoutParams(layoutParams); } if (!AndroidUtilities.isTablet() && ApplicationLoader.applicationContext.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { - dropDown.setTextSize(18); + dropDown.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); } else { - dropDown.setTextSize(20); + dropDown.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java index bc9549c48..6166fc04c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java @@ -286,7 +286,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe args1.putBoolean("expandPhoto", true); } args1.putInt("nearby_distance", peerLocated.distance); - MessagesController.getInstance(currentAccount).ensureMessagesLoaded(peerLocated.peer.user_id, false, 0, null, null); + MessagesController.getInstance(currentAccount).ensureMessagesLoaded(peerLocated.peer.user_id, 0, null); presentFragment(new ProfileActivity(args1)); } } else if (position >= chatsStartRow && position < chatsEndRow) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java index aa03b1f67..9b5afb8a2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java @@ -21,6 +21,7 @@ import android.graphics.drawable.Drawable; import android.os.Build; import android.text.InputFilter; import android.text.TextPaint; +import android.util.TypedValue; import android.view.Gravity; import android.view.HapticFeedbackConstants; import android.view.KeyEvent; @@ -335,7 +336,7 @@ public class PhotoAlbumPickerActivity extends BaseFragment implements Notificati emptyView = new TextView(context); emptyView.setTextColor(0xff808080); - emptyView.setTextSize(20); + emptyView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); emptyView.setGravity(Gravity.CENTER); emptyView.setVisibility(View.GONE); emptyView.setText(LocaleController.getString("NoPhotos", R.string.NoPhotos)); @@ -471,7 +472,7 @@ public class PhotoAlbumPickerActivity extends BaseFragment implements Notificati continue; } int num = a; - itemCells[a] = new ActionBarMenuSubItem(getParentActivity()); + itemCells[a] = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == 1); if (num == 0) { if (UserObject.isUserSelf(user)) { itemCells[a].setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java index e71cc466c..d1a5813ab 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java @@ -1061,7 +1061,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen continue; } int num = a; - itemCells[a] = new ActionBarMenuSubItem(getParentActivity()); + itemCells[a] = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == 1); if (num == 0) { if (UserObject.isUserSelf(user)) { itemCells[a].setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java index c9e330b76..50b84d57b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java @@ -45,6 +45,7 @@ import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.SystemClock; +import android.os.Vibrator; import android.provider.Settings; import androidx.annotation.Keep; import androidx.annotation.NonNull; @@ -52,6 +53,7 @@ import androidx.annotation.Nullable; import androidx.collection.ArrayMap; import androidx.core.content.ContextCompat; import androidx.core.content.FileProvider; +import androidx.core.graphics.ColorUtils; import androidx.core.view.ViewCompat; import androidx.core.widget.NestedScrollView; import androidx.dynamicanimation.animation.DynamicAnimation; @@ -208,6 +210,7 @@ import org.telegram.ui.Components.SizeNotifierFrameLayoutPhoto; import org.telegram.ui.Components.StickersAlert; import org.telegram.ui.Components.TextViewSwitcher; import org.telegram.ui.Components.Tooltip; +import org.telegram.ui.Components.URLSpanReplacement; import org.telegram.ui.Components.URLSpanUserMentionPhotoViewer; import org.telegram.ui.Components.UndoView; import org.telegram.ui.Components.VideoEditTextureView; @@ -482,6 +485,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } }; + private TextView captionLimitView; + private Drawable pickerViewSendDrawable; + public void addPhoto(MessageObject message, int classGuid) { if (classGuid != this.classGuid) { return; @@ -558,7 +564,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat ((LaunchActivity) parentActivity).presentFragment(fragment, false, true); closePhoto(false, false); } - } else if (parentChatActivity != null && AndroidUtilities.shouldShowUrlInAlert(url)) { + } else if (parentChatActivity != null && (link instanceof URLSpanReplacement || AndroidUtilities.shouldShowUrlInAlert(url))) { AlertsCreator.showOpenUrlAlert(parentChatActivity, url, true, true); } else { link.onClick(widget); @@ -677,7 +683,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat videoPlayerSeekbarView.invalidate(); if (shouldSavePositionForCurrentVideo != null) { float value = progress; - if (value >= 0 && shouldSavePositionForCurrentVideo != null && SystemClock.elapsedRealtime() - lastSaveTime >= 1000) { + if (value >= 0 && SystemClock.elapsedRealtime() - lastSaveTime >= 1000) { String saveFor = shouldSavePositionForCurrentVideo; lastSaveTime = SystemClock.elapsedRealtime(); Utilities.globalQueue.postRunnable(() -> { @@ -1048,9 +1054,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } }, 50); } else { - if (parentAlert != null) { - parentAlert.setAllowDrawContent(allowDrawContent); - } + parentAlert.setAllowDrawContent(true); } } } @@ -1378,8 +1382,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } public void setAlpha(float value) { - alphas[0] = animAlphas[0] = value; - checkVisibility(); + setIndexedAlpha(0, value, false); } public void setScale(float value) { @@ -1561,6 +1564,16 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } + @Override + public void replaceButtonPressed(int index, VideoEditedInfo videoEditedInfo) { + + } + + @Override + public boolean canReplace(int index) { + return false; + } + @Override public int getSelectedCount() { return 0; @@ -1571,6 +1584,11 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } + @Override + public boolean allowSendingSubmenu() { + return true; + } + @Override public boolean allowCaption() { return true; @@ -1646,6 +1664,20 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat public CharSequence getSubtitleFor(int i) { return null; } + + @Override + public MessageObject getEditingMessageObject() { + return null; + } + + @Override + public void onCaptionChanged(CharSequence caption) { + } + + @Override + public boolean closeKeyboard() { + return false; + } } public interface PhotoViewerProvider { @@ -1659,8 +1691,11 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat boolean cancelButtonPressed(); void needAddMorePhotos(); void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate); + void replaceButtonPressed(int index, VideoEditedInfo videoEditedInfo); + boolean canReplace(int index); int getSelectedCount(); void updatePhotoAtIndex(int index); + boolean allowSendingSubmenu(); boolean allowCaption(); boolean scaleToFill(); ArrayList getSelectedPhotosOrder(); @@ -1675,6 +1710,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat boolean loadMore(); CharSequence getTitleFor(int index); CharSequence getSubtitleFor(int index); + MessageObject getEditingMessageObject(); + void onCaptionChanged(CharSequence caption); + boolean closeKeyboard(); } private class FrameLayoutDrawer extends SizeNotifierFrameLayoutPhoto { @@ -1750,6 +1788,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (cameraItem != null) { cameraItem.setTranslationY(y); } + if (captionLimitView != null) { + captionLimitView.setTranslationY(y); + } invalidate(); } @@ -2031,7 +2072,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat canvas.restore(); return r; } - } else if (child == cameraItem || child == muteItem || child == pickerView || child == videoTimelineView || child == pickerViewSendButton || child == captionTextViewSwitcher || muteItem.getVisibility() == VISIBLE && child == bottomLayout) { + } else if (child == cameraItem || child == muteItem || child == pickerView || child == videoTimelineView || child == pickerViewSendButton || child == captionLimitView || child == captionTextViewSwitcher || muteItem.getVisibility() == VISIBLE && child == bottomLayout) { if (captionEditText.isPopupAnimatig()) { child.setTranslationY(captionEditText.getEmojiPadding()); bottomTouchEnabled = false; @@ -2095,6 +2136,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat adjustPanLayoutHelper.onDetach(); Bulletin.removeDelegate(this); } + + @Override + public void notifyHeightChanged() { + super.notifyHeightChanged(); + if (isCurrentVideo) { + photoProgressViews[0].setIndexedAlpha(2, getKeyboardHeight() <= AndroidUtilities.dp(20) ? 1.0f : 0.0f, true); + } + } } private static final Property VPC_PROGRESS; @@ -2827,26 +2876,22 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } else if (id == NotificationCenter.mediaCountDidLoad) { long uid = (Long) args[0]; if (uid == currentDialogId || uid == mergeDialogId) { - if (uid == currentDialogId) { - totalImagesCount = (Integer) args[1]; - /*if ((Boolean) args[2]) { - SharedMediaQuery.getMediaCount(currentDialogId, sharedMediaType, classGuid, false); - }*/ - } else if (uid == mergeDialogId) { - totalImagesCountMerge = (Integer) args[1]; - /*if ((Boolean) args[2]) { - SharedMediaQuery.getMediaCount(mergeDialogId, sharedMediaType, classGuid, false); - }*/ - } - if (needSearchImageInArr && isFirstLoading) { - isFirstLoading = false; - loadingMoreImages = true; - MediaDataController.getInstance(currentAccount).loadMedia(currentDialogId, 20, 0, sharedMediaType, 1, classGuid); - } else if (!imagesArr.isEmpty()) { - if (opennedFromMedia) { - actionBar.setTitle(LocaleController.formatString("Of", R.string.Of, currentIndex + 1, totalImagesCount + totalImagesCountMerge)); + if (currentMessageObject == null || MediaDataController.getMediaType(currentMessageObject.messageOwner) == sharedMediaType) { + if (uid == currentDialogId) { + totalImagesCount = (Integer) args[1]; } else { - actionBar.setTitle(LocaleController.formatString("Of", R.string.Of, (totalImagesCount + totalImagesCountMerge - imagesArr.size()) + currentIndex + 1, totalImagesCount + totalImagesCountMerge)); + totalImagesCountMerge = (Integer) args[1]; + } + if (needSearchImageInArr && isFirstLoading) { + isFirstLoading = false; + loadingMoreImages = true; + MediaDataController.getInstance(currentAccount).loadMedia(currentDialogId, 20, 0, sharedMediaType, 1, classGuid); + } else if (!imagesArr.isEmpty()) { + if (opennedFromMedia) { + actionBar.setTitle(LocaleController.formatString("Of", R.string.Of, currentIndex + 1, totalImagesCount + totalImagesCountMerge)); + } else { + actionBar.setTitle(LocaleController.formatString("Of", R.string.Of, (totalImagesCount + totalImagesCountMerge - imagesArr.size()) + currentIndex + 1, totalImagesCount + totalImagesCountMerge)); + } } } } @@ -2871,6 +2916,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat for (int a = 0; a < arr.size(); a++) { MessageObject message = arr.get(a); if (imagesByIdsTemp[loadIndex].indexOfKey(message.getId()) < 0) { + FileLog.d("add message " + message.getId() + " media = " + message.messageOwner.media); imagesByIdsTemp[loadIndex].put(message.getId(), message); if (opennedFromMedia) { imagesArrTemp.add(message); @@ -3402,9 +3448,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } if (f != null && f.exists()) { - MediaController.saveFile(f.toString(), parentActivity, isVideo ? 1 : 0, null, null, () -> { - BulletinFactory.createSaveToGalleryBulletin(containerView, isVideo, 0xf9222222, 0xffffffff).show(); - }); + MediaController.saveFile(f.toString(), parentActivity, isVideo ? 1 : 0, null, null, () -> BulletinFactory.createSaveToGalleryBulletin(containerView, isVideo, 0xf9222222, 0xffffffff).show()); } else { showDownloadAlert(); } @@ -3428,7 +3472,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } Bundle args = new Bundle(); long dialogId = currentDialogId; - if (dialogId == 0 && currentMessageObject != null) { + if (currentMessageObject != null) { dialogId = currentMessageObject.getDialogId(); } int lower_part = (int) dialogId; @@ -3486,7 +3530,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (lower_part != 0) { if (lower_part > 0) { args1.putInt("user_id", lower_part); - } else if (lower_part < 0) { + } else { args1.putInt("chat_id", -lower_part); } } else { @@ -3927,7 +3971,15 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } }); - bottomLayout = new FrameLayout(activityContext); + bottomLayout = new FrameLayout(activityContext) { + @Override + protected void measureChildWithMargins(View child, int parentWidthMeasureSpec, int widthUsed, int parentHeightMeasureSpec, int heightUsed) { + if (child == nameTextView || child == dateTextView) { + widthUsed = bottomButtonsLayout.getMeasuredWidth(); + } + super.measureChildWithMargins(child, parentWidthMeasureSpec, widthUsed, parentHeightMeasureSpec, heightUsed); + } + }; bottomLayout.setBackgroundColor(0x7f000000); containerView.addView(bottomLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM | Gravity.LEFT)); @@ -4088,7 +4140,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } }; - bottomLayout.addView(nameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 16, 5, 60, 0)); + bottomLayout.addView(nameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 16, 5, 8, 0)); dateTextView = new FadingTextViewLayout(containerView.getContext(), true) { @@ -4134,7 +4186,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } }; - bottomLayout.addView(dateTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 16, 25, 50, 0)); + bottomLayout.addView(dateTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 16, 25, 8, 0)); createVideoControlsInterface(); @@ -4408,26 +4460,62 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat public boolean onTouchEvent(MotionEvent event) { return bottomTouchEnabled && super.onTouchEvent(event); } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (captionEditText.getCaptionLimitOffset() < 0) { + captionLimitView.setVisibility(visibility); + } else { + captionLimitView.setVisibility(View.GONE); + } + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + captionLimitView.setTranslationY(translationY); + } + + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + captionLimitView.setAlpha(alpha); + } }; pickerViewSendButton.setScaleType(ImageView.ScaleType.CENTER); - Drawable drawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(56), Theme.getColor(Theme.key_dialogFloatingButton), Theme.getColor(Build.VERSION.SDK_INT >= 21 ? Theme.key_dialogFloatingButtonPressed : Theme.key_dialogFloatingButton)); - pickerViewSendButton.setBackgroundDrawable(drawable); + pickerViewSendDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(56), Theme.getColor(Theme.key_dialogFloatingButton), Theme.getColor(Build.VERSION.SDK_INT >= 21 ? Theme.key_dialogFloatingButtonPressed : Theme.key_dialogFloatingButton)); + pickerViewSendButton.setBackgroundDrawable(pickerViewSendDrawable); pickerViewSendButton.setColorFilter(new PorterDuffColorFilter(0xffffffff, PorterDuff.Mode.MULTIPLY)); pickerViewSendButton.setImageResource(R.drawable.attach_send); pickerViewSendButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogFloatingIcon), PorterDuff.Mode.MULTIPLY)); containerView.addView(pickerViewSendButton, LayoutHelper.createFrame(56, 56, Gravity.RIGHT | Gravity.BOTTOM, 0, 0, 14, 14)); pickerViewSendButton.setContentDescription(LocaleController.getString("Send", R.string.Send)); pickerViewSendButton.setOnClickListener(v -> { + if (captionEditText.getCaptionLimitOffset() < 0) { + AndroidUtilities.shakeView(captionLimitView, 2, 0); + Vibrator vibrator = (Vibrator) captionLimitView.getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (vibrator != null) { + vibrator.vibrate(200); + } + return; + } if (parentChatActivity != null && parentChatActivity.isInScheduleMode() && !parentChatActivity.isEditingMessageMedia()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentChatActivity.getDialogId(), this::sendPressed); + showScheduleDatePickerDialog(); } else { sendPressed(true, 0); } }); pickerViewSendButton.setOnLongClickListener(view -> { + if (placeProvider != null && !placeProvider.allowSendingSubmenu()) { + return false; + } if (parentChatActivity == null || parentChatActivity.isInScheduleMode()) { return false; } + if (captionEditText.getCaptionLimitOffset() < 0) { + return false; + } TLRPC.Chat chat = parentChatActivity.getCurrentChat(); TLRPC.User user = parentChatActivity.getCurrentUser(); @@ -4452,8 +4540,13 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat sendPopupLayout.setShowedFromBotton(false); sendPopupLayout.setBackgroundColor(0xf9222222); - int i = 0; - for (int a = 0; a < 2; a++) { + final boolean canReplace = placeProvider != null && placeProvider.canReplace(currentIndex); + final int[] order = {3, 2, 0, 1}; + for (int i = 0; i < 4; i++) { + final int a = order[i]; + if (a != 2 && a != 3 && canReplace) { + continue; + } if (a == 0 && !parentChatActivity.canScheduleMessage()) { continue; } @@ -4481,17 +4574,22 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } } else if (a == 1 && UserObject.isUserSelf(user)) { continue; + } else if ((a == 2 || a == 3) && !canReplace) { + continue; } - int num = a; - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parentActivity); - if (num == 0) { + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parentActivity, a == 0, a == 3); + if (a == 0) { if (UserObject.isUserSelf(user)) { cell.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); } else { cell.setTextAndIcon(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage), R.drawable.msg_schedule); } - } else if (num == 1) { + } else if (a == 1) { cell.setTextAndIcon(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound), R.drawable.input_notify_off); + } else if (a == 2) { + cell.setTextAndIcon(LocaleController.getString("ReplacePhoto", R.string.ReplacePhoto), R.drawable.msg_replace); + } else if (a == 3) { + cell.setTextAndIcon(LocaleController.getString("SendAsNewPhoto", R.string.SendAsNewPhoto), R.drawable.msg_sendphoto); } cell.setMinimumWidth(AndroidUtilities.dp(196)); cell.setColors(0xffffffff, 0xffffffff); @@ -4500,13 +4598,16 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (sendPopupWindow != null && sendPopupWindow.isShowing()) { sendPopupWindow.dismiss(); } - if (num == 0) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentChatActivity.getDialogId(), this::sendPressed); - } else if (num == 1) { + if (a == 0) { + showScheduleDatePickerDialog(); + } else if (a == 1) { sendPressed(false, 0); + } else if (a == 2) { + replacePressed(); + } else if (a == 3) { + sendPressed(true, 0); } }); - i++; } sendPopupLayout.setupRadialSelectors(0x24ffffff); @@ -4530,6 +4631,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat return false; }); + + captionLimitView = new TextView(parentActivity); + captionLimitView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + captionLimitView.setTextColor(0xffEC7777); + captionLimitView.setGravity(Gravity.CENTER); + captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + containerView.addView(captionLimitView, LayoutHelper.createFrame(56, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 14, 78)); + itemsLayout = new LinearLayout(parentActivity) { boolean ignoreLayout; @@ -4552,12 +4661,10 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat int itemWidth = Math.min(AndroidUtilities.dp(70), width / visibleItemsCount); if (compressItem.getVisibility() == VISIBLE) { ignoreLayout = true; - int compressIconWidth = 64; + int compressIconWidth; if (selectedCompression < 2) { compressIconWidth = 48; - } else if (selectedCompression == 2) { - compressIconWidth = 64; - } else if (selectedCompression == 3) { + } else { compressIconWidth = 64; } int padding = Math.max(0, (itemWidth - AndroidUtilities.dp(compressIconWidth)) / 2); @@ -5094,6 +5201,18 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (mentionsAdapter != null && captionEditText != null && parentChatActivity != null && text != null) { mentionsAdapter.searchUsernameOrHashtag(text.toString(), captionEditText.getCursorPosition(), parentChatActivity.messages, false); } + int color = Theme.getColor(Theme.key_dialogFloatingIcon); + if (captionEditText.getCaptionLimitOffset() < 0) { + captionLimitView.setText(Integer.toString(captionEditText.getCaptionLimitOffset())); + captionLimitView.setVisibility(pickerViewSendButton.getVisibility()); + pickerViewSendButton.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (Color.alpha(color) * 0.58f)), PorterDuff.Mode.MULTIPLY)); + } else { + pickerViewSendButton.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + captionLimitView.setVisibility(View.GONE); + } + if (placeProvider != null) { + placeProvider.onCaptionChanged(text); + } } @Override @@ -5320,7 +5439,20 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } } + private void showScheduleDatePickerDialog() { + final AlertsCreator.ScheduleDatePickerColors colors = new AlertsCreator.ScheduleDatePickerColors(0xffffffff, 0xff252525, 0xffffffff, 0x1effffff, 0xffffffff, 0xf9222222, 0x24ffffff); + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentChatActivity.getDialogId(), this::sendPressed, colors); + } + private void sendPressed(boolean notify, int scheduleDate) { + sendPressed(notify, scheduleDate, false); + } + + private void replacePressed() { + sendPressed(false, 0, true); + } + + private void sendPressed(boolean notify, int scheduleDate, boolean replace) { if (captionEditText.getTag() != null) { return; } @@ -5328,7 +5460,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (sendPhotoType == SELECT_TYPE_AVATAR) { applyCurrentEditMode(); } - if (parentChatActivity != null) { + if (!replace && parentChatActivity != null) { TLRPC.Chat chat = parentChatActivity.getCurrentChat(); TLRPC.User user = parentChatActivity.getCurrentUser(); if (user != null || ChatObject.isChannel(chat) && chat.megagroup || !ChatObject.isChannel(chat)) { @@ -5343,7 +5475,11 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } } doneButtonPressed = true; - placeProvider.sendButtonPressed(currentIndex, videoEditedInfo, notify, scheduleDate); + if (!replace) { + placeProvider.sendButtonPressed(currentIndex, videoEditedInfo, notify, scheduleDate); + } else { + placeProvider.replaceButtonPressed(currentIndex, videoEditedInfo); + } closePhoto(false, false); } } @@ -5355,17 +5491,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(parentActivity)) { return true; } else { - new AlertDialog.Builder(parentActivity).setTitle(LocaleController.getString("AppName", R.string.AppName)) - .setMessage(LocaleController.getString("PermissionDrawAboveOtherApps", R.string.PermissionDrawAboveOtherApps)) - .setPositiveButton(LocaleController.getString("PermissionOpenSettings", R.string.PermissionOpenSettings), (dialog, which) -> { - if (parentActivity != null) { - try { - parentActivity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + parentActivity.getPackageName()))); - } catch (Exception e) { - FileLog.e(e); - } - } - }).show(); + AlertsCreator.createDrawOverlayPermissionDialog(parentActivity, null).show(); } return false; } @@ -5810,6 +5936,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (imageMoveAnimation != null || changeModeAnimation != null || currentEditMode != 0 || sendPhotoType == SELECT_TYPE_AVATAR || sendPhotoType == SELECT_TYPE_WALLPAPER || sendPhotoType == SELECT_TYPE_QR) { return; } + if (!windowView.isFocusable()) { + makeFocusable(); + } selectedPhotosListView.setEnabled(false); photosCounterView.setRotationX(0.0f); isPhotosListViewVisible = false; @@ -6440,9 +6569,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (changingTextureView) { changingTextureView = false; if (isInline) { - if (isInline) { - waitingForFirstTextureUpload = 1; - } + waitingForFirstTextureUpload = 1; changedTextureView.setSurfaceTexture(surfaceTexture); changedTextureView.setSurfaceTextureListener(surfaceTextureListener); changedTextureView.setVisibility(View.VISIBLE); @@ -8454,7 +8581,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } else { return location.location.volume_id + "_" + location.location.local_id + ".jpg"; } - } else if (!imagesArr.isEmpty()) { + } else { if (index >= imagesArr.size()) { return null; } @@ -9058,9 +9185,11 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (currentAnimation == null && !isEvent) { if (currentDialogId != 0 && totalImagesCount == 0 && currentMessageObject != null && !currentMessageObject.scheduled) { - MediaDataController.getInstance(currentAccount).getMediaCount(currentDialogId, sharedMediaType, classGuid, true); - if (mergeDialogId != 0) { - MediaDataController.getInstance(currentAccount).getMediaCount(mergeDialogId, sharedMediaType, classGuid, true); + if (MediaDataController.getMediaType(currentMessageObject.messageOwner) == sharedMediaType) { + MediaDataController.getInstance(currentAccount).getMediaCount(currentDialogId, sharedMediaType, classGuid, true); + if (mergeDialogId != 0) { + MediaDataController.getInstance(currentAccount).getMediaCount(mergeDialogId, sharedMediaType, classGuid, true); + } } } else if (avatarsDialogId != 0) { MessagesController.getInstance(currentAccount).loadDialogPhotos(avatarsDialogId, 80, 0, true, classGuid); @@ -9074,6 +9203,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat CharSequence caption = null; TLRPC.User user = parentChatActivity != null ? parentChatActivity.getCurrentUser() : null; boolean allowTimeItem = !isDocumentsPicker && parentChatActivity != null && !parentChatActivity.isSecretChat() && !parentChatActivity.isInScheduleMode() && user != null && !user.bot && !UserObject.isUserSelf(user) && !parentChatActivity.isEditingMessageMedia(); + if (placeProvider != null && placeProvider.getEditingMessageObject() != null) { + allowTimeItem = false; + } if (entry instanceof TLRPC.BotInlineResult) { allowTimeItem = false; } else if (entry instanceof MediaController.PhotoEntry) { @@ -9202,6 +9334,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } else { dateTextView.setText(dateString, animated); } + } else if (!TextUtils.isEmpty(placeProvider.getTitleFor(switchingToIndex))) { + nameTextView.setText(""); + dateTextView.setText(""); } caption = newMessageObject.caption; } @@ -10584,7 +10719,13 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat public static boolean isShowingImage(MessageObject object) { boolean result = false; if (Instance != null) { - result = !Instance.pipAnimationInProgress && Instance.isVisible && !Instance.disableShowCheck && object != null && Instance.currentMessageObject != null && Instance.currentMessageObject.getId() == object.getId() && Instance.currentMessageObject.getDialogId() == object.getDialogId(); + if (!Instance.pipAnimationInProgress && Instance.isVisible && !Instance.disableShowCheck && object != null) { + MessageObject currentMessageObject = Instance.currentMessageObject; + if (currentMessageObject == null && Instance.placeProvider != null) { + currentMessageObject = Instance.placeProvider.getEditingMessageObject(); + } + result = currentMessageObject != null && currentMessageObject.getId() == object.getId() && currentMessageObject.getDialogId() == object.getDialogId(); + } } if (!result && PipInstance != null) { result = PipInstance.isVisible && !PipInstance.disableShowCheck && object != null && PipInstance.currentMessageObject != null && PipInstance.currentMessageObject.getId() == object.getId() && PipInstance.currentMessageObject.getDialogId() == object.getDialogId(); @@ -10719,18 +10860,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat skipFirstBufferingProgress = false; playerInjected = false; - if (Build.VERSION.SDK_INT >= 21) { - windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | - WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | - WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; - } else { - windowLayoutParams.flags = 0; - } - windowLayoutParams.softInputMode = (useSmoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE) | WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION; - WindowManager wm = (WindowManager) parentActivity.getSystemService(Context.WINDOW_SERVICE); - wm.updateViewLayout(windowView, windowLayoutParams); - windowView.setFocusable(true); - containerView.setFocusable(true); + makeFocusable(); backgroundDrawable.setAlpha(255); containerView.setAlpha(1.0f); @@ -10750,8 +10880,12 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat File file = null; boolean isVideo = false; + boolean capReplace = false; + MessageObject messageObject = null; if (currentMessageObject != null) { + messageObject = currentMessageObject; + capReplace = currentMessageObject.canEditMedia() && !currentMessageObject.isDocument(); isVideo = currentMessageObject.isVideo(); if (!TextUtils.isEmpty(currentMessageObject.messageOwner.attachPath)) { file = new File(currentMessageObject.messageOwner.attachPath); @@ -10772,6 +10906,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat File finalFile = file; boolean finalIsVideo = isVideo; + boolean finalCanReplace = capReplace; + MessageObject finalMessageObject = messageObject; AndroidUtilities.runOnUIThread(() -> { int orientation = 0; try { @@ -10795,10 +10931,16 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat sendPhotoType = 2; doneButtonPressed = false; + final PhotoViewerProvider chatPhotoProvider = placeProvider; placeProvider = new EmptyPhotoViewerProvider() { private final ImageReceiver.BitmapHolder thumbHolder = centerImage.getBitmapSafe(); + @Override + public PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index, boolean needPreview) { + return chatPhotoProvider != null ? chatPhotoProvider.getPlaceForPhoto(finalMessageObject, null, 0, needPreview) : null; + } + @Override public ImageReceiver.BitmapHolder getThumbForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index) { return thumbHolder; @@ -10806,15 +10948,53 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat @Override public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { - if (parentChatActivity != null) { - parentChatActivity.sendMedia(photoEntry, videoEditedInfo, notify, scheduleDate); + sendMedia(videoEditedInfo, notify, scheduleDate, false); + } + + @Override + public void replaceButtonPressed(int index, VideoEditedInfo videoEditedInfo) { + if (photoEntry.isCropped || photoEntry.isPainted || photoEntry.isFiltered || videoEditedInfo != null || !TextUtils.isEmpty(photoEntry.caption)) { + sendMedia(videoEditedInfo, false, 0, true); } } + @Override + public boolean canReplace(int index) { + return chatPhotoProvider != null && finalCanReplace; + } + + @Override + public MessageObject getEditingMessageObject() { + return finalMessageObject; + } + @Override public boolean canCaptureMorePhotos() { return false; } + + private void sendMedia(VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean replace) { + if (parentChatActivity != null) { + final MessageObject editingMessageObject = replace ? finalMessageObject : null; + if (editingMessageObject != null && !TextUtils.isEmpty(photoEntry.caption)) { + editingMessageObject.editingMessage = photoEntry.caption; + editingMessageObject.editingMessageEntities = photoEntry.entities; + } + if (photoEntry.isVideo) { + if (videoEditedInfo != null) { + SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, videoEditedInfo, parentChatActivity.getDialogId(), null, parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + } else { + SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, null, parentChatActivity.getDialogId(), null, parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + } + } else { + if (photoEntry.imagePath != null) { + SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.imagePath, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), null, parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + } else if (photoEntry.path != null) { + SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.path, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), null, parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + } + } + } + } }; selectedPhotosAdapter.notifyDataSetChanged(); @@ -10825,18 +11005,11 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat togglePhotosListView(false, false); toggleActionBar(true, false); - if (Build.VERSION.SDK_INT >= 21) { - windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | - WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | - WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; + if (parentChatActivity != null && parentChatActivity.getChatActivityEnterView() != null && parentChatActivity.isKeyboardVisible()) { + parentChatActivity.getChatActivityEnterView().closeKeyboard(); } else { - windowLayoutParams.flags = 0; + makeFocusable(); } - windowLayoutParams.softInputMode = (useSmoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE) | WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION; - WindowManager wm = (WindowManager) parentActivity.getSystemService(Context.WINDOW_SERVICE); - wm.updateViewLayout(windowView, windowLayoutParams); - windowView.setFocusable(true); - containerView.setFocusable(true); backgroundDrawable.setAlpha(255); containerView.setAlpha(1.0f); @@ -11181,18 +11354,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat hideAfterAnimation.imageReceiver.setVisible(false, true); } if (photos != null && sendPhotoType != 3) { - if (Build.VERSION.SDK_INT >= 21) { - windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | - WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | - WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; - } else { - windowLayoutParams.flags = 0; + if (placeProvider == null || !placeProvider.closeKeyboard()) { + makeFocusable(); } - windowLayoutParams.softInputMode = (useSmoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE) | WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION; - WindowManager wm1 = (WindowManager) parentActivity.getSystemService(Context.WINDOW_SERVICE); - wm1.updateViewLayout(windowView, windowLayoutParams); - windowView.setFocusable(true); - containerView.setFocusable(true); } if (videoPlayer != null && videoPlayer.isPlaying() && isCurrentVideo && !imagesArrLocals.isEmpty()) { seekAnimatedStickersTo(videoPlayer.getCurrentPosition()); @@ -11261,17 +11425,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat }); } else { if (photos != null && sendPhotoType != 3) { - if (Build.VERSION.SDK_INT >= 21) { - windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | - WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | - WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; - } else { - windowLayoutParams.flags = 0; + if (placeProvider == null || !placeProvider.closeKeyboard()) { + makeFocusable(); } - windowLayoutParams.softInputMode = (useSmoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE) | WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION; - wm.updateViewLayout(windowView, windowLayoutParams); - windowView.setFocusable(true); - containerView.setFocusable(true); } backgroundDrawable.setAlpha(255); @@ -11292,6 +11448,25 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat return true; } + private void makeFocusable() { + if (Build.VERSION.SDK_INT >= 21) { + windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | + WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | + WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; + } else { + windowLayoutParams.flags = 0; + } + windowLayoutParams.softInputMode = (useSmoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE) | WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION; + WindowManager wm1 = (WindowManager) parentActivity.getSystemService(Context.WINDOW_SERVICE); + try { + wm1.updateViewLayout(windowView, windowLayoutParams); + } catch (Exception e) { + FileLog.e(e); + } + windowView.setFocusable(true); + containerView.setFocusable(true); + } + public void injectVideoPlayerToMediaController() { if (videoPlayer.isPlaying()) { if (playerLooping) { @@ -12762,11 +12937,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } boolean drawProgress; if (isCurrentVideo) { - if (containerView.getKeyboardHeight() > 0) { - drawProgress = false; - } else { - drawProgress = (videoTimelineView == null || !videoTimelineView.isDragging()) && (sendPhotoType != SELECT_TYPE_AVATAR || manuallyPaused) && (videoPlayer == null || !videoPlayer.isPlaying()); - } + drawProgress = (videoTimelineView == null || !videoTimelineView.isDragging()) && (sendPhotoType != SELECT_TYPE_AVATAR || manuallyPaused) && (videoPlayer == null || !videoPlayer.isPlaying()); } else { drawProgress = true; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java index 6f47cf86d..58c2bf68f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java @@ -48,6 +48,7 @@ import android.text.TextUtils; import android.text.style.ForegroundColorSpan; import android.util.Property; import android.util.SparseArray; +import android.util.SparseIntArray; import android.util.TypedValue; import android.view.Display; import android.view.Gravity; @@ -74,6 +75,7 @@ import androidx.core.view.NestedScrollingParent3; import androidx.core.view.NestedScrollingParentHelper; import androidx.core.view.ViewCompat; import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.DiffUtil; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import androidx.viewpager.widget.PagerAdapter; @@ -132,6 +134,7 @@ import org.telegram.ui.Cells.UserCell; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.AnimatedFileDrawable; import org.telegram.ui.Components.AnimationProperties; +import org.telegram.ui.Components.AudioPlayerAlert; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.BulletinFactory; @@ -140,6 +143,7 @@ import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.CrossfadeDrawable; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EmptyTextProgressView; +import org.telegram.ui.Components.FragmentContextView; import org.telegram.ui.Components.IdenticonDrawable; import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.Components.LayoutHelper; @@ -176,7 +180,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private ListAdapter listAdapter; private SearchAdapter searchAdapter; private SimpleTextView[] nameTextView = new SimpleTextView[2]; - private SimpleTextView[] onlineTextView = new SimpleTextView[3]; + private SimpleTextView[] onlineTextView = new SimpleTextView[2]; + private AudioPlayerAlert.ClippingTextViewSwitcher mediaCounterTextView; private ImageView writeButton; private AnimatorSet writeButtonAnimation; private Drawable lockIconDrawable; @@ -233,6 +238,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private long mergeDialogId; private boolean expandPhoto; private boolean needSendMessage; + private boolean hasVoiceChatItem; private boolean scrolling; @@ -275,6 +281,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private boolean allowPullingDown; private boolean isPulledDown; + private Paint whitePaint = new Paint(); + private boolean isBot; private TLRPC.ChatFull chatInfo; @@ -634,7 +642,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private boolean isOverlaysVisible; private float currentAnimationValue; - private float alpha = 1.0f; + private float alpha = 0f; private float[] alphas = null; private long lastTime; private float previousSelectedProgress; @@ -786,7 +794,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (count > 1 && count <= 20) { if (overlayCountVisible == 0) { - alpha = 1.0f; + alpha = 0.0f; overlayCountVisible = 3; } else if (overlayCountVisible == 1) { alpha = 0.0f; @@ -1065,6 +1073,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. setScaleY(value); setAlpha(value); }); + boolean expanded = expandPhoto; animator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { @@ -1084,7 +1093,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. @Override public void onAnimationStart(Animator animation) { final View menuItem = getSecondaryMenuItem(); - if (menuItem != null) { + if (menuItem != null && (menuItem != searchItem || !expanded)) { menuItem.setVisibility(VISIBLE); } if (videoCallItemVisible) { @@ -1265,6 +1274,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().addObserver(this, NotificationCenter.blockedUsersDidLoad); getNotificationCenter().addObserver(this, NotificationCenter.botInfoDidLoad); getNotificationCenter().addObserver(this, NotificationCenter.userInfoDidLoad); + userBlocked = getMessagesController().blockePeers.indexOfKey(user_id) >= 0; if (user.bot) { isBot = true; @@ -1310,6 +1320,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } getNotificationCenter().addObserver(this, NotificationCenter.chatInfoDidLoad); getNotificationCenter().addObserver(this, NotificationCenter.chatOnlineCountDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); sortedUsers = new ArrayList<>(); updateOnlineCount(); @@ -1391,6 +1402,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } else if (chat_id != 0) { getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); getNotificationCenter().removeObserver(this, NotificationCenter.chatOnlineCountDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); } if (avatarImage != null) { avatarImage.setImageDrawable(null); @@ -1449,7 +1461,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (!isBot || MessagesController.isSupportUser(user)) { if (userBlocked) { getMessagesController().unblockPeer(user_id); - AlertsCreator.showSimpleToast(ProfileActivity.this, LocaleController.getString("UserUnblocked", R.string.UserUnblocked)); + if (BulletinFactory.canShowBulletin(ProfileActivity.this)) { + BulletinFactory.createBanBulletin(ProfileActivity.this, false).show(); + } } else { if (reportSpam) { AlertsCreator.showBlockReportSpamAlert(ProfileActivity.this, user_id, user, null, currentEncryptedChat, false, null, param -> { @@ -1468,7 +1482,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. builder.setMessage(AndroidUtilities.replaceTags(LocaleController.formatString("AreYouSureBlockContact2", R.string.AreYouSureBlockContact2, ContactsController.formatName(user.first_name, user.last_name)))); builder.setPositiveButton(LocaleController.getString("BlockContact", R.string.BlockContact), (dialogInterface, i) -> { getMessagesController().blockPeer(user_id); - AlertsCreator.showSimpleToast(ProfileActivity.this, LocaleController.getString("UserBlocked", R.string.UserBlocked)); + if (BulletinFactory.canShowBulletin(ProfileActivity.this)) { + BulletinFactory.createBanBulletin(ProfileActivity.this, true).show(); + } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); AlertDialog dialog = builder.create(); @@ -1556,7 +1572,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().removeObserver(ProfileActivity.this, NotificationCenter.closeChats); getNotificationCenter().postNotificationName(NotificationCenter.closeChats); - getMessagesController().addUserToChat(-(int) did, user, null, 0, null, ProfileActivity.this, null); + getMessagesController().addUserToChat(-(int) did, user, 0, null, ProfileActivity.this, null); presentFragment(new ChatActivity(args1), true); removeSelfFromStack(); }); @@ -1612,9 +1628,13 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. FileLog.e(e); } } else if (id == call_item || id == video_call_item) { - TLRPC.User user = getMessagesController().getUser(user_id); - if (user != null) { - VoIPHelper.startCall(user, id == video_call_item, userInfo != null && userInfo.video_calls_available, getParentActivity(), userInfo); + if (user_id != 0) { + TLRPC.User user = getMessagesController().getUser(user_id); + if (user != null) { + VoIPHelper.startCall(user, id == video_call_item, userInfo != null && userInfo.video_calls_available, getParentActivity(), userInfo); + } + } else if (chat_id != 0) { + VoIPHelper.showGroupCallAlert(ProfileActivity.this, currentChat, false); } } else if (id == search_members) { Bundle args = new Bundle(); @@ -1821,7 +1841,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. avatarContainer.setVisibility(expanded ? INVISIBLE : VISIBLE); nameTextView[1].setVisibility(expanded ? INVISIBLE : VISIBLE); onlineTextView[1].setVisibility(expanded ? INVISIBLE : VISIBLE); - onlineTextView[2].setVisibility(expanded ? INVISIBLE : VISIBLE); + mediaCounterTextView.setVisibility(expanded ? INVISIBLE : VISIBLE); callItem.setVisibility(expanded || !callItemVisible ? GONE : INVISIBLE); videoCallItem.setVisibility(expanded || !videoCallItemVisible ? GONE : INVISIBLE); editItem.setVisibility(expanded || !editItemVisible ? GONE : INVISIBLE); @@ -1846,6 +1866,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (!searchMode) { searchItem.clearFocusOnSearchView(); } + if (searchMode) { + searchItem.getSearchField().setText(""); + } return searchExpandTransition(searchMode); } @@ -1857,6 +1880,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. searchItem.setContentDescription(LocaleController.getString("SearchInSettings", R.string.SearchInSettings)); searchItem.setSearchFieldHint(LocaleController.getString("SearchInSettings", R.string.SearchInSettings)); sharedMediaLayout.getSearchItem().setVisibility(View.GONE); + if (expandPhoto) { + searchItem.setVisibility(View.GONE); + } } videoCallItem = menu.addItem(video_call_item, R.drawable.profile_video); @@ -1875,7 +1901,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. scrollTo = layoutManager.findFirstVisibleItemPosition(); View topView = layoutManager.findViewByPosition(scrollTo); if (topView != null) { - scrollToPosition = topView.getTop(); + scrollToPosition = topView.getTop() - listView.getPaddingTop(); } else { scrollTo = -1; } @@ -1884,7 +1910,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. scrollTo = -1; } - createActionBarMenu(); + createActionBarMenu(false); listAdapter = new ListAdapter(context); searchAdapter = new SearchAdapter(context); @@ -1895,7 +1921,6 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private boolean ignoreLayout; private boolean firstLayout = true; - private Paint whitePaint = new Paint(); private Paint grayPaint = new Paint(); @Override @@ -2019,7 +2044,13 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. listView.setBottomGlowOffset(0); } int currentPaddingTop = listView.getPaddingTop(); - View view = listView.getChildAt(0); + View view = null; + for (int i = 0; i < listView.getChildCount(); i++) { + if (listView.getChildAdapterPosition(listView.getChildAt(i)) == 0) { + view = listView.getChildAt(i); + break; + } + } int pos = RecyclerView.NO_POSITION; int top = 0; if (view != null) { @@ -2068,7 +2099,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } super.requestLayout(); } - + @Override public void onDraw(Canvas c) { whitePaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -2081,18 +2112,46 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. grayPaint.setAlpha((int) (255 * listView.getAlpha())); } - int top = listView.getTop(); + float minBottom = listView.getY(); + float minTop = 0; int count = listView.getChildCount(); - Paint paint; + boolean animationIsRunning = listView.getItemAnimator().isRunning(); + if (animationIsRunning) { + c.drawRect(listView.getX(), listView.getY(), listView.getX() + listView.getMeasuredWidth(), listView.getY() + listView.getMeasuredHeight(), grayPaint); + } for (int a = 0; a <= count; a++) { if (a < count) { View child = listView.getChildAt(a); - int bottom = listView.getTop() + child.getBottom(); - c.drawRect(listView.getX(), top, listView.getX() + listView.getMeasuredWidth(), bottom, child.getBackground() != null ? grayPaint : whitePaint); - top = bottom; + float top = listView.getY() + child.getY(); + float bottom = listView.getY() + child.getY() + child.getMeasuredHeight(); + if (top < minTop || minTop == 0) { + minTop = top; + } + if (child.getBackground() != null) { + if (!animationIsRunning) { + c.drawRect(listView.getX(), top, listView.getX() + listView.getMeasuredWidth(), bottom, grayPaint); + } + } else if (!(animationIsRunning && child.getY() == 0)) { + boolean useAlpha = listView.getChildAdapterPosition(child) == sharedMediaRow && child.getAlpha() != 1f; + if (useAlpha) { + whitePaint.setAlpha((int) (255 * listView.getAlpha() * child.getAlpha())); + } + c.drawRect(listView.getX(), top, listView.getX() + listView.getMeasuredWidth(), bottom, whitePaint); + if (useAlpha) { + whitePaint.setAlpha((int) (255 * listView.getAlpha())); + } + } + + if (bottom > minBottom) { + minBottom = bottom; + } } else { - if (top < listView.getBottom()) { - c.drawRect(listView.getX(), top, listView.getX() + listView.getMeasuredWidth(), listView.getBottom(), grayPaint); + if (openAnimationInProgress || searchViewTransition != null) { + c.drawRect(listView.getX(), listView.getY(), listView.getX() + listView.getMeasuredWidth(), minTop, whitePaint); + } + + if (minBottom < listView.getBottom()) { + c.drawRect(listView.getX(), minBottom, listView.getX() + listView.getMeasuredWidth(), listView.getBottom(), grayPaint); } } } @@ -2101,6 +2160,33 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. c.drawRect(0, top + extraHeight + searchTransitionOffset, getMeasuredWidth(), top + getMeasuredHeight(), whitePaint); } } + + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (profileTransitionInProgress && parentLayout.fragmentsStack.size() > 1) { + BaseFragment fragment = parentLayout.fragmentsStack.get(parentLayout.fragmentsStack.size() - 2); + if (fragment instanceof ChatActivity) { + ChatActivity chatActivity = (ChatActivity) fragment; + FragmentContextView fragmentContextView = chatActivity.getFragmentContextView(); + + if (fragmentContextView != null && fragmentContextView.getCurrentStyle() == 3) { + float progress = extraHeight / AndroidUtilities.dpf2(fragmentContextView.getStyleHeight()); + if (progress > 1f) { + progress = 1f; + } + canvas.save(); + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.setCollapseTransition(true, extraHeight, progress); + fragmentContextView.draw(canvas); + fragmentContextView.setCollapseTransition(false, extraHeight, progress); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } + } + } + } }; fragmentView.setWillNotDraw(false); FrameLayout frameLayout = (FrameLayout) fragmentView; @@ -2170,16 +2256,79 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } return result; } + + @Override + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (getItemAnimator().isRunning() && child.getBackground() == null && child.getTranslationY() != 0) { + boolean useAlpha = listView.getChildAdapterPosition(child) == sharedMediaRow && child.getAlpha() != 1f; + if (useAlpha) { + whitePaint.setAlpha((int) (255 * listView.getAlpha() * child.getAlpha())); + } + canvas.drawRect(listView.getX(), child.getY(), listView.getX() + listView.getMeasuredWidth(), child.getY() + child.getHeight(), whitePaint); + if (useAlpha) { + whitePaint.setAlpha((int) (255 * listView.getAlpha())); + } + } + return super.drawChild(canvas, child, drawingTime); + + } }; listView.setVerticalScrollBarEnabled(false); - if (imageUpdater == null) { - listView.setItemAnimator(null); - listView.setLayoutAnimation(null); - } else { - DefaultItemAnimator itemAnimator = (DefaultItemAnimator) listView.getItemAnimator(); - itemAnimator.setSupportsChangeAnimations(false); - itemAnimator.setDelayAnimations(false); - } + DefaultItemAnimator defaultItemAnimator = new DefaultItemAnimator() { + + int animationIndex = -1; + int account; + + @Override + protected void onAllAnimationsDone() { + super.onAllAnimationsDone(); + NotificationCenter.getInstance(account = currentAccount).onAnimationFinish(animationIndex); + } + + @Override + public void runPendingAnimations() { + boolean removalsPending = !mPendingRemovals.isEmpty(); + boolean movesPending = !mPendingMoves.isEmpty(); + boolean changesPending = !mPendingChanges.isEmpty(); + boolean additionsPending = !mPendingAdditions.isEmpty(); + if (removalsPending || movesPending || additionsPending || changesPending) { + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); + valueAnimator.addUpdateListener(valueAnimator1 -> listView.invalidate()); + valueAnimator.setDuration(getMoveDuration()); + valueAnimator.start(); + animationIndex = NotificationCenter.getInstance(account = currentAccount).setAnimationInProgress(animationIndex, null); + } + super.runPendingAnimations(); + } + + @Override + protected long getAddAnimationDelay(long removeDuration, long moveDuration, long changeDuration) { + return 0; + } + + @Override + protected long getMoveAnimationDelay() { + return 0; + } + + @Override + public long getMoveDuration() { + return 220; + } + + @Override + public long getRemoveDuration() { + return 220; + } + + @Override + public long getAddDuration() { + return 220; + } + }; + listView.setItemAnimator(defaultItemAnimator); + defaultItemAnimator.setSupportsChangeAnimations(false); + defaultItemAnimator.setDelayAnimations(false); listView.setClipToPadding(false); listView.setHideIfEmpty(false); @@ -2284,7 +2433,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. AlertsCreator.showCustomNotificationsDialog(ProfileActivity.this, did, -1, null, currentAccount, param -> listAdapter.notifyItemChanged(notificationsRow)); } else if (position == unblockRow) { getMessagesController().unblockPeer(user_id); - AlertsCreator.showSimpleToast(ProfileActivity.this, LocaleController.getString("UserUnblocked", R.string.UserUnblocked)); + if (BulletinFactory.canShowBulletin(ProfileActivity.this)) { + BulletinFactory.createBanBulletin(ProfileActivity.this, false).show(); + } } else if (position == sendMessageRow) { onWriteButtonClick(); } else if (position == reportRow) { @@ -2321,7 +2472,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. presentFragment(fragment); } } else if (position == joinRow) { - getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), null, 0, null, ProfileActivity.this, null); + getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), 0, null, ProfileActivity.this, null); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeSearchByActiveAction); } else if (position == subscribersRow) { Bundle args = new Bundle(); @@ -2424,7 +2575,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. LocaleController.getString("DebugMenuReadAllDialogs", R.string.DebugMenuReadAllDialogs), SharedConfig.pauseMusicOnRecord ? LocaleController.getString("DebugMenuDisablePauseMusic", R.string.DebugMenuDisablePauseMusic) : LocaleController.getString("DebugMenuEnablePauseMusic", R.string.DebugMenuEnablePauseMusic), BuildVars.DEBUG_VERSION && !AndroidUtilities.isTablet() && Build.VERSION.SDK_INT >= 23 ? (SharedConfig.smoothKeyboard ? LocaleController.getString("DebugMenuDisableSmoothKeyboard", R.string.DebugMenuDisableSmoothKeyboard) : LocaleController.getString("DebugMenuEnableSmoothKeyboard", R.string.DebugMenuEnableSmoothKeyboard)) : null, - Build.VERSION.SDK_INT >= 29 ? (SharedConfig.chatBubbles ? "Disable chat bubbles" : "Enable chat bubbles") : null + BuildVars.DEBUG_PRIVATE_VERSION ? (SharedConfig.disableVoiceAudioEffects ? "Enable voip audio effects" : "Disable voip audio effects") : null }; builder.setItems(items, (dialog, which) -> { if (which == 0) { @@ -2469,7 +2620,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); } } else if (which == 13) { - SharedConfig.toggleChatBubbles(); + SharedConfig.toggleDisableVoiceAudioEffects(); } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); @@ -2655,7 +2806,13 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (!AndroidUtilities.isTablet() && !isInLandscapeMode && avatarImage.getImageReceiver().hasNotThumb()) { openingAvatar = true; allowPullingDown = true; - View child = listView.getChildAt(0); + View child = null; + for (int i = 0; i < listView.getChildCount(); i++) { + if (listView.getChildAdapterPosition(listView.getChildAt(i)) == 0) { + child = listView.getChildAt(i); + break; + } + } if (child != null) { RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); if (holder != null) { @@ -2739,11 +2896,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } for (int a = 0; a < onlineTextView.length; a++) { onlineTextView[a] = new SimpleTextView(context); - if (a == 2) { - onlineTextView[a].setTextColor(Theme.getColor(Theme.key_player_actionBarSubtitle)); - } else { - onlineTextView[a].setTextColor(Theme.getColor(Theme.key_avatar_subtitleInProfileBlue)); - } + onlineTextView[a].setTextColor(Theme.getColor(Theme.key_avatar_subtitleInProfileBlue)); onlineTextView[a].setTextSize(14); onlineTextView[a].setGravity(Gravity.LEFT); onlineTextView[a].setAlpha(a == 0 || a == 2 ? 0.0f : 1.0f); @@ -2752,6 +2905,18 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } frameLayout.addView(onlineTextView[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 118, 0, a == 0 ? 48 : 8, 0)); } + mediaCounterTextView = new AudioPlayerAlert.ClippingTextViewSwitcher(context) { + @Override + protected TextView createTextView() { + TextView textView = new TextView(context); + textView.setTextColor(Theme.getColor(Theme.key_player_actionBarSubtitle)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setGravity(Gravity.LEFT); + return textView; + } + }; + mediaCounterTextView.setAlpha(0.0f); + frameLayout.addView(mediaCounterTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 118, 0, 8, 0)); updateProfileData(); writeButton = new ImageView(context); @@ -2802,7 +2967,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. needLayout(false); if (scrollTo != -1) { - layoutManager.scrollToPositionWithOffset(scrollTo, scrollToPosition); + //layoutManager.scrollToPositionWithOffset(scrollTo, scrollToPosition); if (writeButtonTag != null) { writeButton.setTag(0); @@ -2890,8 +3055,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. nameTextView[1].setTranslationY(nameTextViewY); onlineTextView[1].setTranslationX(onlineTextViewX); onlineTextView[1].setTranslationY(onlineTextViewY); - onlineTextView[2].setTranslationX(onlineTextViewX); - onlineTextView[2].setTranslationY(onlineTextViewY); + mediaCounterTextView.setTranslationX(onlineTextViewX); + mediaCounterTextView.setTranslationY(onlineTextViewY); final Object onlineTextViewTag = onlineTextView[1].getTag(); int statusColor; if (onlineTextViewTag instanceof String) { @@ -3187,7 +3352,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. channelParticipant1.channelParticipant.banned_rights = rightsBanned; channelParticipant1.channelParticipant.admin_rights = rightsAdmin; channelParticipant1.channelParticipant.rank = rank; - } else if (participant instanceof TLRPC.ChatParticipant) { + } else if (participant != null) { TLRPC.ChatParticipant newParticipant; if (rights == 1) { newParticipant = new TLRPC.TL_chatParticipantAdmin(); @@ -3247,7 +3412,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } private boolean processOnClickOrPress(final int position) { - if (position == usernameRow) { + if (position == usernameRow || position == setUsernameRow) { final String username; if (user_id != 0) { final TLRPC.User user = getMessagesController().getUser(user_id); @@ -3279,7 +3444,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. }); showDialog(builder.create()); return true; - } else if (position == phoneRow) { + } else if (position == phoneRow || position == numberRow) { final TLRPC.User user = getMessagesController().getUser(user_id); if (user == null || user.phone == null || user.phone.length() == 0 || getParentActivity() == null) { return false; @@ -3288,16 +3453,18 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); ArrayList items = new ArrayList<>(); final ArrayList actions = new ArrayList<>(); - if (userInfo != null && userInfo.phone_calls_available) { - items.add(LocaleController.getString("CallViaTelegram", R.string.CallViaTelegram)); - actions.add(2); - if (Build.VERSION.SDK_INT >= 18 && userInfo.video_calls_available) { - items.add(LocaleController.getString("VideoCallViaTelegram", R.string.VideoCallViaTelegram)); - actions.add(3); + if (position == phoneRow) { + if (userInfo != null && userInfo.phone_calls_available) { + items.add(LocaleController.getString("CallViaTelegram", R.string.CallViaTelegram)); + actions.add(2); + if (Build.VERSION.SDK_INT >= 18 && userInfo.video_calls_available) { + items.add(LocaleController.getString("VideoCallViaTelegram", R.string.VideoCallViaTelegram)); + actions.add(3); + } } + items.add(LocaleController.getString("Call", R.string.Call)); + actions.add(0); } - items.add(LocaleController.getString("Call", R.string.Call)); - actions.add(0); items.add(LocaleController.getString("Copy", R.string.Copy)); actions.add(1); builder.setItems(items.toArray(new CharSequence[0]), (dialogInterface, i) -> { @@ -3325,7 +3492,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. }); showDialog(builder.create()); return true; - } else if (position == channelInfoRow || position == userInfoRow || position == locationRow) { + } else if (position == channelInfoRow || position == userInfoRow || position == locationRow || position == bioRow) { + if (position == bioRow && (userInfo == null || TextUtils.isEmpty(userInfo.about))) { + return false; + } AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); builder.setItems(new CharSequence[]{LocaleController.getString("Copy", R.string.Copy)}, (dialogInterface, i) -> { try { @@ -3514,7 +3684,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. animators.add(ObjectAnimator.ofFloat(mediaSearchItem, View.TRANSLATION_Y, visible ? 0.0f : AndroidUtilities.dp(10))); animators.add(ObjectAnimator.ofFloat(actionBar, ACTIONBAR_HEADER_PROGRESS, visible ? 1.0f : 0.0f)); animators.add(ObjectAnimator.ofFloat(onlineTextView[1], View.ALPHA, visible ? 0.0f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(onlineTextView[2], View.ALPHA, visible ? 1.0f : 0.0f)); + animators.add(ObjectAnimator.ofFloat(mediaCounterTextView, View.ALPHA, visible ? 1.0f : 0.0f)); if (visible) { animators.add(ObjectAnimator.ofFloat(this, HEADER_SHADOW, 0.0f)); } @@ -3581,7 +3751,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. fragment.setDelegate((users, fwdCount) -> { for (int a = 0, N = users.size(); a < N; a++) { TLRPC.User user = users.get(a); - getMessagesController().addUserToChat(chat_id, user, chatInfo, fwdCount, null, ProfileActivity.this, null); + getMessagesController().addUserToChat(chat_id, user, fwdCount, null, ProfileActivity.this, null); } }); presentFragment(fragment); @@ -3599,10 +3769,16 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. return; } - View child = listView.getChildAt(0); - RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findContainingViewHolder(child); - int top = child.getTop(); int newOffset = 0; + View child = null; + for (int i = 0; i < listView.getChildCount(); i++) { + if (listView.getChildAdapterPosition(listView.getChildAt(i)) == 0) { + child = listView.getChildAt(i); + break; + } + } + RecyclerListView.Holder holder = child == null ? null : (RecyclerListView.Holder) listView.findContainingViewHolder(child); + int top = child == null ? 0 : child.getTop(); int adapterPosition = holder != null ? holder.getAdapterPosition() : RecyclerView.NO_POSITION; if (top >= 0 && adapterPosition == 0) { newOffset = top; @@ -3628,27 +3804,27 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } private void updateSelectedMediaTabText() { - if (sharedMediaLayout == null || onlineTextView[2] == null) { + if (sharedMediaLayout == null || mediaCounterTextView == null) { return; } - int id = sharedMediaLayout.getSelectedTab(); + int id = sharedMediaLayout.getClosestTab(); int[] mediaCount = sharedMediaPreloader.getLastMediaCount(); if (id == 0) { - onlineTextView[2].setText(LocaleController.formatPluralString("Media", mediaCount[MediaDataController.MEDIA_PHOTOVIDEO])); + mediaCounterTextView.setText(LocaleController.formatPluralString("Media", mediaCount[MediaDataController.MEDIA_PHOTOVIDEO])); } else if (id == 1) { - onlineTextView[2].setText(LocaleController.formatPluralString("Files", mediaCount[MediaDataController.MEDIA_FILE])); + mediaCounterTextView.setText(LocaleController.formatPluralString("Files", mediaCount[MediaDataController.MEDIA_FILE])); } else if (id == 2) { - onlineTextView[2].setText(LocaleController.formatPluralString("Voice", mediaCount[MediaDataController.MEDIA_AUDIO])); + mediaCounterTextView.setText(LocaleController.formatPluralString("Voice", mediaCount[MediaDataController.MEDIA_AUDIO])); } else if (id == 3) { - onlineTextView[2].setText(LocaleController.formatPluralString("Links", mediaCount[MediaDataController.MEDIA_URL])); + mediaCounterTextView.setText(LocaleController.formatPluralString("Links", mediaCount[MediaDataController.MEDIA_URL])); } else if (id == 4) { - onlineTextView[2].setText(LocaleController.formatPluralString("MusicFiles", mediaCount[MediaDataController.MEDIA_MUSIC])); + mediaCounterTextView.setText(LocaleController.formatPluralString("MusicFiles", mediaCount[MediaDataController.MEDIA_MUSIC])); } else if (id == 5) { - onlineTextView[2].setText(LocaleController.formatPluralString("GIFs", mediaCount[MediaDataController.MEDIA_GIF])); + mediaCounterTextView.setText(LocaleController.formatPluralString("GIFs", mediaCount[MediaDataController.MEDIA_GIF])); } else if (id == 6) { - onlineTextView[2].setText(LocaleController.formatPluralString("CommonGroups", userInfo.common_chats_count)); + mediaCounterTextView.setText(LocaleController.formatPluralString("CommonGroups", userInfo.common_chats_count)); } else if (id == 7) { - onlineTextView[2].setText(onlineTextView[1].getText()); + mediaCounterTextView.setText(onlineTextView[1].getText()); } } @@ -3792,8 +3968,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. nameTextView[1].setTranslationY(newTop + h - AndroidUtilities.dpf2(38f) - nameTextView[1].getBottom() + additionalTranslationY); onlineTextView[1].setTranslationX(AndroidUtilities.dpf2(16f) - onlineTextView[1].getLeft()); onlineTextView[1].setTranslationY(newTop + h - AndroidUtilities.dpf2(18f) - onlineTextView[1].getBottom() + additionalTranslationY); - onlineTextView[2].setTranslationX(onlineTextView[1].getTranslationX()); - onlineTextView[2].setTranslationY(onlineTextView[1].getTranslationY()); + mediaCounterTextView.setTranslationX(onlineTextView[1].getTranslationX()); + mediaCounterTextView.setTranslationY(onlineTextView[1].getTranslationY()); } } else { if (isPulledDown) { @@ -3849,8 +4025,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. nameTextView[1].setTranslationY(nameY); onlineTextView[1].setTranslationX(onlineX); onlineTextView[1].setTranslationY(onlineY); - onlineTextView[2].setTranslationX(onlineX); - onlineTextView[2].setTranslationY(onlineY); + mediaCounterTextView.setTranslationX(onlineX); + mediaCounterTextView.setTranslationY(onlineY); } } } @@ -3920,8 +4096,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. onlineTextView[a].setTranslationX(onlineX); onlineTextView[a].setTranslationY(onlineY); if (a == 1) { - onlineTextView[2].setTranslationX(onlineX); - onlineTextView[2].setTranslationY(onlineY); + mediaCounterTextView.setTranslationX(onlineX); + mediaCounterTextView.setTranslationY(onlineY); } } nameTextView[a].setScaleX(nameScale); @@ -4003,7 +4179,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. width2 = onlineTextView[1].getPaint().measureText(onlineTextView[1].getText().toString()); layoutParams = (FrameLayout.LayoutParams) onlineTextView[1].getLayoutParams(); - FrameLayout.LayoutParams layoutParams2 = (FrameLayout.LayoutParams) onlineTextView[2].getLayoutParams(); + FrameLayout.LayoutParams layoutParams2 = (FrameLayout.LayoutParams) mediaCounterTextView.getLayoutParams(); prevWidth = layoutParams.width; layoutParams2.rightMargin = layoutParams.rightMargin = (int) Math.ceil(onlineTextView[1].getTranslationX() + AndroidUtilities.dp(8) + AndroidUtilities.dp(40) * (1.0f - diff)); if (width < width2) { @@ -4013,7 +4189,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } if (prevWidth != layoutParams.width) { onlineTextView[1].requestLayout(); - onlineTextView[2].requestLayout(); + mediaCounterTextView.requestLayout(); } } @@ -4081,10 +4257,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. updateProfileData(); } if ((mask & MessagesController.UPDATE_MASK_CHAT) != 0) { - updateRowsIds(); - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); } if (infoChanged) { if (listView != null) { @@ -4107,7 +4280,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. updateOnlineCount(); updateProfileData(); } else if (id == NotificationCenter.contactsDidLoad) { - createActionBarMenu(); + createActionBarMenu(true); } else if (id == NotificationCenter.encryptedChatCreated) { if (creatingChat) { AndroidUtilities.runOnUIThread(() -> { @@ -4123,19 +4296,24 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. TLRPC.EncryptedChat chat = (TLRPC.EncryptedChat) args[0]; if (currentEncryptedChat != null && chat.id == currentEncryptedChat.id) { currentEncryptedChat = chat; - updateRowsIds(); - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); } } else if (id == NotificationCenter.blockedUsersDidLoad) { boolean oldValue = userBlocked; userBlocked = getMessagesController().blockePeers.indexOfKey(user_id) >= 0; if (oldValue != userBlocked) { - createActionBarMenu(); - updateRowsIds(); - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); + createActionBarMenu(true); + updateListAnimated(); + } + } else if (id == NotificationCenter.groupCallUpdated) { + Integer chatId = (Integer) args[0]; + if (currentChat != null && chatId == currentChat.id && ChatObject.canManageCalls(currentChat)) { + TLRPC.ChatFull chatFull = MessagesController.getInstance(currentAccount).getChatFull(chatId); + if (chatFull != null) { + chatInfo = chatFull; + } + if (chatInfo != null && (chatInfo.call == null && !hasVoiceChatItem || chatInfo.call != null && hasVoiceChatItem)) { + createActionBarMenu(false); } } } else if (id == NotificationCenter.chatInfoDidLoad) { @@ -4155,17 +4333,14 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } fetchUsersFromChannelInfo(); updateOnlineCount(); - updateRowsIds(); if (avatarsViewPager != null) { avatarsViewPager.setChatInfo(chatInfo); } - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); TLRPC.Chat newChat = getMessagesController().getChat(chat_id); if (newChat != null) { currentChat = newChat; - createActionBarMenu(); + createActionBarMenu(true); } if (currentChat.megagroup && (loadChannelParticipants || !byChannelUsers)) { getChannelParticipants(true); @@ -4177,10 +4352,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. TLRPC.BotInfo info = (TLRPC.BotInfo) args[0]; if (info.user_id == user_id) { botInfo = info; - updateRowsIds(); - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); } } else if (id == NotificationCenter.userInfoDidLoad) { int uid = (Integer) args[0]; @@ -4192,20 +4364,17 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } else { if (!openAnimationInProgress && !callItemVisible) { - createActionBarMenu(); + createActionBarMenu(true); } else { recreateMenuAfterAnimation = true; } - updateRowsIds(); - if (listAdapter != null) { - try { - listAdapter.notifyDataSetChanged(); - } catch (Exception e) { - FileLog.e(e); - } - } + updateListAnimated(); sharedMediaLayout.setCommonGroupsCount(userInfo.common_chats_count); updateSelectedMediaTabText(); + if (sharedMediaPreloader == null || sharedMediaPreloader.isMediaWasLoaded()) { + resumeDelayedFragmentAnimation(); + needLayout(true); + } } } } else if (id == NotificationCenter.didReceiveNewMessages) { @@ -4234,6 +4403,14 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } + @Override + public boolean needDelayOpenAnimation() { + if (playProfileAnimation == 0) { + return true; + } + return false; + } + @Override public void mediaCountUpdated() { if (sharedMediaLayout != null && sharedMediaPreloader != null) { @@ -4241,6 +4418,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } updateSharedMediaRows(); updateSelectedMediaTabText(); + + if (userInfo != null) { + resumeDelayedFragmentAnimation(); + } } @Override @@ -4319,15 +4500,14 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (listAdapter == null) { return; } - int sharedMediaRowPrev = sharedMediaRow; - updateRowsIds(); - if (sharedMediaRowPrev != sharedMediaRow) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); } + public boolean isFragmentOpened; + @Override protected void onTransitionAnimationStart(boolean isOpen, boolean backward) { + isFragmentOpened = isOpen; if ((!isOpen && backward || isOpen && !backward) && playProfileAnimation != 0 && allowProfileAnimation && !isPulledDown) { openAnimationInProgress = true; } @@ -4347,14 +4527,13 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (!backward) { if (playProfileAnimation != 0 && allowProfileAnimation) { openAnimationInProgress = false; + checkListViewScroll(); if (recreateMenuAfterAnimation) { - createActionBarMenu(); + createActionBarMenu(true); } } if (!fragmentOpened) { fragmentOpened = true; -// firstLayout = true; -// lastMeasuredContentHeight = -1; fragmentView.requestLayout(); } } @@ -4458,6 +4637,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. fragmentView.invalidate(); } + boolean profileTransitionInProgress; + @Override protected AnimatorSet onCustomTransitionAnimation(final boolean isOpen, final Runnable callback) { if (playProfileAnimation != 0 && allowProfileAnimation && !isPulledDown) { @@ -4565,6 +4746,11 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } animatorSet.playTogether(animators); } + profileTransitionInProgress = true; + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); + valueAnimator.addUpdateListener(valueAnimator1 -> fragmentView.invalidate()); + animatorSet.playTogether(valueAnimator); + animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { @@ -4582,6 +4768,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. avatarsViewPager.resetCurrentItem(); avatarsViewPager.setVisibility(View.VISIBLE); } + profileTransitionInProgress = false; + fragmentView.invalidate(); } }); animatorSet.setInterpolator(playProfileAnimation == 2 ? CubicBezierInterpolator.DEFAULT : new DecelerateInterpolator()); @@ -4703,7 +4891,11 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private void kickUser(int uid) { if (uid != 0) { - getMessagesController().deleteUserFromChat(chat_id, getMessagesController().getUser(uid), chatInfo); + TLRPC.User user = getMessagesController().getUser(uid); + getMessagesController().deleteUserFromChat(chat_id, user, chatInfo); + if (currentChat != null && user != null && BulletinFactory.canShowBulletin(this)) { + BulletinFactory.createRemoveFromChatBulletin(this, user.first_name, currentChat.title).show(); + } } else { getNotificationCenter().removeObserver(this, NotificationCenter.closeChats); if (AndroidUtilities.isTablet()) { @@ -5053,10 +5245,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } if (thumbLocation != null && setAvatarRow != -1 || thumbLocation == null && setAvatarRow == -1) { int prevValue = setAvatarRow; - updateRowsIds(); - if (prevValue != setAvatarRow) { - listAdapter.notifyDataSetChanged(); - } + updateListAnimated(); needLayout(true); } getFileLoader().loadFile(imageLocation, user, null, 0, 1); @@ -5273,7 +5462,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } - private void createActionBarMenu() { + private void createActionBarMenu(boolean animated) { if (actionBar == null || otherItem == null) { return; } @@ -5336,42 +5525,51 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. otherItem.addSubItem(add_shortcut, R.drawable.msg_home, LocaleController.getString("AddShortcut", R.string.AddShortcut)); } } else if (chat_id != 0) { - if (chat_id > 0) { - TLRPC.Chat chat = getMessagesController().getChat(chat_id); - if (ChatObject.isChannel(chat)) { - if (ChatObject.hasAdminRights(chat) || chat.megagroup && ChatObject.canChangeChatInfo(chat)) { - editItemVisible = true; + TLRPC.Chat chat = getMessagesController().getChat(chat_id); + hasVoiceChatItem = false; + if (ChatObject.isChannel(chat)) { + if (ChatObject.hasAdminRights(chat) || chat.megagroup && ChatObject.canChangeChatInfo(chat)) { + editItemVisible = true; + } + if (chatInfo != null) { + if (chat.megagroup && ChatObject.canManageCalls(chat) && chatInfo.call == null) { + otherItem.addSubItem(call_item, R.drawable.msg_voicechat, LocaleController.getString("StartVoipChat", R.string.StartVoipChat)); + hasVoiceChatItem = true; } - if (chatInfo != null && chatInfo.can_view_stats) { + if (chatInfo.can_view_stats) { otherItem.addSubItem(statistics, R.drawable.msg_stats, LocaleController.getString("Statistics", R.string.Statistics)); } - if (chat.megagroup) { - canSearchMembers = true; - otherItem.addSubItem(search_members, R.drawable.msg_search, LocaleController.getString("SearchMembers", R.string.SearchMembers)); - if (!chat.creator && !chat.left && !chat.kicked) { - otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu)); - } - } else { - if (!TextUtils.isEmpty(chat.username)) { - otherItem.addSubItem(share, R.drawable.msg_share, LocaleController.getString("BotShare", R.string.BotShare)); - } - if (chatInfo != null && chatInfo.linked_chat_id != 0) { - otherItem.addSubItem(view_discussion, R.drawable.msg_discussion, LocaleController.getString("ViewDiscussion", R.string.ViewDiscussion)); - } - if (!currentChat.creator && !currentChat.left && !currentChat.kicked) { - otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("LeaveChannelMenu", R.string.LeaveChannelMenu)); - } + } + if (chat.megagroup) { + canSearchMembers = true; + otherItem.addSubItem(search_members, R.drawable.msg_search, LocaleController.getString("SearchMembers", R.string.SearchMembers)); + if (!chat.creator && !chat.left && !chat.kicked) { + otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu)); } } else { - if (ChatObject.canChangeChatInfo(chat)) { - editItemVisible = true; + if (!TextUtils.isEmpty(chat.username)) { + otherItem.addSubItem(share, R.drawable.msg_share, LocaleController.getString("BotShare", R.string.BotShare)); } - if (!ChatObject.isKickedFromChat(chat) && !ChatObject.isLeftFromChat(chat)) { - canSearchMembers = true; - otherItem.addSubItem(search_members, R.drawable.msg_search, LocaleController.getString("SearchMembers", R.string.SearchMembers)); + if (chatInfo != null && chatInfo.linked_chat_id != 0) { + otherItem.addSubItem(view_discussion, R.drawable.msg_discussion, LocaleController.getString("ViewDiscussion", R.string.ViewDiscussion)); + } + if (!currentChat.creator && !currentChat.left && !currentChat.kicked) { + otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("LeaveChannelMenu", R.string.LeaveChannelMenu)); } - otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("DeleteAndExit", R.string.DeleteAndExit)); } + } else { + if (chatInfo != null && ChatObject.canManageCalls(chat) && chatInfo.call == null) { + otherItem.addSubItem(call_item, R.drawable.msg_voicechat, LocaleController.getString("StartVoipChat", R.string.StartVoipChat)); + hasVoiceChatItem = true; + } + if (ChatObject.canChangeChatInfo(chat)) { + editItemVisible = true; + } + if (!ChatObject.isKickedFromChat(chat) && !ChatObject.isLeftFromChat(chat)) { + canSearchMembers = true; + otherItem.addSubItem(search_members, R.drawable.msg_search, LocaleController.getString("SearchMembers", R.string.SearchMembers)); + } + otherItem.addSubItem(leave_group, R.drawable.msg_leave, LocaleController.getString("DeleteAndExit", R.string.DeleteAndExit)); } otherItem.addSubItem(add_shortcut, R.drawable.msg_home, LocaleController.getString("AddShortcut", R.string.AddShortcut)); } @@ -5396,6 +5594,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (callItemVisible) { if (callItem.getVisibility() != View.VISIBLE) { callItem.setVisibility(View.VISIBLE); + if (animated) { + callItem.setAlpha(0); + callItem.animate().alpha(1f).setDuration(150).start(); + } } } else { if (callItem.getVisibility() != View.GONE) { @@ -5405,6 +5607,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (videoCallItemVisible) { if (videoCallItem.getVisibility() != View.VISIBLE) { videoCallItem.setVisibility(View.VISIBLE); + if (animated) { + videoCallItem.setAlpha(0); + videoCallItem.animate().alpha(1f).setDuration(150).start(); + } } } else { if (videoCallItem.getVisibility() != View.GONE) { @@ -5414,6 +5620,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (editItemVisible) { if (editItem.getVisibility() != View.VISIBLE) { editItem.setVisibility(View.VISIBLE); + if (animated) { + editItem.setAlpha(0); + editItem.animate().alpha(1f).setDuration(150).start(); + } } } else { if (editItem.getVisibility() != View.GONE) { @@ -5424,12 +5634,18 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (avatarsViewPagerIndicatorView.isIndicatorFullyVisible()) { if (editItemVisible) { editItem.setVisibility(View.GONE); + editItem.animate().cancel(); + editItem.setAlpha(1f); } if (callItemVisible) { callItem.setVisibility(View.GONE); + callItem.animate().cancel(); + callItem.setAlpha(1f); } if (videoCallItemVisible) { videoCallItem.setVisibility(View.GONE); + videoCallItem.animate().cancel(); + videoCallItem.setAlpha(1f); } } } @@ -5494,6 +5710,22 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } else { VoIPHelper.permissionDenied(getParentActivity(), null, requestCode); } + } else if (requestCode == 103) { + if (currentChat == null) { + return; + } + boolean allGranted = true; + for (int a = 0; a < grantResults.length; a++) { + if (grantResults[a] != PackageManager.PERMISSION_GRANTED) { + allGranted = false; + break; + } + } + if (grantResults.length > 0 && allGranted) { + VoIPHelper.startCall(currentChat, true, getParentActivity()); + } else { + VoIPHelper.permissionDenied(getParentActivity(), null, requestCode); + } } } @@ -5593,6 +5825,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } searchItem.setAlpha(progressHalf); topView.invalidate(); + fragmentView.invalidate(); }); valueAnimator.addListener(new AnimatorListenerAdapter() { @@ -5604,6 +5837,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. searchItem.requestFocusOnSearchView(); } needLayout(true); + searchViewTransition = null; + fragmentView.invalidate(); } }); @@ -5676,68 +5911,66 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. req.video_start_ts = videoStartTimestamp; req.flags |= 4; } - getConnectionsManager().sendRequest(req, (response, error) -> { - AndroidUtilities.runOnUIThread(() -> { - if (error == null) { - TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (error == null) { + TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); + if (user == null) { + user = getUserConfig().getCurrentUser(); if (user == null) { - user = getUserConfig().getCurrentUser(); - if (user == null) { - return; - } - getMessagesController().putUser(user, false); - } else { - getUserConfig().setCurrentUser(user); + return; } - TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; - ArrayList sizes = photos_photo.photo.sizes; - TLRPC.PhotoSize small = FileLoader.getClosestPhotoSizeWithSize(sizes, 150); - TLRPC.PhotoSize big = FileLoader.getClosestPhotoSizeWithSize(sizes, 800); - TLRPC.VideoSize videoSize = photos_photo.photo.video_sizes.isEmpty() ? null : photos_photo.photo.video_sizes.get(0); - user.photo = new TLRPC.TL_userProfilePhoto(); - user.photo.photo_id = photos_photo.photo.id; - if (small != null) { - user.photo.photo_small = small.location; - } - if (big != null) { - user.photo.photo_big = big.location; - } - - if (small != null && avatar != null) { - File destFile = FileLoader.getPathToAttach(small, true); - File src = FileLoader.getPathToAttach(avatar, true); - src.renameTo(destFile); - String oldKey = avatar.volume_id + "_" + avatar.local_id + "@50_50"; - String newKey = small.location.volume_id + "_" + small.location.local_id + "@50_50"; - ImageLoader.getInstance().replaceImageInCache(oldKey, newKey, ImageLocation.getForUser(user, false), true); - } - if (big != null && avatarBig != null) { - File destFile = FileLoader.getPathToAttach(big, true); - File src = FileLoader.getPathToAttach(avatarBig, true); - src.renameTo(destFile); - } - if (videoSize != null && videoPath != null) { - File destFile = FileLoader.getPathToAttach(videoSize, "mp4", true); - File src = new File(videoPath); - src.renameTo(destFile); - } - - getMessagesStorage().clearUserPhotos(user.id); - ArrayList users = new ArrayList<>(); - users.add(user); - getMessagesStorage().putUsersAndChats(users, null, false, true); + getMessagesController().putUser(user, false); + } else { + getUserConfig().setCurrentUser(user); + } + TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; + ArrayList sizes = photos_photo.photo.sizes; + TLRPC.PhotoSize small = FileLoader.getClosestPhotoSizeWithSize(sizes, 150); + TLRPC.PhotoSize big = FileLoader.getClosestPhotoSizeWithSize(sizes, 800); + TLRPC.VideoSize videoSize = photos_photo.photo.video_sizes.isEmpty() ? null : photos_photo.photo.video_sizes.get(0); + user.photo = new TLRPC.TL_userProfilePhoto(); + user.photo.photo_id = photos_photo.photo.id; + if (small != null) { + user.photo.photo_small = small.location; + } + if (big != null) { + user.photo.photo_big = big.location; } - allowPullingDown = !AndroidUtilities.isTablet() && !isInLandscapeMode && avatarImage.getImageReceiver().hasNotThumb(); - avatar = null; - avatarBig = null; - updateProfileData(); - showAvatarProgress(false, true); - getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_ALL); - getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); - getUserConfig().saveConfig(true); - }); - }); + if (small != null && avatar != null) { + File destFile = FileLoader.getPathToAttach(small, true); + File src = FileLoader.getPathToAttach(avatar, true); + src.renameTo(destFile); + String oldKey = avatar.volume_id + "_" + avatar.local_id + "@50_50"; + String newKey = small.location.volume_id + "_" + small.location.local_id + "@50_50"; + ImageLoader.getInstance().replaceImageInCache(oldKey, newKey, ImageLocation.getForUser(user, false), true); + } + if (big != null && avatarBig != null) { + File destFile = FileLoader.getPathToAttach(big, true); + File src = FileLoader.getPathToAttach(avatarBig, true); + src.renameTo(destFile); + } + if (videoSize != null && videoPath != null) { + File destFile = FileLoader.getPathToAttach(videoSize, "mp4", true); + File src = new File(videoPath); + src.renameTo(destFile); + } + + getMessagesStorage().clearUserPhotos(user.id); + ArrayList users = new ArrayList<>(); + users.add(user); + getMessagesStorage().putUsersAndChats(users, null, false, true); + } + + allowPullingDown = !AndroidUtilities.isTablet() && !isInLandscapeMode && avatarImage.getImageReceiver().hasNotThumb(); + avatar = null; + avatarBig = null; + updateProfileData(); + showAvatarProgress(false, true); + getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_ALL); + getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); + getUserConfig().saveConfig(true); + })); } else { allowPullingDown = false; avatar = smallSize.location; @@ -5864,10 +6097,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. while ((count = origin.read(data, 0, data.length)) != -1) { out.write(data, 0, count); } - if (origin != null) { - origin.close(); - origin = null; - } + origin.close(); + origin = null; } finished[0] = true; } catch (Exception e) { @@ -7085,7 +7316,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. arrayList.add(new ThemeDescription(null, 0, null, null, null, themeDelegate, Theme.key_profile_status)); arrayList.add(new ThemeDescription(null, 0, null, null, null, themeDelegate, Theme.key_avatar_subtitleInProfileBlue)); - arrayList.add(new ThemeDescription(onlineTextView[2], ThemeDescription.FLAG_TEXTCOLOR, null, null, null, themeDelegate, Theme.key_player_actionBarSubtitle)); + arrayList.add(new ThemeDescription(mediaCounterTextView.getTextView(), ThemeDescription.FLAG_TEXTCOLOR, null, null, null, themeDelegate, Theme.key_player_actionBarSubtitle)); + arrayList.add(new ThemeDescription(mediaCounterTextView.getNextTextView(), ThemeDescription.FLAG_TEXTCOLOR, null, null, null, themeDelegate, Theme.key_player_actionBarSubtitle)); arrayList.add(new ThemeDescription(topView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_avatar_backgroundActionBarBlue)); arrayList.add(new ThemeDescription(listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector)); @@ -7167,4 +7399,123 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. return arrayList; } + + public void updateListAnimated() { + if (listAdapter == null) { + updateRowsIds(); + return; + } + + DiffCallback diffCallback = new DiffCallback(); + diffCallback.oldRowCount = rowCount; + diffCallback.fillPositions(diffCallback.oldPositionToItem); + updateRowsIds(); + diffCallback.fillPositions(diffCallback.newPositionToItem); + DiffUtil.calculateDiff(diffCallback).dispatchUpdatesTo(listAdapter); + if (listView != null && layoutManager != null && listView.getChildCount() > 0) { + View view = null; + int position = -1; + for (int i = 0; i < listView.getChildCount(); i++) { + position = listView.getChildAdapterPosition(listView.getChildAt(i)); + if (position != RecyclerListView.NO_POSITION) { + view = listView.getChildAt(i); + break; + } + } + if (view != null) { + layoutManager.scrollToPositionWithOffset(position, view.getTop() - listView.getPaddingTop()); + } + } + } + + private class DiffCallback extends DiffUtil.Callback { + + int oldRowCount; + + SparseIntArray oldPositionToItem = new SparseIntArray(); + SparseIntArray newPositionToItem = new SparseIntArray(); + + @Override + public int getOldListSize() { + return oldRowCount; + } + + @Override + public int getNewListSize() { + return rowCount; + } + + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + return oldPositionToItem.get(oldItemPosition) == newPositionToItem.get(newItemPosition); + } + + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return oldPositionToItem.get(oldItemPosition) == newPositionToItem.get(newItemPosition); + } + + public void fillPositions(SparseIntArray sparseIntArray) { + sparseIntArray.clear(); + int pointer = 0; + put(++pointer, setAvatarRow, sparseIntArray); + put(++pointer, setAvatarSectionRow, sparseIntArray); + put(++pointer, numberSectionRow, sparseIntArray); + put(++pointer, numberRow, sparseIntArray); + put(++pointer, setUsernameRow, sparseIntArray); + put(++pointer, bioRow, sparseIntArray); + put(++pointer, settingsSectionRow, sparseIntArray); + put(++pointer, settingsSectionRow2, sparseIntArray); + put(++pointer, notificationRow, sparseIntArray); + put(++pointer, languageRow, sparseIntArray); + put(++pointer, privacyRow, sparseIntArray); + put(++pointer, dataRow, sparseIntArray); + put(++pointer, chatRow, sparseIntArray); + put(++pointer, filtersRow, sparseIntArray); + put(++pointer, devicesRow, sparseIntArray); + put(++pointer, devicesSectionRow, sparseIntArray); + put(++pointer, helpHeaderRow, sparseIntArray); + put(++pointer, questionRow, sparseIntArray); + put(++pointer, faqRow, sparseIntArray); + put(++pointer, policyRow, sparseIntArray); + put(++pointer, helpSectionCell, sparseIntArray); + put(++pointer, debugHeaderRow, sparseIntArray); + put(++pointer, sendLogsRow, sparseIntArray); + put(++pointer, clearLogsRow, sparseIntArray); + put(++pointer, switchBackendRow, sparseIntArray); + put(++pointer, versionRow, sparseIntArray); + put(++pointer, emptyRow, sparseIntArray); + put(++pointer, bottomPaddingRow, sparseIntArray); + put(++pointer, infoHeaderRow, sparseIntArray); + put(++pointer, phoneRow, sparseIntArray); + put(++pointer, locationRow, sparseIntArray); + put(++pointer, userInfoRow, sparseIntArray); + put(++pointer, channelInfoRow, sparseIntArray); + put(++pointer, usernameRow, sparseIntArray); + put(++pointer, notificationsDividerRow, sparseIntArray); + put(++pointer, notificationsRow, sparseIntArray); + put(++pointer, infoSectionRow, sparseIntArray); + put(++pointer, sendMessageRow, sparseIntArray); + put(++pointer, reportRow, sparseIntArray); + put(++pointer, settingsTimerRow, sparseIntArray); + put(++pointer, settingsKeyRow, sparseIntArray); + put(++pointer, secretSettingsSectionRow, sparseIntArray); + put(++pointer, membersHeaderRow, sparseIntArray); + put(++pointer, addMemberRow, sparseIntArray); + put(++pointer, subscribersRow, sparseIntArray); + put(++pointer, administratorsRow, sparseIntArray); + put(++pointer, blockedUsersRow, sparseIntArray); + put(++pointer, membersSectionRow, sparseIntArray); + put(++pointer, sharedMediaRow, sparseIntArray); + put(++pointer, unblockRow, sparseIntArray); + put(++pointer, joinRow, sparseIntArray); + put(++pointer, lastSectionRow, sparseIntArray); + } + + private void put(int id, int position, SparseIntArray sparseIntArray) { + if (position >= 0) { + sparseIntArray.put(position, id); + } + } + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java index e52f38d23..ee8df7c4b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java @@ -549,6 +549,7 @@ public class ProfileNotificationsActivity extends BaseFragment implements Notifi editor.putString("sound_" + dialog_id, "NoSound"); editor.putString("sound_path_" + dialog_id, "NoSound"); } + getNotificationsController().deleteNotificationChannel(dialog_id); } else if (requestCode == 13) { if (name != null) { editor.putString("ringtone_" + dialog_id, name); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java index 171537ed1..6e2db298b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java @@ -12,6 +12,7 @@ import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.util.SparseIntArray; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; @@ -211,7 +212,7 @@ public class StatisticActivity extends BaseFragment implements NotificationCente if (recentPostsAll.size() > 0) { int lastPostId = recentPostsAll.get(0).counters.msg_id; int count = recentPostsAll.size(); - getMessagesStorage().getMessages(-chat.id, 0, false, count, lastPostId, 0, 0, classGuid, 0, true, false, 0, 0); + getMessagesStorage().getMessages(-chat.id, 0, false, count, lastPostId, 0, 0, classGuid, 0, true, false, 0, 0, true); } AndroidUtilities.runOnUIThread(() -> { @@ -410,7 +411,7 @@ public class StatisticActivity extends BaseFragment implements NotificationCente imageView.playAnimation(); TextView loadingTitle = new TextView(context); - loadingTitle.setTextSize(20); + loadingTitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); loadingTitle.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); loadingTitle.setTextColor(Theme.getColor(Theme.key_player_actionBarTitle)); loadingTitle.setTag(Theme.key_player_actionBarTitle); @@ -418,7 +419,7 @@ public class StatisticActivity extends BaseFragment implements NotificationCente loadingTitle.setGravity(Gravity.CENTER_HORIZONTAL); TextView loadingSubtitle = new TextView(context); - loadingSubtitle.setTextSize(15); + loadingSubtitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); loadingSubtitle.setTextColor(Theme.getColor(Theme.key_player_actionBarSubtitle)); loadingSubtitle.setTag(Theme.key_player_actionBarSubtitle); loadingSubtitle.setText(LocaleController.getString("LoadingStatsDescription", R.string.LoadingStatsDescription)); @@ -703,7 +704,7 @@ public class StatisticActivity extends BaseFragment implements NotificationCente @Override public long getItemId(int position) { - if (position >= recentPostsStartRow && position <= recentPostsEndRow) { + if (position >= recentPostsStartRow && position < recentPostsEndRow) { return recentPostsLoaded.get(position - recentPostsStartRow).counters.msg_id; } if (position == growCell) { @@ -1290,7 +1291,7 @@ public class StatisticActivity extends BaseFragment implements NotificationCente frameLayout.addView(progressView, LayoutHelper.createFrame(44, 44, Gravity.CENTER, 0, 0, 0, 60)); errorTextView = new TextView(context); - errorTextView.setTextSize(15); + errorTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); frameLayout.addView(errorTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 30)); progressView.setVisibility(View.GONE); @@ -2382,9 +2383,9 @@ public class StatisticActivity extends BaseFragment implements NotificationCente title[i * 2 + j] = new TextView(context); primary[i * 2 + j].setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - primary[i * 2 + j].setTextSize(17); - title[i * 2 + j].setTextSize(13); - secondary[i * 2 + j].setTextSize(13); + primary[i * 2 + j].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); + title[i * 2 + j].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + secondary[i * 2 + j].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); secondary[i * 2 + j].setPadding(AndroidUtilities.dp(4), 0, 0, 0); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java index 25ce88e67..77fdf34d5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java @@ -22,7 +22,6 @@ import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.TextView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.MediaDataController; @@ -50,6 +49,7 @@ import org.telegram.ui.Cells.TextCheckCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Components.Bulletin; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.NumberTextView; import org.telegram.ui.Components.RecyclerListView; @@ -634,7 +634,7 @@ public class StickersActivity extends BaseFragment implements NotificationCenter android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", String.format(Locale.US, "https://" + MessagesController.getInstance(currentAccount).linkPrefix + "/addstickers/%s", stickerSet.set.short_name)); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + BulletinFactory.createCopyLinkBulletin(StickersActivity.this).show(); } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java index a7cbe10c7..ec6fd5e3c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java @@ -32,7 +32,6 @@ import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.widget.LinearLayout; import android.widget.TextView; -import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; @@ -56,6 +55,7 @@ import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Cells.ThemePreviewMessagesCell; import org.telegram.ui.Cells.ThemesHorizontalListCell; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.LayoutHelper; @@ -113,7 +113,9 @@ public class ThemeSetUrlActivity extends BaseFragment implements NotificationCen android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText("label", url); clipboard.setPrimaryClip(clip); - Toast.makeText(getParentActivity(), LocaleController.getString("LinkCopied", R.string.LinkCopied), Toast.LENGTH_SHORT).show(); + if (BulletinFactory.canShowBulletin(ThemeSetUrlActivity.this)) { + BulletinFactory.createCopyLinkBulletin(ThemeSetUrlActivity.this).show(); + } } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java index 2d19ad84b..056d6b5ce 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java @@ -17,16 +17,15 @@ import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.drawable.GradientDrawable; -import android.net.Uri; import android.os.Build; import android.os.PowerManager; -import android.provider.Settings; import android.text.TextUtils; import android.transition.ChangeBounds; import android.transition.TransitionManager; import android.transition.TransitionSet; import android.transition.TransitionValues; import android.transition.Visibility; +import android.util.TypedValue; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; @@ -70,6 +69,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.DarkAlertDialog; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.CubicBezierInterpolator; @@ -99,7 +99,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification private final static int STATE_FULLSCREEN = 1; private final static int STATE_FLOATING = 2; - private int currentAccount; + private final int currentAccount; Activity activity; @@ -228,9 +228,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification if (VoIPService.getSharedInstance() == null) { return; } - VoIPFragment fragment = new VoIPFragment(); + VoIPFragment fragment = new VoIPFragment(account); fragment.activity = activity; - fragment.currentAccount = account; instance = fragment; VoIPWindowView windowView = new VoIPWindowView(activity, !transitionFromPip) { @Override @@ -380,7 +379,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification fragmentView.requestLayout(); } - public VoIPFragment() { + public VoIPFragment(int account) { + currentAccount = account; currentUser = MessagesController.getInstance(currentAccount).getUser(UserConfig.getInstance(currentAccount).getClientUserId()); callingUser = VoIPService.getSharedInstance().getUser(); VoIPService.getSharedInstance().registerStateListener(this); @@ -404,15 +404,13 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification @Override public void onStateChanged(int state) { - AndroidUtilities.runOnUIThread(() -> { - if (currentState != state) { - previousState = currentState; - currentState = state; - if (windowView != null) { - updateViewState(); - } + if (currentState != state) { + previousState = currentState; + currentState = state; + if (windowView != null) { + updateViewState(); } - }); + } } @Override @@ -436,7 +434,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification @Override public void onSignalBarsCountChanged(int count) { - AndroidUtilities.runOnUIThread(() -> statusTextView.setSignalBarCount(count)); + if (statusTextView != null) { + statusTextView.setSignalBarCount(count); + } } @Override @@ -651,7 +651,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification emojiRationalTextView = new TextView(context); emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, UserObject.getFirstName(callingUser))); - emojiRationalTextView.setTextSize(16); + emojiRationalTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); emojiRationalTextView.setTextColor(Color.WHITE); emojiRationalTextView.setGravity(Gravity.CENTER); emojiRationalTextView.setVisibility(View.GONE); @@ -671,7 +671,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification final StringBuilder builder = new StringBuilder(callingUserTitleText); builder.append(", "); - if (service.call != null && service.call.video) { + if (service.privateCall != null && service.privateCall.video) { builder.append(LocaleController.getString("VoipInVideoCallBranding", R.string.VoipInVideoCallBranding)); } else { builder.append(LocaleController.getString("VoipInCallBranding", R.string.VoipInCallBranding)); @@ -697,7 +697,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification callingUserPhotoViewMini.setVisibility(View.GONE); callingUserTitle = new TextView(context); - callingUserTitle.setTextSize(24); + callingUserTitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 24); callingUserTitle.setText(ContactsController.formatName(callingUser.first_name, callingUser.last_name)); callingUserTitle.setShadowLayer(AndroidUtilities.dp(3), 0, AndroidUtilities.dp(.666666667f), 0x4C000000); callingUserTitle.setTextColor(Color.WHITE); @@ -783,7 +783,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification frameLayout.addView(speakerPhoneIcon, LayoutHelper.createFrame(56, 56, Gravity.TOP | Gravity.RIGHT)); speakerPhoneIcon.setOnClickListener(view -> { if (VoIPService.getSharedInstance() != null) { - VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity); + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity, false); } }); @@ -815,7 +815,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPService service = VoIPService.getSharedInstance(); if (service != null) { if (!isVideoCall) { - isVideoCall = service.call != null && service.call.video; + isVideoCall = service.privateCall != null && service.privateCall.video; } initRenderers(); } @@ -871,6 +871,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPPiPView.bottomInset = instance.lastInsets.getSystemWindowInsetBottom(); } } + if (VoIPPiPView.getInstance() == null) { + return; + } speakerPhoneIcon.animate().alpha(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); backIcon.animate().alpha(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); @@ -1175,7 +1178,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification lockOnScreen = true; statusLayoutOffset = AndroidUtilities.dp(24); acceptDeclineView.setRetryMod(false); - if (service != null && service.call.video) { + if (service != null && service.privateCall.video) { if (currentUserIsVideo && callingUser.photo != null) { showCallingAvatarMini = true; } else { @@ -1417,7 +1420,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification notificationsLayout.removeNotification("video"); } - if (notificationsLayout.getChildCount() == 0 && callingUserIsVideo && service.call != null && !service.call.video && !service.sharedUIParams.tapToVideoTooltipWasShowed) { + if (notificationsLayout.getChildCount() == 0 && callingUserIsVideo && service.privateCall != null && !service.privateCall.video && !service.sharedUIParams.tapToVideoTooltipWasShowed) { service.sharedUIParams.tapToVideoTooltipWasShowed = true; tapToVideoTooltip.showForView(bottomButtons[1], true); } else if (notificationsLayout.getChildCount() != 0) { @@ -1758,7 +1761,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } if (currentState == VoIPService.STATE_WAITING_INCOMING || currentState == VoIPService.STATE_BUSY) { - if (service.call != null && service.call.video && currentState == VoIPService.STATE_WAITING_INCOMING) { + if (service.privateCall != null && service.privateCall.video && currentState == VoIPService.STATE_WAITING_INCOMING) { if (currentUserIsVideo || callingUserIsVideo) { setFrontalCameraAction(bottomButtons[0], service, animated); if (uiVisible) { @@ -1831,7 +1834,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } view.announceForAccessibility(text); } - serviceInstance.setMicMute(micMute); + serviceInstance.setMicMute(micMute, false, true); previousState = currentState; updateViewState(); } @@ -1856,7 +1859,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && activity.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { activity.requestPermissions(new String[]{Manifest.permission.CAMERA}, 102); } else { - if (service.call != null && !service.call.video && !callingUserIsVideo && !service.sharedUIParams.cameraAlertWasShowed) { + if (service.privateCall != null && !service.privateCall.video && !callingUserIsVideo && !service.sharedUIParams.cameraAlertWasShowed) { AlertDialog.Builder builder = new AlertDialog.Builder(activity); builder.setMessage(LocaleController.getString("VoipSwitchToVideoCall", R.string.VoipSwitchToVideoCall)); builder.setPositiveButton(LocaleController.getString("VoipSwitch", R.string.VoipSwitch), (dialogInterface, i) -> { @@ -1901,17 +1904,17 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification private void setSpeakerPhoneAction(VoIPToggleButton bottomButton, VoIPService service, boolean animated) { if (service.isBluetoothOn()) { bottomButton.setData(R.drawable.calls_bluetooth, Color.WHITE, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.12f)), LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth), false, animated); - bottomButton.setChecked(false); + bottomButton.setChecked(false, animated); } else if (service.isSpeakerphoneOn()) { bottomButton.setData(R.drawable.calls_speaker, Color.BLACK, Color.WHITE, LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); - bottomButton.setChecked(true); + bottomButton.setChecked(true, animated); } else { bottomButton.setData(R.drawable.calls_speaker, Color.WHITE, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.12f)), LocaleController.getString("VoipSpeaker", R.string.VoipSpeaker), false, animated); - bottomButton.setChecked(false); + bottomButton.setChecked(false, animated); } bottomButton.setOnClickListener(view -> { if (VoIPService.getSharedInstance() != null) { - VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity); + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity, false); } }); } @@ -1963,7 +1966,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification if (!currentUserIsVideo) { currentUserIsVideo = true; if (!service.isSpeakerphoneOn()) { - VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity); + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity, false); } service.requestVideoCall(); service.setVideoState(Instance.VIDEO_STATE_ACTIVE); @@ -2101,16 +2104,12 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification @SuppressLint("InlinedApi") private void requestInlinePermissions() { - new AlertDialog.Builder(activity).setTitle(LocaleController.getString("AppName", R.string.AppName)) - .setMessage(LocaleController.getString("PermissionDrawAboveOtherApps", R.string.PermissionDrawAboveOtherApps)) - .setPositiveButton(LocaleController.getString("PermissionOpenSettings", R.string.PermissionOpenSettings), (dialog, which) -> { - if (activity != null) { - try { - activity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + activity.getPackageName()))); - } catch (Exception e) { - FileLog.e(e); - } - } - }).show(); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + AlertsCreator.createDrawOverlayPermissionDialog(activity, (dialogInterface, i) -> { + if (windowView != null) { + windowView.finish(); + } + }).show(); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPPermissionActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPPermissionActivity.java index e1f9aed20..78ff9b407 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPPermissionActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPPermissionActivity.java @@ -19,7 +19,7 @@ public class VoIPPermissionActivity extends Activity { super.onCreate(savedInstanceState); VoIPService service = VoIPService.getSharedInstance(); - boolean isVideoCall = service != null && service.call != null && service.call.video; + boolean isVideoCall = service != null && service.privateCall != null && service.privateCall.video; ArrayList permissions = new ArrayList<>(); if (checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { diff --git a/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java b/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java index 4a72d1074..3cd7621b6 100644 --- a/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java +++ b/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java @@ -248,10 +248,6 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink { Logging.e(TAG, "decode() - key frame required first"); return VideoCodecStatus.NO_OUTPUT; } - if (!frame.completeFrame) { - Logging.e(TAG, "decode() - complete frame required first"); - return VideoCodecStatus.NO_OUTPUT; - } } int index; diff --git a/TMessagesProj/src/main/java/org/webrtc/CameraSession.java b/TMessagesProj/src/main/java/org/webrtc/CameraSession.java index 8130f0a4e..8d137854d 100644 --- a/TMessagesProj/src/main/java/org/webrtc/CameraSession.java +++ b/TMessagesProj/src/main/java/org/webrtc/CameraSession.java @@ -10,7 +10,10 @@ package org.webrtc; +import android.content.Context; import android.graphics.Matrix; +import android.view.WindowManager; +import android.view.Surface; interface CameraSession { enum FailureType { ERROR, DISCONNECTED } @@ -36,6 +39,21 @@ interface CameraSession { */ void stop(); + static int getDeviceOrientation(Context context) { + final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + switch (wm.getDefaultDisplay().getRotation()) { + case Surface.ROTATION_90: + return 90; + case Surface.ROTATION_180: + return 180; + case Surface.ROTATION_270: + return 270; + case Surface.ROTATION_0: + default: + return 0; + } + } + static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix( TextureBufferImpl buffer, boolean mirror, int rotation) { final Matrix transformMatrix = new Matrix(); diff --git a/TMessagesProj/src/main/java/org/webrtc/CandidatePairChangeEvent.java b/TMessagesProj/src/main/java/org/webrtc/CandidatePairChangeEvent.java index 395b629c5..b8e6685a8 100644 --- a/TMessagesProj/src/main/java/org/webrtc/CandidatePairChangeEvent.java +++ b/TMessagesProj/src/main/java/org/webrtc/CandidatePairChangeEvent.java @@ -20,12 +20,20 @@ public final class CandidatePairChangeEvent { public final int lastDataReceivedMs; public final String reason; + /** + * An estimate from the ICE stack on how long it was disconnected before + * changing to the new candidate pair in this event. + * The first time an candidate pair is signaled the value will be 0. + */ + public final int estimatedDisconnectedTimeMs; + @CalledByNative - CandidatePairChangeEvent( - IceCandidate local, IceCandidate remote, int lastDataReceivedMs, String reason) { + CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs, + String reason, int estimatedDisconnectedTimeMs) { this.local = local; this.remote = remote; this.lastDataReceivedMs = lastDataReceivedMs; this.reason = reason; + this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs; } } diff --git a/TMessagesProj/src/main/java/org/webrtc/EncodedImage.java b/TMessagesProj/src/main/java/org/webrtc/EncodedImage.java index f2ee569e2..a6eef67da 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EncodedImage.java +++ b/TMessagesProj/src/main/java/org/webrtc/EncodedImage.java @@ -54,7 +54,6 @@ public class EncodedImage implements RefCounted { public final long captureTimeNs; public final FrameType frameType; public final int rotation; - public final boolean completeFrame; public final @Nullable Integer qp; // TODO(bugs.webrtc.org/9378): Use retain and release from jni code. @@ -71,7 +70,7 @@ public class EncodedImage implements RefCounted { @CalledByNative private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth, int encodedHeight, long captureTimeNs, FrameType frameType, int rotation, - boolean completeFrame, @Nullable Integer qp) { + @Nullable Integer qp) { this.buffer = buffer; this.encodedWidth = encodedWidth; this.encodedHeight = encodedHeight; @@ -79,7 +78,6 @@ public class EncodedImage implements RefCounted { this.captureTimeNs = captureTimeNs; this.frameType = frameType; this.rotation = rotation; - this.completeFrame = completeFrame; this.qp = qp; this.refCountDelegate = new RefCountDelegate(releaseCallback); } @@ -114,11 +112,6 @@ public class EncodedImage implements RefCounted { return rotation; } - @CalledByNative - private boolean getCompleteFrame() { - return completeFrame; - } - @CalledByNative private @Nullable Integer getQp() { return qp; @@ -136,7 +129,6 @@ public class EncodedImage implements RefCounted { private long captureTimeNs; private EncodedImage.FrameType frameType; private int rotation; - private boolean completeFrame; private @Nullable Integer qp; private Builder() {} @@ -178,11 +170,6 @@ public class EncodedImage implements RefCounted { return this; } - public Builder setCompleteFrame(boolean completeFrame) { - this.completeFrame = completeFrame; - return this; - } - public Builder setQp(@Nullable Integer qp) { this.qp = qp; return this; @@ -190,7 +177,7 @@ public class EncodedImage implements RefCounted { public EncodedImage createEncodedImage() { return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs, - frameType, rotation, completeFrame, qp); + frameType, rotation, qp); } } } diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java index cdabee2b0..1ff8ce04b 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java @@ -336,6 +336,7 @@ class HardwareVideoEncoder implements VideoEncoder { final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer; + //TODO back to texture buffer // If input resolution changed, restart the codec with the new resolution. final int frameWidth = videoFrame.getBuffer().getWidth(); @@ -370,7 +371,6 @@ class HardwareVideoEncoder implements VideoEncoder { int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; EncodedImage.Builder builder = EncodedImage.builder() .setCaptureTimeNs(videoFrame.getTimestampNs()) - .setCompleteFrame(true) .setEncodedWidth(videoFrame.getBuffer().getWidth()) .setEncodedHeight(videoFrame.getBuffer().getHeight()) .setRotation(videoFrame.getRotation()); diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index 8093aeece..76d55e051 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -157,18 +157,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { - for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { - MediaCodecInfo info = null; - try { - info = MediaCodecList.getCodecInfoAt(i); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve encoder codec info", e); - } - + ArrayList infos = MediaCodecUtils.getSortedCodecsList(); + int count = infos.size(); + for (int i = 0; i < count; ++i) { + MediaCodecInfo info = infos.get(i); if (info == null || !info.isEncoder()) { continue; } - if (isSupportedCodec(info, type)) { return info; } diff --git a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java index 83bd34462..f959a9a53 100644 --- a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java +++ b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java @@ -13,8 +13,16 @@ package org.webrtc; import android.annotation.TargetApi; import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaCodecList; import android.os.Build; + +import org.telegram.messenger.FileLog; + import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.Map; @@ -67,6 +75,24 @@ class MediaCodecUtils { } } + public static ArrayList getSortedCodecsList() { + ArrayList result = new ArrayList<>(); + try { + int numberOfCodecs = MediaCodecList.getCodecCount(); + for (int a = 0; a < numberOfCodecs; a++) { + try { + result.add(MediaCodecList.getCodecInfoAt(a)); + } catch (IllegalArgumentException e) { + Logging.e(TAG, "Cannot retrieve codec info", e); + } + } + Collections.sort(result, (o1, o2) -> o1.getName().compareTo(o2.getName())); + } catch (Exception e) { + FileLog.e(e); + } + return result; + } + static @Nullable Integer selectColorFormat( int[] supportedColorFormats, CodecCapabilities capabilities) { for (int supportedColorFormat : supportedColorFormats) { diff --git a/TMessagesProj/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 7edeae237..c31982c64 100644 --- a/TMessagesProj/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -89,18 +89,13 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory { return null; } - for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { - MediaCodecInfo info = null; - try { - info = MediaCodecList.getCodecInfoAt(i); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve decoder codec info", e); - } - + ArrayList infos = MediaCodecUtils.getSortedCodecsList(); + int codecCount = infos.size(); + for (int i = 0; i < codecCount; ++i) { + MediaCodecInfo info = infos.get(i); if (info == null || info.isEncoder()) { continue; } - if (isSupportedCodec(info, type)) { return info; } diff --git a/TMessagesProj/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java b/TMessagesProj/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java index eea9cd585..1968577ce 100644 --- a/TMessagesProj/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java +++ b/TMessagesProj/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java @@ -112,7 +112,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) { // A link property change may indicate the IP address changes. // so forward the new NetworkInformation to the observer. - Logging.d(TAG, "link properties changed: " + linkProperties.toString()); + // + // linkProperties.toString() has PII that cannot be redacted + // very reliably, so do not include in log. + Logging.d(TAG, "link properties changed"); onNetworkChanged(network); } diff --git a/TMessagesProj/src/main/java/org/webrtc/RtpTransceiver.java b/TMessagesProj/src/main/java/org/webrtc/RtpTransceiver.java index 021cc90bc..aff1bfbde 100644 --- a/TMessagesProj/src/main/java/org/webrtc/RtpTransceiver.java +++ b/TMessagesProj/src/main/java/org/webrtc/RtpTransceiver.java @@ -200,9 +200,9 @@ public class RtpTransceiver { * sendrecv, sendonly, recvonly, or inactive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ - public void setDirection(RtpTransceiverDirection rtpTransceiverDirection) { + public boolean setDirection(RtpTransceiverDirection rtpTransceiverDirection) { checkRtpTransceiverExists(); - nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection); + return nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection); } /** @@ -260,6 +260,6 @@ public class RtpTransceiver { private static native RtpTransceiverDirection nativeCurrentDirection(long rtpTransceiver); private static native void nativeStopInternal(long rtpTransceiver); private static native void nativeStopStandard(long rtpTransceiver); - private static native void nativeSetDirection( + private static native boolean nativeSetDirection( long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection); } diff --git a/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java b/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java index 21017a4a1..05921a889 100644 --- a/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java +++ b/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java @@ -75,6 +75,11 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { } } + @Nullable + public MediaProjection getMediaProjection() { + return mediaProjection; + } + @Override // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. @SuppressWarnings("NoSynchronizedMethodCheck") diff --git a/TMessagesProj/src/main/java/org/webrtc/SessionDescription.java b/TMessagesProj/src/main/java/org/webrtc/SessionDescription.java index 62601f0bf..be89599a5 100644 --- a/TMessagesProj/src/main/java/org/webrtc/SessionDescription.java +++ b/TMessagesProj/src/main/java/org/webrtc/SessionDescription.java @@ -22,7 +22,8 @@ public class SessionDescription { public static enum Type { OFFER, PRANSWER, - ANSWER; + ANSWER, + ROLLBACK; public String canonicalForm() { return name().toLowerCase(Locale.US); diff --git a/TMessagesProj/src/main/java/org/webrtc/SurfaceTextureHelper.java b/TMessagesProj/src/main/java/org/webrtc/SurfaceTextureHelper.java index 6e604fa32..ad3be54b1 100644 --- a/TMessagesProj/src/main/java/org/webrtc/SurfaceTextureHelper.java +++ b/TMessagesProj/src/main/java/org/webrtc/SurfaceTextureHelper.java @@ -19,8 +19,6 @@ import android.os.Handler; import android.os.HandlerThread; import androidx.annotation.Nullable; import java.util.concurrent.Callable; - -import org.telegram.messenger.FileLog; import org.webrtc.EglBase.Context; import org.webrtc.TextureBufferImpl.RefCountMonitor; import org.webrtc.VideoFrame.TextureBuffer; @@ -201,6 +199,10 @@ public class SurfaceTextureHelper { oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); surfaceTexture = new SurfaceTexture(oesTextureId); setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> { + if (hasPendingTexture) { + Logging.d(TAG, "A frame is already pending, dropping frame."); + } + hasPendingTexture = true; tryDeliverTextureFrame(); }, handler); diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java index 1a3ccde31..08edde3f6 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -11,14 +11,15 @@ package org.webrtc.audio; import android.content.Context; +import android.media.AudioAttributes; import android.media.AudioDeviceInfo; import android.media.AudioManager; import android.os.Build; +import androidx.annotation.RequiresApi; +import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; -import androidx.annotation.RequiresApi; - /** * AudioDeviceModule implemented using android.media.AudioRecord as input and * android.media.AudioTrack as output. @@ -32,6 +33,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { public static class Builder { private final Context context; + private ScheduledExecutorService scheduler; private final AudioManager audioManager; private int inputSampleRate; private int outputSampleRate; @@ -46,6 +48,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported(); private boolean useStereoInput; private boolean useStereoOutput; + private AudioAttributes audioAttributes; private Builder(Context context) { this.context = context; @@ -54,6 +57,11 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); } + public Builder setScheduler(ScheduledExecutorService scheduler) { + this.scheduler = scheduler; + return this; + } + /** * Call this method if the default handling of querying the native sample rate shall be * overridden. Can be useful on some devices where the available Android APIs are known to @@ -187,11 +195,19 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { return this; } + /** + * Set custom {@link AudioAttributes} to use. + */ + public Builder setAudioAttributes(AudioAttributes audioAttributes) { + this.audioAttributes = audioAttributes; + return this; + } + /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). */ - public AudioDeviceModule createAudioDeviceModule() { + public JavaAudioDeviceModule createAudioDeviceModule() { Logging.d(TAG, "createAudioDeviceModule"); if (useHardwareNoiseSuppressor) { Logging.d(TAG, "HW NS will be used."); @@ -209,11 +225,15 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { } Logging.d(TAG, "HW AEC will not be used."); } - final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource, - audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, - useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + ScheduledExecutorService executor = this.scheduler; + if (executor == null) { + executor = WebRtcAudioRecord.newDefaultScheduler(); + } + final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( - context, audioManager, audioTrackErrorCallback, audioTrackStateCallback); + context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java index 33b9b39ea..51be90aa0 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -22,7 +22,6 @@ import android.os.Build; import android.os.Process; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; - import java.lang.System; import java.nio.ByteBuffer; import java.util.Arrays; @@ -32,7 +31,10 @@ import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; @@ -91,12 +93,12 @@ class WebRtcAudioRecord { private @Nullable AudioRecordThread audioThread; private @Nullable AudioDeviceInfo preferredDevice; - private @Nullable ScheduledExecutorService executor; + private final ScheduledExecutorService executor; private @Nullable ScheduledFuture future; private volatile boolean microphoneMute; - private boolean audioSourceMatchesRecordingSession; - private boolean isAudioConfigVerified; + private final AtomicReference audioSourceMatchesRecordingSessionRef = + new AtomicReference<>(); private byte[] emptyBytes; private final @Nullable AudioRecordErrorCallback errorCallback; @@ -180,14 +182,15 @@ class WebRtcAudioRecord { @CalledByNative WebRtcAudioRecord(Context context, AudioManager audioManager) { - this(context, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, - null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, - WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), WebRtcAudioEffects.isNoiseSuppressorSupported()); } - public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSource, - int audioFormat, @Nullable AudioRecordErrorCallback errorCallback, + public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { @@ -198,6 +201,7 @@ class WebRtcAudioRecord { throw new IllegalArgumentException("HW NS not supported"); } this.context = context; + this.executor = scheduler; this.audioManager = audioManager; this.audioSource = audioSource; this.audioFormat = audioFormat; @@ -228,7 +232,7 @@ class WebRtcAudioRecord { // checked before using the returned value of isAudioSourceMatchingRecordingSession(). @CalledByNative boolean isAudioConfigVerified() { - return isAudioConfigVerified; + return audioSourceMatchesRecordingSessionRef.get() != null; } // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when @@ -237,7 +241,8 @@ class WebRtcAudioRecord { // enabled in WebRtcAudioRecord to ensure that the returned value is valid. @CalledByNative boolean isAudioSourceMatchingRecordingSession() { - if (!isAudioConfigVerified) { + Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get(); + if (audioSourceMatchesRecordingSession == null) { Logging.w(TAG, "Audio configuration has not yet been verified"); return false; } @@ -299,6 +304,7 @@ class WebRtcAudioRecord { // Throws IllegalArgumentException. audioRecord = createAudioRecordOnMOrHigher( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); if (preferredDevice != null) { setPreferredDevice(preferredDevice); } @@ -307,6 +313,7 @@ class WebRtcAudioRecord { // Throws UnsupportedOperationException. audioRecord = createAudioRecordOnLowerThanM( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); } } catch (IllegalArgumentException | UnsupportedOperationException e) { // Report of exception message is sufficient. Example: "Cannot create AudioRecord". @@ -325,7 +332,7 @@ class WebRtcAudioRecord { // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (numActiveRecordingSessions != 0) { // Log the conflict as a warning since initialization did in fact succeed. Most likely, the // upcoming call to startRecording() will fail under these conditions. @@ -372,7 +379,7 @@ class WebRtcAudioRecord { } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); - scheduleLogRecordingConfigurationsTask(); + scheduleLogRecordingConfigurationsTask(audioRecord); return true; } @@ -387,10 +394,6 @@ class WebRtcAudioRecord { } future = null; } - if (executor != null) { - executor.shutdownNow(); - executor = null; - } audioThread.stopThread(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); @@ -443,8 +446,8 @@ class WebRtcAudioRecord { @TargetApi(Build.VERSION_CODES.N) // Checks the number of active recording sessions and logs the states of all active sessions. - // Returns number of active sessions. - private int logRecordingConfigurations(boolean verifyAudioConfig) { + // Returns number of active sessions. Note that this could occur on arbituary thread. + private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); return 0; @@ -452,6 +455,7 @@ class WebRtcAudioRecord { if (audioRecord == null) { return 0; } + // Get a list of the currently active audio recording configurations of the device (can be more // than one). An empty list indicates there is no recording active when queried. List configs = audioManager.getActiveRecordingConfigurations(); @@ -464,10 +468,9 @@ class WebRtcAudioRecord { // to the AudioRecord instance) is matching what the audio recording configuration lists // as its client parameters. If these do not match, recording might work but under invalid // conditions. - audioSourceMatchesRecordingSession = + audioSourceMatchesRecordingSessionRef.set( verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), - audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs); - isAudioConfigVerified = true; + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); } } return numActiveRecordingSessions; @@ -502,12 +505,13 @@ class WebRtcAudioRecord { audioRecord.release(); audioRecord = null; } + audioSourceMatchesRecordingSessionRef.set(null); } private void reportWebRtcAudioRecordInitError(String errorMessage) { Logging.e(TAG, "Init recording error: " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordInitError(errorMessage); } @@ -517,7 +521,7 @@ class WebRtcAudioRecord { AudioRecordStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); } @@ -565,18 +569,18 @@ class WebRtcAudioRecord { // Use an ExecutorService to schedule a task after a given delay where the task consists of // checking (by logging) the current status of active recording sessions. - private void scheduleLogRecordingConfigurationsTask() { + private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { return; } - if (executor != null) { - executor.shutdownNow(); - } - executor = Executors.newSingleThreadScheduledExecutor(); Callable callable = () -> { - logRecordingConfigurations(true /* verifyAudioConfig */); + if (this.audioRecord == audioRecord) { + logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */); + } else { + Logging.d(TAG, "audio record has changed"); + } return "Scheduled task is done"; }; @@ -705,4 +709,22 @@ class WebRtcAudioRecord { return "INVALID"; } } + + private static final AtomicInteger nextSchedulerId = new AtomicInteger(0); + + static ScheduledExecutorService newDefaultScheduler() { + AtomicInteger nextThreadId = new AtomicInteger(0); + return Executors.newScheduledThreadPool(0, new ThreadFactory() { + /** + * Constructs a new {@code Thread} + */ + @Override + public Thread newThread(Runnable r) { + Thread thread = Executors.defaultThreadFactory().newThread(r); + thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + return thread; + } + }); + } } diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java index a92a34280..196346fb6 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -71,6 +71,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; + private @Nullable final AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -162,15 +163,17 @@ class WebRtcAudioTrack { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { - this(context, audioManager, null /* errorCallback */, null /* stateCallback */); + this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, + null /* stateCallback */); } WebRtcAudioTrack(Context context, AudioManager audioManager, - @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, @Nullable AudioTrackStateCallback stateCallback) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; + this.audioAttributes = audioAttributes; this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = new VolumeLogger(audioManager); @@ -231,8 +234,8 @@ class WebRtcAudioTrack { // supersede the notion of stream types for defining the behavior of audio playback, // and to allow certain platforms or routing policies to use this information for more // refined volume or routing decisions. - audioTrack = - createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes); + audioTrack = createAudioTrackOnLollipopOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); } else { // Use default constructor for API levels below 21. audioTrack = @@ -383,8 +386,8 @@ class WebRtcAudioTrack { // It allows certain platforms or routing policies to use this information for more // refined volume or routing decisions. @TargetApi(Build.VERSION_CODES.LOLLIPOP) - private static AudioTrack createAudioTrackOnLollipopOrHigher( - int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { + private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, + int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { Logging.d(TAG, "createAudioTrackOnLollipopOrHigher"); // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control // performance when Android O is supported. Add some logging in the mean time. @@ -394,11 +397,26 @@ class WebRtcAudioTrack { if (sampleRateInHz != nativeOutputSampleRate) { Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); } + + AudioAttributes.Builder attributesBuilder = + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + + if (overrideAttributes != null) { + if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { + attributesBuilder.setUsage(overrideAttributes.getUsage()); + } + if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) { + attributesBuilder.setContentType(overrideAttributes.getContentType()); + } + + attributesBuilder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()) + .setFlags(overrideAttributes.getFlags()); + } + // Create an audio track where the audio usage is for VoIP and the content type is speech. - return new AudioTrack(new AudioAttributes.Builder() - .setUsage(DEFAULT_USAGE) - .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) - .build(), + return new AudioTrack(attributesBuilder.build(), new AudioFormat.Builder() .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(sampleRateInHz) diff --git a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java index db5b72ac7..08be8a926 100644 --- a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java +++ b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java @@ -18,6 +18,8 @@ import android.os.Build; import androidx.annotation.Nullable; import java.util.List; import java.util.UUID; + +import org.telegram.messenger.SharedConfig; import org.webrtc.Logging; // This class wraps control of three different platform effects. Supported @@ -224,7 +226,7 @@ public class WebRtcAudioEffects { aec = AcousticEchoCanceler.create(audioSession); if (aec != null) { boolean enabled = aec.getEnabled(); - boolean enable = shouldEnableAec && canUseAcousticEchoCanceler(); + boolean enable = shouldEnableAec && canUseAcousticEchoCanceler() && !SharedConfig.disableVoiceAudioEffects; if (aec.setEnabled(enable) != AudioEffect.SUCCESS) { Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); } @@ -242,7 +244,7 @@ public class WebRtcAudioEffects { ns = NoiseSuppressor.create(audioSession); if (ns != null) { boolean enabled = ns.getEnabled(); - boolean enable = shouldEnableNs && canUseNoiseSuppressor(); + boolean enable = shouldEnableNs && canUseNoiseSuppressor() && !SharedConfig.disableVoiceAudioEffects; if (ns.setEnabled(enable) != AudioEffect.SUCCESS) { Logging.e(TAG, "Failed to set the NoiseSuppressor state"); } diff --git a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java index b057c3a45..aa8f666d2 100644 --- a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java +++ b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java @@ -216,7 +216,7 @@ public class WebRtcAudioManager { if (blacklisted) { Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!"); } - return blacklisted; + return true;//blacklisted; } private void storeAudioParameters() { diff --git a/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby.png b/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby.png new file mode 100644 index 000000000..99d7f030f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby_ny.png b/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby_ny.png new file mode 100644 index 000000000..2b15ae217 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/menu_nearby_ny.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_endcall.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_endcall.png new file mode 100644 index 000000000..3981f2e91 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_endcall.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_invited.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_invited.png new file mode 100644 index 000000000..d249ab40d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_invited.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photoeditor.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photoeditor.png new file mode 100644 index 000000000..e8d5fdac2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_photoeditor.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_replace.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_replace.png new file mode 100644 index 000000000..4ac9346dc Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_replace.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_sendphoto.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_sendphoto.png new file mode 100644 index 000000000..623c23a27 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_sendphoto.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_muted.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_muted.png new file mode 100644 index 000000000..02228c816 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_pip.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_pip.png new file mode 100644 index 000000000..4cd0cc91e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_pip.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_unmuted.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_unmuted.png new file mode 100644 index 000000000..543e9ca81 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_unmuted.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voicechat.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voicechat.png new file mode 100644 index 000000000..ce27764e1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voicechat.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_muted_large.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_muted_large.png new file mode 100644 index 000000000..a97716823 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voice_muted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_unmuted_large.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_unmuted_large.png new file mode 100644 index 000000000..a3e47bb2a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voice_unmuted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voicechat_active.png b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_active.png new file mode 100644 index 000000000..4694cd3dd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_active.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voicechat_muted.png b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_muted.png new file mode 100644 index 000000000..15c159ad0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby.png b/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby.png new file mode 100644 index 000000000..574d1b60f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby_ny.png b/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby_ny.png new file mode 100644 index 000000000..0b620c540 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/menu_nearby_ny.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_endcall.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_endcall.png new file mode 100644 index 000000000..f7cca8409 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_endcall.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_invited.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_invited.png new file mode 100644 index 000000000..667fd89a0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_invited.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photoeditor.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photoeditor.png new file mode 100644 index 000000000..61e08c678 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_photoeditor.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_replace.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_replace.png new file mode 100644 index 000000000..0d97939e5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_replace.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_sendphoto.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_sendphoto.png new file mode 100644 index 000000000..350be4368 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_sendphoto.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_muted.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_muted.png new file mode 100644 index 000000000..cfc7f3a94 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_pip.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_pip.png new file mode 100644 index 000000000..943f8089f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_pip.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_unmuted.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_unmuted.png new file mode 100644 index 000000000..7b6c005a4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_unmuted.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voicechat.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voicechat.png new file mode 100644 index 000000000..ee9796da2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voicechat.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_muted_large.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_muted_large.png new file mode 100644 index 000000000..1058cb8a2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voice_muted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_unmuted_large.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_unmuted_large.png new file mode 100644 index 000000000..e3b4c8e38 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voice_unmuted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voicechat_active.png b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_active.png new file mode 100644 index 000000000..8818203a6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_active.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voicechat_muted.png b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_muted.png new file mode 100644 index 000000000..1ab729af0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby.png b/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby.png new file mode 100644 index 000000000..4bbb58edf Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby_ny.png b/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby_ny.png new file mode 100644 index 000000000..a9a30c82c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/menu_nearby_ny.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_endcall.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_endcall.png new file mode 100644 index 000000000..81cc06d2e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_endcall.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_invited.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_invited.png new file mode 100644 index 000000000..d37f1d35b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_invited.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photoeditor.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photoeditor.png new file mode 100644 index 000000000..a9e7ef7d4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photoeditor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_replace.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_replace.png new file mode 100644 index 000000000..09ace2a39 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_replace.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendphoto.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendphoto.png new file mode 100644 index 000000000..97ae7f91f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendphoto.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_muted.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_muted.png new file mode 100644 index 000000000..62f3ed026 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_pip.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_pip.png new file mode 100644 index 000000000..c68f0b070 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_pip.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_unmuted.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_unmuted.png new file mode 100644 index 000000000..65e6bd5ce Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_unmuted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voicechat.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voicechat.png new file mode 100644 index 000000000..f50cdaa8d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voicechat.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_muted_large.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_muted_large.png new file mode 100644 index 000000000..22ba9a38e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voice_muted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_unmuted_large.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_unmuted_large.png new file mode 100644 index 000000000..d0d558592 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voice_unmuted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_active.png b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_active.png new file mode 100644 index 000000000..a32ac2c43 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_active.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_muted.png b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_muted.png new file mode 100644 index 000000000..5ff067acd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby.png b/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby.png new file mode 100644 index 000000000..51dbe4268 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby_ny.png b/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby_ny.png new file mode 100644 index 000000000..55b278290 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/menu_nearby_ny.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_endcall.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_endcall.png new file mode 100644 index 000000000..833167f28 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_endcall.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invited.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invited.png new file mode 100644 index 000000000..85a240de7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invited.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photoeditor.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photoeditor.png new file mode 100644 index 000000000..a3266e86b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photoeditor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_replace.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_replace.png new file mode 100644 index 000000000..722c9155a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_replace.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendphoto.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendphoto.png new file mode 100644 index 000000000..b96917111 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendphoto.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_muted.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_muted.png new file mode 100644 index 000000000..85970746e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_muted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_pip.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_pip.png new file mode 100644 index 000000000..403aab0ba Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_pip.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_unmuted.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_unmuted.png new file mode 100644 index 000000000..b8574698d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_unmuted.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voicechat.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voicechat.png new file mode 100644 index 000000000..91691c984 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voicechat.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_muted_large.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_muted_large.png new file mode 100644 index 000000000..91c8f27fe Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_muted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_unmuted_large.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_unmuted_large.png new file mode 100644 index 000000000..335279299 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_unmuted_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_active.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_active.png new file mode 100644 index 000000000..489f515dc Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_active.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_muted.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_muted.png new file mode 100644 index 000000000..39f5a18bf Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_muted.png differ diff --git a/TMessagesProj/src/main/res/raw/group_pip_delete_icon.json b/TMessagesProj/src/main/res/raw/group_pip_delete_icon.json new file mode 100644 index 000000000..4360f7f75 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/group_pip_delete_icon.json @@ -0,0 +1 @@ +{"v":"5.5.7","meta":{"g":"LottieFiles AE 0.1.20","a":"","k":"","d":"","tc":""},"fr":60,"ip":0,"op":66,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":3,"nm":"Null 2","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[130,130,100],"ix":6}},"ao":0,"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":3,"nm":"NULL BODY","parent":1,"sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.278,"y":1},"o":{"x":0.267,"y":0},"t":0,"s":[-4,16,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.243,"y":0.79},"o":{"x":0.32,"y":0},"t":13,"s":[-4,-17.846,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.582,"y":1},"o":{"x":0.167,"y":0.397},"t":23,"s":[-2.462,26.769,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":41,"s":[-0.923,36,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.233,"y":1},"o":{"x":0.312,"y":0},"t":49,"s":[-4,-2.476,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.72,"y":1},"o":{"x":0.28,"y":0},"t":55,"s":[-4,20.615,0],"to":[0,0,0],"ti":[0,0,0]},{"t":60,"s":[-4,16,0]}],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":0,"s":[100,100,100]},{"t":14,"s":[100,100,100]}],"ix":6}},"ao":0,"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Cover","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":3,"s":[0]},{"i":{"x":[0.4],"y":[0.947]},"o":{"x":[0.4],"y":[0]},"t":10,"s":[3]},{"i":{"x":[0.568],"y":[1]},"o":{"x":[0.347],"y":[0.736]},"t":25,"s":[-63]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":36,"s":[-65]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":53,"s":[4]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":59,"s":[-3]},{"t":66,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[-74.01,-61.26,0],"to":[0,0,0],"ti":[0.27,0.054,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.4,"y":0},"t":7,"s":[-70.974,-54.262,0],"to":[-62.49,-12.49,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":25,"s":[-123.119,-4.001,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.6,"y":0},"t":33,"s":[-122.321,-4.149,0],"to":[2.679,-59.447,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[-73.131,-44.749,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.3,"y":0},"t":56,"s":[-75.833,-86.099,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.167,"y":0},"t":61,"s":[-74.117,-58.185,0],"to":[0,0,0],"ti":[0,0,0]},{"t":66,"s":[-74.01,-61.26,0]}],"ix":2},"a":{"a":0,"k":[-74.01,-61.26,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.6,"y":0},"t":36,"s":[{"i":[[10.825,0],[0,0],[0,-10.825],[0,0],[0,0],[0,-3.589],[0,0],[-3.589,0],[0,0],[0,0],[0,3.589],[0,0],[3.589,0],[0,0],[0,0]],"o":[[0,0],[-10.825,0],[0,0],[0,0],[-3.589,0],[0,0],[0,3.589],[0,0],[0,0],[3.589,0],[0,0],[0,-3.589],[0,0],[0,0],[0,-10.825]],"v":[[21.578,-107.8],[-23.608,-107.8],[-43.208,-88.2],[-43.218,-74.343],[-86.612,-74.341],[-93.11,-67.844],[-93.11,-67.217],[-86.612,-60.719],[57.688,-60.719],[86.592,-60.719],[93.09,-67.217],[93.09,-67.844],[86.592,-74.341],[41.17,-74.343],[41.178,-88.2]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.167,"y":0.167},"t":42,"s":[{"i":[[10.825,0],[0,0],[0,-10.825],[0,0],[0,0],[0,-3.589],[0,0],[-3.589,0],[-8.855,1.229],[0,0],[0,3.589],[0,0],[3.589,0],[17.652,1.484],[0,0]],"o":[[0,0],[-10.825,0],[0,0],[0,0],[-3.589,0],[0,0],[0,3.589],[0,0],[11.767,-1.633],[3.589,0],[0,0],[0,-3.589],[0,0],[-0.877,-3.413],[0,-10.825]],"v":[[21.578,-107.8],[-23.608,-107.8],[-43.208,-88.2],[-43.218,-74.343],[-86.612,-74.341],[-93.11,-67.844],[-93.11,-67.217],[-86.612,-60.719],[57.695,-61.78],[84.867,-67.095],[91.365,-73.593],[91.365,-74.219],[84.867,-80.717],[41.17,-74.343],[41.178,-88.2]],"c":true}]},{"t":53,"s":[{"i":[[10.825,0],[0,0],[0,-10.825],[0,0],[0,0],[0,-3.589],[0,0],[-3.589,0],[0,0],[0,0],[0,3.589],[0,0],[3.589,0],[0,0],[0,0]],"o":[[0,0],[-10.825,0],[0,0],[0,0],[-3.589,0],[0,0],[0,3.589],[0,0],[0,0],[3.589,0],[0,0],[0,-3.589],[0,0],[0,0],[0,-10.825]],"v":[[21.578,-107.8],[-23.608,-107.8],[-43.208,-88.2],[-43.218,-74.343],[-86.612,-74.341],[-93.11,-67.844],[-93.11,-67.217],[-86.612,-60.719],[57.688,-60.719],[86.592,-60.719],[93.09,-67.217],[93.09,-67.844],[86.592,-74.341],[41.17,-74.343],[41.178,-88.2]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[85,-60.5],"ix":2},"a":{"a":0,"k":[85,-60.5],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":1,"k":[{"i":{"x":[0.398],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":53,"s":[0]},{"i":{"x":[0.446],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":60,"s":[-3]},{"t":66,"s":[0]}],"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Bucket","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.563],"y":[1]},"o":{"x":[0.468],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.217],"y":[1]},"o":{"x":[0.253],"y":[0]},"t":11,"s":[10]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.676],"y":[0]},"t":40,"s":[-10]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.438],"y":[0]},"t":53,"s":[6]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":61,"s":[-2]},{"t":66,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.643,"y":1},"o":{"x":0.305,"y":0},"t":0,"s":[54.001,141.8,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.365,"y":0},"t":8,"s":[54.001,143.8,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":49,"s":[54.001,141.8,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":55,"s":[58.616,150.108,0],"to":[0,0,0],"ti":[0,0,0]},{"t":60,"s":[54.001,141.8,0]}],"ix":2},"a":{"a":0,"k":[0.001,107.8,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.69,0.69,0.69],"y":[1,1,1]},"o":{"x":[0.31,0.31,0.31],"y":[0,0,0]},"t":49,"s":[100,100,100]},{"i":{"x":[0.69,0.69,0.69],"y":[1,1,1]},"o":{"x":[0.31,0.31,0.31],"y":[0,0,0]},"t":55,"s":[103,98,100]},{"t":61,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":47,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[34.584,-17.495],[42.718,-9.361],[42.655,70.889],[34.584,78.002],[26.45,69.868],[26.514,-10.382]],"c":true}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":53,"s":[{"i":[[-4.147,0],[-0.91,-4.576],[4.056,-29.534],[4.147,0],[-0.953,4.894],[4.031,30.227]],"o":[[4.492,0],[2.634,13.242],[-0.473,3.446],[-4.492,0],[3.611,-18.55],[-0.375,-2.816]],"v":[[36.834,-7.165],[44.968,0.273],[44.905,73.656],[36.834,80.161],[29.186,71.491],[28.764,-0.66]],"c":true}]},{"t":59,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[34.584,-17.495],[42.718,-9.361],[42.655,70.889],[34.584,78.002],[26.45,69.868],[26.514,-10.382]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":47,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[-0.01,-17.495],[8.124,-9.361],[8.061,70.889],[-0.01,78.002],[-8.144,69.868],[-8.08,-10.382]],"c":true}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":53,"s":[{"i":[[-4.147,0],[0,-4.108],[0,0],[4.147,0],[0,4.108],[0,0]],"o":[[4.492,0],[0,0],[-0.502,3.667],[-4.492,0],[0,0],[0.502,-3.667]],"v":[[-0.01,-7.165],[8.124,0.273],[8.061,73.656],[-0.01,80.161],[-8.144,72.723],[-8.08,-0.66]],"c":true}]},{"t":59,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[-0.01,-17.495],[8.124,-9.361],[8.061,70.889],[-0.01,78.002],[-8.144,69.868],[-8.08,-10.382]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":47,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[-34.604,-17.495],[-26.47,-9.361],[-26.533,70.889],[-34.604,78.002],[-42.738,69.868],[-42.674,-10.382]],"c":true}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":53,"s":[{"i":[[-4.147,0],[1.575,-4.735],[-2.876,-16.233],[4.147,0],[2.042,7.062],[-5.389,19.281]],"o":[[4.492,0],[-6.166,18.546],[0.562,3.175],[-4.492,0],[-5.318,-18.394],[1.321,-4.725]],"v":[[-36.854,-7.165],[-28.72,0.273],[-29.997,72.177],[-36.854,80.161],[-44.988,72.723],[-44.924,-0.66]],"c":true}]},{"t":59,"s":[{"i":[[-4.147,0],[0,-4.492],[0,0],[4.147,0],[0,4.492],[0,0]],"o":[[4.492,0],[0,0],[-0.502,4.011],[-4.492,0],[0,0],[0.502,-4.011]],"v":[[-34.604,-17.495],[-26.47,-9.361],[-26.533,70.889],[-34.604,78.002],[-42.738,69.868],[-42.674,-10.382]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":47,"s":[{"i":[[0,0],[0,0],[0,0],[-11.45,0],[0,0],[0,12.124]],"o":[[0,0],[0,0],[0.768,11.927],[0,0],[11.264,-0.813],[0,0]],"v":[[74.676,-47.138],[-74.675,-47.138],[-74.624,86.367],[-52.951,107.8],[54.435,107.747],[74.676,84.798]],"c":true}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":53,"s":[{"i":[[11.177,53.32],[10.354,0.235],[-5.958,-17.164],[-11.45,0],[0,0],[-3.347,11.559]],"o":[[-5.259,0.235],[-15.373,48.633],[3.561,10.258],[0,0],[11.264,-0.773],[8.333,-28.779]],"v":[[74.796,-36.05],[-74.555,-36.05],[-74.624,87.421],[-52.951,107.8],[54.435,107.749],[74.676,85.93]],"c":true}]},{"t":59,"s":[{"i":[[0,0],[0,0],[0,0],[-11.45,0],[0,0],[0,12.124]],"o":[[0,0],[0,0],[0.768,11.927],[0,0],[11.264,-0.813],[0,0]],"v":[[74.676,-47.138],[-74.675,-47.138],[-74.624,86.367],[-52.951,107.8],[54.435,107.747],[74.676,84.798]],"c":true}]}],"ix":2},"nm":"Path 4","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":5,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/ic_ban.json b/TMessagesProj/src/main/res/raw/ic_ban.json new file mode 100644 index 000000000..83b4988d3 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/ic_ban.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Hand","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[25]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":15,"s":[-7]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":24,"s":[8]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":33,"s":[-6]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":42,"s":[6]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":51,"s":[-2]},{"t":58,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[333.789,483.2,0],"to":[0,0,0],"ti":[21.789,-1.6,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":7,"s":[287.789,405.4,0],"to":[-21.789,1.6,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[257.789,458.2,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":23,"s":[296.789,436.2,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":32,"s":[258.789,436.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":41,"s":[295.289,435.7,0],"to":[0,0,0],"ti":[0,0,0]},{"t":50,"s":[274.789,435.2,0]}],"ix":2},"a":{"a":0,"k":[18.789,179.2,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":7,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":41,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":50,"s":[75,75,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":58,"s":[105,105,100]},{"t":65,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":3,"s":[{"i":[[-12,0],[-2.268,-2.058],[-2.569,-2.661],[-3.58,3.35],[0,0],[-12,0],[0,-1.945],[0,0],[-0.675,1.224],[-4.934,4.763],[-9.82,1.175],[-3.705,-4.215],[0,0],[0.075,0.236],[-3.964,5.79],[-12,0],[-3.945,-7.024],[0.478,-11.952],[0,0],[0,0],[-7.5,-6],[9,-9],[0,0],[16.5,0],[0,0],[0,33],[0,0],[0,0],[-3.304,2.15]],"o":[[12,0],[0,0],[2.569,2.661],[3.58,-3.35],[0,-1.945],[12,0],[0,0],[0.2,1.163],[0,0],[4.934,-4.763],[11.915,-1.426],[3.705,4.215],[0.2,1.102],[0,0],[3.964,-5.79],[12,0],[6.703,11.934],[-0.246,6.141],[0,0],[9,-4.5],[9,9],[0,0],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0,0],[3.304,-2.15]],"v":[[-137.024,-32.115],[-120.926,-27.119],[-114.374,-20.093],[-101.753,-21.378],[-97.832,-28.387],[-77.95,-38.569],[-55.836,-25.596],[-53.285,-19.19],[-47.519,-18.581],[-38.33,-30.772],[-20.595,-39.749],[-0.376,-33.63],[6.997,-24.238],[12.15,-24.002],[16.974,-34.054],[41.164,-42.476],[62.982,-28.625],[69.641,14.894],[68.91,25.149],[87.911,12.693],[114.911,15.693],[114.911,47.193],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.45,-14.301],[-158.956,-21.686],[-153.978,-28.232]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.807,2.142],[-2.142,-11.807],[0,0],[0,0],[0,0],[-7.5,-6],[6.09,-13.826],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[-2.142,-11.807],[11.807,-2.142],[0,0],[0,0],[0,0],[9,-4.5],[9,9],[-8.425,19.128],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.468],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.748,-15.213],[7.009,-114.559],[25.131,-140.714],[51.286,-122.592],[66.386,6.36],[66.758,52.39],[101.144,16.117],[128.144,19.117],[128.539,51.663],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":23,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.91,-1.47],[1.47,-11.91],[1.128,-8.151],[0,0],[0,0],[-7.5,-6],[7.321,-11.784],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[1.47,-11.91],[11.91,1.47],[0,0],[-2.487,17.978],[0,0],[9,-4.5],[9,9],[-4.861,11.035],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.467],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[13.725,-15.339],[28.458,-120.718],[53.544,-140.293],[73.119,-115.206],[65.669,-4.134],[65.824,55.271],[111.682,19.114],[138.682,22.114],[138.91,54.217],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":32,"s":[{"i":[[-12,0],[-1.484,-1.607],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.902,1.41],[-1.41,-11.902],[-0.107,-14.099],[0,0],[0,0],[-7.5,-6],[8.657,-9.569],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[-1.41,-11.902],[11.902,-1.41],[0,0],[0.074,9.737],[0,0],[9,-4.5],[9,9],[-0.993,2.255],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.468],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.539,-15.239],[4.449,-114.618],[24.122,-139.577],[49.082,-119.904],[64.953,8.185],[64.81,58.397],[123.114,22.365],[150.114,25.365],[150.16,56.988],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":43,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.675,-2.776],[2.776,-11.675],[0,0],[0,0],[0,0],[-7.5,-6],[9,-9],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[2.776,-11.675],[11.675,2.776],[0,0],[0,0],[0,0],[9,-4.5],[9,9],[0,0],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.467],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.748,-15.213],[32.077,-123.393],[59.172,-140.078],[75.857,-112.983],[64.769,11.349],[64.55,59.2],[126.05,23.2],[153.05,26.2],[153.05,57.7],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0},"t":50,"s":[{"i":[[-12,0],[0,-11.98],[0,0],[0.2,6.789],[0,0],[-12,0],[0,-11.98],[0,0],[-0.675,7.538],[0,0],[-12,0],[0,-11.98],[0,0],[0.075,6.539],[0,0],[-12,0],[0,-11.98],[0,0],[0,0],[0,0],[-7.5,-5.99],[9,-8.985],[0,0],[16.5,0],[0,0],[0,32.946],[0,0],[0,0],[0,0]],"o":[[12,0],[0,0],[0.325,7.288],[0,0],[0,-11.98],[12,0],[0,0],[0.2,7.163],[0,0],[0,-11.98],[12,0],[0,0],[0.2,6.789],[0,0],[0,-11.98],[12,0],[0,0],[0,0],[0,0],[9,-4.493],[9,8.985],[0,0],[-12,11.98],[0,0],[-33,0],[0,0],[0,0],[0,0],[0,-11.98]],"v":[[-137.95,-118.11],[-115.45,-95.647],[-115.45,-20.77],[-100.45,-20.77],[-100.45,-140.573],[-77.95,-163.036],[-55.45,-140.573],[-55.45,-20.77],[-40.45,-20.77],[-40.45,-155.548],[-17.95,-178.011],[4.55,-155.548],[4.55,-20.77],[19.55,-20.77],[19.55,-110.622],[42.05,-133.085],[64.55,-110.622],[66.54,-2.102],[67.267,37.505],[106.736,2.2],[133.736,5.195],[135.587,41.371],[51.05,161.93],[7.55,179.9],[-100.45,179.9],[-160.45,119.999],[-160.45,-13.544],[-160.45,-58.182],[-160.45,-95.647]],"c":true}]},{"t":58,"s":[{"i":[[-12,0],[0,-11.98],[0,0],[0.2,6.789],[0,0],[-12,0],[0,-11.98],[0,0],[-0.675,7.538],[0,0],[-12,0],[0,-11.98],[0,0],[0.075,6.539],[0,0],[-12,0],[0,-11.98],[0,0],[0,0],[0,0],[-7.5,-5.99],[9,-8.985],[0,0],[16.5,0],[0,0],[0,32.946],[0,0],[0,0],[0,0]],"o":[[12,0],[0,0],[0.325,7.288],[0,0],[0,-11.98],[12,0],[0,0],[0.2,7.163],[0,0],[0,-11.98],[12,0],[0,0],[0.2,6.789],[0,0],[0,-11.98],[12,0],[0,0],[0,0],[0,0],[9,-4.493],[9,8.985],[0,0],[-12,11.98],[0,0],[-33,0],[0,0],[0,0],[0,0],[0,-11.98]],"v":[[-137.95,-118.11],[-115.45,-95.647],[-115.45,-20.77],[-100.45,-20.77],[-100.45,-140.573],[-77.95,-163.036],[-55.45,-140.573],[-55.45,-20.77],[-40.45,-20.77],[-40.45,-155.548],[-17.95,-178.011],[4.55,-155.548],[4.55,-20.77],[19.55,-20.77],[19.55,-110.622],[42.05,-133.085],[64.55,-110.622],[64.55,14.449],[64.55,60.097],[126.05,24.156],[153.05,27.151],[153.05,58.6],[51.05,161.93],[7.55,179.9],[-100.45,179.9],[-160.45,119.999],[-160.45,-13.544],[-160.45,-58.182],[-160.45,-95.647]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Block-user","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":125,"st":5,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/ic_download.json b/TMessagesProj/src/main/res/raw/ic_download.json index 6aa34a572..eab3454c4 100644 --- a/TMessagesProj/src/main/res/raw/ic_download.json +++ b/TMessagesProj/src/main/res/raw/ic_download.json @@ -1 +1 @@ -{"v":"5.7.1","fr":60,"ip":0,"op":65,"w":512,"h":512,"nm":"Download","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":3,"nm":"NULL ALL","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[170,170,100],"ix":6}},"ao":0,"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":3,"nm":"NULL ALL","parent":1,"sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[1,97,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.7,"y":0},"t":16,"s":[1,23,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":0.7},"o":{"x":0.167,"y":0.167},"t":28,"s":[1,37,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[1,37,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[1,39,0],"to":[0,0,0],"ti":[0,0,0]},{"t":60,"s":[1,35,0]}],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.5,0.5,0.5],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":13,"s":[100,100,100]}],"ix":6}},"ao":0,"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Splash","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[0.732,26.307,0],"to":[0,0,0],"ti":[0,0,0]},{"t":39,"s":[0.732,6.307,0]}],"ix":2},"a":{"a":0,"k":[0.732,-19.693,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[2.114,-97.858],[0.095,-22.349]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-114.75,-74.514],[-62.785,-19.693]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[116.215,-74.514],[64.25,-19.693]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.5],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[100]},{"t":40,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.5],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":23,"s":[100]},{"t":36,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":4,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false}],"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Arrow 2","parent":2,"sr":1,"ks":{"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":39,"s":[0]},{"t":43,"s":[100]}],"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[48.979,-117.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":49,"s":[48.979,-79.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":59,"s":[48.979,-105.2,0],"to":[0,0,0],"ti":[0,0,0]},{"t":65,"s":[48.979,-99.7,0]}],"ix":2},"a":{"a":0,"k":[-0.021,-112.7,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":35,"s":[0,0,100]},{"t":49,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[{"i":[[1.348,0],[0,0],[0,-0.925],[0,0],[0,0],[0,-1.599],[-0.889,-0.611],[0,0],[-1.619,0.769],[0,0],[1.648,1.13],[1.258,0],[0,0],[0,0]],"o":[[0,0],[-1.258,0.203],[0,0],[0,0],[-2.214,0.276],[0,0.863],[0,0],[1.617,0.771],[0,0],[1.281,-1.27],[-0.89,-0.61],[0,0],[0,0],[-0.295,-0.863]],"v":[[16.4,-109.275],[-16.187,-109.223],[-18.382,-107.321],[-18.44,-98.335],[-46.356,-98.283],[-50.249,-95.078],[-48.86,-92.776],[-2.719,-61.155],[2.677,-61.152],[49.382,-93.227],[48.83,-97.382],[45.475,-98.335],[19.19,-98.335],[19.173,-107.769]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":47,"s":[{"i":[[1.328,0],[0,0],[0,-1.397],[0,0],[0,0],[0,-2.414],[-0.876,-0.922],[0,0],[-1.595,1.161],[0,0],[1.624,1.706],[1.24,0],[0,0],[0,0]],"o":[[0,0],[-1.24,0.306],[0,0],[0,0],[-2.181,0.416],[0,1.303],[0,0],[1.593,1.164],[0,0],[1.262,-1.917],[-0.877,-0.921],[0,0],[0,0],[-0.291,-1.303]],"v":[[16.187,-107.7],[-15.916,-107.622],[-18.079,-104.75],[-18.163,-92.62],[-45.664,-92.541],[-49.5,-87.703],[-48.132,-84.228],[-2.676,-36.498],[2.64,-36.493],[48.651,-84.909],[48.108,-91.181],[44.803,-92.62],[18.908,-92.62],[18.918,-105.426]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":56,"s":[{"i":[[1.392,0],[0,0],[0,-1.317],[0,0],[0,0],[0,-2.276],[-0.918,-0.869],[0,0],[-1.672,1.095],[0,0],[1.702,1.609],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.288],[0,0],[0,0],[-2.286,0.393],[0,1.229],[0,0],[1.669,1.098],[0,0],[1.322,-1.807],[-0.919,-0.869],[0,0],[0,0],[-0.305,-1.229]],"v":[[16.863,-112.7],[-16.776,-112.627],[-19.042,-109.918],[-19.042,-94.182],[-47.859,-94.107],[-51.878,-89.545],[-50.445,-86.269],[-2.814,-41.262],[2.757,-41.258],[50.969,-86.91],[50.4,-92.825],[46.937,-94.182],[19.803,-94.182],[19.725,-110.556]],"c":true}]},{"t":63,"s":[{"i":[[1.392,0],[0,0],[0,-1.392],[0,0],[0,0],[0,-2.406],[-0.918,-0.919],[0,0],[-1.672,1.157],[0,0],[1.702,1.7],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.305],[0,0],[0,0],[-2.286,0.415],[0,1.299],[0,0],[1.669,1.16],[0,0],[1.322,-1.91],[-0.919,-0.918],[0,0],[0,0],[-0.305,-1.299]],"v":[[16.863,-112.7],[-16.776,-112.622],[-19.042,-109.76],[-19.042,-93.128],[-47.859,-93.049],[-51.878,-88.228],[-50.445,-84.765],[-2.814,-37.198],[2.757,-37.194],[50.969,-85.443],[50.4,-91.694],[46.937,-93.128],[19.803,-93.128],[19.725,-110.434]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":36,"op":65,"st":-3,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Mask","parent":7,"td":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[0,5.5,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[87.75,-56.25],[87.75,56.25],[-87.75,56.25],[-87.75,-56.25]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[4.25,77.75],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Arrow","parent":2,"tt":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.485,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[48.979,-7.327,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.7,"y":0},"t":13,"s":[48.979,-28.327,0],"to":[0,0,0],"ti":[0,0,0]},{"t":28,"s":[48.979,126.673,0]}],"ix":2},"a":{"a":0,"k":[-0.021,-36.327,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[1.196,0],[0,0],[0,-1.339],[0,0],[0,0],[0,-2.315],[-0.789,-0.884],[0,0],[-1.436,1.113],[0,0],[1.462,1.636],[1.116,0],[0,0],[0,0]],"o":[[0,0],[-1.116,0.293],[0,0],[0,0],[-1.964,0.399],[0,1.25],[0,0],[1.434,1.117],[0,0],[1.136,-1.838],[-0.789,-0.884],[0,0],[0,0],[-0.262,-1.25]],"v":[[14.517,-112.7],[-14.384,-112.625],[-16.331,-109.87],[-16.331,-93.864],[-41.089,-93.788],[-44.542,-89.148],[-43.31,-85.815],[-2.388,-40.035],[2.397,-40.031],[43.819,-86.467],[43.33,-92.483],[40.354,-93.864],[17.043,-93.864],[16.976,-110.519]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":10,"s":[{"i":[[1.392,0],[0,0],[0,-1.008],[0,0],[0,0],[0,-1.741],[-0.918,-0.665],[0,0],[-1.672,0.837],[0,0],[1.702,1.231],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.221],[0,0],[0,0],[-2.286,0.3],[0,0.94],[0,0],[1.669,0.84],[0,0],[1.322,-1.383],[-0.919,-0.665],[0,0],[0,0],[-0.305,-0.94]],"v":[[16.905,-116.22],[-16.735,-116.164],[-19,-114.092],[-19.042,-98.532],[-47.859,-98.474],[-51.878,-94.984],[-50.445,-92.477],[-2.814,-58.043],[2.757,-58.04],[50.969,-92.968],[50.4,-97.493],[46.937,-98.532],[19.803,-98.532],[19.767,-114.58]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":22,"s":[{"i":[[1.288,0],[0,0],[0,-1.544],[0,0],[0,0],[0,-2.669],[-0.85,-1.019],[0,0],[-1.547,1.283],[0,0],[1.575,1.887],[1.202,0],[0,0],[0,0]],"o":[[0,0],[-1.202,0.338],[0,0],[0,0],[-2.116,0.46],[0,1.441],[0,0],[1.544,1.287],[0,0],[1.224,-2.119],[-0.85,-1.019],[0,0],[0,0],[-0.282,-1.441]],"v":[[15.623,-121.095],[-15.512,-121.009],[-17.609,-117.833],[-17.609,-99.38],[-44.28,-99.292],[-48,-93.943],[-46.673,-90.101],[-2.589,-37.326],[2.567,-37.32],[47.19,-90.854],[46.663,-97.789],[43.457,-99.38],[18.344,-99.38],[18.272,-118.581]],"c":true}]},{"t":28,"s":[{"i":[[1.288,0],[0,0],[0,-1.166],[0,0],[0,0],[0,-2.015],[-0.85,-0.77],[0,0],[-1.547,0.969],[0,0],[1.575,1.424],[1.202,0],[0,0],[0,0]],"o":[[0,0],[-1.202,0.255],[0,0],[0,0],[-2.116,0.348],[0,1.088],[0,0],[1.544,0.972],[0,0],[1.224,-1.6],[-0.85,-0.769],[0,0],[0,0],[-0.282,-1.088]],"v":[[15.623,-100.25],[-15.512,-100.185],[-17.609,-97.787],[-17.609,-83.857],[-44.28,-83.791],[-48,-79.752],[-46.673,-76.852],[-2.589,-37.01],[2.567,-37.006],[47.19,-77.42],[46.663,-82.656],[43.457,-83.857],[18.344,-83.857],[18.272,-98.352]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":29,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Box","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[49,125.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[49,145.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.652,"y":1},"o":{"x":0.31,"y":0},"t":39,"s":[49,118.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.539,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[49,130.7,0],"to":[0,0,0],"ti":[0,0,0]},{"t":59,"s":[49,125.7,0]}],"ix":2},"a":{"a":0,"k":[0,112.7,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":0,"s":[100,100,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":21,"s":[98,102,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":29,"s":[102,94,100]},{"i":{"x":[0.5,0.5,0.5],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":38,"s":[98,103,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":49,"s":[101,98,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":58,"s":[100,102,100]},{"t":65,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[0,0],[-3.596,-3.225],[0,0],[1.585,0],[0,0],[-0.398,0.367],[0,0],[-4.925,0]],"o":[[4.831,0],[0,0],[0.555,0.551],[0,0],[-1.172,0],[0,0],[3.62,-3.34],[0,0]],"v":[[44.75,5.076],[57.837,10.085],[71.528,22.366],[70.874,24.075],[-71.62,24.075],[-72.285,22.375],[-59.164,10.27],[-45.874,5.076]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[0,0],[-4.135,-3.779],[0,0],[1.822,0],[0,0],[-0.457,0.43],[0,0],[-5.664,0]],"o":[[5.555,0],[0,0],[0.638,0.646],[0,0],[-1.347,0],[0,0],[4.163,-3.913],[0,0]],"v":[[51.461,1.127],[66.51,6.996],[85.13,26.083],[84.378,28.085],[-84.97,28.085],[-85.734,26.093],[-68.031,7.213],[-52.748,1.127]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[0,0],[-3.095,-2.541],[0,0],[1.364,0],[0,0],[-0.342,0.289],[0,0],[-4.239,0]],"o":[[4.158,0],[0,0],[0.478,0.434],[0,0],[-1.008,0],[0,0],[3.116,-2.631],[0,0]],"v":[[38.496,5.076],[49.76,9.023],[61.544,18.699],[60.981,20.045],[-61.66,20.045],[-62.232,18.706],[-50.94,9.169],[-39.501,5.076]],"c":true}]},{"t":51,"s":[{"i":[[0,0],[-3.596,-3.225],[0,0],[1.585,0],[0,0],[-0.398,0.367],[0,0],[-4.925,0]],"o":[[4.831,0],[0,0],[0.555,0.551],[0,0],[-1.172,0],[0,0],[3.62,-3.34],[0,0]],"v":[[44.75,5.076],[57.837,10.085],[71.528,22.366],[70.874,24.075],[-71.62,24.075],[-72.285,22.375],[-59.164,10.27],[-45.874,5.076]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[0,0],[-1.949,-1.877],[0,0],[0,-9.077],[0,0],[6.313,0],[0,0],[1.403,1.154],[-2.751,3.344],[0,0],[-1.711,-1.69],[0,0],[-1.688,2.115]],"o":[[1.877,-1.949],[0,0],[4.795,4.618],[0,0],[0,6.313],[0,0],[-1.817,0],[-3.344,-2.751],[0,0],[1.902,-1.335],[0,0],[2.115,1.688],[0,0]],"v":[[23.881,41.681],[30.809,41.551],[56.745,66.531],[64.25,84.177],[64.25,85.189],[52.819,96.619],[-56.35,96.619],[-61.33,94.834],[-62.405,83.799],[-39.063,55.703],[-32.808,56.232],[-11.102,77.635],[-4.216,76.861]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[0,0],[-2.154,-1.669],[0,0],[0,-8.067],[0,0],[6.976,0],[0,0],[1.551,1.026],[-3.04,2.972],[0,0],[-1.891,-1.502],[0,0],[-1.865,1.88]],"o":[[2.075,-1.732],[0,0],[5.299,4.105],[0,0],[0,5.611],[0,0],[-2.008,0],[-3.695,-2.445],[0,0],[2.101,-1.186],[0,0],[2.338,1.5],[0,0]],"v":[[26.389,47.791],[34.045,47.675],[62.705,69.877],[70.997,85.561],[70.997,86.46],[58.367,96.619],[-62.268,96.619],[-67.771,95.032],[-68.959,85.225],[-43.165,60.254],[-36.254,60.724],[-12.268,79.746],[-4.659,79.058]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[0,0],[-1.896,-2.055],[0,0],[0,-9.936],[0,0],[6.141,0],[0,0],[1.365,1.263],[-2.676,3.66],[0,0],[-1.665,-1.851],[0,0],[-1.642,2.316]],"o":[[1.826,-2.134],[0,0],[4.665,5.055],[0,0],[0,6.91],[0,0],[-1.767,0],[-3.253,-3.011],[0,0],[1.85,-1.461],[0,0],[2.058,1.848],[0,0]],"v":[[23.232,36.481],[29.972,36.338],[55.203,63.683],[62.503,83],[62.503,84.107],[51.383,96.619],[-54.818,96.619],[-59.663,94.665],[-60.708,82.585],[-38.001,51.83],[-31.916,52.409],[-10.8,75.838],[-4.101,74.991]],"c":true}]},{"t":51,"s":[{"i":[[0,0],[-1.949,-1.877],[0,0],[0,-9.077],[0,0],[6.313,0],[0,0],[1.403,1.154],[-2.751,3.344],[0,0],[-1.711,-1.69],[0,0],[-1.688,2.115]],"o":[[1.877,-1.949],[0,0],[4.795,4.618],[0,0],[0,6.313],[0,0],[-1.817,0],[-3.344,-2.751],[0,0],[1.902,-1.335],[0,0],[2.115,1.688],[0,0]],"v":[[23.881,41.681],[30.809,41.551],[56.745,66.531],[64.25,84.177],[64.25,85.189],[52.819,96.619],[-56.35,96.619],[-61.33,94.834],[-62.405,83.799],[-39.063,55.703],[-32.808,56.232],[-11.102,77.635],[-4.216,76.861]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[8.999,0],[0,0],[6.986,-5.673],[0,-13.376],[0,0],[-16.237,0],[0,0],[0,16.237],[0,0],[25.148,20.42]],"o":[[0,0],[-9,0],[-25.145,20.419],[0,0],[0,16.237],[0,0],[16.237,0],[0,0],[0,-13.377],[-6.986,-5.672]],"v":[[45.368,-14.7],[-45.371,-14.7],[-70.082,-5.93],[-107.8,44.763],[-107.8,83.3],[-78.4,112.7],[78.4,112.7],[107.8,83.3],[107.8,44.763],[70.077,-5.932]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[9.944,0],[0,0],[7.72,-5.673],[0,-13.376],[0,0],[-17.942,0],[0,0],[0,16.237],[0,0],[27.79,20.42]],"o":[[0,0],[-9.945,0],[-27.786,20.419],[0,0],[0,16.237],[0,0],[17.942,0],[0,0],[0,-13.377],[-7.72,-5.672]],"v":[[50.132,-14.7],[-50.136,-14.7],[-77.443,-5.93],[-119.122,44.763],[-119.122,83.3],[-86.634,112.7],[86.634,112.7],[119.122,83.3],[119.122,44.763],[77.437,-5.932]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[8.754,0],[0,0],[6.796,-5.673],[0,-13.376],[0,0],[-15.796,0],[0,0],[0,16.237],[0,0],[24.465,20.42]],"o":[[0,0],[-8.755,0],[-24.461,20.419],[0,0],[0,16.237],[0,0],[15.796,0],[0,0],[0,-13.377],[-6.796,-5.672]],"v":[[44.134,-14.7],[-44.138,-14.7],[-68.177,-5.93],[-104.869,44.763],[-104.869,83.3],[-76.268,112.7],[76.268,112.7],[104.869,83.3],[104.869,44.763],[68.172,-5.932]],"c":true}]},{"t":51,"s":[{"i":[[8.999,0],[0,0],[6.986,-5.673],[0,-13.376],[0,0],[-16.237,0],[0,0],[0,16.237],[0,0],[25.148,20.42]],"o":[[0,0],[-9,0],[-25.145,20.419],[0,0],[0,16.237],[0,0],[16.237,0],[0,0],[0,-13.377],[-6.986,-5.672]],"v":[[45.368,-14.7],[-45.371,-14.7],[-70.082,-5.93],[-107.8,44.763],[-107.8,83.3],[-78.4,112.7],[78.4,112.7],[107.8,83.3],[107.8,44.763],[70.077,-5.932]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":65,"st":0,"bm":0}],"markers":[]} \ No newline at end of file +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Arrow","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":7,"s":[0]},{"i":{"x":[0.648],"y":[0.924]},"o":{"x":[0.306],"y":[0]},"t":18,"s":[9]},{"i":{"x":[0.703],"y":[1]},"o":{"x":[0.341],"y":[0.13]},"t":23,"s":[4]},{"t":30,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.702,"y":1},"o":{"x":0.347,"y":0.132},"t":7,"s":[15.431,17.394,0],"to":[0.374,8.268,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":18,"s":[25.911,-57.131,0],"to":[-32.111,59.554,0],"ti":[0,0,0]},{"t":30,"s":[-19.569,193.434,0]}],"ix":2},"a":{"a":0,"k":[0,172.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":7,"s":[{"i":[[1.646,-2.075],[0,0],[0,0],[0.758,0.657],[0,0],[-1.822,-0.052],[0,0],[0,0],[-1.107,0],[0,0],[-0.383,-0.692],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.682,0.7],[0,0],[-2.41,-2.438],[0,0],[0,0],[0,-0.623],[0,0],[1.234,0],[0,0],[0,0],[2.704,0.052]],"v":[[34.137,17.418],[34.137,17.418],[1.148,42.228],[-1.883,42.228],[-34.998,18.04],[-32.881,12.437],[-14.435,12.437],[-14.435,1.854],[-12.392,-0.013],[12.38,-0.013],[14.679,1.232],[14.679,12.437],[32.021,12.437]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":18,"s":[{"i":[[3.149,-3.969],[0,0],[0,0],[1.449,1.257],[0,0],[-3.486,-0.099],[0,0],[0,0],[-2.117,0],[0,0],[-0.733,-1.323],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.304,1.34],[0,0],[-4.611,-4.664],[0,0],[0,0],[0,-1.191],[0,0],[2.361,0],[0,0],[0,0],[5.173,0.099]],"v":[[66.043,13.73],[66.043,13.73],[2.937,61.189],[-2.86,61.189],[-66.206,14.921],[-62.157,4.203],[-26.871,4.203],[-26.871,-16.041],[-22.963,-19.614],[24.424,-19.614],[28.821,-17.232],[28.821,4.203],[61.994,4.203]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":22,"s":[{"i":[[2.584,-4.433],[0,0],[0,0],[1.399,1.404],[0,0],[-2.86,-0.111],[0,0],[0,0],[-1.756,0],[0,0],[-0.608,-1.478],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.259,1.496],[0,0],[-3.783,-5.209],[0,0],[0,0],[0,-1.33],[0,0],[1.959,0],[0,0],[0,0],[4.244,0.111]],"v":[[53.833,19.72],[53.833,19.72],[0.313,79.816],[-5.283,79.816],[-54.315,15.43],[-50.993,3.462],[-22.187,6.522],[-17.075,-45.122],[-13.833,-49.111],[24.753,-44.465],[28.4,-41.805],[24.013,6.522],[50.511,9.081]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":24,"s":[{"i":[[2.922,-4.562],[0,0],[0,0],[1.573,1.445],[0,0],[-3.235,-0.114],[0,0],[0,0],[-1.891,0],[0,0],[-0.655,-1.521],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.416,1.54],[0,0],[-4.278,-5.361],[0,0],[0,0],[0,-1.369],[0,0],[2.109,0],[0,0],[0,0],[4.8,0.114]],"v":[[61.222,18.762],[61.222,18.762],[-0.598,36.781],[-6.89,36.781],[-61.49,20.131],[-57.734,7.812],[-23.743,7.812],[-20.36,-33.625],[-16.87,-37.731],[25.454,-37.731],[29.381,-34.994],[25.998,7.812],[57.465,7.812]],"c":true}]},{"t":30,"s":[{"i":[[3.294,-4.705],[0,0],[0,0],[1.764,1.49],[0,0],[-3.646,-0.118],[0,0],[0,0],[-2.039,0],[0,0],[-0.706,-1.568],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.588,1.588],[0,0],[-4.823,-5.529],[0,0],[0,0],[0,-1.412],[0,0],[2.274,0],[0,0],[0,0],[5.411,0.118]],"v":[[69.117,20.523],[69.117,20.523],[2.775,91.1],[-4.283,91.1],[-69.213,21.934],[-64.978,9.231],[-25.456,9.231],[-25.456,-14.765],[-21.691,-19],[23.948,-19],[28.183,-16.177],[28.183,9.231],[64.883,9.231]],"c":true}]}],"ix":2},"nm":"Arrow","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":7,"op":25,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Box","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":4,"s":[-3]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.576],"y":[0]},"t":20,"s":[3]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":31,"s":[-3]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":42,"s":[5]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":51,"s":[-1]},{"t":60,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[256,513.5,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":18,"s":[256,423.5,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":28,"s":[256,500.5,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":38,"s":[256,454.5,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0},"t":48,"s":[256,476.5,0],"to":[0,0,0],"ti":[0,0,0]},{"t":60,"s":[256,468.5,0]}],"ix":2},"a":{"a":0,"k":[0,172.5,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.704,0.704,0.704],"y":[1,1,1]},"o":{"x":[0.314,0.314,0.314],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":18,"s":[100,100,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":28,"s":[102,98,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":38,"s":[98,102,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":48,"s":[101,99,100]},{"t":60,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[-7.162,0],[0,0],[-4.565,-0.036],[0,0]],"o":[[0,0],[5.209,-0.032],[0,0],[6.511,0],[0,0],[0,0]],"v":[[-66.473,-47.14],[-46.939,-47.298],[-30.01,-47.35],[93.701,-47.35],[109.328,-47.287],[128.861,-47.14]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":15,"s":[{"i":[[0,0],[0,0],[-7.162,0],[0,0],[-4.565,-0.086],[0,0]],"o":[[0,0],[5.209,-0.075],[0,0],[6.511,0],[0,0],[0,0]],"v":[[-66.473,-122.56],[-46.939,-122.934],[-30.01,-123.058],[93.701,-123.058],[109.328,-122.909],[128.861,-122.56]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[0,0],[0,0],[-9.406,0],[0,0],[-3.761,-7.707],[0,0]],"o":[[0,0],[6.841,-7.085],[0,0],[6.062,-0.564],[0,0],[0,0]],"v":[[-129.935,-95.945],[-92.053,-139.279],[-69.821,-151.088],[106.043,-150.874],[117.123,-134.712],[124.062,-97.966]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":40,"s":[{"i":[[0,0],[0,0],[-8.25,0],[0,0],[-5.258,-2.955],[0,0]],"o":[[0,0],[6,-2.568],[0,0],[7.5,0],[0,0],[0,0]],"v":[[-112.5,-125.378],[-90,-138.219],[-70.5,-142.5],[72,-142.5],[90,-137.363],[112.5,-125.378]],"c":true}]},{"t":50,"s":[{"i":[[0,0],[0,0],[-8.25,0],[0,0],[-5.258,-5.177],[0,0]],"o":[[0,0],[6,-4.5],[0,0],[7.5,0],[0,0],[0,0]],"v":[[-112.5,-112.5],[-90,-135],[-70.5,-142.5],[72,-142.5],[90,-133.5],[112.5,-112.5]],"c":true}]}],"ix":2},"nm":"Hole","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[1.983,-1.844],[0,0],[0,0],[1.284,0.502],[0,0],[-2.489,0.062],[0,0],[0,0],[-1.388,0.059],[0,0],[-0.567,-0.562],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.994,0.636],[0,0],[-3.589,-1.914],[0,0],[0,0],[-0.078,-0.524],[0,0],[1.548,-0.066],[0,0],[0,0],[3.691,-0.113]],"v":[[142.204,-15.903],[142.204,-15.903],[100.93,12.241],[96.125,12.446],[48.096,-11.367],[50.278,-16.21],[77.188,-17.356],[75.864,-26.271],[78.193,-27.953],[109.268,-29.277],[112.307,-28.351],[113.71,-18.911],[138.698,-19.975]],"c":true}]},{"i":{"x":0.2,"y":1},"o":{"x":0.6,"y":0},"t":7,"s":[{"i":[[0.008,-0.01],[0,0],[0,0],[0.004,0.003],[0,0],[-0.009,0],[0,0],[0,0],[-0.005,0],[0,0],[-0.002,-0.003],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.004,0.003],[0,0],[-0.012,-0.011],[0,0],[0,0],[0,-0.003],[0,0],[0.006,0],[0,0],[0,0],[0.014,0]],"v":[[-39.057,-56.844],[-39.057,-56.844],[-39.226,-56.699],[-39.244,-56.699],[-39.389,-56.842],[-39.379,-56.868],[-39.295,-56.867],[-39.295,-56.917],[-39.285,-56.925],[-39.176,-56.925],[-39.165,-56.919],[-39.165,-56.867],[-39.068,-56.867]],"c":true}]},{"i":{"x":0.5,"y":1},"o":{"x":0.6,"y":0},"t":20,"s":[{"i":[[0.008,-0.01],[0,0],[0,0],[0.004,0.003],[0,0],[-0.009,0],[0,0],[0,0],[-0.005,0],[0,0],[-0.002,-0.003],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.004,0.003],[0,0],[-0.012,-0.011],[0,0],[0,0],[0,-0.003],[0,0],[0.006,0],[0,0],[0,0],[0.014,0]],"v":[[-19.057,-66.844],[-19.057,-66.844],[-19.226,-66.699],[-19.244,-66.699],[-19.389,-66.842],[-19.379,-66.868],[-19.295,-66.867],[-19.295,-66.917],[-19.285,-66.925],[-19.176,-66.925],[-19.165,-66.919],[-19.165,-66.867],[-19.068,-66.867]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[3.902,-5.333],[0,0],[0,0],[2.09,1.689],[0,0],[-4.32,-0.133],[0,0],[0,0],[-2.415,0],[0,0],[-0.836,-1.778],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.881,1.8],[0,0],[-5.713,-6.266],[0,0],[0,0],[0,-1.6],[0,0],[2.694,0],[0,0],[0,0],[6.41,0.133]],"v":[[64.27,55.084],[64.27,55.084],[-14.32,135.079],[-22.68,135.079],[-99.598,56.684],[-94.582,42.285],[-47.762,42.285],[-47.762,15.086],[-43.303,10.287],[10.762,10.287],[15.778,13.487],[15.778,42.285],[59.254,42.285]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":40,"s":[{"i":[[3.5,-5],[0,0],[0,0],[1.875,1.583],[0,0],[-3.875,-0.125],[0,0],[0,0],[-2.167,0],[0,0],[-0.75,-1.667],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.687,1.687],[0,0],[-5.125,-5.875],[0,0],[0,0],[0,-1.5],[0,0],[2.417,0],[0,0],[0,0],[5.75,0.125]],"v":[[71.955,-2.383],[71.955,-2.383],[1.455,72.617],[-6.045,72.617],[-75.045,-0.883],[-70.545,-14.383],[-28.545,-14.383],[-28.545,-39.883],[-24.545,-44.383],[23.955,-44.383],[28.455,-41.383],[28.455,-14.383],[67.455,-14.383]],"c":true}]},{"t":50,"s":[{"i":[[3.5,-5],[0,0],[0,0],[1.875,1.583],[0,0],[-3.875,-0.125],[0,0],[0,0],[-2.167,0],[0,0],[-0.75,-1.667],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.687,1.687],[0,0],[-5.125,-5.875],[0,0],[0,0],[0,-1.5],[0,0],[2.417,0],[0,0],[0,0],[5.75,0.125]],"v":[[73.5,19.5],[73.5,19.5],[3,94.5],[-4.5,94.5],[-73.5,21],[-69,7.5],[-27,7.5],[-27,-18],[-23,-22.5],[25.5,-22.5],[30,-19.5],[30,7.5],[69,7.5]],"c":true}]}],"ix":2},"nm":"Arrow","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[1.454,4.596],[10.137,2.931],[16.448,0],[0,0],[13.734,-2.186],[7.392,-7.241],[-0.491,-6.001],[0,0],[-17.692,1.509],[0,0],[0,9.28],[0,0]],"o":[[-1.367,-4.32],[-10.137,-2.931],[0,0],[-16.448,0],[-7.71,1.227],[-6.53,6.397],[0,0],[0.758,9.275],[0,0],[25.472,-1.621],[0,0],[0,-6.004]],"v":[[171.59,-33.324],[155.221,-46.107],[105.654,-48.213],[-47.857,-48.1],[-122.25,-43.071],[-146.784,-26.585],[-157.085,0.667],[-146.928,74.623],[-110.583,86.914],[135.192,79.652],[177.344,43.703],[175.416,-15.637]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":15,"s":[{"i":[[1.428,10.893],[6.883,10],[27.575,2.631],[0,0],[13.698,-2.37],[0,0],[0,-14.231],[0,0],[-24.331,0.387],[0,0],[-0.528,31.37],[0,0]],"o":[[-1.343,-10.238],[-8.377,-12.171],[0,0],[-16.156,0],[-11.244,1.946],[-10.281,9.056],[0,0],[2.098,31.207],[0,0],[24.968,0],[0,0],[0,-14.231]],"v":[[174.355,-58.154],[160.962,-90.728],[109.025,-124.16],[-43.771,-124.834],[-114.715,-123.474],[-141.606,-111.609],[-157.761,-75.385],[-158.178,88.76],[-92.806,141.994],[118.294,142.98],[173.468,115.095],[176.305,-23.583]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[7.997,10.562],[0,0],[17.245,0],[0,0],[12.542,-13.193],[0,0],[4.123,-16.322],[-24.875,-59.752],[-28.258,1.094],[0,0],[-7.73,28.961],[14.747,61.906]],"o":[[0,0],[-7.187,-11.354],[0,0],[-17.245,0],[0,0],[-10.974,11.544],[-16.171,64.022],[8.442,20.278],[0,0],[24.078,-1.159],[17.331,-64.933],[-4.962,-20.829]],"v":[[155.255,-121.044],[129.63,-157.775],[101.426,-178.379],[-75.2,-178.766],[-117.023,-157.813],[-155.153,-121.044],[-176.062,-78.604],[-170.344,129.579],[-118.052,172.615],[119.215,171.402],[175.635,127.549],[174.555,-75.968]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":40,"s":[{"i":[[10.5,8.889],[0,0],[16.5,0],[0,0],[12,-10.158],[0,0],[0,-16.5],[0,0],[-25.5,0],[0,0],[0,25.5],[0,0]],"o":[[0,0],[-12,-10.158],[0,0],[-16.5,0],[0,0],[-10.5,8.889],[0,0],[0,25.5],[0,0],[25.5,0],[0,0],[0,-16.5]],"v":[[148.5,-118.389],[117.279,-147.462],[76.201,-162.881],[-72,-162.832],[-114.613,-147.366],[-148.5,-118.389],[-165,-78],[-165,127.5],[-120,172.5],[120,172.5],[165,127.5],[165,-78]],"c":true}]},{"t":50,"s":[{"i":[[10.5,10.5],[0,0],[16.5,0],[0,0],[12,-12],[0,0],[0,-16.5],[0,0],[-25.5,0],[0,0],[0,25.5],[0,0]],"o":[[0,0],[-12,-12],[0,0],[-16.5,0],[0,0],[-10.5,10.5],[0,0],[0,25.5],[0,0],[25.5,0],[0,0],[0,-16.5]],"v":[[148.5,-120],[115.5,-154.5],[72,-172.5],[-72,-172.5],[-115.5,-154.5],[-148.5,-120],[-165,-78],[-165,127.5],[-120,172.5],[120,172.5],[165,127.5],[165,-78]],"c":true}]}],"ix":2},"nm":"Box","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/ic_save_to_gallery.json b/TMessagesProj/src/main/res/raw/ic_save_to_gallery.json new file mode 100644 index 000000000..6aa34a572 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/ic_save_to_gallery.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":65,"w":512,"h":512,"nm":"Download","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":3,"nm":"NULL ALL","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[170,170,100],"ix":6}},"ao":0,"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":3,"nm":"NULL ALL","parent":1,"sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[1,97,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.7,"y":0},"t":16,"s":[1,23,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":0.7},"o":{"x":0.167,"y":0.167},"t":28,"s":[1,37,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[1,37,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[1,39,0],"to":[0,0,0],"ti":[0,0,0]},{"t":60,"s":[1,35,0]}],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.5,0.5,0.5],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":13,"s":[100,100,100]}],"ix":6}},"ao":0,"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Splash","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[0.732,26.307,0],"to":[0,0,0],"ti":[0,0,0]},{"t":39,"s":[0.732,6.307,0]}],"ix":2},"a":{"a":0,"k":[0.732,-19.693,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[2.114,-97.858],[0.095,-22.349]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-114.75,-74.514],[-62.785,-19.693]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[116.215,-74.514],[64.25,-19.693]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.5],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[100]},{"t":40,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.5],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":23,"s":[100]},{"t":36,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":4,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false}],"ip":0,"op":65,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Arrow 2","parent":2,"sr":1,"ks":{"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":39,"s":[0]},{"t":43,"s":[100]}],"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[48.979,-117.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":49,"s":[48.979,-79.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":59,"s":[48.979,-105.2,0],"to":[0,0,0],"ti":[0,0,0]},{"t":65,"s":[48.979,-99.7,0]}],"ix":2},"a":{"a":0,"k":[-0.021,-112.7,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":35,"s":[0,0,100]},{"t":49,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[{"i":[[1.348,0],[0,0],[0,-0.925],[0,0],[0,0],[0,-1.599],[-0.889,-0.611],[0,0],[-1.619,0.769],[0,0],[1.648,1.13],[1.258,0],[0,0],[0,0]],"o":[[0,0],[-1.258,0.203],[0,0],[0,0],[-2.214,0.276],[0,0.863],[0,0],[1.617,0.771],[0,0],[1.281,-1.27],[-0.89,-0.61],[0,0],[0,0],[-0.295,-0.863]],"v":[[16.4,-109.275],[-16.187,-109.223],[-18.382,-107.321],[-18.44,-98.335],[-46.356,-98.283],[-50.249,-95.078],[-48.86,-92.776],[-2.719,-61.155],[2.677,-61.152],[49.382,-93.227],[48.83,-97.382],[45.475,-98.335],[19.19,-98.335],[19.173,-107.769]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":47,"s":[{"i":[[1.328,0],[0,0],[0,-1.397],[0,0],[0,0],[0,-2.414],[-0.876,-0.922],[0,0],[-1.595,1.161],[0,0],[1.624,1.706],[1.24,0],[0,0],[0,0]],"o":[[0,0],[-1.24,0.306],[0,0],[0,0],[-2.181,0.416],[0,1.303],[0,0],[1.593,1.164],[0,0],[1.262,-1.917],[-0.877,-0.921],[0,0],[0,0],[-0.291,-1.303]],"v":[[16.187,-107.7],[-15.916,-107.622],[-18.079,-104.75],[-18.163,-92.62],[-45.664,-92.541],[-49.5,-87.703],[-48.132,-84.228],[-2.676,-36.498],[2.64,-36.493],[48.651,-84.909],[48.108,-91.181],[44.803,-92.62],[18.908,-92.62],[18.918,-105.426]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":56,"s":[{"i":[[1.392,0],[0,0],[0,-1.317],[0,0],[0,0],[0,-2.276],[-0.918,-0.869],[0,0],[-1.672,1.095],[0,0],[1.702,1.609],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.288],[0,0],[0,0],[-2.286,0.393],[0,1.229],[0,0],[1.669,1.098],[0,0],[1.322,-1.807],[-0.919,-0.869],[0,0],[0,0],[-0.305,-1.229]],"v":[[16.863,-112.7],[-16.776,-112.627],[-19.042,-109.918],[-19.042,-94.182],[-47.859,-94.107],[-51.878,-89.545],[-50.445,-86.269],[-2.814,-41.262],[2.757,-41.258],[50.969,-86.91],[50.4,-92.825],[46.937,-94.182],[19.803,-94.182],[19.725,-110.556]],"c":true}]},{"t":63,"s":[{"i":[[1.392,0],[0,0],[0,-1.392],[0,0],[0,0],[0,-2.406],[-0.918,-0.919],[0,0],[-1.672,1.157],[0,0],[1.702,1.7],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.305],[0,0],[0,0],[-2.286,0.415],[0,1.299],[0,0],[1.669,1.16],[0,0],[1.322,-1.91],[-0.919,-0.918],[0,0],[0,0],[-0.305,-1.299]],"v":[[16.863,-112.7],[-16.776,-112.622],[-19.042,-109.76],[-19.042,-93.128],[-47.859,-93.049],[-51.878,-88.228],[-50.445,-84.765],[-2.814,-37.198],[2.757,-37.194],[50.969,-85.443],[50.4,-91.694],[46.937,-93.128],[19.803,-93.128],[19.725,-110.434]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":36,"op":65,"st":-3,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Mask","parent":7,"td":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[0,5.5,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[87.75,-56.25],[87.75,56.25],[-87.75,56.25],[-87.75,-56.25]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[4.25,77.75],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Arrow","parent":2,"tt":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.485,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[48.979,-7.327,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.7,"y":0},"t":13,"s":[48.979,-28.327,0],"to":[0,0,0],"ti":[0,0,0]},{"t":28,"s":[48.979,126.673,0]}],"ix":2},"a":{"a":0,"k":[-0.021,-36.327,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[1.196,0],[0,0],[0,-1.339],[0,0],[0,0],[0,-2.315],[-0.789,-0.884],[0,0],[-1.436,1.113],[0,0],[1.462,1.636],[1.116,0],[0,0],[0,0]],"o":[[0,0],[-1.116,0.293],[0,0],[0,0],[-1.964,0.399],[0,1.25],[0,0],[1.434,1.117],[0,0],[1.136,-1.838],[-0.789,-0.884],[0,0],[0,0],[-0.262,-1.25]],"v":[[14.517,-112.7],[-14.384,-112.625],[-16.331,-109.87],[-16.331,-93.864],[-41.089,-93.788],[-44.542,-89.148],[-43.31,-85.815],[-2.388,-40.035],[2.397,-40.031],[43.819,-86.467],[43.33,-92.483],[40.354,-93.864],[17.043,-93.864],[16.976,-110.519]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":10,"s":[{"i":[[1.392,0],[0,0],[0,-1.008],[0,0],[0,0],[0,-1.741],[-0.918,-0.665],[0,0],[-1.672,0.837],[0,0],[1.702,1.231],[1.299,0],[0,0],[0,0]],"o":[[0,0],[-1.299,0.221],[0,0],[0,0],[-2.286,0.3],[0,0.94],[0,0],[1.669,0.84],[0,0],[1.322,-1.383],[-0.919,-0.665],[0,0],[0,0],[-0.305,-0.94]],"v":[[16.905,-116.22],[-16.735,-116.164],[-19,-114.092],[-19.042,-98.532],[-47.859,-98.474],[-51.878,-94.984],[-50.445,-92.477],[-2.814,-58.043],[2.757,-58.04],[50.969,-92.968],[50.4,-97.493],[46.937,-98.532],[19.803,-98.532],[19.767,-114.58]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":22,"s":[{"i":[[1.288,0],[0,0],[0,-1.544],[0,0],[0,0],[0,-2.669],[-0.85,-1.019],[0,0],[-1.547,1.283],[0,0],[1.575,1.887],[1.202,0],[0,0],[0,0]],"o":[[0,0],[-1.202,0.338],[0,0],[0,0],[-2.116,0.46],[0,1.441],[0,0],[1.544,1.287],[0,0],[1.224,-2.119],[-0.85,-1.019],[0,0],[0,0],[-0.282,-1.441]],"v":[[15.623,-121.095],[-15.512,-121.009],[-17.609,-117.833],[-17.609,-99.38],[-44.28,-99.292],[-48,-93.943],[-46.673,-90.101],[-2.589,-37.326],[2.567,-37.32],[47.19,-90.854],[46.663,-97.789],[43.457,-99.38],[18.344,-99.38],[18.272,-118.581]],"c":true}]},{"t":28,"s":[{"i":[[1.288,0],[0,0],[0,-1.166],[0,0],[0,0],[0,-2.015],[-0.85,-0.77],[0,0],[-1.547,0.969],[0,0],[1.575,1.424],[1.202,0],[0,0],[0,0]],"o":[[0,0],[-1.202,0.255],[0,0],[0,0],[-2.116,0.348],[0,1.088],[0,0],[1.544,0.972],[0,0],[1.224,-1.6],[-0.85,-0.769],[0,0],[0,0],[-0.282,-1.088]],"v":[[15.623,-100.25],[-15.512,-100.185],[-17.609,-97.787],[-17.609,-83.857],[-44.28,-83.791],[-48,-79.752],[-46.673,-76.852],[-2.589,-37.01],[2.567,-37.006],[47.19,-77.42],[46.663,-82.656],[43.457,-83.857],[18.344,-83.857],[18.272,-98.352]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":29,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Box","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[49,125.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[49,145.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.652,"y":1},"o":{"x":0.31,"y":0},"t":39,"s":[49,118.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.539,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[49,130.7,0],"to":[0,0,0],"ti":[0,0,0]},{"t":59,"s":[49,125.7,0]}],"ix":2},"a":{"a":0,"k":[0,112.7,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":0,"s":[100,100,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":21,"s":[98,102,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":29,"s":[102,94,100]},{"i":{"x":[0.5,0.5,0.5],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":38,"s":[98,103,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":49,"s":[101,98,100]},{"i":{"x":[0.68,0.68,0.68],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":58,"s":[100,102,100]},{"t":65,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[0,0],[-3.596,-3.225],[0,0],[1.585,0],[0,0],[-0.398,0.367],[0,0],[-4.925,0]],"o":[[4.831,0],[0,0],[0.555,0.551],[0,0],[-1.172,0],[0,0],[3.62,-3.34],[0,0]],"v":[[44.75,5.076],[57.837,10.085],[71.528,22.366],[70.874,24.075],[-71.62,24.075],[-72.285,22.375],[-59.164,10.27],[-45.874,5.076]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[0,0],[-4.135,-3.779],[0,0],[1.822,0],[0,0],[-0.457,0.43],[0,0],[-5.664,0]],"o":[[5.555,0],[0,0],[0.638,0.646],[0,0],[-1.347,0],[0,0],[4.163,-3.913],[0,0]],"v":[[51.461,1.127],[66.51,6.996],[85.13,26.083],[84.378,28.085],[-84.97,28.085],[-85.734,26.093],[-68.031,7.213],[-52.748,1.127]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[0,0],[-3.095,-2.541],[0,0],[1.364,0],[0,0],[-0.342,0.289],[0,0],[-4.239,0]],"o":[[4.158,0],[0,0],[0.478,0.434],[0,0],[-1.008,0],[0,0],[3.116,-2.631],[0,0]],"v":[[38.496,5.076],[49.76,9.023],[61.544,18.699],[60.981,20.045],[-61.66,20.045],[-62.232,18.706],[-50.94,9.169],[-39.501,5.076]],"c":true}]},{"t":51,"s":[{"i":[[0,0],[-3.596,-3.225],[0,0],[1.585,0],[0,0],[-0.398,0.367],[0,0],[-4.925,0]],"o":[[4.831,0],[0,0],[0.555,0.551],[0,0],[-1.172,0],[0,0],[3.62,-3.34],[0,0]],"v":[[44.75,5.076],[57.837,10.085],[71.528,22.366],[70.874,24.075],[-71.62,24.075],[-72.285,22.375],[-59.164,10.27],[-45.874,5.076]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[0,0],[-1.949,-1.877],[0,0],[0,-9.077],[0,0],[6.313,0],[0,0],[1.403,1.154],[-2.751,3.344],[0,0],[-1.711,-1.69],[0,0],[-1.688,2.115]],"o":[[1.877,-1.949],[0,0],[4.795,4.618],[0,0],[0,6.313],[0,0],[-1.817,0],[-3.344,-2.751],[0,0],[1.902,-1.335],[0,0],[2.115,1.688],[0,0]],"v":[[23.881,41.681],[30.809,41.551],[56.745,66.531],[64.25,84.177],[64.25,85.189],[52.819,96.619],[-56.35,96.619],[-61.33,94.834],[-62.405,83.799],[-39.063,55.703],[-32.808,56.232],[-11.102,77.635],[-4.216,76.861]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[0,0],[-2.154,-1.669],[0,0],[0,-8.067],[0,0],[6.976,0],[0,0],[1.551,1.026],[-3.04,2.972],[0,0],[-1.891,-1.502],[0,0],[-1.865,1.88]],"o":[[2.075,-1.732],[0,0],[5.299,4.105],[0,0],[0,5.611],[0,0],[-2.008,0],[-3.695,-2.445],[0,0],[2.101,-1.186],[0,0],[2.338,1.5],[0,0]],"v":[[26.389,47.791],[34.045,47.675],[62.705,69.877],[70.997,85.561],[70.997,86.46],[58.367,96.619],[-62.268,96.619],[-67.771,95.032],[-68.959,85.225],[-43.165,60.254],[-36.254,60.724],[-12.268,79.746],[-4.659,79.058]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[0,0],[-1.896,-2.055],[0,0],[0,-9.936],[0,0],[6.141,0],[0,0],[1.365,1.263],[-2.676,3.66],[0,0],[-1.665,-1.851],[0,0],[-1.642,2.316]],"o":[[1.826,-2.134],[0,0],[4.665,5.055],[0,0],[0,6.91],[0,0],[-1.767,0],[-3.253,-3.011],[0,0],[1.85,-1.461],[0,0],[2.058,1.848],[0,0]],"v":[[23.232,36.481],[29.972,36.338],[55.203,63.683],[62.503,83],[62.503,84.107],[51.383,96.619],[-54.818,96.619],[-59.663,94.665],[-60.708,82.585],[-38.001,51.83],[-31.916,52.409],[-10.8,75.838],[-4.101,74.991]],"c":true}]},{"t":51,"s":[{"i":[[0,0],[-1.949,-1.877],[0,0],[0,-9.077],[0,0],[6.313,0],[0,0],[1.403,1.154],[-2.751,3.344],[0,0],[-1.711,-1.69],[0,0],[-1.688,2.115]],"o":[[1.877,-1.949],[0,0],[4.795,4.618],[0,0],[0,6.313],[0,0],[-1.817,0],[-3.344,-2.751],[0,0],[1.902,-1.335],[0,0],[2.115,1.688],[0,0]],"v":[[23.881,41.681],[30.809,41.551],[56.745,66.531],[64.25,84.177],[64.25,85.189],[52.819,96.619],[-56.35,96.619],[-61.33,94.834],[-62.405,83.799],[-39.063,55.703],[-32.808,56.232],[-11.102,77.635],[-4.216,76.861]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[{"i":[[8.999,0],[0,0],[6.986,-5.673],[0,-13.376],[0,0],[-16.237,0],[0,0],[0,16.237],[0,0],[25.148,20.42]],"o":[[0,0],[-9,0],[-25.145,20.419],[0,0],[0,16.237],[0,0],[16.237,0],[0,0],[0,-13.377],[-6.986,-5.672]],"v":[[45.368,-14.7],[-45.371,-14.7],[-70.082,-5.93],[-107.8,44.763],[-107.8,83.3],[-78.4,112.7],[78.4,112.7],[107.8,83.3],[107.8,44.763],[70.077,-5.932]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":30,"s":[{"i":[[9.944,0],[0,0],[7.72,-5.673],[0,-13.376],[0,0],[-17.942,0],[0,0],[0,16.237],[0,0],[27.79,20.42]],"o":[[0,0],[-9.945,0],[-27.786,20.419],[0,0],[0,16.237],[0,0],[17.942,0],[0,0],[0,-13.377],[-7.72,-5.672]],"v":[[50.132,-14.7],[-50.136,-14.7],[-77.443,-5.93],[-119.122,44.763],[-119.122,83.3],[-86.634,112.7],[86.634,112.7],[119.122,83.3],[119.122,44.763],[77.437,-5.932]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[{"i":[[8.754,0],[0,0],[6.796,-5.673],[0,-13.376],[0,0],[-15.796,0],[0,0],[0,16.237],[0,0],[24.465,20.42]],"o":[[0,0],[-8.755,0],[-24.461,20.419],[0,0],[0,16.237],[0,0],[15.796,0],[0,0],[0,-13.377],[-6.796,-5.672]],"v":[[44.134,-14.7],[-44.138,-14.7],[-68.177,-5.93],[-104.869,44.763],[-104.869,83.3],[-76.268,112.7],[76.268,112.7],[104.869,83.3],[104.869,44.763],[68.172,-5.932]],"c":true}]},{"t":51,"s":[{"i":[[8.999,0],[0,0],[6.986,-5.673],[0,-13.376],[0,0],[-16.237,0],[0,0],[0,16.237],[0,0],[25.148,20.42]],"o":[[0,0],[-9,0],[-25.145,20.419],[0,0],[0,16.237],[0,0],[16.237,0],[0,0],[0,-13.377],[-6.986,-5.672]],"v":[[45.368,-14.7],[-45.371,-14.7],[-70.082,-5.93],[-107.8,44.763],[-107.8,83.3],[-78.4,112.7],[78.4,112.7],[107.8,83.3],[107.8,44.763],[70.077,-5.932]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":65,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/ic_save_to_music.json b/TMessagesProj/src/main/res/raw/ic_save_to_music.json new file mode 100644 index 000000000..91c44b36c --- /dev/null +++ b/TMessagesProj/src/main/res/raw/ic_save_to_music.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Box","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":4,"s":[-6]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.576],"y":[0]},"t":18,"s":[8]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":28,"s":[-5]},{"i":{"x":[0.69],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":37,"s":[6]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":47,"s":[-3]},{"t":58,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[256,513,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":16,"s":[260,418,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":25,"s":[256,506,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[257,449,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0},"t":45,"s":[256,476,0],"to":[0,0,0],"ti":[0,0,0]},{"t":57,"s":[256,468,0]}],"ix":2},"a":{"a":0,"k":[0,172.5,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.704,0.704,0.704],"y":[1,1,1]},"o":{"x":[0.314,0.314,0.314],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":16,"s":[100,100,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":25,"s":[102,98,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":35,"s":[98,102,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":45,"s":[101,99,100]},{"t":57,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[-7.162,0],[0,0],[-4.565,-0.036],[0,0]],"o":[[0,0],[5.209,-0.032],[0,0],[6.511,0],[0,0],[0,0]],"v":[[-66.473,-47.14],[-46.939,-47.298],[-30.01,-47.35],[93.701,-47.35],[109.328,-47.287],[128.861,-47.14]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.6,"y":0},"t":13,"s":[{"i":[[0,0],[0,0],[-7.162,0],[0,0],[-4.565,-0.073],[0,0]],"o":[[0,0],[5.209,-0.063],[0,0],[6.511,0],[0,0],[0,0]],"v":[[-66.473,-100.161],[-46.939,-100.478],[-30.01,-100.583],[93.701,-100.583],[109.328,-100.457],[128.861,-100.161]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.167,"y":0.167},"t":21,"s":[{"i":[[0,0],[0,0],[-6.803,0],[0,0],[-3.524,-2.208],[0,0]],"o":[[0,0],[4.948,-2.029],[0,0],[5.28,-0.16],[0,0],[0,0]],"v":[[-75.732,-90.545],[-52.735,-102.928],[-36.655,-106.31],[97.894,-106.418],[104.687,-103.031],[108.887,-91.117]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":27,"s":[{"i":[[0,0],[0,0],[-10.41,0],[0,0],[-4.162,-8.075],[0,0]],"o":[[0,0],[7.571,-7.423],[0,0],[6.709,-0.591],[0,0],[0,0]],"v":[[-146.668,-66.261],[-101.696,-109.906],[-77.09,-122.277],[115.956,-122.174],[129.805,-105.121],[142.774,-66.221]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":37,"s":[{"i":[[0,0],[0,0],[-8.25,0],[0,0],[-5.258,-2.542],[0,0]],"o":[[0,0],[6,-2.21],[0,0],[7.5,0],[0,0],[0,0]],"v":[[-112.5,-132.52],[-96.393,-143.659],[-77.677,-147.509],[80.586,-148.125],[96.207,-144.454],[112.5,-132.52]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0},"t":47,"s":[{"i":[[0,0],[0,0],[-8.578,0],[0,0],[-5.467,-4.882],[0,0]],"o":[[0,0],[6.239,-4.244],[0,0],[7.798,0],[0,0],[0,0]],"v":[[-116.976,-96.281],[-93.58,-117.5],[-73.305,-124.574],[74.864,-124.574],[93.58,-116.086],[116.976,-96.281]],"c":true}]},{"t":56,"s":[{"i":[[0,0],[0,0],[-8.25,0],[0,0],[-5.258,-5.177],[0,0]],"o":[[0,0],[6,-4.5],[0,0],[7.5,0],[0,0],[0,0]],"v":[[-112.5,-112.5],[-90,-135],[-70.5,-142.5],[72,-142.5],[90,-133.5],[112.5,-112.5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.6,"y":0},"t":19,"s":[{"i":[[0,0],[0,0],[0,-0.003],[0.054,0],[0,0.041],[-0.054,0],[-0.017,-0.011],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0.041],[-0.054,0],[0,-0.041],[0.024,0],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[-18.105,-75.025],[-18.204,-75.025],[-18.204,-74.825],[-18.301,-74.751],[-18.398,-74.825],[-18.301,-74.898],[-18.238,-74.881],[-18.238,-75.025],[-18.238,-75.086],[-18.238,-75.087],[-18.105,-75.087]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":27,"s":[{"i":[[0,0],[0,0],[-0.041,-1.176],[33.522,-1.179],[0.64,18.19],[-33.522,1.179],[-10.674,-4.359],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0.64,18.19],[-33.522,1.179],[-0.64,-18.19],[14.718,-0.518],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[71.39,13.233],[10.081,15.39],[13.215,104.894],[-45.964,140.051],[-107.819,109.25],[-48.281,74.178],[-9.297,80.388],[-11.557,16.151],[-12.517,-11.118],[-12.542,-11.827],[70.406,-14.745]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":37,"s":[{"i":[[0,0],[0,0],[0,-1.697],[26.805,0],[0,26.239],[-26.805,0],[-8.403,-6.821],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,26.239],[-26.805,0],[0,-26.239],[11.769,0],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[75.312,-47.536],[26.287,-47.536],[26.274,81.571],[-21.977,129.221],[-70.512,81.712],[-21.977,34.202],[8.984,45.125],[8.984,-47.536],[8.984,-86.872],[8.984,-87.893],[75.312,-87.893]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0},"t":47,"s":[{"i":[[0,0],[0,0],[0,-1.36],[30.771,0],[0,21.031],[-30.771,0],[-9.646,-5.467],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,21.03],[-30.771,0],[0,-21.031],[13.51,0],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[86.195,-15.853],[29.916,-15.853],[29.902,87.625],[-25.488,125.817],[-81.204,87.738],[-25.488,49.659],[10.053,58.414],[10.053,-15.853],[10.053,-47.381],[10.053,-48.2],[86.195,-48.2]],"c":true}]},{"t":56,"s":[{"i":[[0,0],[0,0],[0,-1.442],[29.594,0],[0,22.299],[-29.594,0],[-9.277,-5.797],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,22.299],[-29.594,0],[0,-22.3],[12.993,0],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[82.897,-27.219],[28.771,-27.219],[28.758,82.503],[-24.513,123],[-78.097,82.623],[-24.513,42.246],[9.668,51.529],[9.668,-27.219],[9.668,-60.649],[9.668,-61.518],[82.897,-61.518]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[1.454,4.596],[10.137,2.931],[16.448,0],[0,0],[13.734,-2.186],[7.392,-7.241],[-0.491,-6.001],[0,0],[-17.692,1.509],[0,0],[0,9.28],[0,0]],"o":[[-1.367,-4.32],[-10.137,-2.931],[0,0],[-16.448,0],[-7.71,1.227],[-6.53,6.397],[0,0],[0.758,9.275],[0,0],[25.472,-1.621],[0,0],[0,-6.004]],"v":[[171.59,-33.324],[155.221,-46.107],[105.654,-48.213],[-47.857,-48.1],[-122.25,-43.071],[-146.784,-26.585],[-157.085,0.667],[-146.928,74.623],[-110.583,86.914],[135.192,79.652],[177.344,43.703],[175.416,-15.637]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":13,"s":[{"i":[[1.428,9.231],[6.883,8.474],[27.575,2.23],[0,0],[13.698,-2.008],[0,0],[0,-12.059],[0,0],[-24.331,0.328],[0,0],[-0.528,26.582],[0,0]],"o":[[-1.343,-8.676],[-8.377,-10.313],[0,0],[-16.156,0],[-11.244,1.649],[-10.281,7.674],[0,0],[2.098,26.444],[0,0],[24.968,0],[0,0],[0,-12.059]],"v":[[174.355,-45.586],[160.962,-73.188],[109.025,-101.517],[-43.771,-102.089],[-114.715,-100.936],[-141.606,-90.882],[-157.761,-60.187],[-158.178,78.904],[-92.806,124.012],[118.294,124.848],[173.468,101.22],[176.305,-16.292]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":27,"s":[{"i":[[8.228,9.626],[0,0],[17.742,0],[0,0],[12.903,-12.025],[0,0],[4.241,-14.876],[-25.591,-54.461],[-29.073,0.997],[0,0],[-7.953,26.396],[15.172,56.424]],"o":[[0,0],[-7.394,-10.348],[0,0],[-17.742,0],[0,0],[-11.29,10.522],[-16.637,58.352],[8.685,18.482],[0,0],[24.772,-1.057],[17.831,-59.183],[-5.105,-18.984]],"v":[[159.762,-94.942],[133.399,-128.421],[104.382,-147.2],[-77.335,-147.552],[-120.364,-128.455],[-159.593,-94.942],[-181.104,-56.26],[-175.221,133.487],[-121.422,172.711],[122.684,171.606],[180.729,131.637],[179.619,-53.857]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":37,"s":[{"i":[[10.5,9.023],[0,0],[16.5,0],[0,0],[12,-10.312],[0,0],[0,-16.749],[0,0],[-25.5,0],[0,0],[0,25.885],[0,0]],"o":[[0,0],[-12,-10.312],[0,0],[-16.5,0],[0,0],[-10.5,9.023],[0,0],[0,25.885],[0,0],[25.5,0],[0,0],[0,-16.749]],"v":[[148.5,-122.775],[117.279,-152.287],[76.201,-167.939],[-72,-167.889],[-114.613,-152.19],[-148.5,-122.775],[-165,-81.778],[-165,126.821],[-120,172.5],[120,172.5],[165,126.821],[165,-81.778]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0},"t":47,"s":[{"i":[[10.918,9.902],[0,0],[17.156,0],[0,0],[12.477,-11.317],[0,0],[0,-15.561],[0,0],[-26.514,0],[0,0],[0,24.049],[0,0]],"o":[[0,0],[-12.477,-11.317],[0,0],[-17.156,0],[0,0],[-10.918,9.902],[0,0],[0,24.049],[0,0],[26.514,0],[0,0],[0,-15.561]],"v":[[154.408,-103.354],[120.095,-135.891],[74.864,-152.866],[-74.864,-152.866],[-120.095,-135.891],[-154.408,-103.354],[-171.564,-63.744],[-171.564,130.061],[-124.774,172.5],[124.774,172.5],[171.564,130.061],[171.564,-63.744]],"c":true}]},{"t":56,"s":[{"i":[[10.5,10.5],[0,0],[16.5,0],[0,0],[12,-12],[0,0],[0,-16.5],[0,0],[-25.5,0],[0,0],[0,25.5],[0,0]],"o":[[0,0],[-12,-12],[0,0],[-16.5,0],[0,0],[-10.5,10.5],[0,0],[0,25.5],[0,0],[25.5,0],[0,0],[0,-16.5]],"v":[[148.5,-120],[115.5,-154.5],[72,-172.5],[-72,-172.5],[-115.5,-154.5],[-148.5,-120],[-165,-78],[-165,127.5],[-120,172.5],[120,172.5],[165,127.5],[165,-78]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Arrow","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":5,"s":[0]},{"i":{"x":[0.648],"y":[0.924]},"o":{"x":[0.306],"y":[0]},"t":16,"s":[9]},{"i":{"x":[0.703],"y":[1]},"o":{"x":[0.341],"y":[0.13]},"t":21,"s":[4]},{"t":28,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.702,"y":1},"o":{"x":0.347,"y":0.132},"t":5,"s":[15.455,18.901,0],"to":[0.374,8.268,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.472,"y":0},"t":16,"s":[25.935,-55.624,0],"to":[-32.111,59.554,0],"ti":[0,0,0]},{"t":28,"s":[-19.545,194.941,0]}],"ix":2},"a":{"a":0,"k":[0,172.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[1.646,-2.075],[0,0],[0,0],[0.758,0.657],[0,0],[-1.822,-0.052],[0,0],[0,0],[-1.107,0],[0,0],[-0.383,-0.692],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.682,0.7],[0,0],[-2.41,-2.438],[0,0],[0,0],[0,-0.623],[0,0],[1.234,0],[0,0],[0,0],[2.704,0.052]],"v":[[34.137,17.418],[34.137,17.418],[1.148,42.228],[-1.883,42.228],[-34.998,18.04],[-32.881,12.437],[-14.435,12.437],[-14.435,1.854],[-12.392,-0.013],[12.38,-0.013],[14.679,1.232],[14.679,12.437],[32.021,12.437]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[2.016,-3.888],[0,0],[0,0],[0.928,1.231],[0,0],[-2.232,-0.097],[0,0],[0,0],[-1.356,0],[0,0],[-0.469,-1.296],[0,0],[0,0]],"o":[[0,0],[0,0],[-0.835,1.312],[0,0],[-2.952,-4.568],[0,0],[0,0],[0,-1.166],[0,0],[1.512,0],[0,0],[0,0],[3.312,0.097]],"v":[[41.998,13.889],[41.998,13.889],[1.588,60.371],[-2.123,60.371],[-42.687,15.055],[-40.094,4.558],[-17.499,4.558],[-17.499,-15.269],[-14.997,-18.768],[15.348,-18.768],[18.163,-16.436],[18.163,4.558],[39.405,4.558]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":16,"s":[{"i":[[3.412,-3.969],[0,0],[0,0],[1.57,1.257],[0,0],[-3.778,-0.099],[0,0],[0,0],[-2.294,0],[0,0],[-0.794,-1.323],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.413,1.34],[0,0],[-4.996,-4.664],[0,0],[0,0],[0,-1.191],[0,0],[2.559,0],[0,0],[0,0],[5.606,0.099]],"v":[[71.633,13.73],[71.633,13.73],[3.25,61.189],[-3.031,61.189],[-71.674,14.921],[-67.287,4.203],[-29.051,4.203],[-29.051,-16.041],[-24.816,-19.614],[26.535,-19.614],[31.299,-17.232],[31.299,4.203],[67.246,4.203]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":21,"s":[{"i":[[2.584,-4.433],[0,0],[0,0],[1.399,1.404],[0,0],[-2.86,-0.111],[0,0],[0,0],[-1.756,0],[0,0],[-0.608,-1.478],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.259,1.496],[0,0],[-3.783,-5.209],[0,0],[0,0],[0,-1.33],[0,0],[1.959,0],[0,0],[0,0],[4.244,0.111]],"v":[[53.122,21.622],[53.122,21.622],[0.313,79.816],[-5.283,79.816],[-54.315,15.43],[-50.993,3.462],[-22.187,6.522],[-15.774,-53.001],[-12.532,-56.991],[26.054,-52.345],[29.701,-49.685],[23.302,8.424],[49.8,10.984]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":23,"s":[{"i":[[2.966,-4.58],[0,0],[0,0],[1.596,1.45],[0,0],[-3.284,-0.114],[0,0],[0,0],[-1.909,0],[0,0],[-0.661,-1.527],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.436,1.546],[0,0],[-4.343,-5.381],[0,0],[0,0],[0,-1.374],[0,0],[2.129,0],[0,0],[0,0],[4.873,0.114]],"v":[[61.743,21.03],[61.743,21.03],[3.254,43.44],[-3.129,43.44],[-62.344,18.936],[-58.53,6.571],[-23.948,7.982],[-20.992,-32.394],[-17.469,-36.516],[24.919,-34.373],[28.883,-31.626],[25.932,8.859],[57.929,10.039]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[3.197,-1.233],[0,0],[0,0],[1.715,0.39],[0,0],[-3.539,-0.031],[0,0],[0,0],[-2,0],[0,0],[-0.692,-0.411],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.543,0.416],[0,0],[-4.681,-1.448],[0,0],[0,0],[0,-0.37],[0,0],[2.231,0],[0,0],[0,0],[5.252,0.031]],"v":[[66.934,-12.341],[66.934,-12.341],[2.439,5.849],[-4.419,5.849],[-67.179,-12.243],[-63.069,-15.57],[-25.009,-15.46],[-24.134,-23.077],[-20.441,-24.186],[24.236,-24.019],[28.39,-23.279],[27.516,-15.392],[62.824,-15.299]],"c":true}]},{"t":28,"s":[{"i":[[3.294,-4.705],[0,0],[0,0],[1.764,1.49],[0,0],[-3.646,-0.118],[0,0],[0,0],[-2.039,0],[0,0],[-0.706,-1.568],[0,0],[0,0]],"o":[[0,0],[0,0],[-1.588,1.588],[0,0],[-4.823,-5.529],[0,0],[0,0],[0,-1.412],[0,0],[2.274,0],[0,0],[0,0],[5.411,0.118]],"v":[[69.117,20.523],[69.117,20.523],[2.775,91.1],[-4.283,91.1],[-69.213,21.934],[-64.978,9.231],[-25.456,9.231],[-25.456,-14.765],[-21.691,-19],[23.948,-19],[28.183,-16.177],[28.183,9.231],[64.883,9.231]],"c":true}]}],"ix":2},"nm":"Arrow","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":7,"op":25,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/ic_unban.json b/TMessagesProj/src/main/res/raw/ic_unban.json new file mode 100644 index 000000000..7146942a2 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/ic_unban.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Finger 4","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.448],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.1],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":6,"s":[-20.696]},{"i":{"x":[0.512],"y":[1]},"o":{"x":[0.682],"y":[0]},"t":16,"s":[14]},{"i":{"x":[0.573],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":28,"s":[-12]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":42,"s":[4]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":49,"s":[-2]},{"t":55,"s":[0]}],"ix":10},"p":{"a":0,"k":[114.598,1.901,0],"ix":2},"a":{"a":0,"k":[114.598,1.901,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-11.366,-31.215],[6.103,12.041],[16.567,24.423],[-8.643,6.215],[-5.392,-5.871],[-23.672,-31.37],[-2.728,-15.469]],"o":[[-6.148,-33.394],[-6.103,-12.041],[-4.255,-6.139],[8.643,-6.215],[5.392,5.871],[18.084,23.965],[5.771,32.728]],"v":[[54.332,-30.222],[35.956,-98.375],[1.951,-153.07],[5.351,-177.586],[28.683,-173.292],[74.204,-114.262],[114.598,1.901]],"c":false}]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":5,"s":[{"i":[[-11.366,-31.215],[10.173,10.986],[20.844,8.144],[-1.245,10.573],[-7.971,0.018],[-32.608,-42.365],[2.588,-44.206]],"o":[[1.112,-21.079],[-7.288,-7.87],[-7.402,-1.002],[1.245,-10.573],[7.971,-0.018],[12.272,15.944],[-1.942,33.176]],"v":[[54.332,-30.222],[34.998,-89.545],[-2.65,-110.614],[-20.383,-128.082],[-1.47,-142.405],[85.897,-94.403],[114.598,1.901]],"c":false}]},{"i":{"x":0.512,"y":1},"o":{"x":0.682,"y":0},"t":15,"s":[{"i":[[-2.538,31.79],[4.122,12.855],[12.496,26.735],[-9.518,4.77],[-4.395,-6.65],[-18.412,-34.719],[-2.728,-15.469]],"o":[[-1.236,-32.513],[-4.122,-12.855],[-3.231,-6.735],[9.518,-4.77],[4.395,6.65],[14.066,26.523],[5.771,32.728]],"v":[[55.679,-15.51],[36.474,-101.368],[11.547,-160.753],[18.781,-184.423],[43.375,-176.595],[76.753,-111.006],[114.598,1.901]],"c":false}]},{"i":{"x":0.573,"y":1},"o":{"x":0.6,"y":0},"t":27,"s":[{"i":[[-11.366,-31.215],[7.892,10.952],[20.145,21.566],[-7.578,7.478],[-6.235,-4.966],[-28.24,-27.33],[-2.728,-15.469]],"o":[[-6.148,-33.394],[-7.892,-10.952],[-5.154,-5.407],[7.578,-7.478],[6.235,4.966],[21.573,20.879],[5.771,32.728]],"v":[[54.332,-30.222],[33.035,-91.789],[-9.022,-140.566],[-9.456,-165.312],[14.26,-164.68],[68.365,-113.402],[114.598,1.901]],"c":false}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":41,"s":[{"i":[[-11.366,-31.215],[4.579,12.699],[13.443,26.272],[-9.341,5.107],[-4.63,-6.488],[-19.642,-34.039],[-2.728,-15.469]],"o":[[-6.148,-33.394],[-4.579,-12.699],[-3.469,-6.615],[9.341,-5.107],[4.63,6.488],[15.005,26.004],[5.771,32.728]],"v":[[55.845,-19.82],[35.933,-100.559],[8.9,-159.016],[15.284,-182.929],[37.913,-175.803],[75.843,-111.631],[114.598,1.901]],"c":false}]},{"t":48,"s":[{"i":[[-11.366,-31.215],[6.103,12.041],[16.567,24.423],[-8.643,6.215],[-5.392,-5.871],[-23.672,-31.37],[-2.728,-15.469]],"o":[[-6.148,-33.394],[-6.103,-12.041],[-4.255,-6.139],[8.643,-6.215],[5.392,5.871],[18.084,23.965],[5.771,32.728]],"v":[[54.332,-30.222],[35.956,-98.375],[1.951,-153.07],[5.351,-177.586],[28.683,-173.292],[74.204,-114.262],[114.598,1.901]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Finger 3","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.448],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.1],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":6,"s":[-10]},{"i":{"x":[0.512],"y":[1]},"o":{"x":[0.682],"y":[0]},"t":16,"s":[24]},{"i":{"x":[0.573],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":28,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":42,"s":[8]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":49,"s":[-5]},{"t":55,"s":[0]}],"ix":10},"p":{"a":0,"k":[32.844,-22.381,0],"ix":2},"a":{"a":0,"k":[32.844,-22.381,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-8.38,-3.399],[4.477,9.792],[5.244,8.403],[-9.125,7.064],[-6.956,-8.663],[-9.118,-12.633],[-11.366,-31.215]],"o":[[-2.826,-9.955],[-4.477,-9.792],[-5.244,-8.403],[9.125,-7.064],[6.956,8.663],[10.828,15.003],[-6.148,-33.394]],"v":[[11.923,-21.183],[-8.328,-72.495],[-31.897,-112.182],[-30.595,-138.927],[-4.462,-133.419],[21.041,-99.549],[54.332,-30.222]],"c":false}]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":5,"s":[{"i":[[-8.38,-3.399],[10.148,0.558],[9.705,1.981],[-1.158,11.481],[-11.047,1.181],[-15.44,-2.083],[0.208,-38.332]],"o":[[-1.373,-10.818],[-10.751,-0.591],[-9.705,-1.981],[1.158,-11.481],[9.859,-1.054],[18.336,2.474],[-6.148,-33.394]],"v":[[11.923,-21.183],[0.478,-49.478],[-42.052,-56.189],[-64.151,-72.827],[-42.166,-87.99],[0.66,-88.224],[54.332,-30.222]],"c":false}]},{"i":{"x":0.512,"y":1},"o":{"x":0.682,"y":0},"t":15,"s":[{"i":[[-8.38,-3.399],[4.004,9.995],[4.836,8.644],[-9.452,6.619],[-6.534,-8.986],[-8.503,-13.055],[-9.534,-30.699]],"o":[[-0.618,-17.913],[-4.004,-9.995],[-4.836,-8.644],[9.452,-6.619],[6.534,8.986],[10.098,15.504],[5.626,12.422]],"v":[[14.308,-16.676],[-6.113,-74.494],[-27.759,-115.263],[-25.179,-141.915],[0.66,-135.164],[24.515,-100.113],[57.963,-34.473]],"c":false}]},{"i":{"x":0.573,"y":1},"o":{"x":0.6,"y":0},"t":27,"s":[{"i":[[-8.38,-3.399],[9.448,3.06],[9.163,1.732],[-2.483,10.085],[-10.613,-0.798],[-14.524,-2.929],[-7.391,-29.408]],"o":[[-4.316,-15.79],[-9.448,-3.06],[-9.163,-1.732],[2.483,-10.085],[10.613,0.798],[32.223,6.499],[-6.148,-33.394]],"v":[[11.923,-21.183],[-7.124,-61.32],[-48.145,-73.503],[-63.767,-90.489],[-40.679,-103.825],[-0.459,-99.628],[54.332,-30.222]],"c":false}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":41,"s":[{"i":[[-8.38,-3.399],[3.829,10.063],[4.685,8.727],[-9.566,6.454],[-6.376,-9.098],[-8.275,-13.201],[-11.366,-31.215]],"o":[[-2.826,-9.955],[-3.829,-10.063],[-4.685,-8.727],[9.566,-6.454],[6.376,9.098],[9.827,15.677],[-6.148,-33.394]],"v":[[13.059,-18.128],[-8.609,-73.881],[-29.542,-115.02],[-26.5,-141.624],[-0.781,-134.424],[22.46,-98.964],[56.254,-30.774]],"c":false}]},{"t":48,"s":[{"i":[[-8.38,-3.399],[4.477,9.792],[5.244,8.403],[-9.125,7.064],[-6.956,-8.663],[-9.118,-12.633],[-11.366,-31.215]],"o":[[-2.826,-9.955],[-4.477,-9.792],[-5.244,-8.403],[9.125,-7.064],[6.956,8.663],[10.828,15.003],[-6.148,-33.394]],"v":[[11.923,-21.183],[-8.328,-72.495],[-31.897,-112.182],[-30.595,-138.927],[-4.462,-133.419],[21.041,-99.549],[54.332,-30.222]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Finger 2","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.1],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[-7]},{"i":{"x":[0.512],"y":[1]},"o":{"x":[0.582],"y":[0]},"t":19,"s":[16]},{"i":{"x":[0.56],"y":[1]},"o":{"x":[0.428],"y":[0]},"t":31,"s":[-9]},{"t":45,"s":[0]}],"ix":10},"p":{"a":0,"k":[6.05,1.252,0],"ix":2},"a":{"a":0,"k":[6.05,1.252,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":1,"s":[{"i":[[0.137,0.108],[10.146,-1.789],[12.364,-2.18],[2.43,0.281],[2.472,5.615],[-7.134,4.435],[-4.911,2.422],[-9.302,-0.137],[-8.38,-3.399]],"o":[[-8.172,-6.431],[-16.891,2.978],[-2.541,0.448],[-4.967,-0.574],[-3.682,-8.362],[11.128,-6.918],[4.911,-2.422],[4.502,0.066],[-2.826,-9.955]],"v":[[-5.262,23.013],[-37.935,12.475],[-90.16,38.006],[-97.45,38.944],[-109.638,29.512],[-100.744,8.526],[-48.276,-29.46],[-26.654,-34.496],[11.923,-21.183]],"c":false}]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":9,"s":[{"i":[[0.137,0.108],[10.146,-1.789],[11.76,-4.395],[2.441,-0.167],[3.454,5.071],[-3.655,7.563],[-4.911,2.422],[-9.302,-0.137],[-8.38,-3.399]],"o":[[-8.172,-6.431],[-16.891,2.978],[-2.416,0.903],[-4.988,0.34],[-5.143,-7.552],[7.78,-16.096],[4.911,-2.422],[4.502,0.066],[-2.826,-9.955]],"v":[[-5.262,23.013],[-42.796,7.852],[-82.325,35.819],[-89.873,38.163],[-103.649,31.347],[-98.486,9.165],[-59.42,-32.344],[-37.797,-37.38],[11.923,-21.183]],"c":false}]},{"i":{"x":0.327,"y":1},"o":{"x":0.582,"y":0},"t":20,"s":[{"i":[[0.161,0.068],[10.162,1.699],[12.397,1.985],[2.205,1.06],[0.499,6.115],[-8.192,1.857],[-5.475,0.041],[-9.302,-0.137],[-8.38,-3.399]],"o":[[-12.922,-5.45],[-19.063,-3.187],[-2.547,-0.408],[-4.506,-2.167],[-0.744,-9.107],[12.779,-2.897],[5.475,-0.041],[4.502,0.066],[-2.826,-9.955]],"v":[[3.832,24.725],[-51.58,8.325],[-108.023,3.979],[-115.219,2.482],[-123.651,-10.419],[-108.305,-28.399],[-43.702,-33.649],[-25.973,-34.093],[11.671,-22.618]],"c":false}]},{"i":{"x":0.377,"y":1},"o":{"x":0.574,"y":0},"t":32,"s":[{"i":[[0.098,0.145],[9.114,-4.805],[12.363,2.189],[2.187,1.097],[0.398,6.122],[-8.309,1.231],[-4.997,2.239],[-9.299,0.279],[-8.38,-3.399]],"o":[[-6.18,-9.172],[-20.93,11.035],[-2.54,-0.45],[-4.47,-2.241],[-0.593,-9.118],[15.165,-2.246],[6.509,-2.916],[6.794,-0.204],[-2.826,-9.955]],"v":[[-3.901,29.742],[-37.733,20.088],[-99.494,35.454],[-106.663,33.838],[-114.881,20.8],[-99.801,2.602],[-48.58,-19.896],[-27.654,-25.209],[11.923,-21.183]],"c":false}]},{"t":46,"s":[{"i":[[0.137,0.108],[10.146,-1.789],[12.364,-2.18],[2.43,0.281],[2.472,5.615],[-7.134,4.435],[-4.911,2.422],[-9.302,-0.137],[-8.38,-3.399]],"o":[[-8.172,-6.431],[-16.891,2.978],[-2.541,0.448],[-4.967,-0.574],[-3.682,-8.362],[11.128,-6.918],[4.911,-2.422],[4.502,0.066],[-2.826,-9.955]],"v":[[-5.262,23.013],[-37.935,12.475],[-90.16,38.006],[-97.45,38.944],[-109.638,29.512],[-100.744,8.526],[-48.276,-29.46],[-26.654,-34.496],[11.923,-21.183]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Finger 1","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.1],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[1]},{"i":{"x":[0.512],"y":[1]},"o":{"x":[0.582],"y":[0]},"t":19,"s":[-13]},{"i":{"x":[0.56],"y":[1]},"o":{"x":[0.318],"y":[0]},"t":31,"s":[2]},{"t":45,"s":[0]}],"ix":10},"p":{"a":0,"k":[-28.732,174.215,0],"ix":2},"a":{"a":0,"k":[-28.732,174.215,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":1,"s":[{"i":[[8.72,4.39],[13.528,24.627],[9.861,18.015],[-5.147,4.615],[-9.827,-5.748],[-9.206,-8.36],[-11.234,-1.539]],"o":[[-17.456,-8.788],[-9.536,-17.359],[-9.861,-18.015],[5.147,-4.615],[12.428,7.269],[9.206,8.36],[11.234,1.539]],"v":[[-35.206,174.215],[-81.683,124.093],[-109.032,73.324],[-110.412,41.89],[-90.385,40.908],[-57.05,74.684],[-28.732,87.682]],"c":false}]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":9,"s":[{"i":[[9.494,2.274],[13.528,24.627],[2.994,21.775],[-6.742,2.842],[-6.344,-10.148],[-9.205,-8.36],[-11.234,-1.539]],"o":[[-27.125,-6.498],[-9.536,-17.359],[-2.994,-21.775],[6.742,-2.842],[6.472,10.353],[9.206,8.36],[11.234,1.539]],"v":[[-35.206,174.215],[-86.886,126.251],[-105.013,70.784],[-93.171,39.598],[-71.876,45.564],[-55.167,80.033],[-26.003,88.105]],"c":false}]},{"i":{"x":0.327,"y":1},"o":{"x":0.582,"y":0},"t":20,"s":[{"i":[[8.403,4.098],[17.484,23.261],[14.827,12.02],[-2.618,6.479],[-11.346,0.977],[-15.857,-13.437],[-12.008,-5.487]],"o":[[-16.262,-7.93],[-11.667,-15.522],[-10.951,-8.878],[2.618,-6.479],[14.348,-1.236],[17.23,14.601],[9.867,4.509]],"v":[[-29.395,175.543],[-75.808,123.005],[-105.486,91.783],[-119.01,68.045],[-102.784,55.03],[-58.533,72.404],[-8.914,90.493]],"c":false}]},{"i":{"x":0.359,"y":1},"o":{"x":0.464,"y":0},"t":32,"s":[{"i":[[9.318,2.913],[15.713,23.294],[16.272,18.042],[-3.9,4.009],[-11.824,-3.124],[-7.764,-4.418],[-9.501,-0.041]],"o":[[-24.538,-7.672],[-14.483,-21.471],[-13.951,-15.468],[4.82,-4.955],[20.847,5.508],[14.774,8.407],[11.339,0.049]],"v":[[-35.206,174.215],[-83.405,124.884],[-112.131,78.667],[-117.109,48.637],[-94.593,46.279],[-57.942,73.399],[-23.48,86.601]],"c":false}]},{"t":46,"s":[{"i":[[8.72,4.39],[13.528,24.627],[9.861,18.015],[-5.147,4.615],[-9.827,-5.748],[-9.206,-8.36],[-11.234,-1.539]],"o":[[-17.456,-8.788],[-9.536,-17.359],[-9.861,-18.015],[5.147,-4.615],[12.428,7.269],[9.206,8.36],[11.234,1.539]],"v":[[-35.206,174.215],[-81.683,124.093],[-109.032,73.324],[-110.412,41.89],[-90.385,40.908],[-57.05,74.684],[-28.732,87.682]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Main","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.448],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.1],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[-10]},{"i":{"x":[0.502],"y":[1]},"o":{"x":[0.799],"y":[0]},"t":20,"s":[16]},{"i":{"x":[0.573],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":32,"s":[-14]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":43,"s":[5]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":50,"s":[-3]},{"t":56,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[270.199,435.622,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":6,"s":[270.199,451.622,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.533,"y":1},"o":{"x":0.83,"y":0},"t":17,"s":[282.199,369.622,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.6,"y":0},"t":29,"s":[263.199,471.622,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.167,"y":0},"t":40,"s":[270.199,423.622,0],"to":[0,0,0],"ti":[0,0,0]},{"t":49,"s":[270.199,435.622,0]}],"ix":2},"a":{"a":0,"k":[14.199,179.622,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.516,0.516,0.7],"y":[1,1,1]},"o":{"x":[0.528,0.528,0.3],"y":[0,0,0]},"t":20,"s":[100,100,100]},{"i":{"x":[0.2,0.2,0.2],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":32,"s":[110,110,100]},{"t":46,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.448,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-15.798,-1.442],[-8.396,-11.621],[-2.728,-15.469],[12.09,-26.714],[7.467,-14.931],[21.431,-0.415],[0,0],[8.72,4.39],[-22.934,-3.016],[-3.035,15.728],[7.956,6.698]],"o":[[-2.826,-9.955],[37.668,-19.778],[5.771,32.728],[-4.914,10.857],[-9.588,19.171],[0,0],[-9.761,0.189],[-17.456,-8.788],[11.242,1.478],[3.035,-15.728],[-12.488,-10.513]],"v":[[11.923,-21.183],[54.332,-30.222],[114.598,1.901],[88.818,109.553],[70.246,148.236],[19.796,180.091],[-7.067,180.611],[-35.206,174.215],[-28.732,87.682],[3.543,65.947],[-5.262,23.013]],"c":true}]},{"i":{"x":0.1,"y":1},"o":{"x":0.3,"y":0},"t":9,"s":[{"i":[[-15.798,-1.442],[-8.396,-11.621],[-2.728,-15.469],[12.09,-26.714],[7.467,-14.931],[21.431,-0.415],[0,0],[8.72,4.39],[-22.934,-3.016],[-3.035,15.728],[7.956,6.698]],"o":[[-2.826,-9.955],[37.668,-19.778],[5.771,32.728],[-4.914,10.857],[-9.588,19.171],[0,0],[-9.761,0.189],[-17.456,-8.788],[11.242,1.478],[3.035,-15.728],[-12.488,-10.513]],"v":[[11.923,-21.183],[54.332,-30.222],[114.598,1.901],[88.818,109.553],[70.246,148.236],[19.796,180.091],[-7.067,180.611],[-35.206,174.215],[-28.732,87.682],[-3.546,62.784],[-5.262,23.013]],"c":true}]},{"i":{"x":0.327,"y":1},"o":{"x":0.582,"y":0},"t":20,"s":[{"i":[[-15.798,-1.442],[-8.396,-11.621],[-2.728,-15.469],[12.09,-26.714],[7.467,-14.931],[21.431,-0.415],[0,0],[8.72,4.39],[-22.934,-3.016],[-3.035,15.728],[7.956,6.698]],"o":[[-2.826,-9.955],[37.668,-19.778],[5.771,32.728],[-4.914,10.857],[-9.588,19.171],[0,0],[-9.761,0.189],[-17.456,-8.788],[11.242,1.478],[3.035,-15.728],[-12.488,-10.513]],"v":[[11.923,-21.183],[54.332,-30.222],[114.598,1.901],[88.818,109.553],[70.246,148.236],[19.796,180.091],[-7.067,180.611],[-35.206,174.215],[-28.732,87.682],[5.603,65.877],[-7.119,20.165]],"c":true}]},{"i":{"x":0.359,"y":1},"o":{"x":0.464,"y":0},"t":32,"s":[{"i":[[-15.798,-1.442],[-8.396,-11.621],[-2.728,-15.469],[12.09,-26.714],[7.467,-14.931],[21.431,-0.415],[0,0],[8.72,4.39],[-22.934,-3.016],[-3.035,15.728],[14.439,3.071]],"o":[[-2.826,-9.955],[37.668,-19.778],[5.771,32.728],[-4.914,10.857],[-9.588,19.171],[0,0],[-9.761,0.189],[-17.456,-8.788],[11.242,1.478],[3.035,-15.728],[-15.967,-3.396]],"v":[[11.923,-21.183],[54.332,-30.222],[114.598,1.901],[88.818,109.553],[70.246,148.236],[19.796,180.091],[-7.067,180.611],[-35.206,174.215],[-35.222,83.745],[-4.22,64.012],[-18.055,23.689]],"c":true}]},{"t":46,"s":[{"i":[[-15.798,-1.442],[-8.396,-11.621],[-2.728,-15.469],[12.09,-26.714],[7.467,-14.931],[21.431,-0.415],[0,0],[8.72,4.39],[-22.934,-3.016],[-3.035,15.728],[7.956,6.698]],"o":[[-2.826,-9.955],[37.668,-19.778],[5.771,32.728],[-4.914,10.857],[-9.588,19.171],[0,0],[-9.761,0.189],[-17.456,-8.788],[11.242,1.478],[3.035,-15.728],[-12.488,-10.513]],"v":[[11.923,-21.183],[54.332,-30.222],[114.598,1.901],[88.818,109.553],[70.246,148.236],[19.796,180.091],[-7.067,180.611],[-35.206,174.215],[-28.732,87.682],[3.543,65.947],[-5.262,23.013]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/pip_video_request.svg b/TMessagesProj/src/main/res/raw/pip_video_request.svg new file mode 100644 index 000000000..608530932 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/pip_video_request.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/pip_voice_request.svg b/TMessagesProj/src/main/res/raw/pip_voice_request.svg new file mode 100644 index 000000000..59b1063fb --- /dev/null +++ b/TMessagesProj/src/main/res/raw/pip_voice_request.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voice_muted.json b/TMessagesProj/src/main/res/raw/voice_muted.json new file mode 100644 index 000000000..f94984dfc --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voice_muted.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":29,"w":104,"h":114,"nm":"voice_unmuted","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Line","sr":1,"ks":{"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":0,"s":[0]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":1,"s":[100]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":27,"s":[100]},{"t":28,"s":[0]}],"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[51.991,59.074,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":1,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-45.93,-46.085],[-45.07,-45.165]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-45.991,-46.074],[45.989,46.076]],"c":false}]},{"t":27,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[45.884,45.989],[45.989,46.076]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":12,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":36,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Combined Shape","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[52,36,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-9.96,0],[0,9.96],[0,0],[9.96,0],[0,-9.96],[0,0]],"o":[[9.96,0],[0,0],[0,-9.96],[-9.96,0],[0,0],[0,9.96]],"v":[[0,36],[18,18],[18,-18],[0,-36],[-18,-18],[-18,18]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[-0.09,0.7],[0.008,-1.268],[-0.007,-2.696],[0.002,-2.408],[0,0],[0,0],[0,0]],"o":[[0,0],[-0.02,3.118],[0.009,3.216],[0,0],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[-17.992,-17.998],[-18,-10.015],[-18.001,-6.404],[-18,-3.309],[-18,-14.622],[-18,-17.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[-0.09,0.7],[-3.654,-3.665],[2.414,-2.225],[2.062,2.261],[0,0],[0,0],[0,0]],"o":[[0,0],[2.672,2.68],[-2.867,2.642],[0,0],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[-7.578,-9.868],[-7.571,-1.111],[-16.233,-1.566],[-18,-3.309],[-18,-14.622],[-18,-17.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[-0.09,0.7],[-0.75,-0.772],[1.731,-2.08],[2.499,2.558],[0,0],[0,0],[0,0]],"o":[[0,0],[1.62,1.667],[-1.955,2.349],[0,0],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[13.867,11.584],[13.956,19.33],[4.876,19.535],[-18,-3.309],[-18,-14.622],[-18,-17.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[-0.09,0.7],[0,0],[0,0],[2.33,-3.03],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[-18,-3.309],[-18,-14.622],[-18,-17.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":17,"s":[{"i":[[-0.09,0.7],[0,0],[0,0],[2.33,-3.03],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[-18,-3.309],[-18,-14.622],[-18,-17.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[0.831,0.837],[0,0],[0,0],[2.33,-3.03],[0,0],[-3.451,3.66],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[3.156,-3.346],[-1.031,-1.032]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[-13.375,1.316],[-12.218,-6.123],[-4.719,-6.999]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":22,"s":[{"i":[[0.831,0.837],[0,0],[0,0],[2.33,-3.03],[0,0],[-3.451,3.66],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[3.156,-3.346],[-1.031,-1.032]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[4.922,19.613],[6.079,12.174],[13.578,11.298]],"c":true}]},{"t":23,"s":[{"i":[[0.831,0.837],[0,0],[0,0],[2.33,-3.03],[0,0],[-1.422,5.045],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[0.881,-3.123],[-1.031,-1.032]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[14.281,28.956],[17.266,22.924],[17.992,15.72]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"mm","mm":3,"nm":"Merge Paths 1","mn":"ADBE Vector Filter - Merge","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Combined Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-0.09,0.7],[0,0],[0,0],[2.33,-3.03],[0,0],[0,0]],"o":[[0,0],[0,0],[0,4.13],[0,0],[0,0],[0,-0.73]],"v":[[-17.87,-20.149],[18,15.721],[18,18.001],[14.28,28.971],[-18,-3.309],[-18,-17.999]],"c":true},"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":1,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":36,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Shape 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[52.006,84,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[3.66,0],[0.48,-2.94],[14.82,0],[2.46,14.1],[2.94,0],[-0.54,-3.6],[-18.12,-2.58],[0,0],[-3.3,0],[0,3.3],[0,0],[-2.94,18]],"o":[[-2.94,0],[-2.46,14.1],[-14.82,0],[-0.48,-2.94],[-3.66,0],[2.94,18],[0,0],[0,3.3],[3.3,0],[0,0],[18.12,-2.58],[0.6,-3.6]],"v":[[35.458,-30],[29.578,-24.9],[-0.002,0],[-29.582,-24.9],[-35.462,-30],[-41.462,-23.16],[-6.002,11.52],[-6.002,24],[-0.002,30],[5.998,24],[5.998,11.52],[41.458,-23.16]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[-1.22,4.09],[0.336,-0.84],[1,-1.58],[0,0]],"o":[[0,0],[-0.817,2.043],[-0.947,1.497],[2.71,-3.16]],"v":[[28.77,-21.497],[27.865,-18.894],[25.222,-13.778],[22.78,-10.517]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[-1.22,4.09],[0,0],[2.72,-3.04],[0,0]],"o":[[0,0],[-1.75,3.74],[0,0],[2.71,-3.16]],"v":[[28.77,-21.497],[38.06,-12.207],[31.3,-1.997],[22.78,-10.517]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":23,"s":[{"i":[[-1.22,4.09],[0,0],[2.72,-3.04],[0,0]],"o":[[0,0],[-1.75,3.74],[0,0],[2.71,-3.16]],"v":[[28.77,-21.497],[38.06,-12.207],[31.3,-1.997],[22.78,-10.517]],"c":true}]},{"t":24,"s":[{"i":[[-1.22,4.09],[0.336,-0.84],[1,-1.58],[0,0]],"o":[[0,0],[-0.817,2.043],[-0.947,1.497],[2.71,-3.16]],"v":[[28.77,-21.497],[27.865,-18.894],[25.222,-13.778],[22.78,-10.517]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"mm","mm":3,"nm":"Merge Paths 1","mn":"ADBE Vector Filter - Merge","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":36,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voice_outlined.json b/TMessagesProj/src/main/res/raw/voice_outlined.json new file mode 100644 index 000000000..c08e144a2 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voice_outlined.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":24,"w":92,"h":107,"nm":"voice_unmuted_outlined","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape 1","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[46,38.522,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[88,88,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,0],[0,0],[0,-5],[-0.95,-0.95],[-0.31,-0.29],[-5.33,0],[0,0],[0,0],[5.2,-0.6],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[0,5.01],[0.28,0.31],[2.139,1.709],[2.91,0],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,0.607],[-13.54,9.067],[-13.54,16.737],[-10.05,25.707],[-9.16,26.607],[0.13,30.127],[6.09,28.697],[12.23,34.837],[2.59,38.347],[0,38.497],[-22,16.737]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[0,0],[-1.41,-1.42],[-0.95,-0.95],[-0.301,-0.287],[-5.576,-0.044],[0,0],[0,0],[5.2,-0.61],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[1.42,1.42],[0.28,0.31],[2.014,1.579],[2.91,0.023],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,13.759],[-14.07,21.689],[-12.02,23.749],[-10.05,25.709],[-9.16,26.599],[0.13,30.129],[6.09,28.699],[12.23,34.829],[2.59,38.349],[0,38.499],[-22,16.739]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[0,0],[0,0],[-1.41,-1.42],[-0.95,-0.95],[-0.301,-0.287],[-5.576,-0.044],[0,0],[0,0],[5.2,-0.61],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[1.42,1.42],[0.28,0.31],[2.014,1.579],[2.91,0.023],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,13.759],[-14.07,21.689],[-12.02,23.749],[-10.05,25.709],[-9.16,26.599],[0.13,30.129],[6.09,28.699],[12.23,34.829],[2.59,38.349],[0,38.499],[-22,16.739]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[0,0],[-1.41,-1.42],[-0.95,-0.95],[-0.31,-0.28],[-2.63,-2.66],[0,0],[0,0],[-0.81,-0.8],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[1.42,1.42],[0.28,0.31],[1.779,1.8],[1.96,1.96],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,13.759],[-14.07,21.689],[-12.02,23.749],[-10.05,25.709],[-9.16,26.599],[-3.31,32.449],[0.5,36.259],[1.64,37.399],[2.59,38.349],[0,38.499],[-22,16.739]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[0,0],[0,0],[-1.41,-1.42],[-0.95,-0.95],[-0.31,-0.28],[-2.63,-2.66],[0,0],[0,0],[-0.81,-0.8],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[1.42,1.42],[0.28,0.31],[1.779,1.8],[1.96,1.96],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,13.759],[-14.07,21.689],[-12.02,23.749],[-10.05,25.709],[-9.16,26.599],[-3.31,32.449],[0.5,36.259],[1.64,37.399],[2.59,38.349],[0,38.499],[-22,16.739]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":16,"s":[{"i":[[0,0],[0,0],[-0.01,-5],[-0.95,-0.95],[-0.31,-0.29],[-2.63,-2.66],[0,0],[0,0],[-0.81,-0.8],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[0.01,5.01],[0.28,0.31],[1.752,1.82],[1.96,1.96],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,0.607],[-13.53,9.067],[-13.53,16.737],[-10.05,25.707],[-9.16,26.607],[-3.31,32.457],[0.5,36.257],[1.64,37.397],[2.59,38.347],[0,38.497],[-22,16.737]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":19,"s":[{"i":[[0,0],[0,0],[-0.01,-5],[-0.95,-0.95],[-0.31,-0.29],[-2.63,-2.66],[0,0],[0,0],[-0.81,-0.8],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[0.01,5.01],[0.28,0.31],[1.752,1.82],[1.96,1.96],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,0.607],[-13.53,9.067],[-13.53,16.737],[-10.05,25.707],[-9.16,26.607],[-3.31,32.457],[0.5,36.257],[1.64,37.397],[2.59,38.347],[0,38.497],[-22,16.737]],"c":true}]},{"t":20,"s":[{"i":[[0,0],[0,0],[0,-5],[-0.95,-0.95],[-0.31,-0.29],[-5.33,0],[0,0],[0,0],[5.2,-0.6],[0.88,0],[0,12.02]],"o":[[0,0],[0,0],[0,5.01],[0.28,0.31],[2.139,1.709],[2.91,0],[0,0],[0,0],[-0.85,0.1],[-12.15,0],[0,0]],"v":[[-22,0.607],[-13.54,9.067],[-13.54,16.737],[-10.05,25.707],[-9.16,26.607],[0.13,30.127],[6.09,28.697],[12.23,34.837],[2.59,38.347],[0,38.497],[-22,16.737]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":16,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":19,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]},{"t":20,"s":[{"i":[[-12.15,0],[0,-12.02],[0,0],[5.89,-3.9],[0,0],[-0.15,5.05],[0,0],[0,0],[7.48,0],[0.22,-7.2],[0,0],[0,0],[0,0],[0,0]],"o":[[12.15,0],[0,0],[0,7.54],[0,0],[4.3,-2.15],[0,0],[0,0],[0,-7.39],[-7.34,0],[0,0],[0,0],[0,0],[0,0],[0,-12.02]],"v":[[0,-38.5],[22,-16.74],[22,16.74],[12.23,34.83],[6.09,28.7],[13.53,17.16],[13.54,16.74],[13.54,-16.74],[0,-30.13],[-13.53,-17.16],[-13.54,-16.74],[-13.54,9.07],[-22,0.61],[-22,-16.74]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"mm","mm":1,"nm":"Merge Paths 1","mn":"ADBE Vector Filter - Merge","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[113.043,113.043],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Path 2","sr":1,"ks":{"o":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":0,"s":[0]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":1,"s":[100]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":22,"s":[100]},{"t":23,"s":[0]}],"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[45.97,55.077,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":1,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-41.538,-41.6],[-40.837,-40.9]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":12,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-41.722,-41.657],[41.718,41.653]],"c":false}]},{"t":22,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[41.684,41.593],[41.718,41.653]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":8.5,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path 2","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":60,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Shape 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[46,73.5,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[6.05,-6.29]],"o":[[0,9.42],[0,0]],"v":[[35,-17.5],[25.23,6.76]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0,0],[6.05,-6.29]],"o":[[0,9.42],[0,0]],"v":[[35,-17.5],[25.23,6.76]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[0,0],[6.05,-6.29]],"o":[[0,9.42],[0,0]],"v":[[35,-17.5],[25.23,6.76]],"c":false}]},{"t":21,"s":[{"i":[[0,0],[6.05,-6.29]],"o":[[0,9.42],[0,0]],"v":[[35,-17.5],[25.23,6.76]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0.01,-0.01],[3.37,0],[0,19.33]],"o":[[-9.96,10.2],[-19.33,0],[0,0]],"v":[[25.23,6.76],[0,17.5],[-35,-17.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9.5,"s":[{"i":[[0.015,-0.01],[3.37,0],[0,19.33]],"o":[[-6.665,5.595],[-19.33,0],[0,0]],"v":[[17.995,12.098],[0,17.5],[-35,-17.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0.02,-0.01],[3.37,0],[0,19.33]],"o":[[-3.37,0.99],[-19.33,0],[0,0]],"v":[[10.01,16.06],[0,17.5],[-35,-17.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[0.02,-0.01],[3.37,0],[0,19.33]],"o":[[-3.37,0.99],[-19.33,0],[0,0]],"v":[[10.01,16.06],[0,17.5],[-35,-17.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20.5,"s":[{"i":[[0.015,-0.01],[3.37,0],[0,19.33]],"o":[[-6.665,5.595],[-19.33,0],[0,0]],"v":[[17.995,12.098],[0,17.5],[-35,-17.5]],"c":false}]},{"t":21,"s":[{"i":[[0.01,-0.01],[3.37,0],[0,19.33]],"o":[[-9.96,10.2],[-19.33,0],[0,0]],"v":[[25.23,6.76],[0,17.5],[-35,-17.5]],"c":false}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"mm","mm":1,"nm":"Merge Paths 1","mn":"ADBE Vector Filter - Merge","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":8.5,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":60,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Rectangle","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[46,98.521,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-2.76,0],[0,-2.77],[0,0],[2.76,0],[0,2.77],[0,0]],"o":[[2.76,0],[0,0],[0,2.77],[-2.76,0],[0,0],[0,-2.77]],"v":[[0,-8.5],[5,-3.48],[5,3.48],[0,8.5],[-5,3.48],[-5,-3.48]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":60,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voicechat_connecting.mp3 b/TMessagesProj/src/main/res/raw/voicechat_connecting.mp3 new file mode 100644 index 000000000..b5cd7af45 Binary files /dev/null and b/TMessagesProj/src/main/res/raw/voicechat_connecting.mp3 differ diff --git a/TMessagesProj/src/main/res/raw/voicechat_join.mp3 b/TMessagesProj/src/main/res/raw/voicechat_join.mp3 new file mode 100644 index 000000000..2a1bbb464 Binary files /dev/null and b/TMessagesProj/src/main/res/raw/voicechat_join.mp3 differ diff --git a/TMessagesProj/src/main/res/raw/voicechat_leave.mp3 b/TMessagesProj/src/main/res/raw/voicechat_leave.mp3 new file mode 100644 index 000000000..942dd0c65 Binary files /dev/null and b/TMessagesProj/src/main/res/raw/voicechat_leave.mp3 differ diff --git a/TMessagesProj/src/main/res/raw/voip_group_removed.json b/TMessagesProj/src/main/res/raw/voip_group_removed.json new file mode 100644 index 000000000..83b4988d3 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voip_group_removed.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Hand","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[25]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":15,"s":[-7]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":24,"s":[8]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":33,"s":[-6]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":42,"s":[6]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":51,"s":[-2]},{"t":58,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[333.789,483.2,0],"to":[0,0,0],"ti":[21.789,-1.6,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":7,"s":[287.789,405.4,0],"to":[-21.789,1.6,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[257.789,458.2,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":23,"s":[296.789,436.2,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":32,"s":[258.789,436.7,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":41,"s":[295.289,435.7,0],"to":[0,0,0],"ti":[0,0,0]},{"t":50,"s":[274.789,435.2,0]}],"ix":2},"a":{"a":0,"k":[18.789,179.2,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":7,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":41,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":50,"s":[75,75,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":58,"s":[105,105,100]},{"t":65,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":3,"s":[{"i":[[-12,0],[-2.268,-2.058],[-2.569,-2.661],[-3.58,3.35],[0,0],[-12,0],[0,-1.945],[0,0],[-0.675,1.224],[-4.934,4.763],[-9.82,1.175],[-3.705,-4.215],[0,0],[0.075,0.236],[-3.964,5.79],[-12,0],[-3.945,-7.024],[0.478,-11.952],[0,0],[0,0],[-7.5,-6],[9,-9],[0,0],[16.5,0],[0,0],[0,33],[0,0],[0,0],[-3.304,2.15]],"o":[[12,0],[0,0],[2.569,2.661],[3.58,-3.35],[0,-1.945],[12,0],[0,0],[0.2,1.163],[0,0],[4.934,-4.763],[11.915,-1.426],[3.705,4.215],[0.2,1.102],[0,0],[3.964,-5.79],[12,0],[6.703,11.934],[-0.246,6.141],[0,0],[9,-4.5],[9,9],[0,0],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0,0],[3.304,-2.15]],"v":[[-137.024,-32.115],[-120.926,-27.119],[-114.374,-20.093],[-101.753,-21.378],[-97.832,-28.387],[-77.95,-38.569],[-55.836,-25.596],[-53.285,-19.19],[-47.519,-18.581],[-38.33,-30.772],[-20.595,-39.749],[-0.376,-33.63],[6.997,-24.238],[12.15,-24.002],[16.974,-34.054],[41.164,-42.476],[62.982,-28.625],[69.641,14.894],[68.91,25.149],[87.911,12.693],[114.911,15.693],[114.911,47.193],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.45,-14.301],[-158.956,-21.686],[-153.978,-28.232]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.807,2.142],[-2.142,-11.807],[0,0],[0,0],[0,0],[-7.5,-6],[6.09,-13.826],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[-2.142,-11.807],[11.807,-2.142],[0,0],[0,0],[0,0],[9,-4.5],[9,9],[-8.425,19.128],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.468],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.748,-15.213],[7.009,-114.559],[25.131,-140.714],[51.286,-122.592],[66.386,6.36],[66.758,52.39],[101.144,16.117],[128.144,19.117],[128.539,51.663],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":23,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.91,-1.47],[1.47,-11.91],[1.128,-8.151],[0,0],[0,0],[-7.5,-6],[7.321,-11.784],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[1.47,-11.91],[11.91,1.47],[0,0],[-2.487,17.978],[0,0],[9,-4.5],[9,9],[-4.861,11.035],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.467],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[13.725,-15.339],[28.458,-120.718],[53.544,-140.293],[73.119,-115.206],[65.669,-4.134],[65.824,55.271],[111.682,19.114],[138.682,22.114],[138.91,54.217],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.4,"y":0},"t":32,"s":[{"i":[[-12,0],[-1.484,-1.607],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.902,1.41],[-1.41,-11.902],[-0.107,-14.099],[0,0],[0,0],[-7.5,-6],[8.657,-9.569],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[-1.41,-11.902],[11.902,-1.41],[0,0],[0.074,9.737],[0,0],[9,-4.5],[9,9],[-0.993,2.255],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.468],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.539,-15.239],[4.449,-114.618],[24.122,-139.577],[49.082,-119.904],[64.953,8.185],[64.81,58.397],[123.114,22.365],[150.114,25.365],[150.16,56.988],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":43,"s":[{"i":[[-12,0],[-1.484,-1.608],[0,0],[0.2,1.102],[-4.144,3.986],[-12,0],[-2.345,-3.595],[0,0],[-0.675,1.224],[-3.57,5.124],[-12,0],[-4.079,-4.454],[0,0],[0.075,1.062],[0,0],[-11.675,-2.776],[2.776,-11.675],[0,0],[0,0],[0,0],[-7.5,-6],[9,-9],[0,0],[16.5,0],[0,0],[0,33],[0,0],[-0.497,3.294],[-1.48,1.886]],"o":[[12,0],[1.484,1.608],[0.325,1.183],[0,0],[4.144,-3.986],[12,0],[2.345,3.595],[0.2,1.163],[0,0],[3.57,-5.124],[12,0],[4.079,4.454],[0.2,1.102],[0,0],[2.776,-11.675],[11.675,2.776],[0,0],[0,0],[0,0],[9,-4.5],[9,9],[0,0],[-12,12],[0,0],[-33,0],[0,0],[0,0],[0.497,-3.294],[1.48,-1.886]],"v":[[-137.462,-32.356],[-116.426,-25.467],[-113.986,-21.716],[-105.79,-21.536],[-99.564,-31.313],[-77.95,-38.569],[-55.314,-30.133],[-49.822,-20.66],[-44.684,-21.296],[-38.56,-32.51],[-17.418,-40.066],[3.417,-32.443],[10.548,-16.365],[14.748,-15.213],[32.077,-123.393],[59.172,-140.078],[75.857,-112.983],[64.769,11.349],[64.55,59.2],[126.05,23.2],[153.05,26.2],[153.05,57.7],[51.05,161.2],[7.55,179.2],[-100.45,179.2],[-160.45,119.2],[-160.547,-6.017],[-160.288,-17.007],[-157.16,-25.406]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0},"t":50,"s":[{"i":[[-12,0],[0,-11.98],[0,0],[0.2,6.789],[0,0],[-12,0],[0,-11.98],[0,0],[-0.675,7.538],[0,0],[-12,0],[0,-11.98],[0,0],[0.075,6.539],[0,0],[-12,0],[0,-11.98],[0,0],[0,0],[0,0],[-7.5,-5.99],[9,-8.985],[0,0],[16.5,0],[0,0],[0,32.946],[0,0],[0,0],[0,0]],"o":[[12,0],[0,0],[0.325,7.288],[0,0],[0,-11.98],[12,0],[0,0],[0.2,7.163],[0,0],[0,-11.98],[12,0],[0,0],[0.2,6.789],[0,0],[0,-11.98],[12,0],[0,0],[0,0],[0,0],[9,-4.493],[9,8.985],[0,0],[-12,11.98],[0,0],[-33,0],[0,0],[0,0],[0,0],[0,-11.98]],"v":[[-137.95,-118.11],[-115.45,-95.647],[-115.45,-20.77],[-100.45,-20.77],[-100.45,-140.573],[-77.95,-163.036],[-55.45,-140.573],[-55.45,-20.77],[-40.45,-20.77],[-40.45,-155.548],[-17.95,-178.011],[4.55,-155.548],[4.55,-20.77],[19.55,-20.77],[19.55,-110.622],[42.05,-133.085],[64.55,-110.622],[66.54,-2.102],[67.267,37.505],[106.736,2.2],[133.736,5.195],[135.587,41.371],[51.05,161.93],[7.55,179.9],[-100.45,179.9],[-160.45,119.999],[-160.45,-13.544],[-160.45,-58.182],[-160.45,-95.647]],"c":true}]},{"t":58,"s":[{"i":[[-12,0],[0,-11.98],[0,0],[0.2,6.789],[0,0],[-12,0],[0,-11.98],[0,0],[-0.675,7.538],[0,0],[-12,0],[0,-11.98],[0,0],[0.075,6.539],[0,0],[-12,0],[0,-11.98],[0,0],[0,0],[0,0],[-7.5,-5.99],[9,-8.985],[0,0],[16.5,0],[0,0],[0,32.946],[0,0],[0,0],[0,0]],"o":[[12,0],[0,0],[0.325,7.288],[0,0],[0,-11.98],[12,0],[0,0],[0.2,7.163],[0,0],[0,-11.98],[12,0],[0,0],[0.2,6.789],[0,0],[0,-11.98],[12,0],[0,0],[0,0],[0,0],[9,-4.493],[9,8.985],[0,0],[-12,11.98],[0,0],[-33,0],[0,0],[0,0],[0,0],[0,-11.98]],"v":[[-137.95,-118.11],[-115.45,-95.647],[-115.45,-20.77],[-100.45,-20.77],[-100.45,-140.573],[-77.95,-163.036],[-55.45,-140.573],[-55.45,-20.77],[-40.45,-20.77],[-40.45,-155.548],[-17.95,-178.011],[4.55,-155.548],[4.55,-20.77],[19.55,-20.77],[19.55,-110.622],[42.05,-133.085],[64.55,-110.622],[64.55,14.449],[64.55,60.097],[126.05,24.156],[153.05,27.151],[153.05,58.6],[51.05,161.93],[7.55,179.9],[-100.45,179.9],[-160.45,119.999],[-160.45,-13.544],[-160.45,-58.182],[-160.45,-95.647]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Block-user","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":125,"st":5,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voip_invite.json b/TMessagesProj/src/main/res/raw/voip_invite.json new file mode 100644 index 000000000..bdf0dd946 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voip_invite.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Wibe","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[0,0,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[97.32,97.32,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":17,"s":[{"i":[[-64.7,64.701],[64.7,64.7],[64.7,-64.7],[-64.7,-64.7]],"o":[[64.7,-64.7],[-64.7,-64.7],[-64.7,64.7],[64.7,64.7]],"v":[[117.011,117.011],[117.011,-117.011],[-117.011,-117.011],[-117.011,117.011]],"c":true}]},{"t":40,"s":[{"i":[[-95.125,95.125],[95.125,95.125],[95.125,-95.125],[-95.125,-95.125]],"o":[[95.125,-95.125],[-95.125,-95.125],[-95.125,95.125],[95.125,95.125]],"v":[[172.035,172.035],[172.035,-172.035],[-172.035,-172.035],[-172.035,172.035]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":17,"s":[20]},{"t":38,"s":[0]}],"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":18,"op":41,"st":-24,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Circle","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.643],"y":[0.971]},"o":{"x":[0.279],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.2],"y":[0.912]},"o":{"x":[0.364],"y":[-0.016]},"t":8,"s":[-68.075]},{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.5],"y":[1.166]},"t":27,"s":[220]},{"i":{"x":[0.5],"y":[1]},"o":{"x":[0.757],"y":[0]},"t":34,"s":[225]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":48,"s":[365]},{"t":60,"s":[360]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":35,"s":[256,256,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":44,"s":[256,264.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":51,"s":[256,248.5,0],"to":[0,0,0],"ti":[0,0,0]},{"t":58,"s":[256,256,0]}],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[0.866,0.866,1]},"o":{"x":[0.38,0.38,0.38],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"i":{"x":[0.538,0.538,0.538],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[-0.67,-0.67,0]},"t":8,"s":[98,98,100]},{"i":{"x":[0.523,0.523,0.1],"y":[1.588,1.588,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":17,"s":[87,87,100]},{"i":{"x":[0.562,0.562,0.7],"y":[1,1,1]},"o":{"x":[0.475,0.475,0.3],"y":[0.455,0.455,0]},"t":34,"s":[105,105,100]},{"i":{"x":[0.438,0.438,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":45,"s":[90,90,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":52,"s":[105,105,100]},{"t":60,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":2,"s":[{"i":[[0,0],[-4.5,-4.5],[0,0],[4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[4.5,4.5],[0,0],[-4.5,4.5],[0,0],[0,0],[0,0],[0,0]],"o":[[4.5,-4.5],[0,0],[4.5,4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.5,4.5],[0,0],[-4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[21,-39],[36,-39],[40.5,-34.5],[40.5,-19.5],[14.223,6.777],[13.264,7.736],[13.159,7.841],[11.433,9.567],[-15,36],[-30,36],[-34.5,31.5],[-34.5,16.5],[-7.407,-10.593],[-6.061,-11.939],[-5.899,-12.101],[-4.246,-13.754]],"c":true}]},{"i":{"x":0.505,"y":1},"o":{"x":0.525,"y":0},"t":8,"s":[{"i":[[0,0],[-4.5,-4.5],[0,0],[4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[4.5,4.5],[0,0],[-4.5,4.5],[0,0],[0,0],[0,0],[0,0]],"o":[[4.5,-4.5],[0,0],[4.5,4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.5,4.5],[0,0],[-4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[17.25,-34.75],[32.25,-34.75],[36.75,-30.25],[36.75,-15.25],[14.223,6.777],[13.264,7.736],[13.159,7.841],[11.433,9.567],[-9.5,30],[-24.5,30],[-29,25.5],[-29,10.5],[-7.407,-10.593],[-6.061,-11.939],[-5.899,-12.101],[-4.246,-13.754]],"c":true}]},{"i":{"x":0.505,"y":1},"o":{"x":0.5,"y":0},"t":29,"s":[{"i":[[0,0],[-0.112,-9.077],[0,0],[7.954,-1.109],[0,0],[0,0],[0,0],[0,0],[0,0],[8.008,0.597],[0,0],[1.616,6.891],[-0.222,2.299],[0,0],[0,0],[-3.54,0.207]],"o":[[6.989,0.945],[0,0],[-1.541,7.404],[0,0],[0,0],[0,0],[0,0],[0,0],[1.223,6.541],[0,0],[-8.008,-0.597],[0,0],[0.32,-3.308],[0,0],[0,0],[1.898,-0.111]],"v":[[30.566,-40.803],[45.776,-28.007],[46.778,-21.441],[34.73,-11.672],[0.233,-18.289],[-2.364,-17.506],[-2.55,-17.427],[-3.288,-14.769],[13.051,58.936],[1.03,70.407],[-4.699,69.469],[-16.629,54.68],[-32.653,-35.377],[-29.425,-43.123],[-27.831,-44.79],[-19.19,-48.174]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.5,"y":0},"t":34,"s":[{"i":[[0,0],[-0.112,-9.077],[0,0],[7.954,-1.109],[0,0],[0,0],[0,0],[0,0],[0,0],[8.008,0.597],[0,0],[1.616,6.891],[-0.222,2.299],[0,0],[0,0],[-3.54,0.207]],"o":[[6.989,0.945],[0,0],[-1.541,7.404],[0,0],[0,0],[0,0],[0,0],[0,0],[1.223,6.541],[0,0],[-8.008,-0.597],[0,0],[0.32,-3.308],[0,0],[0,0],[1.898,-0.111]],"v":[[30.566,-40.803],[45.776,-28.007],[46.778,-21.441],[34.73,-11.672],[0.233,-18.289],[-2.364,-17.506],[-2.55,-17.427],[-3.288,-14.769],[13.051,58.936],[1.03,70.407],[-4.699,69.469],[-16.629,54.68],[-32.653,-35.377],[-29.425,-43.123],[-27.831,-44.79],[-19.19,-48.174]],"c":true}]},{"i":{"x":0.5,"y":1},"o":{"x":0.5,"y":0},"t":50,"s":[{"i":[[0,0],[-4.465,-4.535],[0,0],[4.535,-4.465],[0,0],[0,0],[0,0],[0,0],[0,0],[4.465,4.535],[0,0],[-4.535,4.465],[0,0],[0,0],[0,0],[0,0]],"o":[[4.535,-4.465],[0,0],[4.465,4.535],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.535,4.465],[0,0],[-4.465,-4.535],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[12.628,-29.642],[27.627,-29.525],[32.092,-24.99],[31.976,-9.991],[14.16,6.862],[13.193,7.813],[13.087,7.918],[11.348,9.63],[-6.151,26.934],[-21.151,26.818],[-25.616,22.283],[-25.499,7.283],[-7.335,-10.675],[-5.978,-12.011],[-5.816,-12.171],[-4.15,-13.812]],"c":true}]},{"t":60,"s":[{"i":[[0,0],[-4.5,-4.5],[0,0],[4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[4.5,4.5],[0,0],[-4.5,4.5],[0,0],[0,0],[0,0],[0,0]],"o":[[4.5,-4.5],[0,0],[4.5,4.5],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.5,4.5],[0,0],[-4.5,-4.5],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[21,-39],[36,-39],[40.5,-34.5],[40.5,-19.5],[14.223,6.777],[13.264,7.736],[13.159,7.841],[11.433,9.567],[-15,36],[-30,36],[-34.5,31.5],[-34.5,16.5],[-7.407,-10.593],[-6.061,-11.939],[-5.899,-12.101],[-4.246,-13.754]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":2,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-6,6],[0,0],[18,18],[18,-18],[0,0],[6,6],[-6,6]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-6,6],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-6,6],[-6,-6],[0,0]],"v":[[-15,-88.5],[88.5,-88.5],[89.125,15.375],[75.875,29.625],[57,30],[57,10.5],[70.5,-3],[70.5,-69],[4.5,-69],[-9,-55.5],[-28.5,-55.5],[-28.5,-75]],"c":true}]},{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":8,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-6,6],[0,0],[18,18],[18,-18],[0,0],[6,6],[-6,6]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-6,6],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-6,6],[-6,-6],[0,0]],"v":[[-22.5,-81],[81,-81],[81.625,22.875],[68.375,37.125],[49.5,37.5],[49.5,18],[63,4.5],[63,-61.5],[-3,-61.5],[-16.5,-48],[-36,-48],[-36,-67.5]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.5,"y":0},"t":29,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-6,6],[0,0],[18,18],[18,-18],[0,0],[6,6],[-6,6]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-6,6],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-6,6],[-6,-6],[0,0]],"v":[[16.228,-118.186],[119.728,-118.186],[120.353,-14.311],[107.103,-0.061],[88.228,0.314],[88.228,-19.186],[101.728,-32.686],[101.728,-98.686],[35.728,-98.686],[22.228,-85.186],[2.728,-85.186],[2.728,-104.686]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.5,"y":0},"t":34,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-6,6],[0,0],[18,18],[18,-18],[0,0],[6,6],[-6,6]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-6,6],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-6,6],[-6,-6],[0,0]],"v":[[16.228,-118.186],[119.728,-118.186],[120.353,-14.311],[107.103,-0.061],[88.228,0.314],[88.228,-19.186],[101.728,-32.686],[101.728,-98.686],[35.728,-98.686],[22.228,-85.186],[2.728,-85.186],[2.728,-104.686]],"c":true}]},{"i":{"x":0.5,"y":1},"o":{"x":0.5,"y":0},"t":50,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-8.827,8.541],[0,0],[18,18],[18,-18],[0,0],[6,6],[-7.327,7.666]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-8.952,8.916],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-8.327,7.416],[-6,-6],[0,0]],"v":[[-33.673,-69.666],[69.827,-69.666],[70.452,34.209],[57.202,48.459],[40.202,46.334],[38.327,29.334],[51.827,15.834],[51.827,-50.166],[-14.173,-50.166],[-27.673,-36.666],[-44.048,-38.916],[-47.173,-56.166]],"c":true}]},{"t":60,"s":[{"i":[[0,0],[-28.5,-28.5],[27.625,-29.25],[0,0],[6,5.437],[-6,6],[0,0],[18,18],[18,-18],[0,0],[6,6],[-6,6]],"o":[[28.5,-28.5],[28.5,28.5],[0,0],[-6,6],[-6.287,-5.698],[0,0],[18,-18],[-18,-18],[0,0],[-6,6],[-6,-6],[0,0]],"v":[[-15,-88.5],[88.5,-88.5],[89.125,15.375],[75.875,29.625],[57,30],[57,10.5],[70.5,-3],[70.5,-69],[4.5,-69],[-9,-55.5],[-28.5,-55.5],[-28.5,-75]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":2,"s":[{"i":[[-6,-6],[6,-6],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[6,-6],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[6,-6],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6,-6]],"v":[[-51,-31.5],[-51,-12],[-64.5,0],[-64.5,66],[-1.5,67.5],[13.5,52.5],[33,52.5],[33,72],[19.5,85.5],[-84,85.5],[-84,-18],[-70.5,-31.5]],"c":true}]},{"i":{"x":0.2,"y":1},"o":{"x":0.3,"y":0},"t":8,"s":[{"i":[[-6,-6],[6,-6],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[6,-6],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[6,-6],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6,-6]],"v":[[-46,-35],[-46,-15.5],[-59.5,-3.5],[-59.5,62.5],[3.5,64],[18.5,49],[38,49],[38,68.5],[24.5,82],[-79,82],[-79,-21.5],[-65.5,-35]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.5,"y":0},"t":29,"s":[{"i":[[-6,-6],[6,-6],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[6,-6],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[6,-6],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6,-6]],"v":[[-84.414,3.275],[-84.414,22.775],[-97.914,34.775],[-97.914,100.775],[-34.914,102.275],[-19.914,87.275],[-0.414,87.275],[-0.414,106.775],[-13.914,120.275],[-117.414,120.275],[-117.414,16.775],[-103.914,3.275]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.5,"y":0},"t":34,"s":[{"i":[[-6,-6],[6,-6],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[6,-6],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[6,-6],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6,-6]],"v":[[-84.414,3.275],[-84.414,22.775],[-97.914,34.775],[-97.914,100.775],[-34.914,102.275],[-19.914,87.275],[-0.414,87.275],[-0.414,106.775],[-13.914,120.275],[-117.414,120.275],[-117.414,16.775],[-103.914,3.275]],"c":true}]},{"i":{"x":0.5,"y":1},"o":{"x":0.5,"y":0},"t":50,"s":[{"i":[[-6,-6],[7,-7.932],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[7.375,-7.432],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[8,-7.432],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6.625,-6.807]],"v":[[-45.5,-39.568],[-41.125,-24.193],[-54.125,-11.068],[-54.125,54.932],[8.875,56.432],[24.375,41.807],[38.875,45.182],[43.375,60.932],[29.875,74.432],[-73.625,74.432],[-73.625,-29.068],[-60.375,-42.943]],"c":true}]},{"t":60,"s":[{"i":[[-6,-6],[6,-6],[0,0],[-18,-18],[-18,16.5],[0,0],[-6,-6],[6,-6],[0,0],[28.5,28.5],[-28.5,28.5],[0,0]],"o":[[6,6],[0,0],[-18,18],[18,18],[0,0],[6,-6],[6,6],[0,0],[-28.5,28.5],[-28.5,-28.5],[0,0],[6,-6]],"v":[[-51,-31.5],[-51,-12],[-64.5,0],[-64.5,66],[-1.5,67.5],[13.5,52.5],[33,52.5],[33,72],[19.5,85.5],[-84,85.5],[-84,-18],[-70.5,-31.5]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.49,"y":0},"t":34,"s":[{"i":[[-70.5,70.5],[70.5,70.5],[70.5,-70.5],[-70.5,-70.5]],"o":[[70.5,-70.5],[-70.5,-70.5],[-70.5,70.5],[70.5,70.5]],"v":[[127.5,127.5],[127.5,-127.5],[-127.5,-127.5],[-127.5,127.5]],"c":true}]},{"i":{"x":0.5,"y":1},"o":{"x":0.5,"y":0},"t":50,"s":[{"i":[[-54.833,54.833],[54.833,54.833],[54.833,-54.833],[-54.833,-54.833]],"o":[[54.833,-54.833],[-54.833,-54.833],[-54.833,54.833],[54.833,54.833]],"v":[[99.167,99.167],[99.167,-99.167],[-99.167,-99.167],[-99.167,99.167]],"c":true}]},{"t":60,"s":[{"i":[[-70.5,70.5],[70.5,70.5],[70.5,-70.5],[-70.5,-70.5]],"o":[[70.5,-70.5],[-70.5,-70.5],[-70.5,70.5],[70.5,70.5]],"v":[[127.5,127.5],[127.5,-127.5],[-127.5,-127.5],[-127.5,127.5]],"c":true}]}],"ix":2},"nm":"Path 4","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Copy-link","np":5,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":122,"st":-4,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voip_muted.json b/TMessagesProj/src/main/res/raw/voip_muted.json new file mode 100644 index 000000000..28b8e150b --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voip_muted.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Piece Right","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[96.759,15.833,0],"ix":2},"a":{"a":0,"k":[96.759,15.833,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":20,"s":[{"i":[[8.644,0],[0.966,-3.318],[1.662,-4.808],[0,0],[0.11,0.518]],"o":[[-8.444,0],[-0.831,5.305],[18.025,10.092],[0.708,-2.076],[1.164,-8.455]],"v":[[99.74,-8.289],[85.957,-2.911],[86.259,-1.293],[113.616,12.213],[113.87,7.792]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":28,"s":[{"i":[[8.644,0],[0.997,-6.797],[2.159,-4.606],[0,0],[-1.829,11.439]],"o":[[-8.444,0],[-0.831,5.305],[14.357,15.633],[6.005,-9.368],[1.164,-8.455]],"v":[[99.74,-8.289],[83.172,3.482],[76.832,24.147],[98.367,47.597],[113.87,7.792]],"c":true}]},{"t":34,"s":[{"i":[[8.644,0],[0.997,-6.797],[1.662,-4.808],[0,0],[-1.829,11.439]],"o":[[-8.444,0],[-0.831,5.305],[15.921,14.037],[4.987,-9.947],[1.164,-8.455]],"v":[[99.74,-8.289],[83.172,3.482],[79.515,18.9],[103.397,39.955],[113.87,7.792]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":20,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Line Arrow","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":19,"s":[-16]},{"t":25,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":19,"s":[-5.688,-11.647,0],"to":[0,0,0],"ti":[0,0,0]},{"t":25,"s":[-0.042,7.25,0]}],"ix":2},"a":{"a":0,"k":[-0.042,7.25,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-127.583,-120.167],[127.5,134.667]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.48],"y":[0]},"t":16,"s":[50]},{"t":25,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.48],"y":[0]},"t":16,"s":[50]},{"t":25,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":33,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Top","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":15,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":21,"s":[-5]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":28,"s":[4]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":34,"s":[-2]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":39,"s":[1]},{"t":43,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":9,"s":[-1.33,-78.668,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":16,"s":[-1.33,61.332,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[-1.33,30.332,0],"to":[0,0,0],"ti":[0,0,0]},{"t":28,"s":[-1.33,41.332,0]}],"ix":2},"a":{"a":0,"k":[-1.33,41.332,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.46,"y":0},"t":9,"s":[{"i":[[0,0],[-2.251,-5.541],[-9.065,-4.075],[-9.932,10.778]],"o":[[0,0],[2.251,5.541],[11.001,4.945],[0,0]],"v":[[-33.983,-59.845],[-32.058,-42.66],[-14.872,-27.336],[24.81,-35.502]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[0,0],[-1.768,-5.154],[-8.639,-5.404],[-8.878,13.879]],"o":[[0,0],[1.855,9.217],[10.281,6.332],[0,0]],"v":[[-31.008,-100.209],[-29.478,-75.002],[-13.537,-53.455],[23.7,-63.849]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[0,0],[-1.019,-4.553],[-7.979,-7.462],[-7.245,18.683]],"o":[[0,0],[1.242,14.911],[9.164,8.48],[0,0]],"v":[[-26.829,-99.882],[-25.911,-62.248],[-11.898,-31.061],[21.551,-44.907]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[0,0],[-0.645,-4.253],[-7.649,-8.492],[-6.428,21.085]],"o":[[0,0],[0.935,17.758],[8.606,9.554],[0,0]],"v":[[-25.968,-57.736],[-25.355,-13.889],[-12.306,22.117],[19.25,6.546]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[0,0],[-0.171,-1.406],[-14.005,-7.213],[-10.18,17.533]],"o":[[0,0],[0.935,17.004],[15.187,7.906],[0,0]],"v":[[-44.05,-37.796],[-43.743,1.971],[-21.059,33.86],[35.072,21.043]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.687,"y":0},"t":16,"s":[{"i":[[0,0],[0,0],[-22.327,-6.582],[-15.799,15.779]],"o":[[0,0],[1.28,16.631],[24.057,7.092],[0,0]],"v":[[-68.775,-27.951],[-68.506,9.802],[-32.67,39.658],[56.366,28.201]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.366,"y":0},"t":18,"s":[{"i":[[0,0],[0,0],[-22.327,-6.582],[-15.799,15.779]],"o":[[0,0],[1.28,16.631],[24.057,7.092],[0,0]],"v":[[-68.775,-27.951],[-68.506,9.802],[-32.67,39.658],[56.366,28.201]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[0,0],[0,0],[-18.81,-8.744],[-18.679,12.082]],"o":[[0,0],[0.415,18.724],[16.226,7.117],[0,0]],"v":[[-56.9,-23.648],[-56.812,-5.783],[-26.882,36.498],[33.444,31.867]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":21,"s":[{"i":[[0,0],[0,0],[-17.754,-9.393],[-14.326,8.948]],"o":[[0,0],[0.155,19.353],[13.875,7.124],[0,0]],"v":[[-53.335,-22.357],[-53.302,-10.462],[-25.144,35.55],[27.514,34.722]],"c":true}]},{"i":{"x":0.1,"y":1},"o":{"x":0.167,"y":0.167},"t":22,"s":[{"i":[[0,0],[0,0],[-17.411,-9.604],[-11.248,5.23]],"o":[[0,0],[0.071,19.557],[13.111,7.127],[0,0]],"v":[[-52.175,-21.937],[-52.16,-11.983],[-24.579,35.241],[21.832,37.335]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[{"i":[[0,0],[0,0],[-17.362,-9.634],[-10.401,2.783]],"o":[[0,0],[0.059,19.586],[13.002,7.127],[0,0]],"v":[[-52.01,-21.877],[-51.998,-12.2],[-24.499,35.197],[19.245,38.033]],"c":true}]},{"t":25,"s":[{"i":[[0,0],[0,0],[-17.122,-9.782],[-10.14,2.155]],"o":[[0,0],[0,19.729],[12.468,7.129],[0,0]],"v":[[-51.2,-21.583],[-51.2,-13.263],[-24.104,34.982],[9.475,40.121]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.46,"y":0},"t":9,"s":[{"i":[[-1.227,4.669],[3.756,5.815],[17.828,0.454],[4.589,-6.289],[0,0],[-17.639,-6.973],[-0.804,-0.31],[-0.486,0.269]],"o":[[1.768,-6.728],[-5.082,-7.869],[-16.694,-0.425],[-2.57,3.522],[0,0],[0.878,0.347],[5.063,1.953],[1.811,-1.004]],"v":[[29.744,-44.09],[27.95,-68.716],[-1.483,-83.563],[-29.667,-71.438],[-33.951,-59.897],[15.218,-38.966],[17.744,-37.978],[24.972,-35.625]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[-0.971,5.146],[2.949,5.409],[16.537,0.423],[3.864,-9.565],[0,0],[-16.338,-10.338],[-0.745,-0.46],[-0.45,0.401]],"o":[[1.396,-7.153],[-3.991,-11.459],[-15.464,-0.396],[-2.018,3.275],[0,0],[0.814,0.515],[4.69,2.895],[1.85,-1.88]],"v":[[28.224,-76.581],[26.816,-113.451],[-0.883,-134.741],[-27.54,-116.855],[-30.979,-100.285],[14.563,-69.254],[16.903,-67.791],[23.85,-64.032]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[-0.573,5.883],[1.701,4.778],[14.538,0.375],[2.74,-14.64],[0,0],[-14.322,-15.55],[-0.653,-0.691],[-0.394,0.606]],"o":[[0.82,-7.811],[-2.301,-17.019],[-13.557,-0.35],[-1.164,2.894],[0,0],[0.713,0.774],[4.111,4.354],[1.91,-3.238]],"v":[[25.441,-64.059],[24.629,-119.894],[-0.383,-151.165],[-24.672,-124.355],[-26.804,-99.996],[13.12,-53.32],[15.171,-51.119],[21.683,-45.182]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[-0.375,6.252],[1.076,4.463],[13.538,0.351],[2.178,-17.177],[0,0],[-13.314,-18.156],[-0.607,-0.807],[-0.366,0.708]],"o":[[0.533,-8.14],[-1.457,-19.799],[-12.604,-0.326],[-0.737,2.703],[0,0],[0.663,0.904],[3.822,5.084],[1.94,-3.917]],"v":[[22.822,-15.815],[22.308,-81.133],[-1.36,-117.394],[-24.467,-86.123],[-25.944,-57.87],[11.17,-3.371],[13.077,-0.801],[19.372,6.224]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[-0.168,4.265],[0.285,1.475],[23.536,0.116],[2.62,-15.859],[0,0],[-23.075,-16.557],[-1.052,-0.736],[-0.639,0.641]],"o":[[0.202,-5.145],[-0.386,-17.888],[-21.846,-0.108],[-0.195,0.893],[0,0],[1.149,0.824],[6.624,4.636],[3.876,-3.888]],"v":[[41.126,0.651],[40.99,-59.206],[-1.333,-91.691],[-43.027,-63.247],[-44.009,-37.918],[20.314,11.781],[23.619,14.125],[35.284,20.75]],"c":true}]},{"i":{"x":0.6,"y":1},"o":{"x":0.687,"y":0},"t":16,"s":[{"i":[[-0.102,3.285],[0,0],[37.182,0],[3.808,-15.209],[0,0],[-36.434,-15.767],[-1.66,-0.701],[-0.999,0.618]],"o":[[0.114,-3.666],[0,-16.944],[-34.494,0],[0,0],[0,0],[1.814,0.785],[10.458,4.415],[6.267,-3.874]],"v":[[65.886,8.781],[65.886,-48.379],[-1.31,-79],[-67.61,-51.951],[-68.71,-28.067],[32.85,19.262],[38.068,21.494],[56.701,27.922]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.366,"y":0},"t":18,"s":[{"i":[[-0.102,3.285],[0,0],[37.182,0],[3.808,-15.209],[0,0],[-36.434,-15.767],[-1.66,-0.701],[-0.999,0.618]],"o":[[0.114,-3.666],[0,-16.944],[-34.494,0],[0,0],[0,0],[1.814,0.785],[10.458,4.415],[6.267,-3.874]],"v":[[65.886,8.781],[65.886,-48.379],[-1.31,-79],[-67.61,-51.951],[-68.71,-28.067],[32.85,19.262],[38.068,21.494],[56.701,27.922]],"c":true}]},{"i":{"x":0.1,"y":1},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[-0.033,1.066],[0,0],[30.704,0],[3.144,-21.624],[0,0],[-11.816,-5.114],[-0.538,-0.227],[-0.324,0.2]],"o":[[0.037,-1.189],[0,-24.091],[-28.484,0],[0,0],[0,0],[0.588,0.255],[3.392,1.432],[2.033,-1.257]],"v":[[53.807,-23.785],[54.166,-88.504],[-1.323,-132.041],[-56.073,-93.583],[-56.247,-78.377],[30.734,-29.858],[33.021,-28.541],[51.299,-17.238]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[{"i":[[-0.005,0.151],[0,0],[28.037,0],[2.871,-24.265],[0,0],[-1.68,-0.727],[-0.077,-0.032],[-0.046,0.028]],"o":[[0.005,-0.169],[0,-27.033],[-26.01,0],[0,0],[0,0],[0.084,0.036],[0.482,0.204],[0.289,-0.179]],"v":[[49.34,-13.828],[49.34,-105.026],[-1.329,-153.881],[-51.322,-110.726],[-51.302,-101.317],[30.858,-30.231],[31.938,-29.291],[48.374,-13.486]],"c":true}]},{"t":25,"s":[{"i":[[0,0],[0,0],[27.595,0],[2.826,-24.703],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,-27.521],[-25.6,0],[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[48.54,-14.921],[48.54,-107.763],[-1.33,-157.5],[-50.535,-113.566],[-50.211,-113.243],[30.762,-32.622],[31.642,-31.746],[47.972,-15.487]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":10,"op":120,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Middle","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-1.333,91.333,0],"ix":2},"a":{"a":0,"k":[-1.333,91.333,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":1,"s":[{"i":[[0,0],[-80.083,0.001],[0,0]],"o":[[0,0],[83.334,-0.001],[0,0]],"v":[[-101.333,7.5],[-2.583,91.333],[98.667,7.5]],"c":false}]},{"i":{"x":0.5,"y":1},"o":{"x":0.3,"y":0},"t":4,"s":[{"i":[[0,0],[-41.278,0.001],[0,0]],"o":[[0,0],[42.953,-0.001],[0,0]],"v":[[-52.877,7.5],[-1.978,91.333],[50.21,7.5]],"c":false}]},{"i":{"x":0.537,"y":1},"o":{"x":0.652,"y":0},"t":11,"s":[{"i":[[0,0],[-66.655,0.001],[0,0]],"o":[[0,0],[69.361,-0.001],[0,0]],"v":[[-84.565,-11.485],[-2.374,91.333],[81.899,-11.485]],"c":false}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":17,"s":[{"i":[[0,0],[-90.825,0.001],[0,0]],"o":[[0,0],[94.511,-0.001],[0,0]],"v":[[-121.962,36.773],[-1.515,103.641],[117.013,36.155]],"c":false}]},{"t":24,"s":[{"i":[[0,0],[-80.083,0.001],[0,0]],"o":[[0,0],[83.334,-0.001],[0,0]],"v":[[-101.333,7.5],[-2.583,91.333],[98.667,7.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":32,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"tm","s":{"a":0,"k":0,"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":19,"s":[100]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":20,"s":[91]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":21,"s":[84]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":22,"s":[82]},{"t":24,"s":[71]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Bottom","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.377],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[180]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.5],"y":[0]},"t":10,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":19,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[6]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":32,"s":[-4]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":37,"s":[2]},{"t":41,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[254.667,457.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":8,"s":[254.667,297.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":16,"s":[254.667,477.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[254.667,377.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.167,"y":0},"t":29,"s":[254.667,417.25,0],"to":[0,0,0],"ti":[0,0,0]},{"t":34,"s":[254.667,397.25,0]}],"ix":2},"a":{"a":0,"k":[-1.333,141.25,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.3,0.3,0.3],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":6,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":11,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-1.333,141.25],[-1.333,96.333]],"c":false}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":17,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-0.807,130.716],[-0.798,106.517]],"c":false}]},{"t":23,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-1.333,141.25],[-1.333,96.333]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":33,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0}],"markers":[{"tm":0,"cm":"1","dr":0},{"tm":45,"cm":"2","dr":0}]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voip_unmuted.json b/TMessagesProj/src/main/res/raw/voip_unmuted.json new file mode 100644 index 000000000..308d93f7b --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voip_unmuted.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":120,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape Layer 1","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.317,"y":1},"o":{"x":0.532,"y":0},"t":17,"s":[256,372,0],"to":[0,0,0],"ti":[0,0,0]},{"t":24,"s":[256,253,0]}],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-2,-186],[-2.334,-263.997]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":16,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[90,-67],"ix":2},"a":{"a":0,"k":[0,-187],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":90.869,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 5","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-2,-186],[-2.12,-259]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":16,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[-90,-67],"ix":2},"a":{"a":0,"k":[0,-187],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":-89.597,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 4","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-7.882,-177.913],[-23.321,-250.087]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":16,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[-80,-157],"ix":2},"a":{"a":0,"k":[0,-187],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":-36.027,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 3","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[4.459,-178.366],[19.316,-250.867]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":16,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[80,-157],"ix":2},"a":{"a":0,"k":[0,-187],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":40.236,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 2","np":2,"cix":2,"bm":0,"ix":4,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-2,-186],[-1,-243.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":16,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-187],"ix":2},"a":{"a":0,"k":[0,-187],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":2,"cix":2,"bm":0,"ix":5,"mn":"ADBE Vector Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.33],"y":[0]},"t":20,"s":[0]},{"t":30,"s":[100]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.33],"y":[0]},"t":17,"s":[0]},{"t":27,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":6,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false}],"ip":18,"op":31,"st":-1,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Top","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.382],"y":[1]},"o":{"x":[0.534],"y":[0]},"t":3,"s":[0]},{"t":18,"s":[180]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.4,"y":0},"t":2,"s":[-1.293,-17.921,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.31,"y":0},"t":12,"s":[-1.293,-123.921,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":18,"s":[-1.293,-46.921,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":23,"s":[-1.293,-63.921,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.167,"y":0},"t":28,"s":[-1.293,-52.921,0],"to":[0,0,0],"ti":[0,0,0]},{"t":32,"s":[-1.293,-57.921,0]}],"ix":2},"a":{"a":0,"k":[172.332,-134.171,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.1,"y":1},"o":{"x":0.35,"y":0},"t":3,"s":[{"i":[[-60.047,-1.953],[-7.601,8.812],[11.216,6.808],[60.074,-0.721],[8.451,-1.873],[-9.93,-10.711]],"o":[[60.047,1.953],[7.601,-8.812],[-11.346,-6.887],[-60.16,0.722],[-8.451,1.873],[9.93,10.711]],"v":[[171.71,-35.897],[306.13,-57.407],[300.717,-77.461],[173.346,-86.203],[47.99,-78.856],[38.107,-55.969]],"c":true}]},{"i":{"x":0.65,"y":1},"o":{"x":0.35,"y":0},"t":15,"s":[{"i":[[-27.519,0],[0,27.521],[0,0],[27.519,0],[0,-27.521],[0,0]],"o":[[27.519,0],[0,0],[0,-27.521],[-27.519,0],[0,0],[0,27.521]],"v":[[172.332,-34.697],[222.065,-84.434],[222.065,-183.908],[172.332,-233.645],[122.599,-183.908],[122.599,-84.434]],"c":true}]},{"i":{"x":0.65,"y":1},"o":{"x":0.35,"y":0},"t":19,"s":[{"i":[[-36.915,0],[-4.805,15.85],[7.712,20.051],[36.915,0],[6.531,-18.949],[-5.986,-14.65]],"o":[[36.915,0],[4.805,-15.85],[-7.712,-20.051],[-36.915,0],[-6.531,18.949],[5.986,14.65]],"v":[[172.284,-91.046],[238.998,-126.696],[238.998,-197.995],[172.284,-233.645],[105.57,-197.995],[105.57,-126.696]],"c":true}]},{"i":{"x":0.65,"y":1},"o":{"x":0.35,"y":0},"t":24,"s":[{"i":[[-26.398,0],[0,29.885],[0,0],[26.398,0],[0,-29.885],[0,0]],"o":[[26.398,0],[0,0],[0,-29.885],[-26.398,0],[0,0],[0,29.885]],"v":[[172.332,-26.155],[220.04,-80.163],[220.04,-188.179],[172.332,-242.188],[124.625,-188.179],[124.625,-80.163]],"c":true}]},{"i":{"x":0.65,"y":1},"o":{"x":0.35,"y":0},"t":29,"s":[{"i":[[-28.659,0],[0,25.792],[0,0],[28.659,0],[0,-25.792],[0,0]],"o":[[28.659,0],[0,0],[0,-25.792],[-28.659,0],[0,0],[0,25.792]],"v":[[172.332,-47.194],[224.125,-93.807],[224.125,-187.032],[172.332,-233.645],[120.54,-187.032],[120.54,-93.807]],"c":true}]},{"t":33,"s":[{"i":[[-27.519,0],[0,27.521],[0,0],[27.519,0],[0,-27.521],[0,0]],"o":[[27.519,0],[0,0],[0,-27.521],[-27.519,0],[0,0],[0,27.521]],"v":[[172.332,-34.697],[222.065,-84.434],[222.065,-183.908],[172.332,-233.645],[122.599,-183.908],[122.599,-84.434]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Middle","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-1.333,91.333,0],"ix":2},"a":{"a":0,"k":[-1.333,91.333,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.4,"y":0},"t":3,"s":[{"i":[[0,0],[-101.927,0],[0,0]],"o":[[0,0],[106.064,0],[0,0]],"v":[[-128.61,52.347],[-2.924,91.334],[125.943,52.347]],"c":false}]},{"i":{"x":0.2,"y":1},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[0,0],[-93.549,0.001],[0,0]],"o":[[0,0],[97.346,-0.001],[0,0]],"v":[[-130.147,55.017],[-2.794,91.334],[127.481,54.017]],"c":false}]},{"i":{"x":0.4,"y":1},"o":{"x":0.31,"y":0},"t":14,"s":[{"i":[[0,0],[-73.41,0.001],[0,0]],"o":[[0,0],[76.39,-0.001],[0,0]],"v":[[-93,-16.848],[-2.479,91.333],[90.333,-16.848]],"c":false}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":20,"s":[{"i":[[0,0],[-83.212,1.774],[0,0]],"o":[[0,0],[82.815,-1.765],[0,0]],"v":[[-117.667,30.388],[-2.788,91.334],[115,30.388]],"c":false}]},{"i":{"x":0.67,"y":1},"o":{"x":0.31,"y":0},"t":25,"s":[{"i":[[0,0],[-75.011,0.001],[0,0]],"o":[[0,0],[78.056,-0.001],[0,0]],"v":[[-95,0.091],[-2.504,91.333],[92.333,0.091]],"c":false}]},{"i":{"x":0.67,"y":1},"o":{"x":0.33,"y":0},"t":30,"s":[{"i":[[0,0],[-84.354,0.001],[0,0]],"o":[[0,0],[87.778,-0.001],[0,0]],"v":[[-106.667,13.292],[-2.65,91.333],[104,13.292]],"c":false}]},{"t":34,"s":[{"i":[[0,0],[-80.083,0.001],[0,0]],"o":[[0,0],[83.334,-0.001],[0,0]],"v":[[-101.333,7.5],[-2.583,91.333],[98.667,7.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":32,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Bottom","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.2,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[254.667,507.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.31,"y":0},"t":11,"s":[254.667,363.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":17,"s":[254.667,457.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.68,"y":1},"o":{"x":0.31,"y":0},"t":22,"s":[254.667,376.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":27,"s":[254.667,411.25,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":32,"s":[254.667,391.75,0],"to":[0,0,0],"ti":[0,0,0]},{"t":36,"s":[254.667,397.25,0]}],"ix":2},"a":{"a":0,"k":[-1.333,141.25,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.2,0.2,0.2],"y":[1,1,1]},"o":{"x":[0.32,0.32,0.32],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":5,"s":[100,100,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-1.333,141.25],[-1.333,96.333]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":33,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":120,"st":0,"bm":0}],"markers":[{"tm":0,"cm":"1","dr":0},{"tm":45,"cm":"2","dr":0}]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/write_contacts_fab_icon.json b/TMessagesProj/src/main/res/raw/write_contacts_fab_icon.json new file mode 100644 index 000000000..91fae9392 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/write_contacts_fab_icon.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":49,"w":512,"h":512,"nm":"Icon","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":3,"nm":"Null 6","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[120,120,100],"ix":6}},"ao":0,"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":3,"nm":"NULL ROTATION","parent":1,"sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.68],"y":[1]},"o":{"x":[0.32],"y":[0]},"t":23,"s":[0]},{"i":{"x":[0.68],"y":[1]},"o":{"x":[0.32],"y":[0]},"t":33.568,"s":[3]},{"i":{"x":[0.68],"y":[1]},"o":{"x":[0.32],"y":[0]},"t":39.334,"s":[2]},{"i":{"x":[0.68],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":44.137,"s":[0]},{"t":48.94140625,"s":[-4]}],"ix":10},"p":{"a":0,"k":[-0.472,-2.472,0],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":0,"k":[94.34,94.34,100],"ix":6}},"ao":0,"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Head","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":34.148,"s":[-12.538,-36.252,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":38.516,"s":[-8.117,-30.695,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":43.234,"s":[-8.112,-38.174,0],"to":[0,0,0],"ti":[0,0,0]},{"t":48.125,"s":[-8.429,-38.524,0]}],"ix":2},"a":{"a":0,"k":[-8.429,-38.524,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":34.148,"s":[{"i":[[-16.467,0.336],[-1.344,-21.682],[21.682,-1.344],[1.344,21.682]],"o":[[21.719,-0.443],[1.344,21.682],[-21.682,1.344],[-1.344,-21.682]],"v":[[-15.056,-69.361],[36.497,-38.542],[-5.169,-6.068],[-59.616,-37.891]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":38.432,"s":[{"i":[[-18.763,0],[0,-18.738],[18.763,0],[0,18.738]],"o":[[18.763,0],[0,18.738],[-18.763,0],[0,-18.738]],"v":[[-8.429,-76.118],[25.544,-42.19],[-8.429,-8.263],[-42.401,-42.19]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":43.234,"s":[{"i":[[-23.106,0],[0,-20.772],[23.106,0],[0,20.772]],"o":[[23.106,0],[0,20.772],[-23.106,0],[0,-20.772]],"v":[[-8.429,-70.29],[33.408,-32.678],[-8.429,4.934],[-50.265,-32.678]],"c":true}]},{"t":46.96875,"s":[{"i":[[-21.723,0],[0,-21.723],[21.723,0],[0,21.723]],"o":[[21.723,0],[0,21.723],[-21.723,0],[0,-21.723]],"v":[[-8.429,-77.857],[30.905,-38.524],[-8.429,0.81],[-47.762,-38.524]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":35,"op":122,"st":-14,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Body","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.592],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":34.148,"s":[-7.287]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":39.244,"s":[-2]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":44.631,"s":[0]},{"t":49,"s":[4]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.486,"y":1},"o":{"x":0.201,"y":0.771},"t":34.148,"s":[56.671,152.969,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":38.432,"s":[41.215,115.097,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":43.234,"s":[42.391,137.091,0],"to":[0,0,0],"ti":[0,0,0]},{"t":47.078125,"s":[35.978,130.089,0]}],"ix":2},"a":{"a":0,"k":[-8.429,73.857,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.3,0.3,0.3],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":31.338,"s":[63.6,0,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":37.643,"s":[103.88,121.9,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":42.012,"s":[109.18,103.88,100]},{"t":46.37890625,"s":[106,106,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":34.148,"s":[{"i":[[0,0],[0,0],[0,-27.069],[0,0],[4.483,0],[0,0],[0,5.475],[0,0],[-47.453,0.433]],"o":[[0,0],[48.466,0],[0,0],[0,5.475],[0,0],[-4.483,0],[0,0],[0,-26.798],[0,0]],"v":[[-11.301,7.589],[-9.855,7.583],[72.293,74.435],[72.293,84.128],[64.176,94.04],[-83.652,91.199],[-91.77,81.287],[-91.77,71.594],[-11.301,7.589]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":38.432,"s":[{"i":[[0,0],[0,0],[-0.784,-23.184],[0,0],[4.483,0],[0,0],[0,5.917],[0,0],[-47.453,0.468]],"o":[[0,0],[48.466,0],[0,0],[0,5.917],[0,0],[-4.483,0],[0,0],[1.604,-22.612],[0,0]],"v":[[-10.528,20.974],[-9.082,20.967],[61.9,75.286],[61.9,85.762],[53.783,96.476],[-70.459,96.476],[-78.576,85.762],[-78.576,75.286],[-10.528,20.974]],"c":true}]},{"t":43.234375,"s":[{"i":[[0,0],[0,0],[0,-22.167],[0,0],[4.483,0],[0,0],[0,4.483],[0,0],[-47.453,0.354]],"o":[[0,0],[48.466,0],[0,0],[0,4.483],[0,0],[-4.483,0],[0,0],[0,-21.945],[0,0]],"v":[[-9.875,17.672],[-8.429,17.667],[61.81,57.803],[61.81,65.74],[53.692,73.857],[-70.549,73.857],[-78.667,65.74],[-78.667,57.803],[-9.875,17.672]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":35,"op":122,"st":-14,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Plus Person","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.654],"y":[1.068]},"o":{"x":[0.312],"y":[0]},"t":28.777,"s":[0]},{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.343],"y":[0.097]},"t":32,"s":[-21]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":37.555,"s":[4]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":43.533,"s":[-7]},{"t":48.0390625,"s":[4]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.211,"y":1},"o":{"x":0.354,"y":0},"t":29.691,"s":[116.741,5.86,0],"to":[1.7,2.585,0],"ti":[0,0,0]},{"i":{"x":0.232,"y":1},"o":{"x":0.3,"y":0},"t":38.43,"s":[145.021,75.433,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.167,"y":0},"t":42.799,"s":[141.076,47.345,0],"to":[0,0,0],"ti":[0,0,0]},{"t":47.1640625,"s":[140.906,55.366,0]}],"ix":2},"a":{"a":0,"k":[-50.491,-44.113,0],"ix":1},"s":{"a":0,"k":[-106,106,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":30.377,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-78.417,-44.113],[-22.565,-44.113]],"c":false}]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":34.061,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-75.071,-44.113],[-25.911,-44.113]],"c":false}]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":39.303,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-79.119,-44.113],[-21.863,-44.113]],"c":false}]},{"t":44.544921875,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-78.417,-44.113],[-22.565,-44.113]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":30.377,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-72.039],[-50.491,-16.187]],"c":false}]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":34.061,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-79.017],[-50.491,-9.21]],"c":false}]},{"i":{"x":0.68,"y":1},"o":{"x":0.32,"y":0},"t":39.303,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-69.343],[-50.491,-18.884]],"c":false}]},{"t":44.544921875,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-72.039],[-50.491,-16.187]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":30,"op":122,"st":-14,"bm":0},{"ddd":0,"ind":6,"ty":3,"nm":"Null 3","parent":2,"sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[90]},{"t":9.4765625,"s":[0]}],"ix":10},"p":{"a":0,"k":[63.22,44.14,0],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.509,0.509,0.167],"y":[0,0,0]},"t":5.766,"s":[-106,106,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.511,0.511,0.3],"y":[0,0,0]},"t":15.373,"s":[-132.5,132.5,100]},{"t":32.994140625,"s":[-106,106,100]}],"ix":6}},"ao":0,"ip":0,"op":35,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Plus","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[116.741,5.86,0],"ix":2},"a":{"a":0,"k":[-50.491,-44.113,0],"ix":1},"s":{"a":0,"k":[-106,106,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-78.417,-44.113],[-22.565,-44.113]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":9.477,"s":[0]},{"t":13.25390625,"s":[100]}],"ix":1},"e":{"a":0,"k":0,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-72.039],[-50.491,-16.187]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":0,"k":0,"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":25.889,"s":[0]},{"t":29.046875,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Bottom","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0.6],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":5.766,"s":[78]},{"i":{"x":[0.5],"y":[0.888]},"o":{"x":[0.5],"y":[0]},"t":15.623,"s":[111]},{"i":{"x":[0.342],"y":[1]},"o":{"x":[0.452],"y":[0.161]},"t":23.059,"s":[52]},{"t":40.353515625,"s":[-34]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":0.877},"o":{"x":0.3,"y":0},"t":0,"s":[-31.096,136.191,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.6,"y":0.892},"o":{"x":0.4,"y":0.126},"t":5.766,"s":[-67.138,14.608,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.5,"y":0.885},"o":{"x":0.5,"y":0.153},"t":15.623,"s":[53.862,14.608,0],"to":[-10.362,-50.608,0],"ti":[0,0,0]},{"i":{"x":0.537,"y":0.93},"o":{"x":0.624,"y":0.077},"t":23.059,"s":[-3.137,-31.393,0],"to":[0,0,0],"ti":[-45.284,-31.783,0]},{"t":34.587890625,"s":[31.727,192.821,0]}],"ix":2},"a":{"a":0,"k":[-62.138,70.608,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.229,"y":1},"o":{"x":0.3,"y":0},"t":5.766,"s":[{"i":[[0,0],[0,0],[3.048,0],[0,0],[0,3.085],[0,0],[0,0],[-2.155,2.163],[0,0]],"o":[[0,0],[-2.155,2.163],[0,0],[-3.085,0],[0,0],[0,0],[0,-3.059],[0,0],[0,0]],"v":[[61.933,-21.302],[-26.266,67.23],[-34.393,70.608],[-56.553,70.608],[-62.138,65.022],[-62.138,43.874],[-62.138,43.874],[-58.772,35.719],[29.933,-53.316]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":15.623,"s":[{"i":[[-10.532,-2.083],[0,0],[3.048,0],[0,0],[0,3.085],[0,0],[0,0],[-2.155,2.163],[-1.7,-0.081]],"o":[[0.356,1.596],[-2.155,2.163],[0,0],[-3.085,0],[0,0],[0,0],[0,-3.059],[0,0],[7.589,28.094]],"v":[[57.4,-6.619],[-30.157,70.341],[-38.283,73.719],[-56.553,70.608],[-62.138,65.022],[-66.028,46.986],[-66.028,46.986],[-62.662,38.83],[16.173,-49.421]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.979,"y":0},"t":23.059,"s":[{"i":[[-10.863,-2.85],[0,0],[3.048,0],[0,0],[0,3.085],[0,0],[0,0],[-2.155,2.163],[-4.966,3.836]],"o":[[-2.32,2.607],[-2.155,2.163],[0,0],[-3.085,0],[0,0],[0,0],[0,-3.059],[0,0],[2.173,10.968]],"v":[[61.933,-21.302],[-26.266,67.23],[-34.393,70.608],[-54.98,69.074],[-60.566,63.489],[-62.138,43.874],[-62.138,43.874],[-58.772,35.719],[29.933,-53.316]],"c":true}]},{"t":34.587890625,"s":[{"i":[[-11.919,-43.105],[0,0],[4.938,3.23],[0,0],[0,0],[6.648,4.812],[0,0],[-3.666,3.437],[-31.842,-2.557]],"o":[[9.2,33.271],[-2.155,2.163],[0,0],[0,0],[0,0],[-7.527,-5.449],[2.14,-5.313],[0,0],[13.388,1.075]],"v":[[41.133,39.594],[35.387,90.297],[24.29,94.419],[-34.18,58.692],[-48.823,50.115],[-82.77,29.164],[-86.244,15.143],[-75.931,4.616],[-23.061,-10.839]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle-Copy-3","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":35,"st":0,"bm":0},{"ddd":0,"ind":9,"ty":4,"nm":"Top","parent":8,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[63.409,-54.825,0],"ix":2},"a":{"a":0,"k":[63.409,-54.825,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.229,"y":1},"o":{"x":0.3,"y":0},"t":5.766,"s":[{"i":[[0,0],[0,0],[-4.488,-4.504],[0,0],[4.488,-4.504],[0,0]],"o":[[0,0],[4.488,-4.504],[0,0],[4.488,4.504],[0,0],[0,0]],"v":[[38.156,-61.569],[53.345,-76.815],[69.598,-76.815],[85.297,-61.06],[85.297,-44.749],[70.06,-29.458]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":16,"s":[{"i":[[3.111,21.585],[0,0],[-10.146,-6.772],[-3.475,-7.408],[5.812,-5.833],[4.071,-2.666]],"o":[[3.112,-4.119],[5.812,-5.833],[8.915,5.749],[5.836,11.717],[0,0],[-14.23,0.042]],"v":[[25.233,-58.115],[37.103,-72.724],[61.137,-75.785],[82.792,-51.762],[81.137,-27.295],[67.255,-15.789]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.979,"y":0},"t":23.059,"s":[{"i":[[-2.294,8.965],[0,0],[-7.868,-4.577],[-2.817,-4.66],[4.488,-4.504],[3.228,-3.153]],"o":[[2.894,-2.657],[4.488,-4.504],[5.547,3.227],[2.817,4.66],[0,0],[-11.858,-1.833]],"v":[[38.156,-61.569],[50.32,-75.93],[68.841,-76.594],[85.297,-61.06],[86.469,-43.766],[70.06,-29.458]],"c":true}]},{"t":34.587890625,"s":[{"i":[[-10.848,11.661],[-13.647,-2.884],[-4.605,-4.827],[6.013,-10.982],[5.64,-0.912],[12.819,6.608]],"o":[[8.678,-9.329],[12.755,2.696],[7.084,7.425],[-3.308,6.042],[-5.185,0.838],[-22.812,-11.758]],"v":[[-2.696,-48.929],[30.849,-57.589],[61.289,-40.975],[69.451,-9.824],[52.397,0.575],[23.028,-3.013]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle-Copy-3","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":35,"st":0,"bm":0}],"markers":[{"tm":51,"cm":"1","dr":0}]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/write_contacts_fab_icon_reverse.json b/TMessagesProj/src/main/res/raw/write_contacts_fab_icon_reverse.json new file mode 100644 index 000000000..7f7c64566 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/write_contacts_fab_icon_reverse.json @@ -0,0 +1 @@ +{"v":"5.7.1","fr":60,"ip":0,"op":36,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":3,"nm":"Null 2","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":0,"k":[120,120,100],"ix":6}},"ao":0,"ip":0,"op":120,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Head 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-8.429,-38.524,0],"ix":2},"a":{"a":0,"k":[-8.429,-38.524,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[{"i":[[-21.723,0],[0,-21.723],[21.723,0],[0,21.723]],"o":[[21.723,0],[0,21.723],[-21.723,0],[0,-21.723]],"v":[[-8.429,-77.857],[30.905,-38.524],[-8.429,0.81],[-47.762,-38.524]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":7.414,"s":[{"i":[[-23.783,0],[0,-11.771],[23.783,0],[0,11.771]],"o":[[23.783,0],[0,11.771],[-23.783,0],[0,-11.771]],"v":[[-5.479,-14.693],[37.585,6.621],[-5.479,27.935],[-48.543,6.621]],"c":true}]},{"t":14.830078125,"s":[{"i":[[-17.964,0],[0,-22.456],[17.964,0],[0,22.456]],"o":[[17.964,0],[0,22.456],[-17.964,0],[0,-22.456]],"v":[[2.11,-175.185],[34.636,-134.525],[2.11,-93.865],[-30.417,-134.525]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":15,"st":-51,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Body 2","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":1.854,"s":[0]},{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":9.268,"s":[-8]},{"t":16.68359375,"s":[2]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[-8.429,73.857,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":7.414,"s":[-8.429,108.857,0],"to":[0,0,0],"ti":[0,0,0]},{"t":15,"s":[-12.179,53.857,0]}],"ix":2},"a":{"a":0,"k":[-8.429,73.857,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,-22.167],[0,0],[4.483,0],[0,0],[0,4.483],[0,0],[-47.453,0.354]],"o":[[0,0],[48.466,0],[0,0],[0,4.483],[0,0],[-4.483,0],[0,0],[0,-21.945],[0,0]],"v":[[-9.875,17.672],[-8.429,17.667],[61.81,57.803],[61.81,65.74],[53.692,73.857],[-70.549,73.857],[-78.667,65.74],[-78.667,57.803],[-9.875,17.672]],"c":true}]},{"i":{"x":0.833,"y":0.82},"o":{"x":0.3,"y":0},"t":7.414,"s":[{"i":[[0,0],[0,0],[0,-8.968],[0,0],[10.089,-0.439],[0,0],[0,1.814],[0,0],[-51.235,0.143]],"o":[[0,0],[52.328,0],[0,0],[0,1.814],[0,0],[-7.526,-0.95],[0,0],[0,-8.878],[0,0]],"v":[[-8.471,47.293],[-6.91,47.291],[68.115,60.142],[68.115,63.353],[53.999,68.952],[-71.652,69.577],[-83.556,63.353],[-83.556,60.142],[-8.471,47.293]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.18},"t":10.195,"s":[{"i":[[0,0],[0,0],[0.627,-17.978],[0,0],[8.234,-4.779],[4.916,8.584],[1.556,6.714],[0,0],[-47.495,0.938]],"o":[[0,0],[48.687,-2.133],[0,0],[-1.939,6.17],[-9.891,8.711],[-4.408,-2.194],[0,0],[-2.459,-17.223],[0,0]],"v":[[0.129,-19.016],[3.589,-19.222],[57.088,51.783],[53.89,65.519],[36.027,81.999],[-41.386,79.436],[-52.042,64.203],[-55.017,50.49],[-4.942,-18.815]],"c":true}]},{"t":14.830078125,"s":[{"i":[[0,0],[0,0],[4.484,-25.333],[0,0],[5.619,-9.118],[8.961,15.647],[2.837,10.745],[0,0],[-37.94,2.083]],"o":[[0,0],[36.927,0.974],[0,0],[-3.534,9.754],[-7.022,11.394],[-1.843,-3.218],[0,0],[-3.147,-27.7],[0,0]],"v":[[3.796,-72.739],[8.817,-73.113],[37.69,33.503],[30.931,56.395],[16.636,87.276],[-17.203,84.22],[-29.428,59.84],[-31.998,38.356],[-10.61,-71.295]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":15,"st":-51,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Plus 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.733],"y":[0.837]},"o":{"x":[0.413],"y":[0]},"t":0,"s":[90]},{"i":{"x":[0.638],"y":[1]},"o":{"x":[0.417],"y":[-0.08]},"t":5.561,"s":[76.14]},{"t":13.90234375,"s":[142]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.31,"y":0},"t":0.928,"s":[84.509,-3.613,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.535,"y":0.604},"o":{"x":0.417,"y":0},"t":5.561,"s":[83.787,7.318,0],"to":[0,0,0],"ti":[13.308,-11.065,0]},{"i":{"x":0.535,"y":1},"o":{"x":0.226,"y":0.077},"t":10.195,"s":[55.242,26.612,0],"to":[-3.001,2.495,0],"ti":[2.608,-2.722,0]},{"t":13.90234375,"s":[45.766,-41.188,0]}],"ix":2},"a":{"a":0,"k":[-50.491,-44.113,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.614,0.614,0.29],"y":[1,1,1]},"o":{"x":[0.31,0.31,0.31],"y":[0,0,0]},"t":6.488,"s":[100,100,100]},{"t":12.9765625,"s":[0,0,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-78.417,-44.113],[-22.565,-44.113]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-50.491,-72.039],[-50.491,-16.187]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":14,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Line","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":12,"st":-40,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Top","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.29,"y":1},"o":{"x":0.4,"y":0},"t":15.756,"s":[107.25,-97.959,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0,"y":1},"o":{"x":0.3,"y":0},"t":25.023,"s":[21.044,-7.523,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0,"y":1},"o":{"x":0.167,"y":0},"t":33,"s":[63.409,-54.825,0],"to":[0,0,0],"ti":[0,0,0]},{"t":36,"s":[63.826,-55.034,0]}],"ix":2},"a":{"a":0,"k":[63.409,-54.825,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.29,"y":1},"o":{"x":0.4,"y":0},"t":17.609,"s":[{"i":[[-1.292,19.708],[-12.097,12.979],[-9.767,-0.419],[0.287,-9.878],[10.245,-10.617],[10.387,-0.012]],"o":[[0.524,-7.992],[12.097,-12.979],[10.493,0.45],[-0.363,12.492],[-10.795,11.186],[-8.234,0.01]],"v":[[36.438,-53.239],[50.003,-81.736],[81.802,-94.858],[105.468,-70.595],[90.399,-39.487],[60.903,-26.81]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.3,"y":0},"t":24.098,"s":[{"i":[[0,0],[-5.789,2.953],[-5.107,-2.041],[-3.199,-9.875],[2.869,-4.743],[0,0]],"o":[[0,0],[7.186,-3.665],[9.848,3.936],[2.223,6.862],[-3.729,6.165],[0,0]],"v":[[20.6,-77.99],[35.79,-93.236],[53.607,-92.45],[99.108,-48.105],[98.224,-30.203],[84.012,-15.91]],"c":true}]},{"t":33,"s":[{"i":[[0,0],[0,0],[-4.488,-4.504],[0,0],[4.488,-4.504],[0,0]],"o":[[0,0],[4.488,-4.504],[0,0],[4.488,4.504],[0,0],[0,0]],"v":[[38.156,-61.569],[53.345,-76.815],[69.598,-76.815],[85.297,-61.06],[85.297,-44.749],[70.06,-29.458]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle-Copy-3","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":15,"op":121,"st":-25,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Bottom","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.252],"y":[1]},"o":{"x":[0.586],"y":[0]},"t":14.83,"s":[-44]},{"t":32.439453125,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":15,"s":[-2.269,8.646,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":22,"s":[-5.519,42.646,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.3,"y":0},"t":31,"s":[-8.435,-5.354,0],"to":[0,0,0],"ti":[0,0,0]},{"t":36,"s":[-12.602,11.146,0]}],"ix":2},"a":{"a":0,"k":[-0.102,8.646,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.29,"y":1},"o":{"x":0.4,"y":0},"t":14.83,"s":[{"i":[[25.55,-30.97],[16.767,-8.433],[11.674,-4.106],[7.346,-0.171],[1.518,5.211],[0,0],[0,0],[-2.526,3.9],[-13.242,7.13]],"o":[[-7.407,8.979],[-8.516,4.283],[0,0],[-7.826,0.182],[-1.518,-5.211],[0,0],[1.026,-4.028],[19.583,-30.24],[23.662,-23.489]],"v":[[77.869,-19.776],[11.958,43.232],[-15.176,55.98],[-37.467,60.767],[-53.007,48.469],[-51.165,31.03],[-50.483,27.098],[-37.5,1.636],[29.389,-67.4]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":24.098,"s":[{"i":[[0,0],[0,0],[3.048,0],[0,0],[1.046,2.636],[0,0],[0,0],[-2.155,2.163],[0,0]],"o":[[0,0],[-2.155,2.163],[0,0],[-2.683,-0.916],[0,0],[0,0],[0,-3.059],[0,0],[0,0]],"v":[[29.961,42.238],[-20.03,80.726],[-28.156,84.104],[-56.553,70.608],[-62.138,65.022],[-78.008,38.935],[-78.008,38.935],[-74.642,30.78],[-34.754,-18.818]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":33,"s":[{"i":[[0,0],[0,0],[3.048,0],[0,0],[0,3.085],[0,0],[0,0],[-2.155,2.163],[0,0]],"o":[[0,0],[-2.155,2.163],[0,0],[-3.085,0],[0,0],[0,0],[0,-3.059],[0,0],[0,0]],"v":[[61.933,-21.302],[-26.266,67.23],[-34.393,70.608],[-56.553,70.608],[-62.138,65.022],[-62.138,43.874],[-62.138,43.874],[-58.772,35.719],[29.933,-53.316]],"c":true}]},{"t":36,"s":[{"i":[[0,0],[0,0],[3.048,0],[0,0],[0,3.085],[0,0],[0,0],[-2.155,2.163],[0,0]],"o":[[0,0],[-2.155,2.163],[0,0],[-3.085,0],[0,0],[0,0],[0,-3.059],[0,0],[0,0]],"v":[[61.933,-21.302],[-26.266,67.23],[-34.393,70.608],[-56.553,70.608],[-61.929,64.918],[-61.721,43.874],[-61.721,43.874],[-58.772,35.719],[29.933,-53.316]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle-Copy-3","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":15,"op":121,"st":-25,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/values/ids.xml b/TMessagesProj/src/main/res/values/ids.xml index 28cef5e45..d9ade7562 100644 --- a/TMessagesProj/src/main/res/values/ids.xml +++ b/TMessagesProj/src/main/res/values/ids.xml @@ -10,6 +10,7 @@ + diff --git a/TMessagesProj/src/main/res/values/strings.xml b/TMessagesProj/src/main/res/values/strings.xml index 2d518a831..d5c59cd78 100644 --- a/TMessagesProj/src/main/res/values/strings.xml +++ b/TMessagesProj/src/main/res/values/strings.xml @@ -165,6 +165,7 @@ Pin Unpin Archive + Archive Unarchive Archived Chats Delete chat @@ -567,6 +568,7 @@ Slow Mode is enabled. You can\'t select more items. Sorry, this text is too long to send as one message.\n\nSlow Mode is enabled. You can\'t send more than one message at once. **%1$s** promoted to admin + **%1$s** removed from **%2$s** New Poll New Quiz @@ -718,6 +720,7 @@ Add admins Send anonymously Ban users + Manage voice chats Add users Title: %1$s Title @@ -732,8 +735,15 @@ Edited messages Pinned messages Leaving members + Voice chats un1 set the slow mode timer to %1$s un1 disabled the slow mode + un1 started a voice chat + un1 ended the voice chat + un1 muted un2 + un1 unmuted un2 + un1 allowed new participants to speak + un1 muted new participants New Broadcast List Enter list name @@ -935,6 +945,13 @@ Edit Message Edit Caption Tap to edit media + Edit this photo + Edit this video + Replace photo + Replace video + Replace media + Replace file + Replace audio file Scroll down for bots %1$s Sorry, editing time expired. @@ -1003,6 +1020,8 @@ Tap for photo, hold for video Tap to view as a list. Send without sound + Send as a new photo + Replace photo Send Now Reschedule Today @@ -1126,6 +1145,10 @@ %1$s changed the group photo for %2$s %1$s changed the group video for %2$s %1$s invited %3$s to the group %2$s + %1$s invited %3$s to a voice chat in the group %2$s + %1$s invited you to a voice chat in the group %2$s + %1$s started a voice chat in the group %2$s + %1$s ended a voice chat in the group %2$s %1$s returned to the group %2$s %1$s joined the group %2$s %1$s removed %3$s from the group %2$s @@ -1262,6 +1285,7 @@ JOIN Link copied to clipboard Link copied to clipboard.\nThis link will only work for members of this chat. + This link will only work for members of this chat. Unfortunately, you can\'t access this message. You are not a member of the chat where it was posted. Phone copied to clipboard Email copied to clipboard @@ -1814,6 +1838,10 @@ In-Chat Sounds Default Default + Default + In-App Default + %1$s In-App + Silent Smart Notifications Exceptions Add an Exception @@ -2654,6 +2682,7 @@ Disk and network usage Storage Usage Data Usage + Storage Path Mobile Wi-Fi Roaming @@ -2852,6 +2881,14 @@ un1 is now within %1$s from you You are now within %1$s from un1 un1 is now within %1$s from un2 + un1 invited you to this group + un1 invited you to this channel + un1 started a voice chat + You started a voice chat + Voice chat finished (%s) + un1 invited un2 to the voice chat + You invited un2 to the voice chat + un1 invited you to the voice chat You allowed this bot to message you when you logged in on %1$s. %1$s received the following documents: %2$s Personal details @@ -3026,9 +3063,12 @@ Telegram needs access to your location so that you can share it with your friends. Telegram needs access to your location. Telegram needs access to draw above other apps to play videos in Picture-in-Picture mode. + Overlay mode lets you use push-to-talk and see who is talking even when you are outside of Telegram. SETTINGS Please allow Telegram to be shown on the lock screen so that calls can work properly. To share your live location in this chat, Telegram needs access to your location all the time, including while the app is in the background.\n\nWe will access your location only for the duration you choose, and you can stop sharing it any time. We won\'t use your location for any purpose other than sharing it in this chat. + Picture-in-Picture + Voice Chat Overlay Growth Followers @@ -3141,9 +3181,14 @@ Telegram Call Telegram Video Call Ongoing Telegram call + Ongoing Voice Chat End call Another call in progress You currently have an ongoing call with **%1$s**. Would you like to hang up and start a new one with **%2$s**? + End call with **%1$s** and start voice chat in **%2$s**? + Another voice chat in progress + Leave voice chat in **%1$s** and start a new one with **%2$s**? + Leave voice chat in **%1$s** and call **%2$s**? Voice calls Ringtone You can customize the ringtone used when this contact calls you on Telegram. @@ -3160,7 +3205,9 @@ Answer Decline You\'re currently offline. Please connect to the Internet in order to make calls. + You\'re currently offline. Please connect to the Internet to join voice chats. You currently have airplane mode enabled. Please turn it off or connect to Wi-Fi in order to make calls. + You currently have airplane mode enabled. Please turn it off or connect to Wi-Fi to join voice chats. Offline Airplane Mode Settings @@ -3226,6 +3273,11 @@ Video was pixelated Tap here to turn on your camera Unmute + or hold and talk + You\'re Live + Muted by admin + Muted + You are in Listen Only Mode Mute Start Video Stop Video @@ -3244,6 +3296,61 @@ Are you sure you want to video call **%1$s**? Video Call Reconnecting + Start Voice chat + Voice Chat + Do you want to start a voice chat in this group? + Manage Voice Chats + Join + %1$s members talking + %1$s member talking + %1$s members talking + %1$s members talking + %1$s members talking + %1$s members talking + speaking + listening + invited + Leave + End + Mute microphone + Are you sure you want to mute **%1$s** in this voice chat? + Remove member + Do you want to remove %1$s from the group chat? + **%1$s** removed from the group. + **%1$s** can now speak. + **%1$s** is now muted in this chat. + Mute + New participants can speak + New paricipants are muted + Share invite link + End voice chat + Connecting... + Leave voice chat + Do you want to leave this voice chat? + End voice chat + Do you want to end this voice chat? + End voice chat + VIEW VOICE CHAT + Voice Chat + Open voice chat + The selected user is already in this voice chat. + Sorry, you can\'t join voice chats as an anonymous administrator. + un1 invited un2 to the voice chat + Join voice chat + Hey! Join our voice chat: %1$s + Invite Members + Remove + Allow to speak + Search members to invite... + Copy Invite Link + Invite link copied to clipboard. + Unmute + Voice chat ended. Start a new one? + Add Member + Do you want to add **%1$s** to **%2$s**? + You invited **%1$s** to the voice chat. + Add + Nobody talking Message %1$s Voice call %1$s @@ -3327,6 +3434,12 @@ %1$d members %1$d members %1$d members + %1$d participants + %1$d participant + %1$d participants + %1$d participants + %1$d participants + %1$d participants and %1$d more are typing and %1$d more are typing and %1$d more are typing @@ -3751,6 +3864,8 @@ Scheduled for %s Go back Open navigation menu + Open menu + Close menu %2$s by %1$s More options Play @@ -3898,4 +4013,5 @@ \'Remind today at\' HH:mm \'Remind on\' MMM d \'at\' HH:mm \'Remind on\' MMM d yyyy \'at\' HH:mm + Enable diff --git a/build.gradle b/build.gradle index 9fe22834c..775045dd5 100644 --- a/build.gradle +++ b/build.gradle @@ -6,7 +6,7 @@ buildscript { google() } dependencies { - classpath 'com.android.tools.build:gradle:4.1.0' + classpath 'com.android.tools.build:gradle:4.1.1' classpath 'com.google.gms:google-services:4.3.4' } }